repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
CourseTalk/edx-platform
|
refs/heads/master
|
lms/djangoapps/shoppingcart/processors/__init__.py
|
215
|
"""
Public API for payment processor implementations.
The specific implementation is determined at runtime using Django settings:
CC_PROCESSOR_NAME: The name of the Python module (in `shoppingcart.processors`) to use.
CC_PROCESSOR: Dictionary of configuration options for specific processor implementations,
keyed to processor names.
"""
from django.conf import settings
# Import the processor implementation, using `CC_PROCESSOR_NAME`
# as the name of the Python module in `shoppingcart.processors`
PROCESSOR_MODULE = __import__(
'shoppingcart.processors.' + settings.CC_PROCESSOR_NAME,
fromlist=[
'render_purchase_form_html',
'process_postpay_callback',
'get_purchase_endpoint',
'get_signed_purchase_params',
]
)
def render_purchase_form_html(cart, **kwargs):
"""
Render an HTML form with POSTs to the hosted payment processor.
Args:
cart (Order): The order model representing items in the user's cart.
Returns:
unicode: the rendered HTML form
"""
return PROCESSOR_MODULE.render_purchase_form_html(cart, **kwargs)
def process_postpay_callback(params, **kwargs):
"""
Handle a response from the payment processor.
Concrete implementations should:
1) Verify the parameters and determine if the payment was successful.
2) If successful, mark the order as purchased and call `purchased_callbacks` of the cart items.
3) If unsuccessful, try to figure out why and generate a helpful error message.
4) Return a dictionary of the form:
{'success': bool, 'order': Order, 'error_html': str}
Args:
params (dict): Dictionary of parameters received from the payment processor.
Keyword Args:
Can be used to provide additional information to concrete implementations.
Returns:
dict
"""
return PROCESSOR_MODULE.process_postpay_callback(params, **kwargs)
def get_purchase_endpoint():
"""
Return the URL of the current payment processor's endpoint.
Returns:
unicode
"""
return PROCESSOR_MODULE.get_purchase_endpoint()
def get_signed_purchase_params(cart, **kwargs):
"""
Return the parameters to send to the current payment processor.
Args:
cart (Order): The order model representing items in the user's cart.
Keyword Args:
Can be used to provide additional information to concrete implementations.
Returns:
dict
"""
return PROCESSOR_MODULE.get_signed_purchase_params(cart, **kwargs)
|
outvader/soclone
|
refs/heads/master
|
soclone/views.py
|
5
|
"""SOClone views."""
import datetime
import itertools
from django.contrib.auth.models import User
from django.contrib.auth import views as auth_views
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator, InvalidPage
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.utils.html import strip_tags
from django.utils.safestring import mark_safe
from lxml.html.diff import htmldiff
from markdown2 import Markdown
from soclone import auth
from soclone import diff
from soclone.forms import (AddAnswerForm, AskQuestionForm, CloseQuestionForm,
CommentForm, EditAnswerForm, EditQuestionForm, RetagQuestionForm,
RevisionForm)
from soclone.http import JsonResponse
from soclone.models import (Answer, AnswerRevision, Badge, Comment,
FavouriteQuestion, Question, QuestionRevision, Tag, Vote)
from soclone.questions import (all_question_views, index_question_views,
unanswered_question_views)
from soclone.shortcuts import get_page
from soclone.utils.html import sanitize_html
from soclone.utils.models import populate_foreign_key_caches
markdowner = Markdown(html4tags=True)
AUTO_WIKI_ANSWER_COUNT = 30
def get_questions_per_page(user):
if user.is_authenticated():
return user.questions_per_page
return 10
def question_list(request, question_views, template, questions_per_page=None,
page_number=None, extra_context=None):
"""
Question list generic view.
Allows the user to select from a number of ways of viewing questions,
rendered with the given template.
"""
view_id = request.GET.get('sort', None)
view = dict([(q.id, q) for q in question_views]).get(view_id,
question_views[0])
if questions_per_page is None:
questions_per_page = get_questions_per_page(request.user)
paginator = Paginator(view.get_queryset(), questions_per_page)
if page_number is None:
page = get_page(request, paginator)
else:
page = paginator.page(page_number)
populate_foreign_key_caches(User, ((page.object_list, (view.user,)),),
fields=view.user_fields)
context = {
'title': view.page_title,
'page': page,
'questions': page.object_list,
'current_view': view,
'question_views': question_views,
}
if extra_context is not None:
context.update(extra_context)
return render_to_response(template, context,
context_instance=RequestContext(request))
def index(request):
"""A condensed version of the main Question list."""
extra_context = {
# TODO Retrieve extra context required for index page
}
return question_list(request, index_question_views, 'index.html',
questions_per_page=50, page_number=1,
extra_context=extra_context)
def about(request):
"""About SOClone."""
raise NotImplementedError
def faq(request):
"""Frequently Asked Questions."""
raise NotImplementedError
def search(request):
"""Search Questions and Answers."""
raise NotImplementedError
def login(request):
"""Logs in."""
return auth_views.login(request, template_name='login.html')
def logout(request):
"""Logs out."""
return auth_views.logout(request, template_name='logged_out.html')
def questions(request):
"""All Questions list."""
return question_list(request, all_question_views, 'questions.html')
def unanswered(request):
"""Unanswered Questions list."""
return question_list(request, unanswered_question_views, 'unanswered.html')
ANSWER_SORT = {
'votes': ('-score', '-added_at'),
'newest': ('-added_at',),
'oldest': ('added_at',),
}
DEFAULT_ANSWER_SORT = 'votes'
def question(request, question_id):
"""Displays a Question."""
if not request.user.is_authenticated():
question = get_object_or_404(Question, id=question_id)
favourite = False
else:
question = get_object_or_404(Question.objects.extra(
select={
'user_favourite_id': (
'SELECT id FROM soclone_favouritequestion '
'WHERE question_id = soclone_question.id '
'AND user_id = %s'),
},
select_params=[request.user.id]
), id=question_id)
favourite = (question.user_favourite_id is not None)
if 'showcomments' in request.GET:
return question_comments(request, question)
answer_sort_type = request.GET.get('sort', DEFAULT_ANSWER_SORT)
if answer_sort_type not in ANSWER_SORT:
answer_sort_type = DEFAULT_ANSWER_SORT
order_by = ANSWER_SORT[answer_sort_type]
paginator = Paginator(Answer.objects.for_question(
question, request.user).order_by(*order_by),
AUTO_WIKI_ANSWER_COUNT)
# Save ourselves a COUNT() query by using the denormalised count
paginator._count = question.answer_count
page = get_page(request, paginator)
answers = page.object_list
populate_foreign_key_caches(User, (
((question,), ('author', 'last_edited_by', 'closed_by')),
(answers, ('author', 'last_edited_by'))
),
fields=('username', 'gravatar', 'reputation', 'gold', 'silver',
'bronze'))
# Look up vote status for the current user
question_vote, answer_votes = Vote.objects.get_for_question_and_answers(
request.user, question, page.object_list)
title = question.title
if question.closed:
title = '%s [closed]' % title
return render_to_response('question.html', {
'title': title,
'question': question,
'question_vote': question_vote,
'favourite': favourite,
'answers': page.object_list,
'answer_votes': answer_votes,
'page': page,
'answer_sort': answer_sort_type,
'answer_form': AddAnswerForm(),
'tags': question.tags.all(),
}, context_instance=RequestContext(request))
def question_comments(request, question, form=None):
"""
Displays a Question and any Comments on it.
This is primarily intended as a fallback for users who can't
dynamically load Comments.
"""
populate_foreign_key_caches(User, (
((question,), ('author', 'last_edited_by', 'closed_by')),
),
fields=('username', 'gravatar', 'reputation', 'gold', 'silver',
'bronze'))
content_type = ContentType.objects.get_for_model(Question)
comments = Comment.objects.filter(content_type=content_type,
object_id=question.id)
if form is None:
form = CommentForm()
return render_to_response('question.html', {
'title': u'Comments on %s' % question.title,
'question': question,
'tags': question.tags.all(),
'comments': comments,
'comment_form': form,
}, context_instance=RequestContext(request))
def ask_question(request):
"""Adds a Question."""
preview = None
if request.method == 'POST':
form = AskQuestionForm(request.POST)
if form.is_valid():
html = sanitize_html(markdowner.convert(form.cleaned_data['text']))
if 'preview' in request.POST:
# The user submitted the form to preview the formatted question
preview = mark_safe(html)
elif 'submit' in request.POST:
added_at = datetime.datetime.now()
# Create the Question
question = Question(
title = form.cleaned_data['title'],
author = request.user,
added_at = added_at,
wiki = form.cleaned_data['wiki'],
last_activity_at = added_at,
last_activity_by = request.user,
tagnames = form.cleaned_data['tags'],
html = html,
summary = strip_tags(html)[:180]
)
if question.wiki:
question.wikified_at = added_at
# When in wiki mode, we always display the last edit
question.last_edited_at = added_at
question.last_edited_by = request.user
question.save()
# Create the initial revision
QuestionRevision.objects.create(
question = question,
revision = 1,
title = question.title,
author = request.user,
revised_at = added_at,
tagnames = question.tagnames,
summary = u'asked question',
text = form.cleaned_data['text']
)
# TODO Badges related to Tag usage
# TODO Badges related to asking Questions
return HttpResponseRedirect(question.get_absolute_url())
else:
form = AskQuestionForm()
return render_to_response('ask_question.html', {
'title': u'Ask a Question',
'form': form,
'preview': preview,
}, context_instance=RequestContext(request))
def edit_question(request, question_id):
"""
Entry point for editing a question.
Fields which can be edited depend on the logged-in user's roles or
reputation, so this view delegates to the apporopriate view based on
those criteria.
"""
question = get_object_or_404(Question, id=question_id)
if auth.can_edit_post(request.user, question):
return _edit_question(request, question)
elif auth.can_retag_questions(request.user):
return _retag_question(request, question)
else:
raise Http404
def _edit_question(request, question):
"""
Allows the user to edit a Question's title, text and tags.
If the Question is not already in wiki mode, the user can put it in
wiki mode, or it will automatically be put in wiki mode if the
question has been edited five times by the person who asked it, or
has been edited by four different users.
"""
latest_revision = question.get_latest_revision()
preview = None
revision_form = None
if request.method == 'POST':
if 'select_revision' in request.POST:
# The user submitted to change the revision to start editing from
revision_form = RevisionForm(question, latest_revision, request.POST)
if revision_form.is_valid():
# Replace Question details with those from the selected revision
form = EditQuestionForm(question,
QuestionRevision.objects.get(question=question,
revision=revision_form.cleaned_data['revision']))
else:
# Make sure we keep a hold of the user's other input, even
# though they appear to be messing about.
form = EditQuestionForm(question, latest_revision, request.POST)
else:
# Always check modifications against the latest revision
form = EditQuestionForm(question, latest_revision, request.POST)
if form.is_valid():
html = sanitize_html(
markdowner.convert(form.cleaned_data['text']))
if 'preview' in request.POST:
# The user submitted to preview the formatted question
preview = mark_safe(html)
elif 'submit' in request.POST:
if form.has_changed():
edited_at = datetime.datetime.now()
tags_changed = (latest_revision.tagnames !=
form.cleaned_data['tags'])
tags_updated = False
# Update the Question itself
updated_fields = {
'title': form.cleaned_data['title'],
'last_edited_at': edited_at,
'last_edited_by': request.user,
'last_activity_at': edited_at,
'last_activity_by': request.user,
'tagnames': form.cleaned_data['tags'],
'summary': strip_tags(html)[:180],
'html': html,
}
if ('wiki' in form.cleaned_data and
form.cleaned_data['wiki']):
updated_fields['wiki'] = True
updated_fields['wikified_at'] = edited_at
Question.objects.filter(
id=question.id).update(**updated_fields)
# Update the Question's tag associations
if tags_changed:
tags_updated = Question.objects.update_tags(
question, question.tagnames, request.user)
# Create a new revision
revision = QuestionRevision(
question = question,
title = form.cleaned_data['title'],
author = request.user,
revised_at = edited_at,
tagnames = form.cleaned_data['tags'],
text = form.cleaned_data['text']
)
if form.cleaned_data['summary']:
revision.summary = form.cleaned_data['summary']
else:
revision.summary = \
diff.generate_question_revision_summary(
latest_revision, revision,
('wiki' in updated_fields))
revision.save()
# TODO 5 body edits by the author = automatic wiki mode
# TODO 4 individual editors = automatic wiki mode
# TODO Badges related to Tag usage
# TODO Badges related to editing Questions
return HttpResponseRedirect(question.get_absolute_url())
else:
if 'revision' in request.GET:
revision_form = RevisionForm(question, latest_revision, request.GET)
if revision_form.is_valid():
# Replace Question details with those from the selected revision
form = EditQuestionForm(question,
QuestionRevision.objects.get(question=question,
revision=revision_form.cleaned_data['revision']))
else:
revision_form = RevisionForm(question, latest_revision)
form = EditQuestionForm(question, latest_revision)
if revision_form is None:
# We're about to redisplay after a POST where we didn't care which
# revision was selected - make sure the revision the user started from
# is still selected on redisplay.
revision_form = RevisionForm(question, latest_revision, request.POST)
return render_to_response('edit_question.html', {
'title': u'Edit Question',
'question': question,
'revision_form': revision_form,
'form': form,
'preview': preview,
}, context_instance=RequestContext(request))
def _retag_question(request, question):
"""Allows the user to edit a Question's tags."""
if request.method == 'POST':
form = RetagQuestionForm(question, request.POST)
if form.is_valid():
if form.has_changed():
latest_revision = question.get_latest_revision()
retagged_at = datetime.datetime.now()
# Update the Question itself
Question.objects.filter(id=question.id).update(
tagnames = form.cleaned_data['tags'],
last_edited_at = retagged_at,
last_edited_by = request.user,
last_activity_at = retagged_at,
last_activity_by = request.user
)
# Update the Question's tag associations
tags_updated = Question.objects.update_tags(question,
form.cleaned_data['tags'], request.user)
# Create a new revision
QuestionRevision.objects.create(
question = question,
title = latest_revision.title,
author = request.user,
revised_at = retagged_at,
tagnames = form.cleaned_data['tags'],
summary = u'modified tags',
text = latest_revision.text
)
# TODO Badges related to retagging / Tag usage
# TODO Badges related to editing Questions
return HttpResponseRedirect(question.get_absolute_url())
else:
form = RetagQuestionForm(question)
return render_to_response('retag_question.html', {
'title': u'Edit Tags',
'question': question,
'form': form,
}, context_instance=RequestContext(request))
QUESTION_REVISION_TEMPLATE = ('<h1>%(title)s</h1>\n'
'<div class="text">%(html)s</div>\n'
'<div class="tags">%(tags)s</div>')
def question_revisions(request, question_id):
"""Revision history for a Question."""
question = get_object_or_404(Question, id=question_id)
revisions = list(question.revisions.all())
populate_foreign_key_caches(User, ((revisions, ('author',)),),
fields=('username', 'gravatar', 'reputation', 'gold', 'silver',
'bronze'))
for i, revision in enumerate(revisions):
revision.html = QUESTION_REVISION_TEMPLATE % {
'title': revision.title,
'html': sanitize_html(markdowner.convert(revision.text)),
'tags': ' '.join(['<a class="tag">%s</a>' % tag
for tag in revision.tagnames.split(' ')]),
}
if i > 0:
revisions[i - 1].diff = htmldiff(revision.html,
revisions[i - 1].html)
return render_to_response('question_revisions.html', {
'title': u'Question Revisions',
'question': question,
'revisions': revisions,
}, context_instance=RequestContext(request))
def close_question(request, question_id):
"""Closes or reopens a Question based on its current closed status."""
question = get_object_or_404(Question, id=question_id)
if not auth.can_close_question(request.user, question):
raise Http404
if not question.closed:
return _close_question(request, question)
else:
return _reopen_question(request, question)
def _close_question(request, question):
"""Closes a Question."""
if request.method == 'POST' and 'close' in request.POST:
form = CloseQuestionForm(request.POST)
if form.is_valid():
Question.objects.filter(id=question.id).update(closed=True,
closed_by=request.user, closed_at=datetime.datetime.now(),
close_reason=form.cleaned_data['reason'])
if request.is_ajax():
return JsonResponse({'success': True})
else:
return HttpResponseRedirect(question.get_absolute_url())
elif request.is_ajax():
return JsonResponse({'success': False, 'errors': form.errors})
else:
if request.is_ajax():
raise Http404
form = CloseQuestionForm()
return render_to_response('close_question.html', {
'title': u'Close Question',
'question': question,
'form': form,
}, context_instance=RequestContext(request))
def _reopen_question(request, question):
"""Reopens a Question."""
if request.method == 'POST' and 'reopen' in request.POST:
Question.objects.filter(id=question.id).update(closed=False,
closed_by=None, closed_at=None, close_reason=None)
if request.is_ajax():
return JsonResponse({'success': True})
else:
return HttpResponseRedirect(question.get_absolute_url())
if request.is_ajax():
raise Http404
return render_to_response('reopen_question.html', {
'title': u'Reopen Question',
'question': question,
}, context_instance=RequestContext(request))
def delete_question(request, question_id):
"""Deletes or undeletes a Question."""
raise NotImplementedError
def favourite_question(request, question_id):
"""
Adds or removes a FavouriteQuestion.
Favouriting will not use a confirmation page, as it's an action which
is non-destructive and easily reversible.
"""
if request.method != 'POST':
raise Http404
question = get_object_or_404(Question, id=question_id, deleted=False)
favourite, created = FavouriteQuestion.objects.get_or_create(
user=request.user, question=question)
if not created:
favourite.delete()
if request.is_ajax():
return JsonResponse({'success': True, 'favourited': created})
else:
return HttpResponseRedirect(question.get_absolute_url())
def add_answer(request, question_id):
"""
Adds an Answer to a Question.
Once a certain number of Answers have been added, a Question and all
its Answers will enter wiki mode and all subsequent Answers will be in
wiki mode.
"""
question = get_object_or_404(Question, id=question_id)
preview = None
if request.method == 'POST':
form = AddAnswerForm(request.POST)
if form.is_valid():
html = sanitize_html(markdowner.convert(form.cleaned_data['text']))
if 'preview' in request.POST:
# The user submitted the form to preview the formatted answer
preview = mark_safe(html)
elif 'submit' in request.POST:
added_at = datetime.datetime.now()
# Create the Answer
answer = Answer(
question = question,
author = request.user,
added_at = added_at,
wiki = (form.cleaned_data['wiki'] or
question.answer_count >= AUTO_WIKI_ANSWER_COUNT),
html = html
)
if answer.wiki:
answer.wikified_at = added_at
# When in wiki mode, we always display the last edit
answer.last_edited_at = added_at
answer.last_edited_by = request.user
answer.save()
# Create the initial revision
AnswerRevision.objects.create(
answer = answer,
revision = 1,
author = request.user,
revised_at = added_at,
summary = u'added answer',
text = form.cleaned_data['text']
)
Question.objects.update_answer_count(question)
# TODO Badges related to answering Questions
# TODO If this is answer 30, put question and all answers into
# wiki mode.
# TODO Redirect needs to handle paging
return HttpResponseRedirect(question.get_absolute_url())
else:
form = AddAnswerForm()
return render_to_response('add_answer.html', {
'title': u'Post an Answer',
'question': question,
'form': form,
'preview': preview,
}, context_instance=RequestContext(request))
def answer_comments(request, answer_id, answer=None, form=None):
"""
Displays a single Answer and any Comments on it.
This is primarily intended as a fallback for users who can't
dynamically load Comments.
"""
if answer is None:
answer = get_object_or_404(Answer, id=answer_id)
populate_foreign_key_caches(User, (
((answer,), ('author', 'last_edited_by')),
),
fields=('username', 'gravatar', 'reputation', 'gold', 'silver',
'bronze'))
content_type = ContentType.objects.get_for_model(Answer)
comments = Comment.objects.filter(content_type=content_type,
object_id=answer.id)
if form is None:
form = CommentForm()
return render_to_response('answer.html', {
'title': u'Answer Comments',
'answer': answer,
'comments': comments,
'comment_form': form,
}, context_instance=RequestContext(request))
def edit_answer(request, answer_id):
"""Edits an Answer."""
answer = get_object_or_404(Answer, id=answer_id)
if not auth.can_edit_post(request.user, answer):
raise Http404
latest_revision = answer.get_latest_revision()
preview = None
revision_form = None
if request.method == 'POST':
if 'select_revision' in request.POST:
# The user submitted to change the revision to start editing from
revision_form = RevisionForm(answer, latest_revision, request.POST)
if revision_form.is_valid():
# Replace Question details with those from the selected revision
form = EditAnswerForm(answer,
AnswerRevision.objects.get(answer=answer,
revision=revision_form.cleaned_data['revision']))
else:
# Make sure we keep a hold of the user's other input, even
# though they appear to be messing about.
form = EditAnswerForm(answer, latest_revision, request.POST)
else:
# Always check modifications against the latest revision
form = EditAnswerForm(answer, latest_revision, request.POST)
if form.is_valid():
html = sanitize_html(
markdowner.convert(form.cleaned_data['text']))
if 'preview' in request.POST:
# The user submitted to preview the formatted question
preview = mark_safe(html)
elif 'submit' in request.POST:
if form.has_changed():
edited_at = datetime.datetime.now()
# Update the Answer itself
updated_fields = {
'last_edited_at': edited_at,
'last_edited_by': request.user,
'html': html,
}
if ('wiki' in form.cleaned_data and
form.cleaned_data['wiki']):
updated_fields['wiki'] = True
updated_fields['wikified_at'] = edited_at
Answer.objects.filter(
id=answer.id).update(**updated_fields)
# Create a new revision
revision = AnswerRevision(
answer = answer,
author = request.user,
revised_at = edited_at,
text = form.cleaned_data['text']
)
if form.cleaned_data['summary']:
revision.summary = form.cleaned_data['summary']
else:
revision.summary = \
diff.generate_answer_revision_summary(
latest_revision, revision,
('wiki' in updated_fields))
revision.save()
# TODO 5 body edits by the asker = automatic wiki mode
# TODO 4 individual editors = automatic wiki mode
# TODO Badges related to editing Answers
return HttpResponseRedirect(answer.get_absolute_url())
else:
revision_form = RevisionForm(answer, latest_revision)
form = EditAnswerForm(answer, latest_revision)
if revision_form is None:
# We're about to redisplay after a POST where we didn't care which
# revision was selected - make sure the revision the user started from
# is still selected on redisplay.
revision_form = RevisionForm(answer, latest_revision, request.POST)
return render_to_response('edit_answer.html', {
'title': u'Edit Answer',
'question': answer.question,
'answer': answer,
'revision_form': revision_form,
'form': form,
'preview': preview,
}, context_instance=RequestContext(request))
ANSWER_REVISION_TEMPLATE = '<div class="text">%(html)s</div>'
def answer_revisions(request, answer_id):
"""Revision history for an Answer."""
answer = get_object_or_404(Answer, id=answer_id)
revisions = list(answer.revisions.all())
populate_foreign_key_caches(User, ((revisions, ('author',)),),
fields=('username', 'gravatar', 'reputation', 'gold', 'silver',
'bronze'))
for i, revision in enumerate(revisions):
revision.html = QUESTION_REVISION_TEMPLATE % {
'html': sanitize_html(markdowner.convert(revision.text)),
}
if i > 0:
revisions[i - 1].diff = htmldiff(revision.html,
revisions[i - 1].html)
return render_to_response('answer_revisions.html', {
'title': u'Answer Revisions',
'answer': answer,
'revisions': revisions,
}, context_instance=RequestContext(request))
def accept_answer(request, answer_id):
"""Marks an Answer as accepted."""
raise NotImplementedError
def delete_answer(request, answer_id):
"""Deletes or undeletes an Answer."""
raise NotImplementedError
def vote(request, model, object_id):
"""
Vote on a Question or Answer.
"""
if request.method != 'POST':
raise Http404
vote_type = request.POST.get('type', None)
if vote_type == 'up' and auth.can_vote_up(request.user):
vote_type = Vote.VOTE_UP
elif vote_type == 'down' and auth.can_vote_down(request.user):
vote_type = Vote.VOTE_DOWN
else:
raise Http404
# TODO Ensure users can't vote on their own posts
obj = get_object_or_404(model, id=object_id, deleted=False, locked=False)
content_type = ContentType.objects.get_for_model(model)
try:
existing_vote = Vote.objects.get(content_type=content_type,
object_id=object_id,
user=request.user)
except Vote.DoesNotExist:
existing_vote = None
if existing_vote is None:
Vote.objects.create(content_type=content_type,
object_id=object_id,
user=request.user,
vote=vote_type)
else:
if vote_type == existing_vote.vote:
existing_vote.delete()
else:
existing_vote.vote = vote_type
existing_vote.save()
# TODO Reputation management
if request.is_ajax():
return JsonResponse({
'success': True,
'score': model._default_manager.filter(
id=object_id).values_list('score', flat=True)[0],
})
else:
return HttpResponseRedirect(obj.get_absolute_url())
def flag_item(request, model, object_id):
"""Flag a Question or Answer as containing offensive content."""
raise NotImplementedError
def add_comment(request, model, object_id):
"""Adds a comment to a Question or Answer."""
obj = get_object_or_404(model, id=object_id)
if request.method == "POST":
form = CommentForm(request.POST)
if form.is_valid():
Comment.objects.create(
content_type = ContentType.objects.get_for_model(model),
object_id = object_id,
author = request.user,
added_at = datetime.datetime.now(),
comment = form.cleaned_data['comment']
)
if request.is_ajax():
return JsonResponse({'success': True})
else:
return HttpResponseRedirect(obj.get_absolute_url())
elif request.is_ajax():
return JsonResponse({'success': False, 'errors': form.errors})
else:
form = CommentForm()
# Let the appropriate fallback view take care of display/redisplay
if model is Question:
return question_comments(request, obj, form=form)
elif model is Answer:
return answer_comments(request, object_id, answer=obj, form=form)
def delete_comment(request, comment_id):
"""Deletes a Comment permenantly."""
raise NotImplementedError
TAG_SORT = {
'popular': ('-use_count', 'name'),
'name': ('name',),
}
DEFAULT_TAG_SORT = 'popular'
def tags(request):
"""Searchable Tag list."""
sort_type = request.GET.get('sort', DEFAULT_TAG_SORT)
if sort_type not in TAG_SORT:
sort_type = DEFAULT_TAG_SORT
tags = Tag.objects.all().order_by(*TAG_SORT[sort_type])
name_filter = request.GET.get('filter', '')
if name_filter:
tags = tags.filter(name__icontains=name_filter)
paginator = Paginator(tags, 50)
page = get_page(request, paginator)
return render_to_response('tags.html', {
'title': u'Tags',
'tags': page.object_list,
'page': page,
'sort': sort_type,
'filter': name_filter,
}, context_instance=RequestContext(request))
def tag(request, tag_name):
"""Displayed Questions for a Tag."""
raise NotImplementedError
USER_SORT = {
'reputation': ('-reputation', '-date_joined'),
'newest': ('-date_joined',),
'oldest': ('date_joined',),
'name': ('username',),
}
DEFAULT_USER_SORT = 'reputation'
def users(request):
"""Searchable User list."""
sort_type = request.GET.get('sort', DEFAULT_USER_SORT)
if sort_type not in USER_SORT:
sort_type = DEFAULT_USER_SORT
users = User.objects.all().order_by(*USER_SORT[sort_type])
name_filter = request.GET.get('filter', '')
if name_filter:
users = users.filter(username__icontains=name_filter)
users = users.values('id', 'username', 'gravatar', 'reputation', 'gold',
'silver', 'bronze')
paginator = Paginator(users, 28)
page = get_page(request, paginator)
return render_to_response('users.html', {
'title': u'Users',
'users': page.object_list,
'page': page,
'sort': sort_type,
'filter': name_filter,
}, context_instance=RequestContext(request))
def user(request, user_id):
"""Displays a User and various information about them."""
raise NotImplementedError
def badges(request):
"""Badge list."""
return render_to_response('badges.html', {
'title': u'Badges',
'badges': Badge.objects.all(),
}, context_instance=RequestContext(request))
def badge(request, badge_id):
"""Displays a Badge and any Users who have recently been awarded it."""
badge = get_object_or_404(Badge, id=badge_id)
awarded_to = badge.awarded_to.all().order_by('-award__awarded_at').values(
'id', 'username', 'reputation', 'gold', 'silver', 'bronze')[:500]
return render_to_response('badge.html', {
'title': '%s Badge' % badge.name,
'badge': badge,
'awarded_to': awarded_to,
}, context_instance=RequestContext(request))
|
Kalyzee/edx-platform
|
refs/heads/master
|
lms/djangoapps/verify_student/migrations/0002_auto__add_field_softwaresecurephotoverification_window.py
|
114
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SoftwareSecurePhotoVerification.window'
db.add_column('verify_student_softwaresecurephotoverification', 'window',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['reverification.MidcourseReverificationWindow'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SoftwareSecurePhotoVerification.window'
db.delete_column('verify_student_softwaresecurephotoverification', 'window_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'reverification.midcoursereverificationwindow': {
'Meta': {'object_name': 'MidcourseReverificationWindow'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'verify_student.softwaresecurephotoverification': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'SoftwareSecurePhotoVerification'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'error_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'face_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_key': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'receipt_id': ('django.db.models.fields.CharField', [], {'default': "'<function uuid4 at 0x21d4398>'", 'max_length': '255', 'db_index': 'True'}),
'reviewing_service': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reviewing_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'photo_verifications_reviewed'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('model_utils.fields.StatusField', [], {'default': "'created'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'window': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['reverification.MidcourseReverificationWindow']", 'null': 'True'})
}
}
complete_apps = ['verify_student']
|
coursemdetw/2015cd_midterm
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/color.py
|
603
|
## pygame - Python Game Library
## Copyright (C) 2000-2003 Pete Shinners
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Pete Shinners
## pete@shinners.org
"""Manipulate colors"""
try:
from colordict import THECOLORS
except ImportError:
#the colordict module isn't available
THECOLORS = {}
def Color(colorname):
"""pygame.color.Color(colorname) -> RGBA
Get RGB values from common color names
The color name can be the name of a common english color,
or a "web" style color in the form of 0xFF00FF. The english
color names are defined by the standard 'rgb' colors for X11.
With the hex color formatting you may optionally include an
alpha value, the formatting is 0xRRGGBBAA. You may also specify
a hex formatted color by starting the string with a '#'.
The color name used is case insensitive and whitespace is ignored.
"""
if colorname[:2] == '0x' or colorname[0] == '#': #webstyle
if colorname[0] == '#':
colorname = colorname[1:]
else:
colorname = colorname[2:]
a = 255
try:
r = int('0x' + colorname[0:2], 16)
g = int('0x' + colorname[2:4], 16)
b = int('0x' + colorname[4:6], 16)
if len(colorname) > 6:
a = int('0x' + colorname[6:8], 16)
except ValueError:
raise ValueError("Illegal hex color")
return r, g, b, a
else: #color name
#no spaces and lowercase
name = colorname.replace(' ', '').lower()
try:
return THECOLORS[name]
except KeyError:
raise ValueError("Illegal color name, " + name)
def _splitcolor(color, defaultalpha=255):
try:
second = int(color)
r = g = b = color
a = defaultalpha
except TypeError:
if len(color) == 4:
r, g, b, a = color
elif len(color) == 3:
r, g, b = color
a = defaultalpha
return r, g, b, a
def add(color1, color2):
"""pygame.color.add(color1, color2) -> RGBA
add two colors
Add the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color. Color values will be clamped
to the maximum color value of 255.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2)
m, i = min, int
return m(i(r1+r2), 255), m(i(g1+g2), 255), m(i(b1+b2), 255), m(i(a1+a2), 255)
def subtract(color1, color2):
"""pygame.color.subtract(color1, color2) -> RGBA
subtract two colors
Subtract the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color. Color values will be clamped
to the minimum color value of 0.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2, 0)
m, i = max, int
return m(i(r1-r2), 0), m(i(g1-g2), 0), m(i(b1-b2), 0), m(i(a1-a2), 0)
def multiply(color1, color2):
"""pygame.color.multiply(color1, color2) -> RGBA
multiply two colors
Multiply the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2)
m, i = min, int
return m(i(r1*r2)/255, 255), m(i(g1*g2)/255, 255), m(i(b1*b2)/255, 255), m(i(a1*a2)/255, 255)
|
googleapis/python-debugger-client
|
refs/heads/master
|
google/cloud/debugger_v2/services/controller2/__init__.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import Controller2Client
from .async_client import Controller2AsyncClient
__all__ = (
"Controller2Client",
"Controller2AsyncClient",
)
|
the-zebulan/CodeWars
|
refs/heads/master
|
tests/beta_tests/test_multiples_2.py
|
1
|
import unittest
from katas.beta.multiples_2 import multiples
class MultiplesTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(multiples(49), 'Fizz')
def test_equals_2(self):
self.assertEqual(multiples(147), 'Fang')
def test_equals_3(self):
self.assertEqual(multiples(30), 'Foo')
def test_equals_4(self):
self.assertEqual(multiples(51), 'Far')
|
pipoket/django-guardian
|
refs/heads/master
|
example_project/posts/views.py
|
3
|
def post_detail(request):
return
|
damdam-s/OpenUpgrade
|
refs/heads/8.0
|
addons/report/tests/test_reports.py
|
385
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import openerp
import openerp.tests
_logger = logging.getLogger(__name__)
@openerp.tests.common.at_install(False)
@openerp.tests.common.post_install(True)
class TestReports(openerp.tests.TransactionCase):
def test_reports(self):
registry, cr, uid = self.registry, self.cr, self.uid
r_model = registry('ir.actions.report.xml')
domain = [('report_type', 'like', 'qweb')]
for r in r_model.browse(cr, uid, r_model.search(cr, uid, domain)):
report_model = 'report.%s' % r.report_name
try:
registry(report_model)
except KeyError:
# Only test the generic reports here
_logger.info("testing report %s", r.report_name)
report_model = registry(r.model)
report_model_ids = report_model.search(cr, uid, [], limit=10)
if not report_model_ids:
_logger.info("no record found skipping report %s", r.report_name)
if not r.multi:
report_model_ids = report_model_ids[:1]
# Test report generation
registry('report').get_html(cr, uid, report_model_ids, r.report_name)
else:
continue
|
Markus-Goetz/CDS-Invenio-Authorlist
|
refs/heads/master
|
modules/miscutil/lib/miscutil_config.py
|
8
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Configuration file for miscutil module.
- Contains standard error messages for errorlib
e.g. No error message given, etc.
"""
__revision__ = "$Id$"
# pylint: disable=C0301
CFG_MISCUTIL_ERROR_MESSAGES = \
{ 'ERR_MISCUTIL_BAD_FILE_ARGUMENT_PASSED': '_("Invalid argument %s was passed")',
'ERR_MISCUTIL_WRITE_FAILED': '_("Unable to write to file %s")',
'ERR_MISCUTIL_NO_ERROR_MESSAGE': '_("Trying to write a non error message to error log")',
'ERR_MISCUTIL_NO_WARNING_MESSAGE': '_("Trying to write a non error message or non warning message to error log")',
'ERR_MISCUTIL_TOO_MANY_ARGUMENT': '_("Unable to display error: Too many arguments given for error %s")',
'ERR_MISCUTIL_TOO_FEW_ARGUMENT':'_("Unable to display error: Too few arguments given for error %s")',
'ERR_MISCUTIL_IMPORT_ERROR': '_("An undefined error has occured (%s). \'%s\' does not exist")',
'ERR_MISCUTIL_NO_DICT': '_("An undefined error has occured (%s). %s does not contain %s")',
'ERR_MISCUTIL_NO_MESSAGE_IN_DICT': '_("An undefined error has occured. %s not defined in %s")',
'ERR_MISCUTIL_UNDEFINED_ERROR': '_("An undefined error has occured (%s)")',
'ERR_MISCUTIL_BAD_ARGUMENT_TYPE': '_("Unable to display error: Arguments do not match for error %s")',
'ERR_MISCUTIL_DEBUG': 'Error nb %i',
'ERR_MISCUTIL_NOT_ATTEMPTING_SEND_EMAIL' : '_("The system is not attempting to send an email from %s, to %s, with body %s")',
'ERR_MISCUTIL_CONNECTION_SMTP': '_("Error in connecting to the SMPT server waiting %s seconds. Exception is %s, while sending email from %s to %s with body %s.")',
'ERR_MISCUTIL_SENDING_EMAIL' : '_("Error in sending email from %s to %s with body %s")'
}
|
ArnossArnossi/django
|
refs/heads/master
|
django/contrib/gis/maps/google/gmap.py
|
526
|
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.gis.maps.google.overlays import (
GMarker, GPolygon, GPolyline,
)
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.six.moves import range
class GoogleMapException(Exception):
pass
# The default Google Maps URL (for the API javascript)
# TODO: Internationalize for Japan, UK, etc.
GOOGLE_MAPS_URL = 'http://maps.google.com/maps?file=api&v=%s&key='
class GoogleMap(object):
"A class for generating Google Maps JavaScript."
# String constants
onunload = mark_safe('onunload="GUnload()"') # Cleans up after Google Maps
vml_css = mark_safe('v\:* {behavior:url(#default#VML);}') # CSS for IE VML
xmlns = mark_safe('xmlns:v="urn:schemas-microsoft-com:vml"') # XML Namespace (for IE VML).
def __init__(self, key=None, api_url=None, version=None,
center=None, zoom=None, dom_id='map',
kml_urls=[], polylines=None, polygons=None, markers=None,
template='gis/google/google-map.js',
js_module='geodjango',
extra_context={}):
# The Google Maps API Key defined in the settings will be used
# if not passed in as a parameter. The use of an API key is
# _required_.
if not key:
try:
self.key = settings.GOOGLE_MAPS_API_KEY
except AttributeError:
raise GoogleMapException(
'Google Maps API Key not found (try adding '
'GOOGLE_MAPS_API_KEY to your settings).'
)
else:
self.key = key
# Getting the Google Maps API version, defaults to using the latest ("2.x"),
# this is not necessarily the most stable.
if not version:
self.version = getattr(settings, 'GOOGLE_MAPS_API_VERSION', '2.x')
else:
self.version = version
# Can specify the API URL in the `api_url` keyword.
if not api_url:
self.api_url = getattr(settings, 'GOOGLE_MAPS_URL', GOOGLE_MAPS_URL) % self.version
else:
self.api_url = api_url
# Setting the DOM id of the map, the load function, the JavaScript
# template, and the KML URLs array.
self.dom_id = dom_id
self.extra_context = extra_context
self.js_module = js_module
self.template = template
self.kml_urls = kml_urls
# Does the user want any GMarker, GPolygon, and/or GPolyline overlays?
overlay_info = [[GMarker, markers, 'markers'],
[GPolygon, polygons, 'polygons'],
[GPolyline, polylines, 'polylines']]
for overlay_class, overlay_list, varname in overlay_info:
setattr(self, varname, [])
if overlay_list:
for overlay in overlay_list:
if isinstance(overlay, overlay_class):
getattr(self, varname).append(overlay)
else:
getattr(self, varname).append(overlay_class(overlay))
# If GMarker, GPolygons, and/or GPolylines are used the zoom will be
# automatically calculated via the Google Maps API. If both a zoom
# level and a center coordinate are provided with polygons/polylines,
# no automatic determination will occur.
self.calc_zoom = False
if self.polygons or self.polylines or self.markers:
if center is None or zoom is None:
self.calc_zoom = True
# Defaults for the zoom level and center coordinates if the zoom
# is not automatically calculated.
if zoom is None:
zoom = 4
self.zoom = zoom
if center is None:
center = (0, 0)
self.center = center
def render(self):
"""
Generates the JavaScript necessary for displaying this Google Map.
"""
params = {'calc_zoom': self.calc_zoom,
'center': self.center,
'dom_id': self.dom_id,
'js_module': self.js_module,
'kml_urls': self.kml_urls,
'zoom': self.zoom,
'polygons': self.polygons,
'polylines': self.polylines,
'icons': self.icons,
'markers': self.markers,
}
params.update(self.extra_context)
return render_to_string(self.template, params)
@property
def body(self):
"Returns HTML body tag for loading and unloading Google Maps javascript."
return format_html('<body {} {}>', self.onload, self.onunload)
@property
def onload(self):
"Returns the `onload` HTML <body> attribute."
return format_html('onload="{}.{}_load()"', self.js_module, self.dom_id)
@property
def api_script(self):
"Returns the <script> tag for the Google Maps API javascript."
return format_html('<script src="{}{}" type="text/javascript"></script>',
self.api_url, self.key)
@property
def js(self):
"Returns only the generated Google Maps JavaScript (no <script> tags)."
return self.render()
@property
def scripts(self):
"Returns all <script></script> tags required with Google Maps JavaScript."
return format_html('{}\n <script type="text/javascript">\n//<![CDATA[\n{}//]]>\n </script>',
self.api_script, mark_safe(self.js))
@property
def style(self):
"Returns additional CSS styling needed for Google Maps on IE."
return format_html('<style type="text/css">{}</style>', self.vml_css)
@property
def xhtml(self):
"Returns XHTML information needed for IE VML overlays."
return format_html('<html xmlns="http://www.w3.org/1999/xhtml" {}>', self.xmlns)
@property
def icons(self):
"Returns a sequence of GIcon objects in this map."
return set(marker.icon for marker in self.markers if marker.icon)
class GoogleMapSet(GoogleMap):
def __init__(self, *args, **kwargs):
"""
A class for generating sets of Google Maps that will be shown on the
same page together.
Example:
gmapset = GoogleMapSet( GoogleMap( ... ), GoogleMap( ... ) )
gmapset = GoogleMapSet( [ gmap1, gmap2] )
"""
# The `google-multi.js` template is used instead of `google-single.js`
# by default.
template = kwargs.pop('template', 'gis/google/google-multi.js')
# This is the template used to generate the GMap load JavaScript for
# each map in the set.
self.map_template = kwargs.pop('map_template', 'gis/google/google-single.js')
# Running GoogleMap.__init__(), and resetting the template
# value with default obtained above.
super(GoogleMapSet, self).__init__(**kwargs)
self.template = template
# If a tuple/list passed in as first element of args, then assume
if isinstance(args[0], (tuple, list)):
self.maps = args[0]
else:
self.maps = args
# Generating DOM ids for each of the maps in the set.
self.dom_ids = ['map%d' % i for i in range(len(self.maps))]
def load_map_js(self):
"""
Returns JavaScript containing all of the loading routines for each
map in this set.
"""
result = []
for dom_id, gmap in zip(self.dom_ids, self.maps):
# Backup copies the GoogleMap DOM id and template attributes.
# They are overridden on each GoogleMap instance in the set so
# that only the loading JavaScript (and not the header variables)
# is used with the generated DOM ids.
tmp = (gmap.template, gmap.dom_id)
gmap.template = self.map_template
gmap.dom_id = dom_id
result.append(gmap.js)
# Restoring the backup values.
gmap.template, gmap.dom_id = tmp
return mark_safe(''.join(result))
def render(self):
"""
Generates the JavaScript for the collection of Google Maps in
this set.
"""
params = {'js_module': self.js_module,
'dom_ids': self.dom_ids,
'load_map_js': self.load_map_js(),
'icons': self.icons,
}
params.update(self.extra_context)
return render_to_string(self.template, params)
@property
def onload(self):
"Returns the `onload` HTML <body> attribute."
# Overloaded to use the `load` function defined in the
# `google-multi.js`, which calls the load routines for
# each one of the individual maps in the set.
return mark_safe('onload="%s.load()"' % self.js_module)
@property
def icons(self):
"Returns a sequence of all icons in each map of the set."
icons = set()
for map in self.maps:
icons |= map.icons
return icons
|
sdl-static/ircbot-collection
|
refs/heads/master
|
beanbot-client.py
|
4
|
#!/usr/bin/python
import sys, os, re, popen2
from socket import *
serve_addr = ('localhost', 47701)
def popen(cmd):
p = popen2.Popen4(cmd)
p.tochild.close()
val = p.fromchild.read()
p.fromchild.close()
return val.strip()
if __name__ == '__main__':
IRC_BOLD = '\x02'
IRC_ULINE = '\x1f'
IRC_NORMAL = '\x0f'
IRC_RED = '\x034'
IRC_LIME = '\x039'
IRC_BLUE = '\x0312'
repos, rev = sys.argv[1:3]
author = popen(('/usr/local/bin/svnlook', 'author', '-r', rev, repos))
log = popen(('/usr/local/bin/svnlook', 'log', '-r', rev, repos))
log = re.subn(r'\n *', ' ', log)[0]
repos = os.path.basename(repos)
data = (
"%(IRC_LIME)s%(author)s "
"%(IRC_RED)sr%(rev)s "
"%(IRC_BLUE)s%(repos)s "
"%(IRC_NORMAL)s%(log)s" % locals()
)
if len(data) > 400:
data = data[:400] + "..."
#for c in range(0, 20):
# data += " \x030,%s_%s_\x0f" % (c,c)
sock = socket(AF_INET, SOCK_DGRAM)
sock.sendto(data, serve_addr)
sock.close()
|
alrusdi/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/decorators/models.py
|
275
|
# A models.py so that tests run.
|
piotroxp/scibibscan
|
refs/heads/master
|
scib/lib/python3.5/site-packages/astropy/coordinates/tests/test_api_ape5.py
|
1
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
"""
This is the APE5 coordinates API document re-written to work as a series of test
functions.
Note that new tests for coordinates functionality should generally *not* be
added to this file - instead, add them to other appropriate test modules in
this package, like ``test_sky_coord.py``, ``test_frames.py``, or
``test_representation.py``. This file is instead meant mainly to keep track of
deviations from the original APE5 plan.
"""
import numpy as np
from numpy.random import randn
from numpy import testing as npt
from ...tests.helper import (pytest, quantity_allclose as allclose,
assert_quantity_allclose as assert_allclose)
raises = pytest.raises
from ... import units as u
from ... import time
from ... import coordinates as coords
from ..errors import *
try:
import scipy
except ImportError:
HAS_SCIPY = False
else:
HAS_SCIPY = True
def test_representations_api():
from ..representation import SphericalRepresentation, \
UnitSphericalRepresentation, PhysicsSphericalRepresentation, \
CartesianRepresentation
from ... coordinates import Angle, Longitude, Latitude, Distance
#<-----------------Classes for representation of coordinate data--------------->
# These classes inherit from a common base class and internally contain Quantity
# objects, which are arrays (although they may act as scalars, like numpy's
# length-0 "arrays")
# They can be initialized with a variety of ways that make intuitive sense.
# Distance is optional.
UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg)
UnitSphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg)
SphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg, distance=10*u.kpc)
# In the initial implementation, the lat/lon/distance arguments to the
# initializer must be in order. A *possible* future change will be to allow
# smarter guessing of the order. E.g. `Latitude` and `Longitude` objects can be
# given in any order.
UnitSphericalRepresentation(Longitude(8, u.hour), Latitude(5, u.deg))
SphericalRepresentation(Longitude(8, u.hour), Latitude(5, u.deg), Distance(10, u.kpc))
# Arrays of any of the inputs are fine
UnitSphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg)
# Default is to copy arrays, but optionally, it can be a reference
UnitSphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, copy=False)
# strings are parsed by `Latitude` and `Longitude` constructors, so no need to
# implement parsing in the Representation classes
UnitSphericalRepresentation(lon=Angle('2h6m3.3s'), lat=Angle('0.1rad'))
# Or, you can give `Quantity`s with keywords, and they will be internally
# converted to Angle/Distance
c1 = SphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg, distance=10*u.kpc)
# Can also give another representation object with the `reprobj` keyword.
c2 = SphericalRepresentation.from_representation(c1)
# distance, lat, and lon typically will just match in shape
SphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, distance=[10, 11]*u.kpc)
# if the inputs are not the same, if possible they will be broadcast following
# numpy's standard broadcasting rules.
c2 = SphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, distance=10*u.kpc)
assert len(c2.distance) == 2
#when they can't be broadcast, it is a ValueError (same as Numpy)
with raises(ValueError):
c2 = UnitSphericalRepresentation(lon=[8, 9, 10]*u.hourangle, lat=[5, 6]*u.deg)
# It's also possible to pass in scalar quantity lists with mixed units. These
# are converted to array quantities following the same rule as `Quantity`: all
# elements are converted to match the first element's units.
c2 = UnitSphericalRepresentation(lon=Angle([8*u.hourangle, 135*u.deg]),
lat=Angle([5*u.deg, (6*np.pi/180)*u.rad]))
assert c2.lat.unit == u.deg and c2.lon.unit == u.hourangle
npt.assert_almost_equal(c2.lon[1].value, 9)
# The Quantity initializer itself can also be used to force the unit even if the
# first element doesn't have the right unit
lon = u.Quantity([120*u.deg, 135*u.deg], u.hourangle)
lat = u.Quantity([(5*np.pi/180)*u.rad, 0.4*u.hourangle], u.deg)
c2 = UnitSphericalRepresentation(lon, lat)
# regardless of how input, the `lat` and `lon` come out as angle/distance
assert isinstance(c1.lat, Angle)
assert isinstance(c1.lat, Latitude) # `Latitude` is an `Angle` subclass
assert isinstance(c1.distance, Distance)
# but they are read-only, as representations are immutable once created
with raises(AttributeError):
c1.lat = Latitude(5, u.deg)
# Note that it is still possible to modify the array in-place, but this is not
# sanctioned by the API, as this would prevent things like caching.
c2.lat[:] = [0] * u.deg # possible, but NOT SUPPORTED
# To address the fact that there are various other conventions for how spherical
# coordinates are defined, other conventions can be included as new classes.
# Later there may be other conventions that we implement - for now just the
# physics convention, as it is one of the most common cases.
c3 = PhysicsSphericalRepresentation(phi=120*u.deg, theta=85*u.deg, r=3*u.kpc)
# first dimension must be length-3 if a lone `Quantity` is passed in.
c1 = CartesianRepresentation(randn(3, 100) * u.kpc)
assert c1.xyz.shape[0] == 3
assert c1.xyz.unit == u.kpc
assert c1.x.shape[0] == 100
assert c1.y.shape[0] == 100
assert c1.z.shape[0] == 100
# can also give each as separate keywords
CartesianRepresentation(x=randn(100)*u.kpc, y=randn(100)*u.kpc, z=randn(100)*u.kpc)
# if the units don't match but are all distances, they will automatically be
# converted to match `x`
xarr, yarr, zarr = randn(3, 100)
c1 = CartesianRepresentation(x=xarr*u.kpc, y=yarr*u.kpc, z=zarr*u.kpc)
c2 = CartesianRepresentation(x=xarr*u.kpc, y=yarr*u.kpc, z=zarr*u.pc)
assert c1.xyz.unit == c2.xyz.unit == u.kpc
assert_allclose((c1.z / 1000) - c2.z, 0*u.kpc, atol=1e-10*u.kpc)
# representations convert into other representations via `represent_as`
srep = SphericalRepresentation(lon=90*u.deg, lat=0*u.deg, distance=1*u.pc)
crep = srep.represent_as(CartesianRepresentation)
assert_allclose(crep.x, 0*u.pc, atol=1e-10*u.pc)
assert_allclose(crep.y, 1*u.pc, atol=1e-10*u.pc)
assert_allclose(crep.z, 0*u.pc, atol=1e-10*u.pc)
# The functions that actually do the conversion are defined via methods on the
# representation classes. This may later be expanded into a full registerable
# transform graph like the coordinate frames, but initially it will be a simpler
# method system
def test_frame_api():
from ..representation import SphericalRepresentation, \
UnitSphericalRepresentation
from ..builtin_frames import ICRS, FK5
#<---------------------Reference Frame/"Low-level" classes--------------------->
# The low-level classes have a dual role: they act as specifiers of coordinate
# frames and they *may* also contain data as one of the representation objects,
# in which case they are the actual coordinate objects themselves.
# They can always accept a representation as a first argument
icrs = ICRS(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg))
# which is stored as the `data` attribute
assert icrs.data.lat == 5*u.deg
assert icrs.data.lon == 8*u.hourangle
# Frames that require additional information like equinoxs or obstimes get them
# as keyword parameters to the frame constructor. Where sensible, defaults are
# used. E.g., FK5 is almost always J2000 equinox
fk5 = FK5(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg))
J2000 = time.Time('J2000', scale='utc')
fk5_2000 = FK5(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg), equinox=J2000)
assert fk5.equinox == fk5_2000.equinox
# the information required to specify the frame is immutable
J2001 = time.Time('J2001', scale='utc')
with raises(AttributeError):
fk5.equinox = J2001
# Similar for the representation data.
with raises(AttributeError):
fk5.data = UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg)
# There is also a class-level attribute that lists the attributes needed to
# identify the frame. These include attributes like `equinox` shown above.
assert all([nm in ('equinox', 'obstime') for nm in fk5.get_frame_attr_names()])
# the result of `get_frame_attr_names` is called for particularly in the
# high-level class (discussed below) to allow round-tripping between various
# frames. It is also part of the public API for other similar developer /
# advanced users' use.
# The actual position information is accessed via the representation objects
assert_allclose(icrs.represent_as(SphericalRepresentation).lat, 5*u.deg)
# shorthand for the above
assert_allclose(icrs.spherical.lat, 5*u.deg)
assert icrs.cartesian.z.value > 0
# Many frames have a "default" representation, the one in which they are
# conventionally described, often with a special name for some of the
# coordinates. E.g., most equatorial coordinate systems are spherical with RA and
# Dec. This works simply as a shorthand for the longer form above
assert_allclose(icrs.dec, 5*u.deg)
assert_allclose(fk5.ra, 8*u.hourangle)
assert icrs.representation == SphericalRepresentation
# low-level classes can also be initialized with names valid for that representation
# and frame:
icrs_2 = ICRS(ra=8*u.hour, dec=5*u.deg, distance=1*u.kpc)
assert_allclose(icrs.ra, icrs_2.ra)
# and these are taken as the default if keywords are not given:
#icrs_nokwarg = ICRS(8*u.hour, 5*u.deg, distance=1*u.kpc)
#assert icrs_nokwarg.ra == icrs_2.ra and icrs_nokwarg.dec == icrs_2.dec
# they also are capable of computing on-sky or 3d separations from each other,
# which will be a direct port of the existing methods:
coo1 = ICRS(ra=0*u.hour, dec=0*u.deg)
coo2 = ICRS(ra=0*u.hour, dec=1*u.deg)
# `separation` is the on-sky separation
assert coo1.separation(coo2).degree == 1.0
# while `separation_3d` includes the 3D distance information
coo3 = ICRS(ra=0*u.hour, dec=0*u.deg, distance=1*u.kpc)
coo4 = ICRS(ra=0*u.hour, dec=0*u.deg, distance=2*u.kpc)
assert coo3.separation_3d(coo4).kpc == 1.0
# The next example fails because `coo1` and `coo2` don't have distances
with raises(ValueError):
assert coo1.separation_3d(coo2).kpc == 1.0
# repr/str also shows info, with frame and data
#assert repr(fk5) == ''
def test_transform_api():
from ..representation import UnitSphericalRepresentation
from ..builtin_frames import ICRS, FK5
from ..baseframe import frame_transform_graph, BaseCoordinateFrame
from ..transformations import DynamicMatrixTransform
#<-------------------------Transformations------------------------------------->
# Transformation functionality is the key to the whole scheme: they transform
# low-level classes from one frame to another.
#(used below but defined above in the API)
fk5 = FK5(ra=8*u.hour, dec=5*u.deg)
# If no data (or `None`) is given, the class acts as a specifier of a frame, but
# without any stored data.
J2001 = time.Time('J2001', scale='utc')
fk5_J2001_frame = FK5(equinox=J2001)
# if they do not have data, the string instead is the frame specification
assert repr(fk5_J2001_frame) == "<FK5 Frame (equinox=J2001.000)>"
# Note that, although a frame object is immutable and can't have data added, it
# can be used to create a new object that does have data by giving the
# `realize_frame` method a representation:
srep = UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg)
fk5_j2001_with_data = fk5_J2001_frame.realize_frame(srep)
assert fk5_j2001_with_data.data is not None
# Now `fk5_j2001_with_data` is in the same frame as `fk5_J2001_frame`, but it
# is an actual low-level coordinate, rather than a frame without data.
# These frames are primarily useful for specifying what a coordinate should be
# transformed *into*, as they are used by the `transform_to` method
# E.g., this snippet precesses the point to the new equinox
newfk5 = fk5.transform_to(fk5_J2001_frame)
assert newfk5.equinox == J2001
# classes can also be given to `transform_to`, which then uses the defaults for
# the frame information:
samefk5 = fk5.transform_to(FK5)
# `fk5` was initialized using default `obstime` and `equinox`, so:
assert_allclose(samefk5.ra, fk5.ra, atol=1e-10*u.deg)
assert_allclose(samefk5.dec, fk5.dec, atol=1e-10*u.deg)
# transforming to a new frame necessarily loses framespec information if that
# information is not applicable to the new frame. This means transforms are not
# always round-trippable:
fk5_2 = FK5(ra=8*u.hour, dec=5*u.deg, equinox=J2001)
ic_trans = fk5_2.transform_to(ICRS)
# `ic_trans` does not have an `equinox`, so now when we transform back to FK5,
# it's a *different* RA and Dec
fk5_trans = ic_trans.transform_to(FK5)
assert not allclose(fk5_2.ra, fk5_trans.ra, rtol=0, atol=1e-10*u.deg)
# But if you explicitly give the right equinox, all is fine
fk5_trans_2 = fk5_2.transform_to(FK5(equinox=J2001))
assert_allclose(fk5_2.ra, fk5_trans_2.ra, rtol=0, atol=1e-10*u.deg)
# Trying to transforming a frame with no data is of course an error:
with raises(ValueError):
FK5(equinox=J2001).transform_to(ICRS)
# To actually define a new transformation, the same scheme as in the
# 0.2/0.3 coordinates framework can be re-used - a graph of transform functions
# connecting various coordinate classes together. The main changes are:
# 1) The transform functions now get the frame object they are transforming the
# current data into.
# 2) Frames with additional information need to have a way to transform between
# objects of the same class, but with different framespecinfo values
# An example transform function:
class SomeNewSystem(BaseCoordinateFrame):
pass
@frame_transform_graph.transform(DynamicMatrixTransform, SomeNewSystem, FK5)
def new_to_fk5(newobj, fk5frame):
ot = newobj.obstime
eq = fk5frame.equinox
# ... build a *cartesian* transform matrix using `eq` that transforms from
# the `newobj` frame as observed at `ot` to FK5 an equinox `eq`
matrix = np.eye(3)
return matrix
# Other options for transform functions include one that simply returns the new
# coordinate object, and one that returns a cartesian matrix but does *not*
# require `newobj` or `fk5frame` - this allows optimization of the transform.
def test_highlevel_api():
J2001 = time.Time('J2001', scale='utc')
#<---------------------------"High-level" class-------------------------------->
# The "high-level" class is intended to wrap the lower-level classes in such a
# way that they can be round-tripped, as well as providing a variety of
# convenience functionality. This document is not intended to show *all* of the
# possible high-level functionality, rather how the high-level classes are
# initialized and interact with the low-level classes
# this creates an object that contains an `ICRS` low-level class, initialized
# identically to the first ICRS example further up.
sc = coords.SkyCoord(coords.SphericalRepresentation(lon=8 * u.hour,
lat=5 * u.deg, distance=1 * u.kpc), frame='icrs')
# Other representations and `system` keywords delegate to the appropriate
# low-level class. The already-existing registry for user-defined coordinates
# will be used by `SkyCoordinate` to figure out what various the `system`
# keyword actually means.
sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs')
sc = coords.SkyCoord(l=120 * u.deg, b=5 * u.deg, frame='galactic')
# High-level classes can also be initialized directly from low-level objects
sc = coords.SkyCoord(coords.ICRS(ra=8 * u.hour, dec=5 * u.deg))
# The next example raises an error because the high-level class must always
# have position data.
with pytest.raises(ValueError):
sc = coords.SkyCoord(coords.FK5(equinox=J2001)) # raises ValueError
# similarly, the low-level object can always be accessed
#this is how it's supposed to look, but sometimes the numbers get rounded in
#funny ways
#assert repr(sc.frame) == '<ICRS Coordinate: ra=120.0 deg, dec=5.0 deg>'
rscf = repr(sc.frame)
assert rscf.startswith('<ICRS Coordinate: (ra, dec) in deg')
# and the string representation will be inherited from the low-level class.
# same deal, should loook like this, but different archituectures/ python
# versions may round the numbers differently
#assert repr(sc) == '<SkyCoord (ICRS): ra=120.0 deg, dec=5.0 deg>'
rsc = repr(sc)
assert rsc.startswith('<SkyCoord (ICRS): (ra, dec) in deg')
# Supports a variety of possible complex string formats
sc = coords.SkyCoord('8h00m00s +5d00m00.0s', frame='icrs')
# In the next example, the unit is only needed b/c units are ambiguous. In
# general, we *never* accept ambiguity
sc = coords.SkyCoord('8:00:00 +5:00:00.0', unit=(u.hour, u.deg), frame='icrs')
# The next one would yield length-2 array coordinates, because of the comma
sc = coords.SkyCoord(['8h 5d', '2°2′3″ 0.3rad'], frame='icrs')
# It should also interpret common designation styles as a coordinate
# NOT YET
# sc = coords.SkyCoord('SDSS J123456.89-012345.6', frame='icrs')
# but it should also be possible to provide formats for outputting to strings,
# similar to `Time`. This can be added right away or at a later date.
# transformation is done the same as for low-level classes, which it delegates to
sc_fk5_j2001 = sc.transform_to(coords.FK5(equinox=J2001))
assert sc_fk5_j2001.equinox == J2001
# The key difference is that the high-level class remembers frame information
# necessary for round-tripping, unlike the low-level classes:
sc1 = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, equinox=J2001, frame='fk5')
sc2 = sc1.transform_to('icrs')
# The next assertion succeeds, but it doesn't mean anything for ICRS, as ICRS
# isn't defined in terms of an equinox
assert sc2.equinox == J2001
# But it *is* necessary once we transform to FK5
sc3 = sc2.transform_to('fk5')
assert sc3.equinox == J2001
assert_allclose(sc1.ra, sc3.ra)
# `SkyCoord` will also include the attribute-style access that is in the
# v0.2/0.3 coordinate objects. This will *not* be in the low-level classes
sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs')
scgal = sc.galactic
assert str(scgal).startswith('<SkyCoord (Galactic): (l, b)')
# the existing `from_name` and `match_to_catalog_*` methods will be moved to the
# high-level class as convenience functionality.
#in remote-data test below!
#m31icrs = coords.SkyCoord.from_name('M31', frame='icrs')
#assert str(m31icrs) == '<SkyCoord (ICRS) RA=10.68471 deg, Dec=41.26875 deg>'
if HAS_SCIPY:
cat1 = coords.SkyCoord(ra=[1, 2]*u.hr, dec=[3, 4.01]*u.deg, distance=[5, 6]*u.kpc, frame='icrs')
cat2 = coords.SkyCoord(ra=[1, 2, 2.01]*u.hr, dec=[3, 4, 5]*u.deg, distance=[5, 200, 6]*u.kpc, frame='icrs')
idx1, sep2d1, dist3d1 = cat1.match_to_catalog_sky(cat2)
idx2, sep2d2, dist3d2 = cat1.match_to_catalog_3d(cat2)
assert np.any(idx1 != idx2)
# additional convenience functionality for the future should be added as methods
# on `SkyCoord`, *not* the low-level classes.
@pytest.mark.remote_data
def test_highlevel_api_remote():
m31icrs = coords.SkyCoord.from_name('M31', frame='icrs')
assert str(m31icrs) == '<SkyCoord (ICRS): (ra, dec) in deg\n (10.6847083, 41.26875)>'
m31fk4 = coords.SkyCoord.from_name('M31', frame='fk4')
assert m31icrs.frame != m31fk4.frame
assert np.abs(m31icrs.ra - m31fk4.ra) > .5*u.deg
|
xsmart/bluecherry-client
|
refs/heads/master
|
breakpad/src/tools/gyp/test/lib/TestCmd.py
|
137
|
"""
TestCmd.py: a testing framework for commands and scripts.
The TestCmd module provides a framework for portable automated testing
of executable commands and scripts (in any language, not just Python),
especially commands and scripts that require file system interaction.
In addition to running tests and evaluating conditions, the TestCmd
module manages and cleans up one or more temporary workspace
directories, and provides methods for creating files and directories in
those workspace directories from in-line data, here-documents), allowing
tests to be completely self-contained.
A TestCmd environment object is created via the usual invocation:
import TestCmd
test = TestCmd.TestCmd()
There are a bunch of keyword arguments available at instantiation:
test = TestCmd.TestCmd(description = 'string',
program = 'program_or_script_to_test',
interpreter = 'script_interpreter',
workdir = 'prefix',
subdir = 'subdir',
verbose = Boolean,
match = default_match_function,
diff = default_diff_function,
combine = Boolean)
There are a bunch of methods that let you do different things:
test.verbose_set(1)
test.description_set('string')
test.program_set('program_or_script_to_test')
test.interpreter_set('script_interpreter')
test.interpreter_set(['script_interpreter', 'arg'])
test.workdir_set('prefix')
test.workdir_set('')
test.workpath('file')
test.workpath('subdir', 'file')
test.subdir('subdir', ...)
test.rmdir('subdir', ...)
test.write('file', "contents\n")
test.write(['subdir', 'file'], "contents\n")
test.read('file')
test.read(['subdir', 'file'])
test.read('file', mode)
test.read(['subdir', 'file'], mode)
test.writable('dir', 1)
test.writable('dir', None)
test.preserve(condition, ...)
test.cleanup(condition)
test.command_args(program = 'program_or_script_to_run',
interpreter = 'script_interpreter',
arguments = 'arguments to pass to program')
test.run(program = 'program_or_script_to_run',
interpreter = 'script_interpreter',
arguments = 'arguments to pass to program',
chdir = 'directory_to_chdir_to',
stdin = 'input to feed to the program\n')
universal_newlines = True)
p = test.start(program = 'program_or_script_to_run',
interpreter = 'script_interpreter',
arguments = 'arguments to pass to program',
universal_newlines = None)
test.finish(self, p)
test.pass_test()
test.pass_test(condition)
test.pass_test(condition, function)
test.fail_test()
test.fail_test(condition)
test.fail_test(condition, function)
test.fail_test(condition, function, skip)
test.no_result()
test.no_result(condition)
test.no_result(condition, function)
test.no_result(condition, function, skip)
test.stdout()
test.stdout(run)
test.stderr()
test.stderr(run)
test.symlink(target, link)
test.banner(string)
test.banner(string, width)
test.diff(actual, expected)
test.match(actual, expected)
test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
test.match_exact(["actual 1\n", "actual 2\n"],
["expected 1\n", "expected 2\n"])
test.match_re("actual 1\nactual 2\n", regex_string)
test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
test.match_re_dotall("actual 1\nactual 2\n", regex_string)
test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
test.tempdir()
test.tempdir('temporary-directory')
test.sleep()
test.sleep(seconds)
test.where_is('foo')
test.where_is('foo', 'PATH1:PATH2')
test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
test.unlink('file')
test.unlink('subdir', 'file')
The TestCmd module provides pass_test(), fail_test(), and no_result()
unbound functions that report test results for use with the Aegis change
management system. These methods terminate the test immediately,
reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
status 0 (success), 1 or 2 respectively. This allows for a distinction
between an actual failed test and a test that could not be properly
evaluated because of an external condition (such as a full file system
or incorrect permissions).
import TestCmd
TestCmd.pass_test()
TestCmd.pass_test(condition)
TestCmd.pass_test(condition, function)
TestCmd.fail_test()
TestCmd.fail_test(condition)
TestCmd.fail_test(condition, function)
TestCmd.fail_test(condition, function, skip)
TestCmd.no_result()
TestCmd.no_result(condition)
TestCmd.no_result(condition, function)
TestCmd.no_result(condition, function, skip)
The TestCmd module also provides unbound functions that handle matching
in the same way as the match_*() methods described above.
import TestCmd
test = TestCmd.TestCmd(match = TestCmd.match_exact)
test = TestCmd.TestCmd(match = TestCmd.match_re)
test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
The TestCmd module provides unbound functions that can be used for the
"diff" argument to TestCmd.TestCmd instantiation:
import TestCmd
test = TestCmd.TestCmd(match = TestCmd.match_re,
diff = TestCmd.diff_re)
test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
The "diff" argument can also be used with standard difflib functions:
import difflib
test = TestCmd.TestCmd(diff = difflib.context_diff)
test = TestCmd.TestCmd(diff = difflib.unified_diff)
Lastly, the where_is() method also exists in an unbound function
version.
import TestCmd
TestCmd.where_is('foo')
TestCmd.where_is('foo', 'PATH1:PATH2')
TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
"""
# Copyright 2000, 2001, 2002, 2003, 2004 Steven Knight
# This module is free software, and you may redistribute it and/or modify
# it under the same terms as Python itself, so long as this copyright message
# and disclaimer are retained in their original form.
#
# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#
# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
__author__ = "Steven Knight <knight at baldmt dot com>"
__revision__ = "TestCmd.py 0.36.D001 2009/07/24 08:45:26 knight"
__version__ = "0.36"
import errno
import os
import os.path
import re
import shutil
import stat
import string
import sys
import tempfile
import time
import traceback
import types
import UserList
__all__ = [
'diff_re',
'fail_test',
'no_result',
'pass_test',
'match_exact',
'match_re',
'match_re_dotall',
'python_executable',
'TestCmd'
]
try:
import difflib
except ImportError:
__all__.append('simple_diff')
def is_List(e):
return type(e) is types.ListType \
or isinstance(e, UserList.UserList)
try:
from UserString import UserString
except ImportError:
class UserString:
pass
if hasattr(types, 'UnicodeType'):
def is_String(e):
return type(e) is types.StringType \
or type(e) is types.UnicodeType \
or isinstance(e, UserString)
else:
def is_String(e):
return type(e) is types.StringType or isinstance(e, UserString)
tempfile.template = 'testcmd.'
if os.name in ('posix', 'nt'):
tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
else:
tempfile.template = 'testcmd.'
re_space = re.compile('\s')
_Cleanup = []
_chain_to_exitfunc = None
def _clean():
global _Cleanup
cleanlist = filter(None, _Cleanup)
del _Cleanup[:]
cleanlist.reverse()
for test in cleanlist:
test.cleanup()
if _chain_to_exitfunc:
_chain_to_exitfunc()
try:
import atexit
except ImportError:
# TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
try:
_chain_to_exitfunc = sys.exitfunc
except AttributeError:
pass
sys.exitfunc = _clean
else:
atexit.register(_clean)
try:
zip
except NameError:
def zip(*lists):
result = []
for i in xrange(min(map(len, lists))):
result.append(tuple(map(lambda l, i=i: l[i], lists)))
return result
class Collector:
def __init__(self, top):
self.entries = [top]
def __call__(self, arg, dirname, names):
pathjoin = lambda n, d=dirname: os.path.join(d, n)
self.entries.extend(map(pathjoin, names))
def _caller(tblist, skip):
string = ""
arr = []
for file, line, name, text in tblist:
if file[-10:] == "TestCmd.py":
break
arr = [(file, line, name, text)] + arr
atfrom = "at"
for file, line, name, text in arr[skip:]:
if name in ("?", "<module>"):
name = ""
else:
name = " (" + name + ")"
string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
atfrom = "\tfrom"
return string
def fail_test(self = None, condition = 1, function = None, skip = 0):
"""Cause the test to fail.
By default, the fail_test() method reports that the test FAILED
and exits with a status of 1. If a condition argument is supplied,
the test fails only if the condition is true.
"""
if not condition:
return
if not function is None:
function()
of = ""
desc = ""
sep = " "
if not self is None:
if self.program:
of = " of " + self.program
sep = "\n\t"
if self.description:
desc = " [" + self.description + "]"
sep = "\n\t"
at = _caller(traceback.extract_stack(), skip)
sys.stderr.write("FAILED test" + of + desc + sep + at)
sys.exit(1)
def no_result(self = None, condition = 1, function = None, skip = 0):
"""Causes a test to exit with no valid result.
By default, the no_result() method reports NO RESULT for the test
and exits with a status of 2. If a condition argument is supplied,
the test fails only if the condition is true.
"""
if not condition:
return
if not function is None:
function()
of = ""
desc = ""
sep = " "
if not self is None:
if self.program:
of = " of " + self.program
sep = "\n\t"
if self.description:
desc = " [" + self.description + "]"
sep = "\n\t"
at = _caller(traceback.extract_stack(), skip)
sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
sys.exit(2)
def pass_test(self = None, condition = 1, function = None):
"""Causes a test to pass.
By default, the pass_test() method reports PASSED for the test
and exits with a status of 0. If a condition argument is supplied,
the test passes only if the condition is true.
"""
if not condition:
return
if not function is None:
function()
sys.stderr.write("PASSED\n")
sys.exit(0)
def match_exact(lines = None, matches = None):
"""
"""
if not is_List(lines):
lines = string.split(lines, "\n")
if not is_List(matches):
matches = string.split(matches, "\n")
if len(lines) != len(matches):
return
for i in range(len(lines)):
if lines[i] != matches[i]:
return
return 1
def match_re(lines = None, res = None):
"""
"""
if not is_List(lines):
lines = string.split(lines, "\n")
if not is_List(res):
res = string.split(res, "\n")
if len(lines) != len(res):
return
for i in range(len(lines)):
s = "^" + res[i] + "$"
try:
expr = re.compile(s)
except re.error, e:
msg = "Regular expression error in %s: %s"
raise re.error, msg % (repr(s), e[0])
if not expr.search(lines[i]):
return
return 1
def match_re_dotall(lines = None, res = None):
"""
"""
if not type(lines) is type(""):
lines = string.join(lines, "\n")
if not type(res) is type(""):
res = string.join(res, "\n")
s = "^" + res + "$"
try:
expr = re.compile(s, re.DOTALL)
except re.error, e:
msg = "Regular expression error in %s: %s"
raise re.error, msg % (repr(s), e[0])
if expr.match(lines):
return 1
try:
import difflib
except ImportError:
pass
else:
def simple_diff(a, b, fromfile='', tofile='',
fromfiledate='', tofiledate='', n=3, lineterm='\n'):
"""
A function with the same calling signature as difflib.context_diff
(diff -c) and difflib.unified_diff (diff -u) but which prints
output like the simple, unadorned 'diff" command.
"""
sm = difflib.SequenceMatcher(None, a, b)
def comma(x1, x2):
return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
result = []
for op, a1, a2, b1, b2 in sm.get_opcodes():
if op == 'delete':
result.append("%sd%d" % (comma(a1, a2), b1))
result.extend(map(lambda l: '< ' + l, a[a1:a2]))
elif op == 'insert':
result.append("%da%s" % (a1, comma(b1, b2)))
result.extend(map(lambda l: '> ' + l, b[b1:b2]))
elif op == 'replace':
result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
result.extend(map(lambda l: '< ' + l, a[a1:a2]))
result.append('---')
result.extend(map(lambda l: '> ' + l, b[b1:b2]))
return result
def diff_re(a, b, fromfile='', tofile='',
fromfiledate='', tofiledate='', n=3, lineterm='\n'):
"""
A simple "diff" of two sets of lines when the expected lines
are regular expressions. This is a really dumb thing that
just compares each line in turn, so it doesn't look for
chunks of matching lines and the like--but at least it lets
you know exactly which line first didn't compare correctl...
"""
result = []
diff = len(a) - len(b)
if diff < 0:
a = a + ['']*(-diff)
elif diff > 0:
b = b + ['']*diff
i = 0
for aline, bline in zip(a, b):
s = "^" + aline + "$"
try:
expr = re.compile(s)
except re.error, e:
msg = "Regular expression error in %s: %s"
raise re.error, msg % (repr(s), e[0])
if not expr.search(bline):
result.append("%sc%s" % (i+1, i+1))
result.append('< ' + repr(a[i]))
result.append('---')
result.append('> ' + repr(b[i]))
i = i+1
return result
if os.name == 'java':
python_executable = os.path.join(sys.prefix, 'jython')
else:
python_executable = sys.executable
if sys.platform == 'win32':
default_sleep_seconds = 2
def where_is(file, path=None, pathext=None):
if path is None:
path = os.environ['PATH']
if is_String(path):
path = string.split(path, os.pathsep)
if pathext is None:
pathext = os.environ['PATHEXT']
if is_String(pathext):
pathext = string.split(pathext, os.pathsep)
for ext in pathext:
if string.lower(ext) == string.lower(file[-len(ext):]):
pathext = ['']
break
for dir in path:
f = os.path.join(dir, file)
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
else:
def where_is(file, path=None, pathext=None):
if path is None:
path = os.environ['PATH']
if is_String(path):
path = string.split(path, os.pathsep)
for dir in path:
f = os.path.join(dir, file)
if os.path.isfile(f):
try:
st = os.stat(f)
except OSError:
continue
if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
return f
return None
default_sleep_seconds = 1
try:
import subprocess
except ImportError:
# The subprocess module doesn't exist in this version of Python,
# so we're going to cobble up something that looks just enough
# like its API for our purposes below.
import new
subprocess = new.module('subprocess')
subprocess.PIPE = 'PIPE'
subprocess.STDOUT = 'STDOUT'
subprocess.mswindows = (sys.platform == 'win32')
try:
import popen2
popen2.Popen3
except AttributeError:
class Popen3:
universal_newlines = 1
def __init__(self, command, **kw):
if sys.platform == 'win32' and command[0] == '"':
command = '"' + command + '"'
(stdin, stdout, stderr) = os.popen3(' ' + command)
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
def close_output(self):
self.stdout.close()
self.resultcode = self.stderr.close()
def wait(self):
resultcode = self.resultcode
if os.WIFEXITED(resultcode):
return os.WEXITSTATUS(resultcode)
elif os.WIFSIGNALED(resultcode):
return os.WTERMSIG(resultcode)
else:
return None
else:
try:
popen2.Popen4
except AttributeError:
# A cribbed Popen4 class, with some retrofitted code from
# the Python 1.5 Popen3 class methods to do certain things
# by hand.
class Popen4(popen2.Popen3):
childerr = None
def __init__(self, cmd, bufsize=-1):
p2cread, p2cwrite = os.pipe()
c2pread, c2pwrite = os.pipe()
self.pid = os.fork()
if self.pid == 0:
# Child
os.dup2(p2cread, 0)
os.dup2(c2pwrite, 1)
os.dup2(c2pwrite, 2)
for i in range(3, popen2.MAXFD):
try:
os.close(i)
except: pass
try:
os.execvp(cmd[0], cmd)
finally:
os._exit(1)
# Shouldn't come here, I guess
os._exit(1)
os.close(p2cread)
self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
os.close(c2pwrite)
self.fromchild = os.fdopen(c2pread, 'r', bufsize)
popen2._active.append(self)
popen2.Popen4 = Popen4
class Popen3(popen2.Popen3, popen2.Popen4):
universal_newlines = 1
def __init__(self, command, **kw):
if kw.get('stderr') == 'STDOUT':
apply(popen2.Popen4.__init__, (self, command, 1))
else:
apply(popen2.Popen3.__init__, (self, command, 1))
self.stdin = self.tochild
self.stdout = self.fromchild
self.stderr = self.childerr
def wait(self, *args, **kw):
resultcode = apply(popen2.Popen3.wait, (self,)+args, kw)
if os.WIFEXITED(resultcode):
return os.WEXITSTATUS(resultcode)
elif os.WIFSIGNALED(resultcode):
return os.WTERMSIG(resultcode)
else:
return None
subprocess.Popen = Popen3
# From Josiah Carlson,
# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
PIPE = subprocess.PIPE
if subprocess.mswindows:
from win32file import ReadFile, WriteFile
from win32pipe import PeekNamedPipe
import msvcrt
else:
import select
import fcntl
try: fcntl.F_GETFL
except AttributeError: fcntl.F_GETFL = 3
try: fcntl.F_SETFL
except AttributeError: fcntl.F_SETFL = 4
class Popen(subprocess.Popen):
def recv(self, maxsize=None):
return self._recv('stdout', maxsize)
def recv_err(self, maxsize=None):
return self._recv('stderr', maxsize)
def send_recv(self, input='', maxsize=None):
return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
def get_conn_maxsize(self, which, maxsize):
if maxsize is None:
maxsize = 1024
elif maxsize < 1:
maxsize = 1
return getattr(self, which), maxsize
def _close(self, which):
getattr(self, which).close()
setattr(self, which, None)
if subprocess.mswindows:
def send(self, input):
if not self.stdin:
return None
try:
x = msvcrt.get_osfhandle(self.stdin.fileno())
(errCode, written) = WriteFile(x, input)
except ValueError:
return self._close('stdin')
except (subprocess.pywintypes.error, Exception), why:
if why[0] in (109, errno.ESHUTDOWN):
return self._close('stdin')
raise
return written
def _recv(self, which, maxsize):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
try:
x = msvcrt.get_osfhandle(conn.fileno())
(read, nAvail, nMessage) = PeekNamedPipe(x, 0)
if maxsize < nAvail:
nAvail = maxsize
if nAvail > 0:
(errCode, read) = ReadFile(x, nAvail, None)
except ValueError:
return self._close(which)
except (subprocess.pywintypes.error, Exception), why:
if why[0] in (109, errno.ESHUTDOWN):
return self._close(which)
raise
#if self.universal_newlines:
# read = self._translate_newlines(read)
return read
else:
def send(self, input):
if not self.stdin:
return None
if not select.select([], [self.stdin], [], 0)[1]:
return 0
try:
written = os.write(self.stdin.fileno(), input)
except OSError, why:
if why[0] == errno.EPIPE: #broken pipe
return self._close('stdin')
raise
return written
def _recv(self, which, maxsize):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
try:
flags = fcntl.fcntl(conn, fcntl.F_GETFL)
except TypeError:
flags = None
else:
if not conn.closed:
fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
try:
if not select.select([conn], [], [], 0)[0]:
return ''
r = conn.read(maxsize)
if not r:
return self._close(which)
#if self.universal_newlines:
# r = self._translate_newlines(r)
return r
finally:
if not conn.closed and not flags is None:
fcntl.fcntl(conn, fcntl.F_SETFL, flags)
disconnect_message = "Other end disconnected!"
def recv_some(p, t=.1, e=1, tr=5, stderr=0):
if tr < 1:
tr = 1
x = time.time()+t
y = []
r = ''
pr = p.recv
if stderr:
pr = p.recv_err
while time.time() < x or r:
r = pr()
if r is None:
if e:
raise Exception(disconnect_message)
else:
break
elif r:
y.append(r)
else:
time.sleep(max((x-time.time())/tr, 0))
return ''.join(y)
# TODO(3.0: rewrite to use memoryview()
def send_all(p, data):
while len(data):
sent = p.send(data)
if sent is None:
raise Exception(disconnect_message)
data = buffer(data, sent)
class TestCmd(object):
"""Class TestCmd
"""
def __init__(self, description = None,
program = None,
interpreter = None,
workdir = None,
subdir = None,
verbose = None,
match = None,
diff = None,
combine = 0,
universal_newlines = 1):
self._cwd = os.getcwd()
self.description_set(description)
self.program_set(program)
self.interpreter_set(interpreter)
if verbose is None:
try:
verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
except ValueError:
verbose = 0
self.verbose_set(verbose)
self.combine = combine
self.universal_newlines = universal_newlines
if not match is None:
self.match_function = match
else:
self.match_function = match_re
if not diff is None:
self.diff_function = diff
else:
try:
difflib
except NameError:
pass
else:
self.diff_function = simple_diff
#self.diff_function = difflib.context_diff
#self.diff_function = difflib.unified_diff
self._dirlist = []
self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
self._preserve['pass_test'] = os.environ['PRESERVE']
self._preserve['fail_test'] = os.environ['PRESERVE']
self._preserve['no_result'] = os.environ['PRESERVE']
else:
try:
self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
except KeyError:
pass
try:
self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
except KeyError:
pass
try:
self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
except KeyError:
pass
self._stdout = []
self._stderr = []
self.status = None
self.condition = 'no_result'
self.workdir_set(workdir)
self.subdir(subdir)
def __del__(self):
self.cleanup()
def __repr__(self):
return "%x" % id(self)
banner_char = '='
banner_width = 80
def banner(self, s, width=None):
if width is None:
width = self.banner_width
return s + self.banner_char * (width - len(s))
if os.name == 'posix':
def escape(self, arg):
"escape shell special characters"
slash = '\\'
special = '"$'
arg = string.replace(arg, slash, slash+slash)
for c in special:
arg = string.replace(arg, c, slash+c)
if re_space.search(arg):
arg = '"' + arg + '"'
return arg
else:
# Windows does not allow special characters in file names
# anyway, so no need for an escape function, we will just quote
# the arg.
def escape(self, arg):
if re_space.search(arg):
arg = '"' + arg + '"'
return arg
def canonicalize(self, path):
if is_List(path):
path = apply(os.path.join, tuple(path))
if not os.path.isabs(path):
path = os.path.join(self.workdir, path)
return path
def chmod(self, path, mode):
"""Changes permissions on the specified file or directory
path name."""
path = self.canonicalize(path)
os.chmod(path, mode)
def cleanup(self, condition = None):
"""Removes any temporary working directories for the specified
TestCmd environment. If the environment variable PRESERVE was
set when the TestCmd environment was created, temporary working
directories are not removed. If any of the environment variables
PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
when the TestCmd environment was created, then temporary working
directories are not removed if the test passed, failed, or had
no result, respectively. Temporary working directories are also
preserved for conditions specified via the preserve method.
Typically, this method is not called directly, but is used when
the script exits to clean up temporary working directories as
appropriate for the exit status.
"""
if not self._dirlist:
return
os.chdir(self._cwd)
self.workdir = None
if condition is None:
condition = self.condition
if self._preserve[condition]:
for dir in self._dirlist:
print "Preserved directory", dir
else:
list = self._dirlist[:]
list.reverse()
for dir in list:
self.writable(dir, 1)
shutil.rmtree(dir, ignore_errors = 1)
self._dirlist = []
try:
global _Cleanup
_Cleanup.remove(self)
except (AttributeError, ValueError):
pass
def command_args(self, program = None,
interpreter = None,
arguments = None):
if program:
if type(program) == type('') and not os.path.isabs(program):
program = os.path.join(self._cwd, program)
else:
program = self.program
if not interpreter:
interpreter = self.interpreter
if not type(program) in [type([]), type(())]:
program = [program]
cmd = list(program)
if interpreter:
if not type(interpreter) in [type([]), type(())]:
interpreter = [interpreter]
cmd = list(interpreter) + cmd
if arguments:
if type(arguments) == type(''):
arguments = string.split(arguments)
cmd.extend(arguments)
return cmd
def description_set(self, description):
"""Set the description of the functionality being tested.
"""
self.description = description
try:
difflib
except NameError:
def diff(self, a, b, name, *args, **kw):
print self.banner('Expected %s' % name)
print a
print self.banner('Actual %s' % name)
print b
else:
def diff(self, a, b, name, *args, **kw):
print self.banner(name)
args = (a.splitlines(), b.splitlines()) + args
lines = apply(self.diff_function, args, kw)
for l in lines:
print l
def fail_test(self, condition = 1, function = None, skip = 0):
"""Cause the test to fail.
"""
if not condition:
return
self.condition = 'fail_test'
fail_test(self = self,
condition = condition,
function = function,
skip = skip)
def interpreter_set(self, interpreter):
"""Set the program to be used to interpret the program
under test as a script.
"""
self.interpreter = interpreter
def match(self, lines, matches):
"""Compare actual and expected file contents.
"""
return self.match_function(lines, matches)
def match_exact(self, lines, matches):
"""Compare actual and expected file contents.
"""
return match_exact(lines, matches)
def match_re(self, lines, res):
"""Compare actual and expected file contents.
"""
return match_re(lines, res)
def match_re_dotall(self, lines, res):
"""Compare actual and expected file contents.
"""
return match_re_dotall(lines, res)
def no_result(self, condition = 1, function = None, skip = 0):
"""Report that the test could not be run.
"""
if not condition:
return
self.condition = 'no_result'
no_result(self = self,
condition = condition,
function = function,
skip = skip)
def pass_test(self, condition = 1, function = None):
"""Cause the test to pass.
"""
if not condition:
return
self.condition = 'pass_test'
pass_test(self = self, condition = condition, function = function)
def preserve(self, *conditions):
"""Arrange for the temporary working directories for the
specified TestCmd environment to be preserved for one or more
conditions. If no conditions are specified, arranges for
the temporary working directories to be preserved for all
conditions.
"""
if conditions is ():
conditions = ('pass_test', 'fail_test', 'no_result')
for cond in conditions:
self._preserve[cond] = 1
def program_set(self, program):
"""Set the executable program or script to be tested.
"""
if program and not os.path.isabs(program):
program = os.path.join(self._cwd, program)
self.program = program
def read(self, file, mode = 'rb'):
"""Reads and returns the contents of the specified file name.
The file name may be a list, in which case the elements are
concatenated with the os.path.join() method. The file is
assumed to be under the temporary working directory unless it
is an absolute path name. The I/O mode for the file may
be specified; it must begin with an 'r'. The default is
'rb' (binary read).
"""
file = self.canonicalize(file)
if mode[0] != 'r':
raise ValueError, "mode must begin with 'r'"
return open(file, mode).read()
def rmdir(self, dir):
"""Removes the specified dir name.
The dir name may be a list, in which case the elements are
concatenated with the os.path.join() method. The dir is
assumed to be under the temporary working directory unless it
is an absolute path name.
The dir must be empty.
"""
dir = self.canonicalize(dir)
os.rmdir(dir)
def start(self, program = None,
interpreter = None,
arguments = None,
universal_newlines = None,
**kw):
"""
Starts a program or script for the test environment.
The specified program will have the original directory
prepended unless it is enclosed in a [list].
"""
cmd = self.command_args(program, interpreter, arguments)
cmd_string = string.join(map(self.escape, cmd), ' ')
if self.verbose:
sys.stderr.write(cmd_string + "\n")
if universal_newlines is None:
universal_newlines = self.universal_newlines
combine = kw.get('combine', self.combine)
if combine:
stderr_value = subprocess.STDOUT
else:
stderr_value = subprocess.PIPE
return Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=stderr_value,
universal_newlines=universal_newlines)
def finish(self, popen, **kw):
"""
Finishes and waits for the process being run under control of
the specified popen argument, recording the exit status,
standard output and error output.
"""
popen.stdin.close()
self.status = popen.wait()
if not self.status:
self.status = 0
self._stdout.append(popen.stdout.read())
if popen.stderr:
stderr = popen.stderr.read()
else:
stderr = ''
self._stderr.append(stderr)
def run(self, program = None,
interpreter = None,
arguments = None,
chdir = None,
stdin = None,
universal_newlines = None):
"""Runs a test of the program or script for the test
environment. Standard output and error output are saved for
future retrieval via the stdout() and stderr() methods.
The specified program will have the original directory
prepended unless it is enclosed in a [list].
"""
if chdir:
oldcwd = os.getcwd()
if not os.path.isabs(chdir):
chdir = os.path.join(self.workpath(chdir))
if self.verbose:
sys.stderr.write("chdir(" + chdir + ")\n")
os.chdir(chdir)
p = self.start(program, interpreter, arguments, universal_newlines)
if stdin:
if is_List(stdin):
for line in stdin:
p.stdin.write(line)
else:
p.stdin.write(stdin)
p.stdin.close()
out = p.stdout.read()
if p.stderr is None:
err = ''
else:
err = p.stderr.read()
try:
close_output = p.close_output
except AttributeError:
p.stdout.close()
if not p.stderr is None:
p.stderr.close()
else:
close_output()
self._stdout.append(out)
self._stderr.append(err)
self.status = p.wait()
if not self.status:
self.status = 0
if chdir:
os.chdir(oldcwd)
if self.verbose >= 2:
write = sys.stdout.write
write('============ STATUS: %d\n' % self.status)
out = self.stdout()
if out or self.verbose >= 3:
write('============ BEGIN STDOUT (len=%d):\n' % len(out))
write(out)
write('============ END STDOUT\n')
err = self.stderr()
if err or self.verbose >= 3:
write('============ BEGIN STDERR (len=%d)\n' % len(err))
write(err)
write('============ END STDERR\n')
def sleep(self, seconds = default_sleep_seconds):
"""Sleeps at least the specified number of seconds. If no
number is specified, sleeps at least the minimum number of
seconds necessary to advance file time stamps on the current
system. Sleeping more seconds is all right.
"""
time.sleep(seconds)
def stderr(self, run = None):
"""Returns the error output from the specified run number.
If there is no specified run number, then returns the error
output of the last run. If the run number is less than zero,
then returns the error output from that many runs back from the
current run.
"""
if not run:
run = len(self._stderr)
elif run < 0:
run = len(self._stderr) + run
run = run - 1
return self._stderr[run]
def stdout(self, run = None):
"""Returns the standard output from the specified run number.
If there is no specified run number, then returns the standard
output of the last run. If the run number is less than zero,
then returns the standard output from that many runs back from
the current run.
"""
if not run:
run = len(self._stdout)
elif run < 0:
run = len(self._stdout) + run
run = run - 1
return self._stdout[run]
def subdir(self, *subdirs):
"""Create new subdirectories under the temporary working
directory, one for each argument. An argument may be a list,
in which case the list elements are concatenated using the
os.path.join() method. Subdirectories multiple levels deep
must be created using a separate argument for each level:
test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
Returns the number of subdirectories actually created.
"""
count = 0
for sub in subdirs:
if sub is None:
continue
if is_List(sub):
sub = apply(os.path.join, tuple(sub))
new = os.path.join(self.workdir, sub)
try:
os.mkdir(new)
except OSError:
pass
else:
count = count + 1
return count
def symlink(self, target, link):
"""Creates a symlink to the specified target.
The link name may be a list, in which case the elements are
concatenated with the os.path.join() method. The link is
assumed to be under the temporary working directory unless it
is an absolute path name. The target is *not* assumed to be
under the temporary working directory.
"""
link = self.canonicalize(link)
os.symlink(target, link)
def tempdir(self, path=None):
"""Creates a temporary directory.
A unique directory name is generated if no path name is specified.
The directory is created, and will be removed when the TestCmd
object is destroyed.
"""
if path is None:
try:
path = tempfile.mktemp(prefix=tempfile.template)
except TypeError:
path = tempfile.mktemp()
os.mkdir(path)
# Symlinks in the path will report things
# differently from os.getcwd(), so chdir there
# and back to fetch the canonical path.
cwd = os.getcwd()
try:
os.chdir(path)
path = os.getcwd()
finally:
os.chdir(cwd)
# Uppercase the drive letter since the case of drive
# letters is pretty much random on win32:
drive,rest = os.path.splitdrive(path)
if drive:
path = string.upper(drive) + rest
#
self._dirlist.append(path)
global _Cleanup
try:
_Cleanup.index(self)
except ValueError:
_Cleanup.append(self)
return path
def touch(self, path, mtime=None):
"""Updates the modification time on the specified file or
directory path name. The default is to update to the
current time if no explicit modification time is specified.
"""
path = self.canonicalize(path)
atime = os.path.getatime(path)
if mtime is None:
mtime = time.time()
os.utime(path, (atime, mtime))
def unlink(self, file):
"""Unlinks the specified file name.
The file name may be a list, in which case the elements are
concatenated with the os.path.join() method. The file is
assumed to be under the temporary working directory unless it
is an absolute path name.
"""
file = self.canonicalize(file)
os.unlink(file)
def verbose_set(self, verbose):
"""Set the verbose level.
"""
self.verbose = verbose
def where_is(self, file, path=None, pathext=None):
"""Find an executable file.
"""
if is_List(file):
file = apply(os.path.join, tuple(file))
if not os.path.isabs(file):
file = where_is(file, path, pathext)
return file
def workdir_set(self, path):
"""Creates a temporary working directory with the specified
path name. If the path is a null string (''), a unique
directory name is created.
"""
if (path != None):
if path == '':
path = None
path = self.tempdir(path)
self.workdir = path
def workpath(self, *args):
"""Returns the absolute path name to a subdirectory or file
within the current temporary working directory. Concatenates
the temporary working directory name with the specified
arguments using the os.path.join() method.
"""
return apply(os.path.join, (self.workdir,) + tuple(args))
def readable(self, top, read=1):
"""Make the specified directory tree readable (read == 1)
or not (read == None).
This method has no effect on Windows systems, which use a
completely different mechanism to control file readability.
"""
if sys.platform == 'win32':
return
if read:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
else:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
if os.path.isfile(top):
# If it's a file, that's easy, just chmod it.
do_chmod(top)
elif read:
# It's a directory and we're trying to turn on read
# permission, so it's also pretty easy, just chmod the
# directory and then chmod every entry on our walk down the
# tree. Because os.path.walk() is top-down, we'll enable
# read permission on any directories that have it disabled
# before os.path.walk() tries to list their contents.
do_chmod(top)
def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
for n in names:
do_chmod(os.path.join(dirname, n))
os.path.walk(top, chmod_entries, None)
else:
# It's a directory and we're trying to turn off read
# permission, which means we have to chmod the directoreis
# in the tree bottom-up, lest disabling read permission from
# the top down get in the way of being able to get at lower
# parts of the tree. But os.path.walk() visits things top
# down, so we just use an object to collect a list of all
# of the entries in the tree, reverse the list, and then
# chmod the reversed (bottom-up) list.
col = Collector(top)
os.path.walk(top, col, None)
col.entries.reverse()
for d in col.entries: do_chmod(d)
def writable(self, top, write=1):
"""Make the specified directory tree writable (write == 1)
or not (write == None).
"""
if sys.platform == 'win32':
if write:
def do_chmod(fname):
try: os.chmod(fname, stat.S_IWRITE)
except OSError: pass
else:
def do_chmod(fname):
try: os.chmod(fname, stat.S_IREAD)
except OSError: pass
else:
if write:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200))
else:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200))
if os.path.isfile(top):
do_chmod(top)
else:
col = Collector(top)
os.path.walk(top, col, None)
for d in col.entries: do_chmod(d)
def executable(self, top, execute=1):
"""Make the specified directory tree executable (execute == 1)
or not (execute == None).
This method has no effect on Windows systems, which use a
completely different mechanism to control file executability.
"""
if sys.platform == 'win32':
return
if execute:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
else:
def do_chmod(fname):
try: st = os.stat(fname)
except OSError: pass
else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
if os.path.isfile(top):
# If it's a file, that's easy, just chmod it.
do_chmod(top)
elif execute:
# It's a directory and we're trying to turn on execute
# permission, so it's also pretty easy, just chmod the
# directory and then chmod every entry on our walk down the
# tree. Because os.path.walk() is top-down, we'll enable
# execute permission on any directories that have it disabled
# before os.path.walk() tries to list their contents.
do_chmod(top)
def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
for n in names:
do_chmod(os.path.join(dirname, n))
os.path.walk(top, chmod_entries, None)
else:
# It's a directory and we're trying to turn off execute
# permission, which means we have to chmod the directories
# in the tree bottom-up, lest disabling execute permission from
# the top down get in the way of being able to get at lower
# parts of the tree. But os.path.walk() visits things top
# down, so we just use an object to collect a list of all
# of the entries in the tree, reverse the list, and then
# chmod the reversed (bottom-up) list.
col = Collector(top)
os.path.walk(top, col, None)
col.entries.reverse()
for d in col.entries: do_chmod(d)
def write(self, file, content, mode = 'wb'):
"""Writes the specified content text (second argument) to the
specified file name (first argument). The file name may be
a list, in which case the elements are concatenated with the
os.path.join() method. The file is created under the temporary
working directory. Any subdirectories in the path must already
exist. The I/O mode for the file may be specified; it must
begin with a 'w'. The default is 'wb' (binary write).
"""
file = self.canonicalize(file)
if mode[0] != 'w':
raise ValueError, "mode must begin with 'w'"
open(file, mode).write(content)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
schacki/djangocms-cascade
|
refs/heads/master
|
cmsplugin_cascade/south_migrations/0011_auto__add_inlinecascadeelement.py
|
5
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'InlineCascadeElement'
db.create_table(u'cmsplugin_cascade_inline', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cascade_element', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'inline_elements', to=orm['cmsplugin_cascade.CascadeElement'])),
('glossary', self.gf('jsonfield.fields.JSONField')(default={}, blank=True)),
))
db.send_create_signal(u'cmsplugin_cascade', ['InlineCascadeElement'])
def backwards(self, orm):
# Deleting model 'InlineCascadeElement'
db.delete_table(u'cmsplugin_cascade_inline')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
u'cmsplugin_cascade.cascadeelement': {
'Meta': {'object_name': 'CascadeElement', 'db_table': "u'cmsplugin_cascade_element'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'+'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['cms.CMSPlugin']"}),
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'})
},
u'cmsplugin_cascade.inlinecascadeelement': {
'Meta': {'object_name': 'InlineCascadeElement', 'db_table': "u'cmsplugin_cascade_inline'"},
'cascade_element': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'inline_elements'", 'to': u"orm['cmsplugin_cascade.CascadeElement']"}),
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'cmsplugin_cascade.pluginextrafields': {
'Meta': {'unique_together': "((u'plugin_type', u'site'),)", 'object_name': 'PluginExtraFields'},
'allow_id_tag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'css_classes': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inline_styles': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"})
},
u'cmsplugin_cascade.segmentation': {
'Meta': {'object_name': 'Segmentation', 'managed': 'False'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'cmsplugin_cascade.sharablecascadeelement': {
'Meta': {'object_name': 'SharableCascadeElement', 'db_table': "u'cmsplugin_cascade_sharableelement'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'+'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['cms.CMSPlugin']"}),
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'shared_glossary': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cmsplugin_cascade.SharedGlossary']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'cmsplugin_cascade.sharedglossary': {
'Meta': {'unique_together': "((u'plugin_type', u'identifier'),)", 'object_name': 'SharedGlossary'},
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cmsplugin_cascade']
|
zhumengyuan/kallithea
|
refs/heads/master
|
kallithea/lib/dbmigrate/migrate/changeset/databases/sqlite.py
|
2
|
"""
`SQLite`_ database specific implementations of changeset classes.
.. _`SQLite`: http://www.sqlite.org/
"""
from UserDict import DictMixin
from copy import copy
from sqlalchemy.databases import sqlite as sa_base
from kallithea.lib.dbmigrate.migrate import exceptions
from kallithea.lib.dbmigrate.migrate.changeset import ansisql, SQLA_06
SQLiteSchemaGenerator = sa_base.SQLiteDDLCompiler
class SQLiteCommon(object):
def _not_supported(self, op):
raise exceptions.NotSupportedError("SQLite does not support "
"%s; see http://www.sqlite.org/lang_altertable.html" % op)
class SQLiteHelper(SQLiteCommon):
def recreate_table(self,table,column=None,delta=None):
table_name = self.preparer.format_table(table)
# we remove all indexes so as not to have
# problems during copy and re-create
for index in table.indexes:
index.drop()
self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name)
self.execute()
insertion_string = self._modify_table(table, column, delta)
table.create(bind=self.connection)
self.append(insertion_string % {'table_name': table_name})
self.execute()
self.append('DROP TABLE migration_tmp')
self.execute()
def visit_column(self, delta):
if isinstance(delta, DictMixin):
column = delta.result_column
table = self._to_table(delta.table)
else:
column = delta
table = self._to_table(column.table)
self.recreate_table(table,column,delta)
class SQLiteColumnGenerator(SQLiteSchemaGenerator,
ansisql.ANSIColumnGenerator,
# at the end so we get the normal
# visit_column by default
SQLiteHelper,
SQLiteCommon
):
"""SQLite ColumnGenerator"""
def _modify_table(self, table, column, delta):
columns = ' ,'.join(map(
self.preparer.format_column,
[c for c in table.columns if c.name!=column.name]))
return ('INSERT INTO %%(table_name)s (%(cols)s) '
'SELECT %(cols)s from migration_tmp')%{'cols':columns}
def visit_column(self,column):
if column.foreign_keys:
SQLiteHelper.visit_column(self,column)
else:
super(SQLiteColumnGenerator,self).visit_column(column)
class SQLiteColumnDropper(SQLiteHelper, ansisql.ANSIColumnDropper):
"""SQLite ColumnDropper"""
def _modify_table(self, table, column, delta):
columns = ' ,'.join(map(self.preparer.format_column, table.columns))
return 'INSERT INTO %(table_name)s SELECT ' + columns + \
' from migration_tmp'
def visit_column(self,column):
# For SQLite, we *have* to remove the column here so the table
# is re-created properly.
column.remove_from_table(column.table,unset_table=False)
super(SQLiteColumnDropper,self).visit_column(column)
class SQLiteSchemaChanger(SQLiteHelper, ansisql.ANSISchemaChanger):
"""SQLite SchemaChanger"""
def _modify_table(self, table, column, delta):
return 'INSERT INTO %(table_name)s SELECT * from migration_tmp'
def visit_index(self, index):
"""Does not support ALTER INDEX"""
self._not_supported('ALTER INDEX')
class SQLiteConstraintGenerator(ansisql.ANSIConstraintGenerator, SQLiteHelper, SQLiteCommon):
def visit_migrate_primary_key_constraint(self, constraint):
tmpl = "CREATE UNIQUE INDEX %s ON %s ( %s )"
cols = ', '.join(map(self.preparer.format_column, constraint.columns))
tname = self.preparer.format_table(constraint.table)
name = self.get_constraint_name(constraint)
msg = tmpl % (name, tname, cols)
self.append(msg)
self.execute()
def _modify_table(self, table, column, delta):
return 'INSERT INTO %(table_name)s SELECT * from migration_tmp'
def visit_migrate_foreign_key_constraint(self, *p, **k):
self.recreate_table(p[0].table)
def visit_migrate_unique_constraint(self, *p, **k):
self.recreate_table(p[0].table)
class SQLiteConstraintDropper(ansisql.ANSIColumnDropper,
SQLiteCommon,
ansisql.ANSIConstraintCommon):
def visit_migrate_primary_key_constraint(self, constraint):
tmpl = "DROP INDEX %s "
name = self.get_constraint_name(constraint)
msg = tmpl % (name)
self.append(msg)
self.execute()
def visit_migrate_foreign_key_constraint(self, *p, **k):
self._not_supported('ALTER TABLE DROP CONSTRAINT')
def visit_migrate_check_constraint(self, *p, **k):
self._not_supported('ALTER TABLE DROP CONSTRAINT')
def visit_migrate_unique_constraint(self, *p, **k):
self._not_supported('ALTER TABLE DROP CONSTRAINT')
# TODO: technically primary key is a NOT NULL + UNIQUE constraint, should add NOT NULL to index
class SQLiteDialect(ansisql.ANSIDialect):
columngenerator = SQLiteColumnGenerator
columndropper = SQLiteColumnDropper
schemachanger = SQLiteSchemaChanger
constraintgenerator = SQLiteConstraintGenerator
constraintdropper = SQLiteConstraintDropper
|
akosyakov/intellij-community
|
refs/heads/master
|
python/testData/completion/propertyType.py
|
83
|
class C(object):
def __init__(self):
self._x = []
@property
def x(self):
return self._x
c = C()
c.x.app<caret>
|
sugartom/tensorflow-alien
|
refs/heads/master
|
tensorflow/python/ops/dequantize_op_test.py
|
129
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Dequantize Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class DequantizeOpTest(test.TestCase):
def __init__(self, method_name="runTest"):
super(DequantizeOpTest, self).__init__(method_name)
def _testDequantizeOp(self, inputs, min_range, max_range, dtype):
with self.test_session():
input_op = constant_op.constant(inputs, shape=[len(inputs)], dtype=dtype)
dequantized = array_ops.dequantize(input_op, min_range, max_range)
tf_ans = dequantized.eval()
# TODO(vrv): Add support for DT_QINT32 quantization if needed.
type_dict = {
dtypes.quint8: np.uint8,
dtypes.qint8: np.int8,
dtypes.quint16: np.uint16,
dtypes.qint16: np.int16
}
self.assertTrue(dtype in type_dict.keys())
v_max = np.iinfo(type_dict[dtype]).max
v_min = np.iinfo(type_dict[dtype]).min
self.assertTrue(min_range >= v_min)
self.assertTrue(max_range <= v_max)
type_range = v_max - v_min
if v_min < 0:
half_range = (type_range + 1) / 2
else:
half_range = 0.0
np_ans = ((inputs.astype(np.float32) + half_range) *
(max_range - min_range) / type_range) + min_range
self.assertAllClose(tf_ans, np_ans, rtol=1e-5, atol=1e-5)
def testBasicQuint8(self):
self._testDequantizeOp(np.array([0, 128, 255]), 0.0, 6.0, dtypes.quint8)
self._testDequantizeOp(np.array([0, 128, 255]), 0.0, 123.456, dtypes.quint8)
self._testDequantizeOp(
np.array([0, 4, 42, 108, 243]), 5.0, 200.2, dtypes.quint8)
def testBasicQint8(self):
self._testDequantizeOp(np.array([-128, 0, 127]), -1.0, 2.0, dtypes.qint8)
self._testDequantizeOp(np.array([-2, 4, -17]), -5.0, -3.0, dtypes.qint8)
self._testDequantizeOp(np.array([0, -4, 42, -108]), 5.0, 40.0, dtypes.qint8)
if __name__ == "__main__":
test.main()
|
tarak/django-password-policies
|
refs/heads/master
|
password_policies/tests/test_models.py
|
2
|
from password_policies.conf import settings
from password_policies.models import PasswordHistory
from password_policies.tests.lib import BaseTest
from password_policies.tests.lib import create_user
from password_policies.tests.lib import create_password_history
from password_policies.tests.lib import passwords
class PasswordHistoryModelTestCase(BaseTest):
def setUp(self):
self.user = create_user()
create_password_history(self.user)
return super(PasswordHistoryModelTestCase, self).setUp()
def test_password_history_expiration_with_offset(self):
offset = settings.PASSWORD_HISTORY_COUNT + 2
PasswordHistory.objects.delete_expired(self.user, offset=offset)
count = PasswordHistory.objects.filter(user=self.user).count()
self.assertEqual(count, offset)
def test_password_history_expiration(self):
PasswordHistory.objects.delete_expired(self.user)
count = PasswordHistory.objects.filter(user=self.user).count()
self.assertEqual(count, settings.PASSWORD_HISTORY_COUNT)
def test_password_history_recent_passwords(self):
self.failIf(PasswordHistory.objects.check_password(self.user,
passwords[-1]))
|
xiaolonginfo/decode-Django
|
refs/heads/master
|
Django-1.5.1/django/views/i18n.py
|
4
|
import os
import gettext as gettext_module
from django import http
from django.conf import settings
from django.utils import importlib
from django.utils.translation import check_for_language, activate, to_locale, get_language
from django.utils.text import javascript_quote
from django.utils.encoding import smart_text
from django.utils.formats import get_format_modules, get_format
from django.utils._os import upath
from django.utils.http import is_safe_url
from django.utils import six
国际化
def set_language(request):
"""
Redirect to a given url while setting the chosen language in the
session or cookie. The url and the language code need to be
specified in the request parameters.
Since this view changes how the user will see the rest of the site, it must
only be accessed as a POST request. If called as a GET request, it will
redirect to the page in the request (the 'next' parameter) without changing
any state.
"""
next = request.REQUEST.get('next')
if not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if not is_safe_url(url=next, host=request.get_host()):
next = '/'
response = http.HttpResponseRedirect(next)
if request.method == 'POST':
lang_code = request.POST.get('language', None)
if lang_code and check_for_language(lang_code):
if hasattr(request, 'session'):
request.session['django_language'] = lang_code
else:
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code)
return response
def get_formats():
"""
Returns all formats strings required for i18n to work
"""
FORMAT_SETTINGS = (
'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT',
'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT',
'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR',
'THOUSAND_SEPARATOR', 'NUMBER_GROUPING',
'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'
)
result = {}
for module in [settings] + get_format_modules(reverse=True):
for attr in FORMAT_SETTINGS:
result[attr] = get_format(attr)
src = []
for k, v in result.items():
if isinstance(v, (six.string_types, int)):
src.append("formats['%s'] = '%s';\n" % (javascript_quote(k), javascript_quote(smart_text(v))))
elif isinstance(v, (tuple, list)):
v = [javascript_quote(smart_text(value)) for value in v]
src.append("formats['%s'] = ['%s'];\n" % (javascript_quote(k), "', '".join(v)))
return ''.join(src)
NullSource = """
/* gettext identity library */
function gettext(msgid) { return msgid; }
function ngettext(singular, plural, count) { return (count == 1) ? singular : plural; }
function gettext_noop(msgid) { return msgid; }
function pgettext(context, msgid) { return msgid; }
function npgettext(context, singular, plural, count) { return (count == 1) ? singular : plural; }
"""
LibHead = """
/* gettext library */
var catalog = new Array();
"""
LibFoot = """
function gettext(msgid) {
var value = catalog[msgid];
if (typeof(value) == 'undefined') {
return msgid;
} else {
return (typeof(value) == 'string') ? value : value[0];
}
}
function ngettext(singular, plural, count) {
value = catalog[singular];
if (typeof(value) == 'undefined') {
return (count == 1) ? singular : plural;
} else {
return value[pluralidx(count)];
}
}
function gettext_noop(msgid) { return msgid; }
function pgettext(context, msgid) {
var value = gettext(context + '\\x04' + msgid);
if (value.indexOf('\\x04') != -1) {
value = msgid;
}
return value;
}
function npgettext(context, singular, plural, count) {
var value = ngettext(context + '\\x04' + singular, context + '\\x04' + plural, count);
if (value.indexOf('\\x04') != -1) {
value = ngettext(singular, plural, count);
}
return value;
}
"""
LibFormatHead = """
/* formatting library */
var formats = new Array();
"""
LibFormatFoot = """
function get_format(format_type) {
var value = formats[format_type];
if (typeof(value) == 'undefined') {
return format_type;
} else {
return value;
}
}
"""
SimplePlural = """
function pluralidx(count) { return (count == 1) ? 0 : 1; }
"""
InterPolate = r"""
function interpolate(fmt, obj, named) {
if (named) {
return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])});
} else {
return fmt.replace(/%s/g, function(match){return String(obj.shift())});
}
}
"""
PluralIdx = r"""
function pluralidx(n) {
var v=%s;
if (typeof(v) == 'boolean') {
return v ? 1 : 0;
} else {
return v;
}
}
"""
def null_javascript_catalog(request, domain=None, packages=None):
"""
Returns "identity" versions of the JavaScript i18n functions -- i.e.,
versions that don't actually do anything.
"""
src = [NullSource, InterPolate, LibFormatHead, get_formats(), LibFormatFoot]
return http.HttpResponse(''.join(src), 'text/javascript')
def javascript_catalog(request, domain='djangojs', packages=None):
"""
Returns the selected language catalog as a javascript library.
Receives the list of packages to check for translations in the
packages parameter either from an infodict or as a +-delimited
string from the request. Default is 'django.conf'.
Additionally you can override the gettext domain for this view,
but usually you don't want to do that, as JavaScript messages
go to the djangojs domain. But this might be needed if you
deliver your JavaScript source from Django templates.
"""
if request.GET:
if 'language' in request.GET:
if check_for_language(request.GET['language']):
activate(request.GET['language'])
if packages is None:
packages = ['django.conf']
if isinstance(packages, six.string_types):
packages = packages.split('+')
packages = [p for p in packages if p == 'django.conf' or p in settings.INSTALLED_APPS]
default_locale = to_locale(settings.LANGUAGE_CODE)
locale = to_locale(get_language())
t = {}
paths = []
en_selected = locale.startswith('en')
en_catalog_missing = True
# paths of requested packages
for package in packages:
p = importlib.import_module(package)
path = os.path.join(os.path.dirname(upath(p.__file__)), 'locale')
paths.append(path)
# add the filesystem paths listed in the LOCALE_PATHS setting
paths.extend(list(reversed(settings.LOCALE_PATHS)))
# first load all english languages files for defaults
for path in paths:
try:
catalog = gettext_module.translation(domain, path, ['en'])
t.update(catalog._catalog)
except IOError:
pass
else:
# 'en' is the selected language and at least one of the packages
# listed in `packages` has an 'en' catalog
if en_selected:
en_catalog_missing = False
# next load the settings.LANGUAGE_CODE translations if it isn't english
if default_locale != 'en':
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [default_locale])
except IOError:
catalog = None
if catalog is not None:
t.update(catalog._catalog)
# last load the currently selected language, if it isn't identical to the default.
if locale != default_locale:
# If the currently selected language is English but it doesn't have a
# translation catalog (presumably due to being the language translated
# from) then a wrong language catalog might have been loaded in the
# previous step. It needs to be discarded.
if en_selected and en_catalog_missing:
t = {}
else:
locale_t = {}
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [locale])
except IOError:
catalog = None
if catalog is not None:
locale_t.update(catalog._catalog)
if locale_t:
t = locale_t
src = [LibHead]
plural = None
if '' in t:
for l in t[''].split('\n'):
if l.startswith('Plural-Forms:'):
plural = l.split(':',1)[1].strip()
if plural is not None:
# this should actually be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;
plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=',1)[1]
src.append(PluralIdx % plural)
else:
src.append(SimplePlural)
csrc = []
pdict = {}
for k, v in t.items():
if k == '':
continue
if isinstance(k, six.string_types):
csrc.append("catalog['%s'] = '%s';\n" % (javascript_quote(k), javascript_quote(v)))
elif isinstance(k, tuple):
if k[0] not in pdict:
pdict[k[0]] = k[1]
else:
pdict[k[0]] = max(k[1], pdict[k[0]])
csrc.append("catalog['%s'][%d] = '%s';\n" % (javascript_quote(k[0]), k[1], javascript_quote(v)))
else:
raise TypeError(k)
csrc.sort()
for k, v in pdict.items():
src.append("catalog['%s'] = [%s];\n" % (javascript_quote(k), ','.join(["''"]*(v+1))))
src.extend(csrc)
src.append(LibFoot)
src.append(InterPolate)
src.append(LibFormatHead)
src.append(get_formats())
src.append(LibFormatFoot)
src = ''.join(src)
return http.HttpResponse(src, 'text/javascript')
|
uclouvain/OSIS-Louvain
|
refs/heads/master
|
program_management/forms/education_groups.py
|
1
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from dal import autocomplete
from django import forms
from django.db.models import Case, When, Value, CharField, OuterRef, Subquery
from django.db.models import Q
from django.db.models.functions import Concat
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_filters import OrderingFilter, filters, FilterSet
from base.business.entity import get_entities_ids
from base.forms.utils.filter_field import filter_field_by_regex
from base.models import entity_version
from base.models.academic_year import AcademicYear, starting_academic_year
from base.models.education_group_type import EducationGroupType
from base.models.enums import education_group_categories
from base.models.enums import education_group_types
from base.models.enums.education_group_categories import Categories
from education_group.models.group_year import GroupYear
PARTICULAR = "PARTICULAR"
STANDARD = "STANDARD"
VERSION_CHOICES = (
(STANDARD, _("Standard")),
(PARTICULAR, _("Particulière")),
)
class GroupFilter(FilterSet):
academic_year = filters.ModelChoiceFilter(
queryset=AcademicYear.objects.all(),
required=False,
label=_('Ac yr.'),
empty_label=pgettext_lazy("plural", "All"),
)
category = filters.ChoiceFilter(
choices=list(Categories.choices()),
required=False,
label=_('Category'),
field_name='education_group_type__category',
empty_label=pgettext_lazy("plural", "All")
)
education_group_type = filters.ModelMultipleChoiceFilter(
queryset=EducationGroupType.objects.none(),
required=False,
label=_('Type'),
widget=autocomplete.ModelSelect2Multiple(
url='education_group_type_autocomplete',
forward=['category'],
),
)
management_entity = filters.CharFilter(
method='filter_with_entity_subordinated',
label=_('Entity')
)
with_entity_subordinated = filters.BooleanFilter(
method=lambda queryset, *args, **kwargs: queryset,
label=_('Include subordinate entities'),
widget=forms.CheckboxInput
)
acronym = filters.CharFilter(
field_name="acronym",
method="filter_education_group_year_field",
max_length=40,
required=False,
label=_('Acronym/Short title'),
)
title_fr = filters.CharFilter(
field_name="title_fr",
method='filter_education_group_year_field',
max_length=255,
required=False,
label=_('Title')
)
partial_acronym = filters.CharFilter(
field_name="partial_acronym",
method='filter_education_group_year_field',
max_length=15,
required=False,
label=_('Code'),
)
version = filters.ChoiceFilter(
choices=list(VERSION_CHOICES),
required=False,
label=_('Version'),
field_name='version',
empty_label=pgettext_lazy("plural", "All"),
)
with_entity_transition = filters.BooleanFilter(
method="filter_by_transition",
label=_('Include transition'),
widget=forms.CheckboxInput,
initial='True'
)
order_by_field = 'ordering'
ordering = OrderingFilter(
fields=(
('acronym', 'acronym'),
('partial_acronym', 'code'),
('academic_year__year', 'academic_year'),
('title_fr', 'title_fr'),
('type_ordering', 'type'),
('entity_management_version', 'management_entity')
),
widget=forms.HiddenInput
)
class Meta:
model = GroupYear
fields = [
'acronym',
'partial_acronym',
'title_fr',
'education_group_type__name',
'management_entity',
'with_entity_subordinated',
'version',
'with_entity_transition'
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.queryset = self.get_queryset()
self.form.fields['education_group_type'].queryset = EducationGroupType.objects.all().order_by_translated_name()
self.form.fields['academic_year'].initial = starting_academic_year()
self.form.fields['category'].initial = education_group_categories.TRAINING
self.form.fields["with_entity_subordinated"].initial = kwargs.pop('with_entity_subordinated', True)
self.form.fields["version"].initial = kwargs.pop('version', None)
def filter_with_entity_subordinated(self, queryset, name, value):
with_subordinated = self.form.cleaned_data['with_entity_subordinated']
if value:
entity_ids = get_entities_ids(value, with_subordinated)
queryset = queryset.filter(management_entity__in=entity_ids)
return queryset
@staticmethod
def filter_education_group_year_field(queryset, name, value):
return filter_field_by_regex(queryset, name, value)
def filter_by_transition(self, queryset, name, value):
if not value:
return queryset.exclude(educationgroupversion__is_transition=True)
return queryset
def get_queryset(self):
# Need this close so as to return empty query by default when form is unbound
if not self.data:
return GroupYear.objects.none()
management_entity = entity_version.EntityVersion.objects.filter(
entity=OuterRef('management_entity'),
).current(
OuterRef('academic_year__start_date')
).values('acronym')[:1]
return GroupYear.objects.all().annotate(
type_ordering=Case(
*[When(education_group_type__name=key, then=Value(str(_(val))))
for i, (key, val) in enumerate(education_group_types.ALL_TYPES)],
default=Value(''),
output_field=CharField()
)
).annotate(
entity_management_version=Subquery(management_entity)
).annotate(
version=Case(
When(~Q(Q(educationgroupversion__version_name='') | Q(educationgroupversion__isnull=True)),
then=Value(PARTICULAR)),
default=Value(STANDARD),
output_field=CharField(),)
).annotate(
complete_title_fr=Case(
When(
Q(educationgroupversion__isnull=False),
then=Case(
When(Q(educationgroupversion__is_transition=True) &
Q(educationgroupversion__version_name=''),
then=Concat('acronym', Value('[Transition]'))),
When(~Q(educationgroupversion__version_name='') &
Q(educationgroupversion__is_transition=True),
then=Concat('acronym', Value('['), 'educationgroupversion__version_name', Value('-Transition]'))),
When(~Q(educationgroupversion__version_name='') &
Q(educationgroupversion__is_transition=False),
then=Concat('acronym', Value('['), 'educationgroupversion__version_name', Value(']'))),
default='acronym',
output_field=CharField()
)
),
default='acronym',
output_field=CharField()
)
).annotate(
title=Case(
When(Q(educationgroupversion__isnull=False) & ~Q(educationgroupversion__title_fr=''),
then=Concat('title_fr', Value(' ['), 'educationgroupversion__title_fr', Value(']'))),
default='title_fr',
output_field=CharField(),)
)
def filter_queryset(self, queryset):
# Order by id to always ensure same order when objects have same values for order field (ex: title)
qs = super().filter_queryset(queryset)
order_fields = qs.query.order_by + ('id', )
return qs.order_by(*order_fields)
|
cloakedcode/CouchPotatoServer
|
refs/heads/master
|
libs/sqlalchemy/engine/ddl.py
|
31
|
# engine/ddl.py
# Copyright (C) 2009-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Routines to handle CREATE/DROP workflow."""
from sqlalchemy import engine, schema
from sqlalchemy.sql import util as sql_util
class DDLBase(schema.SchemaVisitor):
def __init__(self, connection):
self.connection = connection
class SchemaGenerator(DDLBase):
def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
super(SchemaGenerator, self).__init__(connection, **kwargs)
self.checkfirst = checkfirst
self.tables = tables and set(tables) or None
self.preparer = dialect.identifier_preparer
self.dialect = dialect
self.memo = {}
def _can_create_table(self, table):
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
return not self.checkfirst or \
not self.dialect.has_table(self.connection,
table.name, schema=table.schema)
def _can_create_sequence(self, sequence):
return self.dialect.supports_sequences and \
(
(not self.dialect.sequences_optional or
not sequence.optional) and
(
not self.checkfirst or
not self.dialect.has_sequence(
self.connection,
sequence.name,
schema=sequence.schema)
)
)
def visit_metadata(self, metadata):
if self.tables:
tables = self.tables
else:
tables = metadata.tables.values()
collection = [t for t in sql_util.sort_tables(tables)
if self._can_create_table(t)]
seq_coll = [s for s in metadata._sequences.values()
if s.column is None and self._can_create_sequence(s)]
metadata.dispatch.before_create(metadata, self.connection,
tables=collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
for seq in seq_coll:
self.traverse_single(seq, create_ok=True)
for table in collection:
self.traverse_single(table, create_ok=True)
metadata.dispatch.after_create(metadata, self.connection,
tables=collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
def visit_table(self, table, create_ok=False):
if not create_ok and not self._can_create_table(table):
return
table.dispatch.before_create(table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self)
for column in table.columns:
if column.default is not None:
self.traverse_single(column.default)
self.connection.execute(schema.CreateTable(table))
if hasattr(table, 'indexes'):
for index in table.indexes:
self.traverse_single(index)
table.dispatch.after_create(table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self)
def visit_sequence(self, sequence, create_ok=False):
if not create_ok and not self._can_create_sequence(sequence):
return
self.connection.execute(schema.CreateSequence(sequence))
def visit_index(self, index):
self.connection.execute(schema.CreateIndex(index))
class SchemaDropper(DDLBase):
def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
super(SchemaDropper, self).__init__(connection, **kwargs)
self.checkfirst = checkfirst
self.tables = tables
self.preparer = dialect.identifier_preparer
self.dialect = dialect
self.memo = {}
def visit_metadata(self, metadata):
if self.tables:
tables = self.tables
else:
tables = metadata.tables.values()
collection = [t for t in reversed(sql_util.sort_tables(tables))
if self._can_drop_table(t)]
seq_coll = [s for s in metadata._sequences.values()
if s.column is None and self._can_drop_sequence(s)]
metadata.dispatch.before_drop(metadata, self.connection,
tables=collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
for table in collection:
self.traverse_single(table, drop_ok=True)
for seq in seq_coll:
self.traverse_single(seq, drop_ok=True)
metadata.dispatch.after_drop(metadata, self.connection,
tables=collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
def _can_drop_table(self, table):
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
return not self.checkfirst or self.dialect.has_table(self.connection,
table.name, schema=table.schema)
def _can_drop_sequence(self, sequence):
return self.dialect.supports_sequences and \
((not self.dialect.sequences_optional or
not sequence.optional) and
(not self.checkfirst or
self.dialect.has_sequence(
self.connection,
sequence.name,
schema=sequence.schema))
)
def visit_index(self, index):
self.connection.execute(schema.DropIndex(index))
def visit_table(self, table, drop_ok=False):
if not drop_ok and not self._can_drop_table(table):
return
table.dispatch.before_drop(table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self)
for column in table.columns:
if column.default is not None:
self.traverse_single(column.default)
self.connection.execute(schema.DropTable(table))
table.dispatch.after_drop(table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self)
def visit_sequence(self, sequence, drop_ok=False):
if not drop_ok and not self._can_drop_sequence(sequence):
return
self.connection.execute(schema.DropSequence(sequence))
|
hbenniou/trunk
|
refs/heads/master
|
py/_extraDocs.py
|
7
|
# encoding: utf-8
# 2010 © Václav Šmilauer <eudoxos@arcig.cz>
#
# This module is imported at startup. It is meant to update
# docstrings of wrapper classes, which are not practical to document
# in the c++ source itself, due to the necessity of writing
# \n for newlines and having everything as "string".
#
# PLEASE:
#
# 1. provide at least brief description of the class
# in the c++ code (for those who read it) and
#
# 2. Add something like
#
# "Full documentation of this class is in py/_extraDocs.py."
#
# to the c++ documentation.
import wrapper
# Update docstring of your class/function like this:
#
# wrapper.YourClass.__doc__="""
# This class is documented from _extraDocs.py. Yay!
#
# .. note::
# The c++ documentation will be overwritten by this string.
# """
wrapper.TriaxialTest.__doc__='''
Create a scene for triaxal test.
**Introduction**
Yade includes tools to simulate triaxial tests on particles assemblies. This pre-processor (and variants like e.g. :yref:`CapillaryTriaxialTest`) illustrate how to use them. It generates a scene which will - by default - go through the following steps :
* generate random loose packings in a parallelepiped.
* compress the packing isotropicaly, either squeezing the packing between moving rigid boxes or expanding the particles while boxes are fixed (depending on flag :yref:`internalCompaction<TriaxialTest.internalCompaction>`). The confining pressure in this stage is defined via :yref:`sigmaIsoCompaction<TriaxialTest.sigmaIsoCompaction>`.
* when the packing is dense and stable, simulate a loading path and get the mechanical response as a result.
The default loading path corresponds to a constant lateral stress (:yref:`sigmaLateralConfinement<TriaxialTest.sigmaLateralConfinement>`) in 2 directions and constant strain rate on the third direction. This default loading path is performed when the flag :yref:`autoCompressionActivation<TriaxialTest.autoCompressionActivation>` it ``True``, otherwise the simulation stops after isotropic compression.
Different loading paths might be performed. In order to define them, the user can modify the flags found in engine :yref:`TriaxialStressController` at any point in the simulation (in c++). If ``TriaxialStressController.wall_X_activated`` is ``true`` boundary X is moved automatically to maintain the defined stress level *sigmaN* (see axis conventions below). If ``false`` the boundary is not controlled by the engine at all. In that case the user is free to prescribe fixed position, constant velocity, or more complex conditions.
.. note:: *Axis conventions.* Boundaries perpendicular to the *x* axis are called "left" and "right", *y* corresponds to "top" and "bottom", and axis *z* to "front" and "back". In the default loading path, strain rate is assigned along *y*, and constant stresses are assigned on *x* and *z*.
**Essential engines**
#. The :yref:`TriaxialCompressionEngine` is used for controlling the state of the sample and simulating loading paths. :yref:`TriaxialCompressionEngine` inherits from :yref:`TriaxialStressController`, which computes stress- and strain-like quantities in the packing and maintain a constant level of stress at each boundary. :yref:`TriaxialCompressionEngine` has few more members in order to impose constant strain rate and control the transition between isotropic compression and triaxial test. Transitions are defined by changing some flags of the :yref:`TriaxialStressController`, switching from/to imposed strain rate to/from imposed stress.
#. The class :yref:`TriaxialStateRecorder` is used to write to a file the history of stresses and strains.
#. :yref:`TriaxialTest` is using :yref:`GlobalStiffnessTimeStepper` to compute an appropriate $\Dt$ for the numerical scheme.
.. note:: ``TriaxialStressController::ComputeUnbalancedForce`` returns a value that can be useful for evaluating the stability of the packing. It is defined as (mean force on particles)/(mean contact force), so that it tends to 0 in a stable packing. This parameter is checked by :yref:`TriaxialCompressionEngine` to switch from one stage of the simulation to the next one (e.g. stop isotropic confinment and start axial loading)
.. admonition:: Frequently Asked Questions
#. How is generated the packing? How to change particles sizes distribution? Why do I have a message "Exceeded 3000 tries to insert non-overlapping sphere?
The initial positioning of spheres is done by generating random (x,y,z) in a box and checking if a sphere of radius R (R also randomly generated with respect to a uniform distribution between mean*(1-std_dev) and mean*(1+std_dev) can be inserted at this location without overlaping with others.
If the sphere overlaps, new (x,y,z)'s are generated until a free position for the new sphere is found. This explains the message you have: after 3000 trial-and-error, the sphere couldn't be placed, and the algorithm stops.
You get the message above if you try to generate an initialy dense packing, which is not possible with this algorithm. It can only generate clouds. You should keep the default value of porosity (n~0.7), or even increase if it is still to low in some cases. The dense state will be obtained in the second step (compaction, see below).
#. How is the compaction done, what are the parameters :yref:`maxWallVelocity<TriaxialTest.maxWallVelocity>` and :yref:`finalMaxMultiplier<TriaxialTest.finalMaxMultiplier>`?
Compaction is done
#. by moving rigid boxes or
#. by increasing the sizes of the particles (decided using the option :yref:`internalCompaction<TriaxialTest.internalCompaction>` ⇒ size increase).
Both algorithm needs numerical parameters to prevent instabilities. For instance, with the method (1) :yref:`maxWallVelocity<TriaxialTest.maxWallVelocity>` is the maximum wall velocity, with method (2) :yref:`finalMaxMultiplier<TriaxialTest.finalMaxMultiplier>` is the max value of the multiplier applied on sizes at each iteration (always something like 1.00001).
#. During the simulation of triaxial compression test, the wall in one direction moves with an increment of strain while the stresses in other two directions are adjusted to :yref:`sigma_iso<TriaxialStressController.sigma_iso>`. How the stresses in other directions are maintained constant to :yref:`sigma_iso<TriaxialStressController.sigma_iso>`? What is the mechanism? Where is it implemented in Yade?
The control of stress on a boundary is based on the total stiffness *K* of all contacts between the packing and this boundary. In short, at each step, displacement=stress_error/K. This algorithm is implemented in :yref:`TriaxialStressController`, and the control itself is in ``TriaxialStressController::ControlExternalStress``. The control can be turned off independently for each boundary, using the flags ``wall_XXX_activated``, with *XXX*\ ∈{*top*, *bottom*, *left*, *right*, *back*, *front*}. The imposed sress is a unique value (:yref:`sigma_iso<TriaxialStressController.sigma_iso>`) for all directions if :yref:`TriaxialStressController.isAxisymetric`, or 3 independent values :yref:`sigma1<TriaxialStressController.sigma1>`, :yref:`sigma2<TriaxialStressController.sigma2>`, :yref:`sigma3<TriaxialStressController.sigma3>`.
#. Which value of friction angle do you use during the compaction phase of the Triaxial Test?
The friction during the compaction (whether you are using the expansion method or the compression one for the specimen generation) can be anything between 0 and the final value used during the Triaxial phase. Note that higher friction than the final one would result in volumetric collapse at the beginning of the test. The purpose of using a different value of friction during this phase is related to the fact that the final porosity you get at the end of the sample generation essentially depends on it as well as on the assumed Particle Size Distribution. Changing the initial value of friction will get to a different value of the final porosity.
#. Which is the aim of the ``bool isRadiusControlIteration``?
This internal variable (updated automatically) is true each *N* timesteps (with *N*\ =\ :yref:`radiusControlInterval<TriaxialTest.radiusControlInterval>`). For other timesteps, there is no expansion. Cycling without expanding is just a way to speed up the simulation, based on the idea that 1% increase each 10 iterations needs less operations than 0.1% at each iteration, but will give similar results.
#. How comes the unbalanced force reaches a low value only after many timesteps in the compaction phase?
The value of unbalanced force (dimensionless) is expected to reach low value (i.e. identifying a static-equilibrium condition for the specimen) only at the end of the compaction phase. The code is not aiming at simulating a quasistatic isotropic compaction process, it is only giving a stable packing at the end of it.
'''
wrapper.Peri3dController.__doc__=r'''
Class for controlling independently all 6 components of "engineering" :yref:`stress<Peri3dController.stress>` and :yref:`strain<Peri3dController.strain>` of periodic :yref:`Cell`. :yref:`goal<Peri3dController.goal>` are the goal values, while :yref:`stressMask<Peri3dController.stressMask>` determines which components prescribe stress and which prescribe strain.
If the strain is prescribed, appropriate strain rate is directly applied. If the stress is prescribed, the strain predictor is used: from stress values in two previous steps the value of strain rate is prescribed so as the value of stress in the next step is as close as possible to the ideal one. Current algorithm is extremly simple and probably will be changed in future, but is roboust enough and mostly works fine.
Stress error (difference between actual and ideal stress) is evaluated in current and previous steps ($\mathrm{d}\sigma_i,\mathrm{d}\sigma_{i-1}$). Linear extrapolation is used to estimate error in the next step
.. math:: \mathrm{d}\sigma_{i+1}=2\mathrm{d}\sigma_i - \mathrm{d}\sigma_{i-1}
According to this error, the strain rate is modified by :yref:`mod<Peri3dController.mod>` parameter
.. math:: \mathrm{d}\sigma_{i+1}\left\{\begin{array}{c} >0 \rightarrow \dot{\varepsilon}_{i+1} = \dot{\varepsilon}_i - \max(\mathrm{abs}(\dot{\boldsymbol{\varepsilon}}_i))\cdot\mathrm{mod} \\ <0 \rightarrow \dot{\varepsilon}_{i+1} = \dot{\varepsilon}_i + \max(\mathrm{abs}(\dot{\boldsymbol{\varepsilon}}_i))\cdot\mathrm{mod} \end{array}\right.
According to this fact, the prescribed stress will (almost) never have exact prescribed value, but the difference would be very small (and decreasing for increasing :yref:`nSteps<Peri3dController.nSteps>`. This approach works good if one of the dominant strain rates is prescribed. If all stresses are prescribed or if all goal strains is prescribed as zero, a good estimation is needed for the first step, therefore the compliance matrix is estimated (from user defined estimations of macroscopic material parameters :yref:`youngEstimation<Peri3dController.youngEstimation>` and :yref:`poissonEstimation<Peri3dController.poissonEstimation>`) and respective strain rates is computed form prescribed stress rates and compliance matrix (the estimation of compliance matrix could be computed autamatically avoiding user inputs of this kind).
The simulation on rotated periodic cell is also supported. Firstly, the `polar decomposition <http://en.wikipedia.org/wiki/Polar_decomposition#Matrix_polar_decomposition>`_ is performed on cell's transformation matrix :yref:`trsf<Cell.trsf>` $\mathcal{T}=\mat{U}\mat{P}$, where $\mat{U}$ is orthogonal (unitary) matrix representing rotation and $\mat{P}$ is a positive semi-definite Hermitian matrix representing strain. A logarithm of $\mat{P}$ should be used to obtain realistic values at higher strain values (not implemented yet). A prescribed strain increment in global coordinates $\mathrm{d}t\cdot\dot{\boldsymbol{\varepsilon}}$ is properly rotated to cell's local coordinates and added to $\mat{P}$
.. math:: \mat{P}_{i+1}=\mat{P}+\mat{U}^{\mathsf{T}}\mathrm{d}t\cdot\dot{\boldsymbol{\varepsilon}}\mat{U}
The new value of :yref:`trsf<Cell.trsf>` is computed at $\mat{T}_{i+1}=\mat{UP}_{i+1}$. From current and next :yref:`trsf<Cell.trsf>` the cell's velocity gradient :yref:`velGrad<Cell.velGrad>` is computed (according to its definition) as
.. math:: \mat{V} = (\mat{T}_{i+1}\mat{T}^{-1}-\mat{I})/\mathrm{d}t
Current implementation allow user to define independent loading "path" for each prescribed component. i.e. define the prescribed value as a function of time (or :yref:`progress<Peri3dController.progress>` or steps). See :yref:`Paths<Peri3dController.xxPath>`.
Examples :ysrc:`examples/test/peri3dController_example1.py` and :ysrc:`examples/test/peri3dController_triaxialCompression.py` explain usage and inputs of Peri3dController, :ysrc:`examples/test/peri3dController_shear.py` is an example of using shear components and also simulation on rotated cell.
'''
wrapper.Ig2_Sphere_Sphere_L3Geom.__doc__=r'''Functor for computing incrementally configuration of 2 :yref:`Spheres<Sphere>` stored in :yref:`L3Geom`; the configuration is positioned in global space by local origin $\vec{c}$ (contact point) and rotation matrix $\mat{T}$ (orthonormal transformation matrix), and its degrees of freedom are local displacement $\vec{u}$ (in one normal and two shear directions); with :yref:`Ig2_Sphere_Sphere_L6Geom` and :yref:`L6Geom`, there is additionally $\vec{\phi}$. The first row of $\mat{T}$, i.e. local $x$-axis, is the contact normal noted $\vec{n}$ for brevity. Additionally, quasi-constant values of $\vec{u}_0$ (and $\vec{\phi}_0$) are stored as shifted origins of $\vec{u}$ (and $\vec{\phi}$); therefore, current value of displacement is always $\curr{\vec{u}}-\vec{u}_0$.
Suppose two spheres with radii $r_i$, positions $\vec{x}_i$, velocities $\vec{v}_i$, angular velocities $\vec{\omega}_i$.
When there is not yet contact, it will be created if $u_N=|\curr{\vec{x}}_2-\curr{\vec{x}}_1|-|f_d|(r_1+r2)<0$, where $f_d$ is :yref:`distFactor<Ig2_Sphere_Sphere_L3Geom.distFactor>` (sometimes also called
\`\`interaction radius''). If $f_d>0$, then $\vec{u}_{0x}$ will be initalized to $u_N$, otherwise to 0. In another words, contact will be created if spheres enlarged by $|f_d|$ touch, and the \`\`equilibrium distance'' (where $\vec{u}_x-\vec{u}-{0x}$ is zero) will be set to the current distance if $f_d$ is positive, and to the geometrically-touching distance if negative.
Local axes (rows of $\mat{T}$) are initially defined as follows:
* local $x$-axis is $\vec{n}=\vec{x}_l=\normalized{\vec{x}_2-\vec{x}_1}$;
* local $y$-axis positioned arbitrarily, but in a deterministic manner: aligned with the $xz$ plane (if $\vec{n}_y<\vec{n}_z$) or $xy$ plane (otherwise);
* local $z$-axis $\vec{z}_l=\vec{x}_l\times\vec{y}_l$.
If there has already been contact between the two spheres, it is updated to keep track of rigid motion of the contact (one that does not change mutual configuration of spheres) and mutual configuration changes. Rigid motion transforms local coordinate system and can be decomposed in rigid translation (affecting $\vec{c}$), and rigid rotation (affecting $\mat{T}$), which can be split in rotation $\vec{o}_r$ perpendicular to the normal and rotation $\vec{o}_t$ (\`\`twist'') parallel with the normal:
.. math:: \pprev{\vec{o}_r}=\prev{\vec{n}}\times\curr{\vec{n}}.
Since velocities are known at previous midstep ($t-\Dt/2$), we consider mid-step normal
.. math:: \pprev{\vec{n}}=\frac{\prev{\vec{n}}+\curr{\vec{n}}}{2}.
For the sake of numerical stability, $\pprev{\vec{n}}$ is re-normalized after being computed, unless prohibited by :yref:`approxMask<Ig2_Sphere_Sphere_L3Geom.approxMask>`. If :yref:`approxMask<Ig2_Sphere_Sphere_L3Geom.approxMask>` has the appropriate bit set, the mid-normal is not compute, and we simply use $\pprev{\vec{n}}\approx\prev{\vec{n}}$.
Rigid rotation parallel with the normal is
.. math:: \pprev{\vec{o}_t}=\pprev{\vec{n}}\left(\pprev{\vec{n}}\cdot\frac{\pprev{\vec{\omega}}_1+\pprev{\vec{\omega}}_2}{2}\right)\Dt.
*Branch vectors* $\vec{b}_1$, $\vec{b}_2$ (connecting $\curr{\vec{x}}_1$, $\curr{\vec{x}}_2$ with $\curr{\vec{c}}$ are computed depending on :yref:`noRatch<Ig2_Sphere_Sphere_L3Geom.noRatch>` (see :yref:`here<Ig2_Sphere_Sphere_ScGeom.avoidGranularRatcheting>`).
.. math::
:nowrap:
\begin{align*}
\vec{b}_1&=\begin{cases} r_1 \curr{\vec{n}} & \mbox{with \texttt{noRatch}} \\ \curr{\vec{c}}-\curr{\vec{x}}_1 & \mbox{otherwise} \end{cases} \\
\vec{b}_2&=\begin{cases} -r_2\curr{\vec{n}} & \mbox{with \texttt{noRatch}} \\ \curr{\vec{c}}-\curr{\vec{x}}_2 & \mbox{otherwise} \end{cases} \\
\end{align*}
Relative velocity at $\curr{\vec{c}}$ can be computed as
.. math:: \pprev{\vec{v}_r}=(\pprev{\vec{\tilde{v}}_2}+\vec{\omega}_2\times\vec{b}_2)-(\vec{v}_1+\vec{\omega}_1\times\vec{b}_1)
where $\vec{\tilde{v}}_2$ is $\vec{v}_2$ without mean-field velocity gradient in periodic boundary conditions (see :yref:`Cell.homoDeform`). In the numerial implementation, the normal part of incident velocity is removed (since it is computed directly) with $\pprev{\vec{v}_{r2}}=\pprev{\vec{v}_r}-(\pprev{\vec{n}}\cdot\pprev{\vec{v}_r})\pprev{\vec{n}}$.
Any vector $\vec{a}$ expressed in global coordinates transforms during one timestep as
.. math:: \curr{\vec{a}}=\prev{\vec{a}}+\pprev{\vec{v}_r}\Dt-\prev{\vec{a}}\times\pprev{\vec{o}_r}-\prev{\vec{a}}\times{\pprev{\vec{t}_r}}
where the increments have the meaning of relative shear, rigid rotation normal to $\vec{n}$ and rigid rotation parallel with $\vec{n}$. Local coordinate system orientation, rotation matrix $\mat{T}$, is updated by rows, i.e.
.. math:: \curr{\mat{T}}=\begin{pmatrix} \curr{\vec{n}_x} & \curr{\vec{n}_y} & \curr{\vec{n}_z} \\ \multicolumn{3}{c}{\prev{\mat{T}_{1,\bullet}}-\prev{\mat{T}_{1,\bullet}}\times\pprev{\vec{o}_r}-\prev{\mat{T}_{1,\bullet}}\times\pprev{\vec{o}_t}} \\ \multicolumn{3}{c}{\prev{\mat{T}_{2,\bullet}}-\prev{\mat{T}_{2,\bullet}}\times\pprev{\vec{o}_r}-\prev{\mat{T}_{,\bullet}}\times\pprev{\vec{o}_t}} \end{pmatrix}
This matrix is re-normalized (unless prevented by :yref:`approxMask<Ig2_Sphere_Sphere_L3Geom.approxMask>`) and mid-step transformation is computed using quaternion spherical interpolation as
.. math:: \pprev{\mat{T}}=\mathrm{Slerp}\,\left(\prev{\mat{T}};\curr{\mat{T}};t=1/2\right).
Depending on :yref:`approxMask<Ig2_Sphere_Sphere_L3Geom.approxMask>`, this computation can be avoided by approximating $\pprev{\mat{T}}=\prev{\mat{T}}$.
Finally, current displacement is evaluated as
.. math:: \curr{\vec{u}}=\prev{u}+\pprev{\mat{T}}\pprev{\vec{v}_r}\Dt.
For the normal component, non-incremental evaluation is preferred, giving
.. math:: \curr{\vec{u}_x}=|\curr{\vec{x}_2}-\curr{\vec{x}_1}|-(r_1+r_2)
If this functor is called for :yref:`L6Geom`, local rotation is updated as
.. math:: \curr{\vec{\phi}}=\prev{\vec{\phi}}+\pprev{\mat{T}}\Dt(\vec{\omega}_2-\vec{\omega}_1)
.. note: TODO: ``distFactor`` is not yet implemented as described above; some formulas mix values at different times, should be checked carefully.
'''
wrapper.LawTester.__doc__='''Prescribe and apply deformations of an interaction in terms of local mutual displacements and rotations. The loading path is specified either using :yref:`path<LawTester.path>` (as sequence of 6-vectors containing generalized displacements $u_x$, $u_y$, $u_z$, $\phi_x$, $\phi_y$, $\phi_z$) or :yref:`disPath<LawTester.disPath>` ($u_x$, $u_y$, $u_z$) and :yref:`rotPath<LawTester.rotPath>` ($\phi_x$, $\phi_y$, $\phi_z$). Time function with time values (step numbers) corresponding to points on loading path is given by :yref:`pathSteps<LawTester.pathSteps>`. Loading values are linearly interpolated between given loading path points, and starting zero-value (the initial configuration) is assumed for both :yref:`path<LawTester.path>` and :yref:`pathSteps<LawTester.pathSteps>`. :yref:`hooks<LawTester.hooks>` can specify python code to run when respective point on the path is reached; when the path is finished, :yref:`doneHook<LawTester.doneHook>` will be run.
LawTester should be placed between :yref:`InteractionLoop` and :yref:`NewtonIntegrator` in the simulation loop, since it controls motion via setting linear/angular velocities on particles; those velocities are integrated by :yref:`NewtonIntegrator` to yield an actual position change, which in turn causes :yref:`IGeom` to be updated (and :yref:`contact law<LawFunctor>` applied) when :yref:`InteractionLoop` is executed. Constitutive law generating forces on particles will not affect prescribed particle motion, since both particles have all :yref:`DoFs blocked<State.blockedDOFs>` when first used with LawTester.
LawTester uses, as much as possible, :yref:`IGeom` to provide useful data (such as local coordinate system), but is able to compute those independently if absent in the respective :yref:`IGeom`:
=================== ===== ==================================
:yref:`IGeom` #DoFs LawTester support level
=================== ===== ==================================
:yref:`L3Geom` 3 full
:yref:`L6Geom` 6 full
:yref:`ScGeom` 3 emulate local coordinate system
:yref:`ScGeom6D` 6 emulate local coordinate system
=================== ===== ==================================
Depending on :yref:`IGeom`, 3 ($u_x$, $u_y$, $u_z$) or 6 ($u_x$, $u_y$, $u_z$, $\phi_x$, $\phi_y$, $\phi_z$) degrees of freedom (DoFs) are controlled with LawTester, by prescribing linear and angular velocities of both particles in contact. All DoFs controlled with LawTester are orthogonal (fully decoupled) and are controlled independently.
When 3 DoFs are controlled, :yref:`rotWeight<LawTester.rotWeight>` controls whether local shear is applied by moving particle on arc around the other one, or by rotating without changing position; although such rotation induces mutual rotation on the interaction, it is ignored with :yref:`IGeom` with only 3 DoFs. When 6 DoFs are controlled, only arc-displacement is applied for shear, since otherwise mutual rotation would occur.
:yref:`idWeight<LawTester.idWeight>` distributes prescribed motion between both particles (resulting local deformation is the same if ``id1`` is moved towards ``id2`` or ``id2`` towards ``id1``). This is true only for $u_x$, $u_y$, $u_z$, $\phi_x$ however ; bending rotations $\phi_y$, $\phi_z$ are nevertheless always distributed regardless of ``idWeight`` to both spheres in inverse proportion to their radii, so that there is no shear induced.
LawTester knows current contact deformation from 2 sources: from its own internal data (which are used for prescribing the displacement at every step), which can be accessed in :yref:`uTest<LawTester.uTest>`, and from :yref:`IGeom` itself (depending on which data it provides), which is stored in :yref:`uGeom<LawTester.uGeom>`. These two values should be identical (disregarding numerical percision), and it is a way to test whether :yref:`IGeom` and related functors compute what they are supposed to compute.
LawTester-operated interactions can be rendered with :yref:`GlExtra_LawTester` renderer.
See :ysrc:`scripts/test/law-test.py` for an example.
'''
|
fitermay/intellij-community
|
refs/heads/master
|
python/testData/intentions/convertTripleQuotedStringRawStrings.py
|
79
|
S = (<caret>r'''foo
double-only"
single-only'
mix'ed"
'''
"""\r\n"""
r'one line')
|
tersmitten/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_analyticsprofile.py
|
29
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_analyticsprofile
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of AnalyticsProfile Avi RESTful Object
description:
- This module is used to configure AnalyticsProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
apdex_response_threshold:
description:
- If a client receives an http response in less than the satisfactory latency threshold, the request is considered satisfied.
- It is considered tolerated if it is not satisfied and less than tolerated latency factor multiplied by the satisfactory latency threshold.
- Greater than this number and the client's request is considered frustrated.
- Allowed values are 1-30000.
- Default value when not specified in API or module is interpreted by Avi Controller as 500.
- Units(MILLISECONDS).
apdex_response_tolerated_factor:
description:
- Client tolerated response latency factor.
- Client must receive a response within this factor times the satisfactory threshold (apdex_response_threshold) to be considered tolerated.
- Allowed values are 1-1000.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
apdex_rtt_threshold:
description:
- Satisfactory client to avi round trip time(rtt).
- Allowed values are 1-2000.
- Default value when not specified in API or module is interpreted by Avi Controller as 250.
- Units(MILLISECONDS).
apdex_rtt_tolerated_factor:
description:
- Tolerated client to avi round trip time(rtt) factor.
- It is a multiple of apdex_rtt_tolerated_factor.
- Allowed values are 1-1000.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
apdex_rum_threshold:
description:
- If a client is able to load a page in less than the satisfactory latency threshold, the pageload is considered satisfied.
- It is considered tolerated if it is greater than satisfied but less than the tolerated latency multiplied by satisifed latency.
- Greater than this number and the client's request is considered frustrated.
- A pageload includes the time for dns lookup, download of all http objects, and page render time.
- Allowed values are 1-30000.
- Default value when not specified in API or module is interpreted by Avi Controller as 5000.
- Units(MILLISECONDS).
apdex_rum_tolerated_factor:
description:
- Virtual service threshold factor for tolerated page load time (plt) as multiple of apdex_rum_threshold.
- Allowed values are 1-1000.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
apdex_server_response_threshold:
description:
- A server http response is considered satisfied if latency is less than the satisfactory latency threshold.
- The response is considered tolerated when it is greater than satisfied but less than the tolerated latency factor * s_latency.
- Greater than this number and the server response is considered frustrated.
- Allowed values are 1-30000.
- Default value when not specified in API or module is interpreted by Avi Controller as 400.
- Units(MILLISECONDS).
apdex_server_response_tolerated_factor:
description:
- Server tolerated response latency factor.
- Servermust response within this factor times the satisfactory threshold (apdex_server_response_threshold) to be considered tolerated.
- Allowed values are 1-1000.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
apdex_server_rtt_threshold:
description:
- Satisfactory client to avi round trip time(rtt).
- Allowed values are 1-2000.
- Default value when not specified in API or module is interpreted by Avi Controller as 125.
- Units(MILLISECONDS).
apdex_server_rtt_tolerated_factor:
description:
- Tolerated client to avi round trip time(rtt) factor.
- It is a multiple of apdex_rtt_tolerated_factor.
- Allowed values are 1-1000.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
client_log_config:
description:
- Configure which logs are sent to the avi controller from ses and how they are processed.
client_log_streaming_config:
description:
- Configure to stream logs to an external server.
- Field introduced in 17.1.1.
version_added: "2.4"
conn_lossy_ooo_threshold:
description:
- A connection between client and avi is considered lossy when more than this percentage of out of order packets are received.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 50.
- Units(PERCENT).
conn_lossy_timeo_rexmt_threshold:
description:
- A connection between client and avi is considered lossy when more than this percentage of packets are retransmitted due to timeout.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 20.
- Units(PERCENT).
conn_lossy_total_rexmt_threshold:
description:
- A connection between client and avi is considered lossy when more than this percentage of packets are retransmitted.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 50.
- Units(PERCENT).
conn_lossy_zero_win_size_event_threshold:
description:
- A client connection is considered lossy when percentage of times a packet could not be trasmitted due to tcp zero window is above this threshold.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.
- Units(PERCENT).
conn_server_lossy_ooo_threshold:
description:
- A connection between avi and server is considered lossy when more than this percentage of out of order packets are received.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 50.
- Units(PERCENT).
conn_server_lossy_timeo_rexmt_threshold:
description:
- A connection between avi and server is considered lossy when more than this percentage of packets are retransmitted due to timeout.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 20.
- Units(PERCENT).
conn_server_lossy_total_rexmt_threshold:
description:
- A connection between avi and server is considered lossy when more than this percentage of packets are retransmitted.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 50.
- Units(PERCENT).
conn_server_lossy_zero_win_size_event_threshold:
description:
- A server connection is considered lossy when percentage of times a packet could not be trasmitted due to tcp zero window is above this threshold.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.
- Units(PERCENT).
description:
description:
- User defined description for the object.
disable_se_analytics:
description:
- Disable node (service engine) level analytics forvs metrics.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
disable_server_analytics:
description:
- Disable analytics on backend servers.
- This may be desired in container environment when there are large number of ephemeral servers.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_client_close_before_request_as_error:
description:
- Exclude client closed connection before an http request could be completed from being classified as an error.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_dns_policy_drop_as_significant:
description:
- Exclude dns policy drops from the list of errors.
- Field introduced in 17.2.2.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.5"
type: bool
exclude_gs_down_as_error:
description:
- Exclude queries to gslb services that are operationally down from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_http_error_codes:
description:
- List of http status codes to be excluded from being classified as an error.
- Error connections or responses impacts health score, are included as significant logs, and may be classified as part of a dos attack.
exclude_invalid_dns_domain_as_error:
description:
- Exclude dns queries to domains outside the domains configured in the dns application profile from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_invalid_dns_query_as_error:
description:
- Exclude invalid dns queries from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_no_dns_record_as_error:
description:
- Exclude queries to domains that did not have configured services/records from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_no_valid_gs_member_as_error:
description:
- Exclude queries to gslb services that have no available members from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_persistence_change_as_error:
description:
- Exclude persistence server changed while load balancing' from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_server_dns_error_as_error:
description:
- Exclude server dns error response from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_server_tcp_reset_as_error:
description:
- Exclude server tcp reset from errors.
- It is common for applications like ms exchange.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_syn_retransmit_as_error:
description:
- Exclude 'server unanswered syns' from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_tcp_reset_as_error:
description:
- Exclude tcp resets by client from the list of potential errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
exclude_unsupported_dns_query_as_error:
description:
- Exclude unsupported dns queries from the list of errors.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
hs_event_throttle_window:
description:
- Time window (in secs) within which only unique health change events should occur.
- Default value when not specified in API or module is interpreted by Avi Controller as 1209600.
hs_max_anomaly_penalty:
description:
- Maximum penalty that may be deducted from health score for anomalies.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 10.
hs_max_resources_penalty:
description:
- Maximum penalty that may be deducted from health score for high resource utilization.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 25.
hs_max_security_penalty:
description:
- Maximum penalty that may be deducted from health score based on security assessment.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 100.
hs_min_dos_rate:
description:
- Dos connection rate below which the dos security assessment will not kick in.
- Default value when not specified in API or module is interpreted by Avi Controller as 1000.
hs_performance_boost:
description:
- Adds free performance score credits to health score.
- It can be used for compensating health score for known slow applications.
- Allowed values are 0-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
hs_pscore_traffic_threshold_l4_client:
description:
- Threshold number of connections in 5min, below which apdexr, apdexc, rum_apdex, and other network quality metrics are not computed.
- Default value when not specified in API or module is interpreted by Avi Controller as 10.0.
hs_pscore_traffic_threshold_l4_server:
description:
- Threshold number of connections in 5min, below which apdexr, apdexc, rum_apdex, and other network quality metrics are not computed.
- Default value when not specified in API or module is interpreted by Avi Controller as 10.0.
hs_security_certscore_expired:
description:
- Score assigned when the certificate has expired.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.0.
hs_security_certscore_gt30d:
description:
- Score assigned when the certificate expires in more than 30 days.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.0.
hs_security_certscore_le07d:
description:
- Score assigned when the certificate expires in less than or equal to 7 days.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.0.
hs_security_certscore_le30d:
description:
- Score assigned when the certificate expires in less than or equal to 30 days.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.0.
hs_security_chain_invalidity_penalty:
description:
- Penalty for allowing certificates with invalid chain.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.0.
hs_security_cipherscore_eq000b:
description:
- Score assigned when the minimum cipher strength is 0 bits.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.0.
hs_security_cipherscore_ge128b:
description:
- Score assigned when the minimum cipher strength is greater than equal to 128 bits.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.0.
hs_security_cipherscore_lt128b:
description:
- Score assigned when the minimum cipher strength is less than 128 bits.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 3.5.
hs_security_encalgo_score_none:
description:
- Score assigned when no algorithm is used for encryption.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.0.
hs_security_encalgo_score_rc4:
description:
- Score assigned when rc4 algorithm is used for encryption.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.5.
hs_security_hsts_penalty:
description:
- Penalty for not enabling hsts.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.0.
hs_security_nonpfs_penalty:
description:
- Penalty for allowing non-pfs handshakes.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.0.
hs_security_selfsignedcert_penalty:
description:
- Deprecated.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.0.
hs_security_ssl30_score:
description:
- Score assigned when supporting ssl3.0 encryption protocol.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 3.5.
hs_security_tls10_score:
description:
- Score assigned when supporting tls1.0 encryption protocol.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.0.
hs_security_tls11_score:
description:
- Score assigned when supporting tls1.1 encryption protocol.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.0.
hs_security_tls12_score:
description:
- Score assigned when supporting tls1.2 encryption protocol.
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.0.
hs_security_weak_signature_algo_penalty:
description:
- Penalty for allowing weak signature algorithm(s).
- Allowed values are 0-5.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.0.
name:
description:
- The name of the analytics profile.
required: true
ranges:
description:
- List of http status code ranges to be excluded from being classified as an error.
resp_code_block:
description:
- Block of http response codes to be excluded from being classified as an error.
- Enum options - AP_HTTP_RSP_4XX, AP_HTTP_RSP_5XX.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the analytics profile.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create a custom Analytics profile object
avi_analyticsprofile:
controller: '{{ controller }}'
username: '{{ username }}'
password: '{{ password }}'
apdex_response_threshold: 500
apdex_response_tolerated_factor: 4.0
apdex_rtt_threshold: 250
apdex_rtt_tolerated_factor: 4.0
apdex_rum_threshold: 5000
apdex_rum_tolerated_factor: 4.0
apdex_server_response_threshold: 400
apdex_server_response_tolerated_factor: 4.0
apdex_server_rtt_threshold: 125
apdex_server_rtt_tolerated_factor: 4.0
conn_lossy_ooo_threshold: 50
conn_lossy_timeo_rexmt_threshold: 20
conn_lossy_total_rexmt_threshold: 50
conn_lossy_zero_win_size_event_threshold: 2
conn_server_lossy_ooo_threshold: 50
conn_server_lossy_timeo_rexmt_threshold: 20
conn_server_lossy_total_rexmt_threshold: 50
conn_server_lossy_zero_win_size_event_threshold: 2
disable_se_analytics: false
disable_server_analytics: false
exclude_client_close_before_request_as_error: false
exclude_persistence_change_as_error: false
exclude_server_tcp_reset_as_error: false
exclude_syn_retransmit_as_error: false
exclude_tcp_reset_as_error: false
hs_event_throttle_window: 1209600
hs_max_anomaly_penalty: 10
hs_max_resources_penalty: 25
hs_max_security_penalty: 100
hs_min_dos_rate: 1000
hs_performance_boost: 20
hs_pscore_traffic_threshold_l4_client: 10.0
hs_pscore_traffic_threshold_l4_server: 10.0
hs_security_certscore_expired: 0.0
hs_security_certscore_gt30d: 5.0
hs_security_certscore_le07d: 2.0
hs_security_certscore_le30d: 4.0
hs_security_chain_invalidity_penalty: 1.0
hs_security_cipherscore_eq000b: 0.0
hs_security_cipherscore_ge128b: 5.0
hs_security_cipherscore_lt128b: 3.5
hs_security_encalgo_score_none: 0.0
hs_security_encalgo_score_rc4: 2.5
hs_security_hsts_penalty: 0.0
hs_security_nonpfs_penalty: 1.0
hs_security_selfsignedcert_penalty: 1.0
hs_security_ssl30_score: 3.5
hs_security_tls10_score: 5.0
hs_security_tls11_score: 5.0
hs_security_tls12_score: 5.0
hs_security_weak_signature_algo_penalty: 1.0
name: jason-analytics-profile
tenant_ref: Demo
"""
RETURN = '''
obj:
description: AnalyticsProfile (api/analyticsprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
apdex_response_threshold=dict(type='int',),
apdex_response_tolerated_factor=dict(type='float',),
apdex_rtt_threshold=dict(type='int',),
apdex_rtt_tolerated_factor=dict(type='float',),
apdex_rum_threshold=dict(type='int',),
apdex_rum_tolerated_factor=dict(type='float',),
apdex_server_response_threshold=dict(type='int',),
apdex_server_response_tolerated_factor=dict(type='float',),
apdex_server_rtt_threshold=dict(type='int',),
apdex_server_rtt_tolerated_factor=dict(type='float',),
client_log_config=dict(type='dict',),
client_log_streaming_config=dict(type='dict',),
conn_lossy_ooo_threshold=dict(type='int',),
conn_lossy_timeo_rexmt_threshold=dict(type='int',),
conn_lossy_total_rexmt_threshold=dict(type='int',),
conn_lossy_zero_win_size_event_threshold=dict(type='int',),
conn_server_lossy_ooo_threshold=dict(type='int',),
conn_server_lossy_timeo_rexmt_threshold=dict(type='int',),
conn_server_lossy_total_rexmt_threshold=dict(type='int',),
conn_server_lossy_zero_win_size_event_threshold=dict(type='int',),
description=dict(type='str',),
disable_se_analytics=dict(type='bool',),
disable_server_analytics=dict(type='bool',),
exclude_client_close_before_request_as_error=dict(type='bool',),
exclude_dns_policy_drop_as_significant=dict(type='bool',),
exclude_gs_down_as_error=dict(type='bool',),
exclude_http_error_codes=dict(type='list',),
exclude_invalid_dns_domain_as_error=dict(type='bool',),
exclude_invalid_dns_query_as_error=dict(type='bool',),
exclude_no_dns_record_as_error=dict(type='bool',),
exclude_no_valid_gs_member_as_error=dict(type='bool',),
exclude_persistence_change_as_error=dict(type='bool',),
exclude_server_dns_error_as_error=dict(type='bool',),
exclude_server_tcp_reset_as_error=dict(type='bool',),
exclude_syn_retransmit_as_error=dict(type='bool',),
exclude_tcp_reset_as_error=dict(type='bool',),
exclude_unsupported_dns_query_as_error=dict(type='bool',),
hs_event_throttle_window=dict(type='int',),
hs_max_anomaly_penalty=dict(type='int',),
hs_max_resources_penalty=dict(type='int',),
hs_max_security_penalty=dict(type='int',),
hs_min_dos_rate=dict(type='int',),
hs_performance_boost=dict(type='int',),
hs_pscore_traffic_threshold_l4_client=dict(type='float',),
hs_pscore_traffic_threshold_l4_server=dict(type='float',),
hs_security_certscore_expired=dict(type='float',),
hs_security_certscore_gt30d=dict(type='float',),
hs_security_certscore_le07d=dict(type='float',),
hs_security_certscore_le30d=dict(type='float',),
hs_security_chain_invalidity_penalty=dict(type='float',),
hs_security_cipherscore_eq000b=dict(type='float',),
hs_security_cipherscore_ge128b=dict(type='float',),
hs_security_cipherscore_lt128b=dict(type='float',),
hs_security_encalgo_score_none=dict(type='float',),
hs_security_encalgo_score_rc4=dict(type='float',),
hs_security_hsts_penalty=dict(type='float',),
hs_security_nonpfs_penalty=dict(type='float',),
hs_security_selfsignedcert_penalty=dict(type='float',),
hs_security_ssl30_score=dict(type='float',),
hs_security_tls10_score=dict(type='float',),
hs_security_tls11_score=dict(type='float',),
hs_security_tls12_score=dict(type='float',),
hs_security_weak_signature_algo_penalty=dict(type='float',),
name=dict(type='str', required=True),
ranges=dict(type='list',),
resp_code_block=dict(type='list',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'analyticsprofile',
set([]))
if __name__ == '__main__':
main()
|
Lilywei123/tempest
|
refs/heads/master
|
tempest/api/volume/test_volumes_negative.py
|
2
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest import exceptions
from tempest import test
class VolumesV2NegativeTest(base.BaseVolumeTest):
@classmethod
def resource_setup(cls):
super(VolumesV2NegativeTest, cls).resource_setup()
cls.client = cls.volumes_client
cls.name_field = cls.special_fields['name_field']
# Create a test shared instance and volume for attach/detach tests
cls.volume = cls.create_volume()
cls.mountpoint = "/dev/vdc"
@test.attr(type=['negative', 'gate'])
def test_volume_get_nonexistent_volume_id(self):
# Should not be able to get a non-existent volume
self.assertRaises(exceptions.NotFound, self.client.get_volume,
str(uuid.uuid4()))
@test.attr(type=['negative', 'gate'])
def test_volume_delete_nonexistent_volume_id(self):
# Should not be able to delete a non-existent Volume
self.assertRaises(exceptions.NotFound, self.client.delete_volume,
str(uuid.uuid4()))
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_invalid_size(self):
# Should not be able to create volume with invalid size
# in request
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.BadRequest, self.client.create_volume,
size='#$%', display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_out_passing_size(self):
# Should not be able to create volume without passing size
# in request
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.BadRequest, self.client.create_volume,
size='', display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_size_zero(self):
# Should not be able to create volume with size zero
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.BadRequest, self.client.create_volume,
size='0', display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_size_negative(self):
# Should not be able to create volume with size negative
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.BadRequest, self.client.create_volume,
size='-1', display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_nonexistent_volume_type(self):
# Should not be able to create volume with non-existent volume type
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.create_volume,
size='1', volume_type=str(uuid.uuid4()),
display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_nonexistent_snapshot_id(self):
# Should not be able to create volume with non-existent snapshot
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.create_volume,
size='1', snapshot_id=str(uuid.uuid4()),
display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_create_volume_with_nonexistent_source_volid(self):
# Should not be able to create volume with non-existent source volume
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.create_volume,
size='1', source_volid=str(uuid.uuid4()),
display_name=v_name, metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_update_volume_with_nonexistent_volume_id(self):
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.update_volume,
volume_id=str(uuid.uuid4()), display_name=v_name,
metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_update_volume_with_invalid_volume_id(self):
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.update_volume,
volume_id='#$%%&^&^', display_name=v_name,
metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_update_volume_with_empty_volume_id(self):
v_name = data_utils.rand_name('Volume-')
metadata = {'Type': 'work'}
self.assertRaises(exceptions.NotFound, self.client.update_volume,
volume_id='', display_name=v_name,
metadata=metadata)
@test.attr(type=['negative', 'gate'])
def test_get_invalid_volume_id(self):
# Should not be able to get volume with invalid id
self.assertRaises(exceptions.NotFound, self.client.get_volume,
'#$%%&^&^')
@test.attr(type=['negative', 'gate'])
def test_get_volume_without_passing_volume_id(self):
# Should not be able to get volume when empty ID is passed
self.assertRaises(exceptions.NotFound, self.client.get_volume, '')
@test.attr(type=['negative', 'gate'])
def test_delete_invalid_volume_id(self):
# Should not be able to delete volume when invalid ID is passed
self.assertRaises(exceptions.NotFound, self.client.delete_volume,
'!@#$%^&*()')
@test.attr(type=['negative', 'gate'])
def test_delete_volume_without_passing_volume_id(self):
# Should not be able to delete volume when empty ID is passed
self.assertRaises(exceptions.NotFound, self.client.delete_volume, '')
@test.attr(type=['negative', 'gate'])
@test.services('compute')
def test_attach_volumes_with_nonexistent_volume_id(self):
srv_name = data_utils.rand_name('Instance-')
resp, server = self.servers_client.create_server(srv_name,
self.image_ref,
self.flavor_ref)
self.addCleanup(self.servers_client.delete_server, server['id'])
self.servers_client.wait_for_server_status(server['id'], 'ACTIVE')
self.assertRaises(exceptions.NotFound,
self.client.attach_volume,
str(uuid.uuid4()),
server['id'],
self.mountpoint)
@test.attr(type=['negative', 'gate'])
def test_detach_volumes_with_invalid_volume_id(self):
self.assertRaises(exceptions.NotFound,
self.client.detach_volume,
'xxx')
@test.attr(type=['negative', 'gate'])
def test_volume_extend_with_size_smaller_than_original_size(self):
# Extend volume with smaller size than original size.
extend_size = 0
self.assertRaises(exceptions.BadRequest, self.client.extend_volume,
self.volume['id'], extend_size)
@test.attr(type=['negative', 'gate'])
def test_volume_extend_with_non_number_size(self):
# Extend volume when size is non number.
extend_size = 'abc'
self.assertRaises(exceptions.BadRequest, self.client.extend_volume,
self.volume['id'], extend_size)
@test.attr(type=['negative', 'gate'])
def test_volume_extend_with_None_size(self):
# Extend volume with None size.
extend_size = None
self.assertRaises(exceptions.BadRequest, self.client.extend_volume,
self.volume['id'], extend_size)
@test.attr(type=['negative', 'gate'])
def test_volume_extend_with_nonexistent_volume_id(self):
# Extend volume size when volume is nonexistent.
extend_size = int(self.volume['size']) + 1
self.assertRaises(exceptions.NotFound, self.client.extend_volume,
str(uuid.uuid4()), extend_size)
@test.attr(type=['negative', 'gate'])
def test_volume_extend_without_passing_volume_id(self):
# Extend volume size when passing volume id is None.
extend_size = int(self.volume['size']) + 1
self.assertRaises(exceptions.NotFound, self.client.extend_volume,
None, extend_size)
@test.attr(type=['negative', 'gate'])
def test_reserve_volume_with_nonexistent_volume_id(self):
self.assertRaises(exceptions.NotFound,
self.client.reserve_volume,
str(uuid.uuid4()))
@test.attr(type=['negative', 'gate'])
def test_unreserve_volume_with_nonexistent_volume_id(self):
self.assertRaises(exceptions.NotFound,
self.client.unreserve_volume,
str(uuid.uuid4()))
@test.attr(type=['negative', 'gate'])
def test_reserve_volume_with_negative_volume_status(self):
# Mark volume as reserved.
_, body = self.client.reserve_volume(self.volume['id'])
# Mark volume which is marked as reserved before
self.assertRaises(exceptions.BadRequest,
self.client.reserve_volume,
self.volume['id'])
# Unmark volume as reserved.
_, body = self.client.unreserve_volume(self.volume['id'])
@test.attr(type=['negative', 'gate'])
def test_list_volumes_with_nonexistent_name(self):
v_name = data_utils.rand_name('Volume-')
params = {self.name_field: v_name}
_, fetched_volume = self.client.list_volumes(params)
self.assertEqual(0, len(fetched_volume))
@test.attr(type=['negative', 'gate'])
def test_list_volumes_detail_with_nonexistent_name(self):
v_name = data_utils.rand_name('Volume-')
params = {self.name_field: v_name}
_, fetched_volume = self.client.list_volumes_with_detail(params)
self.assertEqual(0, len(fetched_volume))
@test.attr(type=['negative', 'gate'])
def test_list_volumes_with_invalid_status(self):
params = {'status': 'null'}
_, fetched_volume = self.client.list_volumes(params)
self.assertEqual(0, len(fetched_volume))
@test.attr(type=['negative', 'gate'])
def test_list_volumes_detail_with_invalid_status(self):
params = {'status': 'null'}
_, fetched_volume = self.client.list_volumes_with_detail(params)
self.assertEqual(0, len(fetched_volume))
class VolumesV1NegativeTest(VolumesV2NegativeTest):
_api_version = 1
_name = 'display_name'
|
erudit/eruditorg
|
refs/heads/master
|
tests/unit/core/citations/test_middleware.py
|
1
|
import pytest
from django.contrib.auth.models import AnonymousUser
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import RequestFactory
from erudit.test.factories import ArticleFactory
from erudit.test.factories import IssueFactory
from core.citations.citations import SavedCitationList
from core.citations.middleware import SavedCitationListMiddleware
pytestmark = pytest.mark.django_db
def test_associates_the_citation_list_to_the_request_object():
issue = IssueFactory.create()
article = ArticleFactory.create(issue=issue)
request = RequestFactory().get("/")
request.user = AnonymousUser()
middleware = SessionMiddleware()
middleware.process_request(request)
citation_list = SavedCitationList(request)
citation_list.add(article)
citation_list.save()
middleware = SavedCitationListMiddleware()
middleware.process_request(request)
assert list(request.saved_citations) == [article.solr_id]
|
fishroot/nemoa
|
refs/heads/master
|
nemoa/workspace/imports/text.py
|
1
|
# -*- coding: utf-8 -*-
__author__ = 'Patrick Michl'
__email__ = 'frootlab@gmail.com'
__license__ = 'GPLv3'
import nemoa
import os
def filetypes():
"""Get supported text filetypes for workspace import."""
return {
'ini': 'Nemoa Workspace Description' }
def load(path, **kwds):
"""Import workspace from text file."""
from nemoa.base import env
# extract filetype from path
filetype = env.fileext(path).lower()
# test if filetype is supported
if filetype not in filetypes():
raise ValueError("""could not import graph:
filetype '%s' is not supported.""" % filetype)
if filetype == 'ini':
return Ini(**kwds).load(path)
return False
class Ini:
"""Import workspace configuration from ini file."""
settings = None
default = {}
def __init__(self, **kwds):
self.settings = {**self.default, **kwds}
def load(self, path):
"""Return workspace configuration as dictionary.
Args:
path: configuration file used to generate workspace
configuration dictionary.
"""
from nemoa.file import inifile
structure = {
'workspace': {
'description': str,
'maintainer': str,
'email': str,
'startup_script': str},
'folders': {
'datasets': str,
'networks': str,
'systems': str,
'models': str,
'scripts': str}}
config = inifile.load(path, structure)
config['type'] = 'base.Workspace'
return {'config': config}
|
showgood/YCM_windows
|
refs/heads/master
|
python/ycm/completers/completer_utils.py
|
4
|
#!/usr/bin/env python
#
# Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
from copy import deepcopy
import os
DEFAULT_FILETYPE_TRIGGERS = {
'c' : ['->', '.'],
'objc' : ['->', '.'],
'ocaml' : ['.', '#'],
'cpp,objcpp' : ['->', '.', '::'],
'perl' : ['->'],
'php' : ['->', '::'],
'cs,java,javascript,d,vim,python,perl6,scala,vb,elixir,go' : ['.'],
'ruby' : ['.', '::'],
'lua' : ['.', ':'],
'erlang' : [':'],
}
def _FiletypeTriggerDictFromSpec( trigger_dict_spec ):
triggers_for_filetype = defaultdict( set )
for key, value in trigger_dict_spec.iteritems():
filetypes = key.split( ',' )
for filetype in filetypes:
triggers_for_filetype[ filetype ].update( value )
return triggers_for_filetype
def _FiletypeDictUnion( dict_one, dict_two ):
"""Returns a new filetye dict that's a union of the provided two dicts.
Dict params are supposed to be type defaultdict(set)."""
final_dict = deepcopy( dict_one )
for key, value in dict_two.iteritems():
final_dict[ key ].update( value )
return final_dict
def TriggersForFiletype( user_triggers ):
default_triggers = _FiletypeTriggerDictFromSpec(
DEFAULT_FILETYPE_TRIGGERS )
return _FiletypeDictUnion( default_triggers, dict( user_triggers ) )
def _PathToCompletersFolder():
dir_of_current_script = os.path.dirname( os.path.abspath( __file__ ) )
return os.path.join( dir_of_current_script )
def PathToFiletypeCompleterPluginLoader( filetype ):
return os.path.join( _PathToCompletersFolder(), filetype, 'hook.py' )
def FiletypeCompleterExistsForFiletype( filetype ):
return os.path.exists( PathToFiletypeCompleterPluginLoader( filetype ) )
|
leghtas/pyHFSS
|
refs/heads/master
|
hfss.py
|
1
|
from __future__ import division
import atexit
from copy import copy
import os
import tempfile
import types
import numpy
import signal
import pythoncom
import time
from sympy.parsing import sympy_parser
from pint import UnitRegistry # units
from win32com.client import Dispatch, CDispatch
ureg = UnitRegistry()
Q = ureg.Quantity
BASIS_ORDER = {"Zero Order": 0,
"First Order": 1,
"Second Order": 2,
"Mixed Order": -1}
def simplify_arith_expr(expr):
try:
out = repr(sympy_parser.parse_expr(str(expr)))
return out
except:
print "Couldn't parse", expr
raise
def increment_name(base, existing):
if not base in existing:
return base
n = 1
make_name = lambda: base + str(n)
while make_name() in existing:
n += 1
return make_name()
def extract_value_unit(expr, units):
"""
:type expr: str
:type units: str
:return: float
"""
return Q(expr).to(units).magnitude
class VariableString(str):
def __add__(self, other):
return var("(%s) + (%s)" % (self, other))
def __radd__(self, other):
return var("(%s) + (%s)" % (other, self))
def __sub__(self, other):
return var("(%s) - (%s)" % (self, other))
def __rsub__(self, other):
return var("(%s) - (%s)" % (other, self))
def __mul__(self, other):
return var("(%s) * (%s)" % (self, other))
def __rmul__(self, other):
return var("(%s) * (%s)" % (other, self))
def __div__(self, other):
return var("(%s) / (%s)" % (self, other))
def __rdiv__(self, other):
return var("(%s) / (%s)" % (other, self))
def __truediv__(self, other):
return var("(%s) / (%s)" % (self, other))
def __rtruediv__(self, other):
return var("(%s) / (%s)" % (other, self))
def __pow__(self, other):
return var("(%s) ^ (%s)" % (self, other))
def __rpow__(self, other):
return var("(%s) ^ (%s)" % (other, self))
def __neg__(self):
return var("-(%s)" % self)
def __abs__(self):
return var("abs(%s)" % self)
def var(x):
if isinstance(x, str):
return VariableString(simplify_arith_expr(x))
return x
_release_fns = []
def _add_release_fn(fn):
global _release_fns
_release_fns.append(fn)
atexit.register(fn)
signal.signal(signal.SIGTERM, fn)
signal.signal(signal.SIGABRT, fn)
def release():
global _release_fns
for fn in _release_fns:
fn()
time.sleep(0.1)
refcount = pythoncom._GetInterfaceCount()
if refcount > 0:
print "Warning! %d COM references still alive"
print "HFSS will likely refuse to shut down"
class COMWrapper(object):
def __init__(self):
_add_release_fn(self.release)
def release(self):
for k, v in self.__dict__.items():
if isinstance(v, CDispatch):
setattr(self, k, None)
class HfssPropertyObject(COMWrapper):
prop_holder = None
prop_tab = None
prop_server = None
def make_str_prop(name, prop_tab=None, prop_server=None):
return make_prop(name, prop_tab=prop_tab, prop_server=prop_server)
def make_int_prop(name, prop_tab=None, prop_server=None):
return make_prop(name, prop_tab=prop_tab, prop_server=prop_server, prop_args=["MustBeInt:=", True])
def make_float_prop(name, prop_tab=None, prop_server=None):
return make_prop(name, prop_tab=prop_tab, prop_server=prop_server, prop_args=["MustBeInt:=", False])
def make_prop(name, prop_tab=None, prop_server=None, prop_args=None):
def set_prop(self, value, prop_tab=prop_tab, prop_server=prop_server, prop_args=prop_args):
prop_tab = self.prop_tab if prop_tab is None else prop_tab
prop_server = self.prop_server if prop_server is None else prop_server
if isinstance(prop_tab, types.FunctionType):
prop_tab = prop_tab(self)
if isinstance(prop_server, types.FunctionType):
prop_server = prop_server(self)
if prop_args is None:
prop_args = []
self.prop_holder.ChangeProperty(
["NAME:AllTabs",
["NAME:"+prop_tab,
["NAME:PropServers", prop_server],
["NAME:ChangedProps",
["NAME:"+name, "Value:=", value] + prop_args]]])
def get_prop(self, prop_tab=prop_tab, prop_server=prop_server):
prop_tab = self.prop_tab if prop_tab is None else prop_tab
prop_server = self.prop_server if prop_server is None else prop_server
if isinstance(prop_tab, types.FunctionType):
prop_tab = prop_tab(self)
if isinstance(prop_server, types.FunctionType):
prop_server = prop_server(self)
return self.prop_holder.GetPropertyValue(prop_tab, prop_server, name)
return property(get_prop, set_prop)
class HfssApp(COMWrapper):
def __init__(self):
super(HfssApp, self).__init__()
self._app = Dispatch('AnsoftHfss.HfssScriptInterface')
# in v2016 the main object is 'Ansoft.ElectronicsDesktop'
def get_app_desktop(self):
return HfssDesktop(self, self._app.GetAppDesktop())
# in v2016, there is also getApp - which can be called with HFSS
class HfssDesktop(COMWrapper):
def __init__(self, app, desktop):
"""
:type app: HfssApp
:type desktop: Dispatch
"""
super(HfssDesktop, self).__init__()
self.parent = app
self._desktop = desktop
def close_all_windows(self):
self._desktop.CloseAllWindows()
def project_count(self):
return self._desktop.Count()
def get_active_project(self):
return HfssProject(self, self._desktop.GetActiveProject())
def get_projects(self):
return [HfssProject(self, p) for p in self._desktop.GetProjects()]
def get_project_names(self):
return self._desktop.GetProjectList()
def get_version(self):
return self._desktop.GetVersion()
def new_project(self):
return HfssProject(self, self._desktop.NewProject())
def open_project(self, path):
''' returns error if already open '''
return HfssProject(self, self._desktop.OpenProject(path))
def set_active_project(self, name):
self._desktop.SetActiveProject(name)
@property
def project_directory(self):
return self._desktop.GetProjectDirectory()
@project_directory.setter
def project_directory(self, path):
self._desktop.SetProjectDirectory(path)
@property
def library_directory(self):
return self._desktop.GetLibraryDirectory()
@library_directory.setter
def library_directory(self, path):
self._desktop.SetLibraryDirectory(path)
@property
def temp_directory(self):
return self._desktop.GetTempDirectory()
@temp_directory.setter
def temp_directory(self, path):
self._desktop.SetTempDirectory(path)
class HfssProject(COMWrapper):
def __init__(self, desktop, project):
"""
:type desktop: HfssDesktop
:type project: Dispatch
"""
super(HfssProject, self).__init__()
self.parent = desktop
self._project = project
#self.name = project.GetName()
def close(self):
self._project.Close()
def make_active(self):
self.parent.set_active_project(self.name)
def get_designs(self):
return [HfssDesign(self, d) for d in self._project.GetDesigns()]
def save(self, path=None):
if path is None:
self._project.Save()
else:
self._project.SaveAs(path, True)
def simulate_all(self):
self._project.SimulateAll()
def import_dataset(self, path):
self._project.ImportDataset(path)
def rename_design(self, design, rename):
if design in self.get_designs():
design.rename_design(design.name, rename)
else:
raise ValueError('%s design does not exist' % design.name)
def duplicate_design(self, target, source):
src_design = self.get_design(source)
return src_design.duplicate(name=target)
def get_variable_names(self):
return [VariableString(s) for s in self._project.GetVariables()]
def get_variables(self):
return {VariableString(s): self.get_variable_value(s) for s in self._project.GetVariables()}
def get_variable_value(self, name):
return self._project.GetVariableValue(name)
def create_variable(self, name, value):
self._project.ChangeProperty(
["NAME:AllTabs",
["NAME:ProjectVariableTab",
["NAME:PropServers", "ProjectVariables"],
["Name:NewProps",
["NAME:" + name,
"PropType:=", "VariableProp",
"UserDef:=", True,
"Value:=", value]]]])
def set_variable(self, name, value):
if name not in self._project.GetVariables():
self.create_variable(name, value)
else:
self._project.SetVariableValue(name, value)
return VariableString(name)
def get_path(self):
return self._project.GetPath()
def new_design(self, name, type):
name = increment_name(name, [d.GetName() for d in self._project.GetDesigns()])
return HfssDesign(self, self._project.InsertDesign("HFSS", name, type, ""))
def get_design(self, name):
return HfssDesign(self, self._project.GetDesign(name))
def get_active_design(self):
d = self._project.GetActiveDesign()
if d is None:
raise EnvironmentError("No Design Active")
return HfssDesign(self, d)
def new_dm_design(self, name):
return self.new_design(name, "DrivenModal")
def new_em_design(self, name):
return self.new_design(name, "Eigenmode")
@property # v2016
def name(self):
return self._project.GetName()
class HfssDesign(COMWrapper):
def __init__(self, project, design):
super(HfssDesign, self).__init__()
self.parent = project
self._design = design
self.name = design.GetName()
self.solution_type = design.GetSolutionType()
if design is None:
return
self._setup_module = design.GetModule("AnalysisSetup")
self._solutions = design.GetModule("Solutions")
self._fields_calc = design.GetModule("FieldsReporter")
self._output = design.GetModule("OutputVariable")
self._boundaries = design.GetModule("BoundarySetup")
self._reporter = design.GetModule("ReportSetup")
self._modeler = design.SetActiveEditor("3D Modeler")
self._optimetrics = design.GetModule("Optimetrics")
self.modeler = HfssModeler(self, self._modeler, self._boundaries)
def rename_design(self, name):
old_name = self._design.GetName()
self._design.RenameDesignInstance(old_name, name)
def copy_to_project(self, project):
project.make_active()
project._project.CopyDesign(self.name)
project._project.Paste()
return project.get_active_design()
def duplicate(self, name=None):
dup = self.copy_to_project(self.parent)
if name is not None:
dup.rename_design(name)
return dup
def get_setup_names(self):
return self._setup_module.GetSetups()
def get_setup(self, name=None):
"""
:rtype: HfssSetup
"""
setups = self.get_setup_names()
if not setups:
raise EnvironmentError("No Setups Present")
if name is None:
name = setups[0]
elif name not in setups:
raise EnvironmentError("Setup {} not found: {}".format(name, setups))
if self.solution_type == "Eigenmode":
return HfssEMSetup(self, name)
elif self.solution_type == "DrivenModal":
return HfssDMSetup(self, name)
def create_dm_setup(self, freq_ghz=1, name="Setup", max_delta_s=0.1, max_passes=10,
min_passes=1, min_converged=1, pct_refinement=30,
basis_order=-1):
name = increment_name(name, self.get_setup_names())
self._setup_module.InsertSetup(
"HfssDriven", [
"NAME:"+name,
"Frequency:=", str(freq_ghz)+"GHz",
"MaxDeltaS:=", max_delta_s,
"MaximumPasses:=", max_passes,
"MinimumPasses:=", min_passes,
"MinimumConvergedPasses:=", min_converged,
"PercentRefinement:=", pct_refinement,
"IsEnabled:=", True,
"BasisOrder:=", basis_order
])
return HfssDMSetup(self, name)
def create_em_setup(self, name="Setup", min_freq_ghz=1, n_modes=1, max_delta_f=0.1, max_passes=10,
min_passes=1, min_converged=1, pct_refinement=30,
basis_order=-1):
name = increment_name(name, self.get_setup_names())
self._setup_module.InsertSetup(
"HfssEigen", [
"NAME:"+name,
"MinimumFrequency:=", str(min_freq_ghz)+"GHz",
"NumModes:=", n_modes,
"MaxDeltaFreq:=", max_delta_f,
"ConvergeOnRealFreq:=", True,
"MaximumPasses:=", max_passes,
"MinimumPasses:=", min_passes,
"MinimumConvergedPasses:=", min_converged,
"PercentRefinement:=", pct_refinement,
"IsEnabled:=", True,
"BasisOrder:=", basis_order
])
return HfssEMSetup(self, name)
def delete_setup(self, name):
if name in self.get_setup_names():
self._setup_module.DeleteSetups(name)
def get_nominal_variation(self):
return self._design.GetNominalVariation()
def create_variable(self, name, value, postprocessing=False):
if postprocessing==True:
variableprop = "PostProcessingVariableProp"
else:
variableprop = "VariableProp"
self._design.ChangeProperty(
["NAME:AllTabs",
["NAME:LocalVariableTab",
["NAME:PropServers", "LocalVariables"],
["Name:NewProps",
["NAME:" + name,
"PropType:=", variableprop,
"UserDef:=", True,
"Value:=", value]]]])
def set_variable(self, name, value, postprocessing=False):
# TODO: check if variable does not exist and quit if it doesn't?
if name not in self._design.GetVariables()+self._design.GetPostProcessingVariables():
self.create_variable(name, value, postprocessing=postprocessing)
else:
self._design.SetVariableValue(name, value)
return VariableString(name)
def get_variable_value(self, name):
return self._design.GetVariableValue(name)
def get_variable_names(self):
return [VariableString(s) for s in self._design.GetVariables()+self._design.GetPostProcessingVariables()]
def get_variables(self):
local_variables = self._design.GetVariables()+self._design.GetPostProcessingVariables()
return {lv : self.get_variable_value(lv) for lv in local_variables}
def copy_design_variables(self, source_design):
''' does not check that variables are all present '''
# don't care about values
source_variables = source_design.get_variables()
for name, value in source_variables.iteritems():
self.set_variable(name, value)
def get_excitations(self):
self._boundaries.GetExcitations()
def _evaluate_variable_expression(self, expr, units):
"""
:type expr: str
:type units: str
:return: float
"""
try:
sexp = sympy_parser.parse_expr(expr)
except SyntaxError:
return Q(expr).to(units).magnitude
sub_exprs = {fs: self.get_variable_value(fs.name) for fs in sexp.free_symbols}
return float(sexp.subs({fs: self._evaluate_variable_expression(e, units) for fs, e in sub_exprs.items()}))
def eval_expr(self, expr, units="mm"):
return str(self._evaluate_variable_expression(expr, units)) + units
def Clear_Field_Clac_Stack(self):
self._fields_calc.CalcStack("Clear")
class HfssSetup(HfssPropertyObject):
prop_tab = "HfssTab"
passes = make_int_prop("Passes")
pct_refinement = make_float_prop("Percent Refinement")
basis_order = make_str_prop("Basis Order")
def __init__(self, design, setup):
"""
:type design: HfssDesign
:type setup: Dispatch
"""
super(HfssSetup, self).__init__()
self.parent = design
self.prop_holder = design._design
self._setup_module = design._setup_module
self._reporter = design._reporter
self._solutions = design._solutions
self.name = setup
self.solution_name = setup + " : LastAdaptive"
self.prop_server = "AnalysisSetup:" + setup
self.expression_cache_items = []
def analyze(self, name=None):
if name is None:
name = self.name
self.parent._design.Analyze(name)
def insert_sweep(self, start_ghz, stop_ghz, count=None, step_ghz=None,
name="Sweep", type="Fast", save_fields=False):
if (count is None) == (step_ghz is None):
raise ValueError("Exactly one of 'points' and 'delta' must be specified")
name = increment_name(name, self.get_sweep_names())
params = [
"NAME:"+name,
"IsEnabled:=", True,
"StartValue:=", "%fGHz" % start_ghz,
"StopValue:=", "%fGHz" % stop_ghz,
"Type:=", type,
"SaveFields:=", save_fields,
"ExtrapToDC:=", False,
]
if step_ghz is not None:
params.extend([
"SetupType:=", "LinearSetup",
"StepSize:=", "%fGHz" % step_ghz,
])
else:
params.extend([
"SetupType:=", "LinearCount",
"Count:=", count,
])
self._setup_module.InsertFrequencySweep(self.name, params)
return HfssFrequencySweep(self, name)
def delete_sweep(self, name):
self._setup_module.DeleteSweep(self.name, name)
def add_fields_convergence_expr(self, expr, pct_delta, phase=0):
"""note: because of hfss idiocy, you must call "commit_convergence_exprs" after adding all exprs"""
assert isinstance(expr, NamedCalcObject)
self.expression_cache_items.append(
["NAME:CacheItem",
"Title:=", expr.name+"_conv",
"Expression:=", expr.name,
"Intrinsics:=", "Phase='{}deg'".format(phase),
"IsConvergence:=", True,
"UseRelativeConvergence:=", 1,
"MaxConvergenceDelta:=", pct_delta,
"MaxConvergeValue:=", "0.05",
"ReportType:=", "Fields",
["NAME:ExpressionContext"]])
def commit_convergence_exprs(self):
"""note: this will eliminate any convergence expressions not added through this interface"""
args = [
"NAME:"+self.name,
["NAME:ExpressionCache", self.expression_cache_items]
]
self._setup_module.EditSetup(self.name, args)
def get_sweep_names(self):
return self._setup_module.GetSweeps(self.name)
def get_sweep(self, name=None):
sweeps = self.get_sweep_names()
if not sweeps:
raise EnvironmentError("No Sweeps Present")
if name is None:
name = sweeps[0]
elif name not in sweeps:
raise EnvironmentError("Sweep {} not found in {}".format(name, sweeps))
return HfssFrequencySweep(self, name)
def add_fields_convergence_expr(self, expr, pct_delta, phase=0):
"""note: because of hfss idiocy, you must call "commit_convergence_exprs" after adding all exprs"""
assert isinstance(expr, NamedCalcObject)
self.expression_cache_items.append(
["NAME:CacheItem",
"Title:=", expr.name+"_conv",
"Expression:=", expr.name,
"Intrinsics:=", "Phase='{}deg'".format(phase),
"IsConvergence:=", True,
"UseRelativeConvergence:=", 1,
"MaxConvergenceDelta:=", pct_delta,
"MaxConvergeValue:=", "0.05",
"ReportType:=", "Fields",
["NAME:ExpressionContext"]])
def commit_convergence_exprs(self):
"""note: this will eliminate any convergence expressions not added through this interface"""
args = [
"NAME:"+self.name,
["NAME:ExpressionCache", self.expression_cache_items]
]
self._setup_module.EditSetup(self.name, args)
def get_convergence(self, variation=""):
fn = tempfile.mktemp()
self.parent._design.ExportConvergence(self.name, variation, fn, False)
return numpy.loadtxt(fn)
def get_mesh_stats(self, variation=""):
#TODO: seems to be borken in 2016. todo fix
fn = tempfile.mktemp()
self.parent._design.ExportMeshStats(self.name, variation, fn, False)
return numpy.loadtxt(fn)
def get_profile(self, variation=""):
fn = tempfile.mktemp()
self.parent._design.ExportProfile(self.name, variation, fn, False)
return numpy.loadtxt(fn)
def get_fields(self):
return HfssFieldsCalc(self)
class HfssDMSetup(HfssSetup):
solution_freq = make_float_prop("Solution Freq")
delta_s = make_float_prop("Delta S")
solver_type = make_str_prop("Solver Type")
def setup_link(self, linked_setup):
'''
type: linked_setup <HfssSetup>
'''
args = ["NAME:" + self.name,
["NAME:MeshLink",
"Project:=", "This Project*",
"Design:=", linked_setup.parent.name,
"Soln:=", linked_setup.solution_name,
self._map_variables_by_name(),
"ForceSourceToSolve:=", True,
"PathRelativeTo:=", "TargetProject",
],
]
self._setup_module.EditSetup(self.name, args)
def _map_variables_by_name(self):
''' does not check that variables are all present '''
# don't care about values
project_variables = self.parent.parent.get_variable_names()
design_variables = self.parent.get_variable_names()
# build array
args = ["NAME:Params",]
for name in project_variables:
args.extend([str(name)+":=", str(name)])
for name in design_variables:
args.extend([str(name)+":=", str(name)])
return args
def get_solutions(self):
return HfssDMDesignSolutions(self, self.parent._solutions)
class HfssEMSetup(HfssSetup):
min_freq = make_float_prop("Min Freq")
n_modes = make_int_prop("Modes")
delta_f = make_float_prop("Delta F")
def get_solutions(self):
return HfssEMDesignSolutions(self, self.parent._solutions)
class HfssDesignSolutions(COMWrapper):
def __init__(self, setup, solutions):
'''
:type setup: HfssSetup
'''
super(HfssDesignSolutions, self).__init__()
self.parent = setup
self._solutions = solutions
class HfssEMDesignSolutions(HfssDesignSolutions):
def eigenmodes(self, lv=""):
fn = tempfile.mktemp()
self._solutions.ExportEigenmodes(self.parent.solution_name, lv, fn)
data = numpy.loadtxt(fn, dtype='str')
if numpy.size(numpy.shape(data)) == 1: # getting around the very annoying fact that
data = numpy.array([data]) # in Python a 1D array does not have shape (N,1)
else: # but rather (N,) ....
pass
if numpy.size(data[0,:])==6: # checking if values for Q were saved
kappa_over_2pis = [2*float(ii) for ii in data[:,3]] # eigvalue=(omega-i*kappa/2)/2pi
# so kappa/2pi = 2*Im(eigvalue)
else:
kappa_over_2pis = None
freqs = [float(ii) for ii in data[:,1]]
return freqs, kappa_over_2pis
def set_mode(self, n, phase):
n_modes = int(self.parent.n_modes)
self._solutions.EditSources(
"EigenStoredEnergy",
["NAME:SourceNames", "EigenMode"],
["NAME:Modes", n_modes],
["NAME:Magnitudes"] + [1 if i + 1 == n else 0 for i in range(n_modes)],
["NAME:Phases"] + [phase if i + 1 == n else 0 for i in range(n_modes)],
["NAME:Terminated"], ["NAME:Impedances"]
)
class HfssDMDesignSolutions(HfssDesignSolutions):
pass
class HfssFrequencySweep(COMWrapper):
prop_tab = "HfssTab"
start_freq = make_float_prop("Start")
stop_freq = make_float_prop("Stop")
step_size = make_float_prop("Step Size")
count = make_float_prop("Count")
sweep_type = make_str_prop("Type")
def __init__(self, setup, name):
"""
:type setup: HfssSetup
:type name: str
"""
super(HfssFrequencySweep, self).__init__()
self.parent = setup
self.name = name
self.solution_name = self.parent.name + " : " + name
self.prop_holder = self.parent.prop_holder
self.prop_server = self.parent.prop_server + ":" + name
def analyze_sweep(self):
self.parent.analyze(self.solution_name)
def get_network_data(self, formats):
if isinstance(formats, str):
formats = formats.split(",")
formats = [f.upper() for f in formats]
fmts_lists = {'S': [], 'Y': [], 'Z': []}
for f in formats:
fmts_lists[f[0]].append((int(f[1]), int(f[2])))
ret = [None] * len(formats)
freq = None
for data_type, list in fmts_lists.items():
if list:
fn = tempfile.mktemp()
self.parent._solutions.ExportNetworkData(
[], self.parent.name + " : " + self.name,
2, fn, ["all"], False, 0,
data_type, -1, 1, 15
)
with open(fn) as f:
f.readline()
colnames = f.readline().split()
array = numpy.loadtxt(fn, skiprows=2)
if freq is None:
freq = array[:, 0]
for i, j in list:
real_idx = colnames.index("%s[%d,%d]_Real" % (data_type, i, j))
imag_idx = colnames.index("%s[%d,%d]_Imag" % (data_type, i, j))
c_arr = array[:, real_idx] + 1j*array[:, imag_idx]
ret[formats.index("%s%d%d" % (data_type, i, j))] = c_arr
return freq, ret
def create_report(self, name, expr):
existing = self.parent._reporter.GetAllReportNames()
name = increment_name(name, existing)
var_names = self.parent.parent.get_variable_names()
var_args = sum([["%s:=" % v_name, ["Nominal"]] for v_name in var_names], [])
self.parent._reporter.CreateReport(
name, "Modal Solution Data", "Rectangular Plot",
self.solution_name, ["Domain:=", "Sweep"], ["Freq:=", ["All"]] + var_args,
["X Component:=", "Freq", "Y Component:=", [expr]], [])
return HfssReport(self.parent.parent, name)
def get_report_arrays(self, expr):
r = self.create_report("Temp", expr)
return r.get_arrays()
class HfssReport(COMWrapper):
def __init__(self, design, name):
"""
:type design: HfssDesign
:type name: str
"""
super(HfssReport, self).__init__()
self.parent_design = design
self.name = name
def export_to_file(self, filename):
filepath = os.path.abspath(filename)
self.parent_design._reporter.ExportToFile(self.name, filepath)
def get_arrays(self):
fn = tempfile.mktemp(suffix=".csv")
self.export_to_file(fn)
return numpy.loadtxt(fn, skiprows=1, delimiter=',').transpose()
class HfssModeler(COMWrapper):
def __init__(self, design, modeler, boundaries):
"""
:type design: HfssDesign
"""
super(HfssModeler, self).__init__()
self.parent = design
self._modeler = modeler
self._boundaries = boundaries
def set_units(self, units, rescale=True):
self._modeler.SetModelUnits(["NAME:Units Parameter", "Units:=", units, "Rescale:=", rescale])
def _attributes_array(self, name=None, nonmodel=False, color=None, transparency=0.9, material=None):
arr = ["NAME:Attributes", "PartCoordinateSystem:=", "Global"]
if name is not None:
arr.extend(["Name:=", name])
if nonmodel:
arr.extend(["Flags:=", "NonModel"])
if color is not None:
arr.extend(["Color:=", "(%d %d %d)" % color])
if transparency is not None:
arr.extend(["Transparency:=", transparency])
if material is not None:
arr.extend(["MaterialName:=", material])
return arr
def _selections_array(self, *names):
return ["NAME:Selections", "Selections:=", ",".join(names)]
def draw_box_corner(self, pos, size, **kwargs):
name = self._modeler.CreateBox(
["NAME:BoxParameters",
"XPosition:=", pos[0],
"YPosition:=", pos[1],
"ZPosition:=", pos[2],
"XSize:=", size[0],
"YSize:=", size[1],
"ZSize:=", size[2]],
self._attributes_array(**kwargs)
)
return Box(name, self, pos, size)
def draw_box_center(self, pos, size, **kwargs):
corner_pos = [var(p) - var(s)/2 for p, s in zip(pos, size)]
return self.draw_box_corner(corner_pos, size, **kwargs)
def draw_rect_corner(self, pos, x_size=0, y_size=0, z_size=0, **kwargs):
size = [x_size, y_size, z_size]
assert 0 in size
axis = "XYZ"[size.index(0)]
w_idx, h_idx = {
'X': (1, 2),
'Y': (2, 0),
'Z': (0, 1)
}[axis]
name = self._modeler.CreateRectangle(
["NAME:RectangleParameters",
"XStart:=", pos[0],
"YStart:=", pos[1],
"ZStart:=", pos[2],
"Width:=", size[w_idx],
"Height:=", size[h_idx],
"WhichAxis:=", axis],
self._attributes_array(**kwargs)
)
return Rect(name, self, pos, size)
def draw_rect_center(self, pos, x_size=0, y_size=0, z_size=0, **kwargs):
corner_pos = [var(p) - var(s)/2 for p, s in zip(pos, [x_size, y_size, z_size])]
return self.draw_rect_corner(corner_pos, x_size, y_size, z_size, **kwargs)
def draw_cylinder(self, pos, radius, height, axis, **kwargs):
assert axis in "XYZ"
return self._modeler.CreateCylinder(
["NAME:CylinderParameters",
"XCenter:=", pos[0],
"YCenter:=", pos[1],
"ZCenter:=", pos[2],
"Radius:=", radius,
"Height:=", height,
"WhichAxis:=", axis,
"NumSides:=", 0],
self._attributes_array(**kwargs))
def draw_cylinder_center(self, pos, radius, height, axis, **kwargs):
axis_idx = ["X", "Y", "Z"].index(axis)
edge_pos = copy(pos)
edge_pos[axis_idx] = var(pos[axis_idx]) - var(height)/2
return self.draw_cylinder(edge_pos, radius, height, axis, **kwargs)
def unite(self, names, keep_originals=False):
self._modeler.Unite(
self._selections_array(*names),
["NAME:UniteParameters", "KeepOriginals:=", keep_originals]
)
return names[0]
def intersect(self, names, keep_originals=False):
self._modeler.Intersect(
self._selections_array(*names),
["NAME:IntersectParameters", "KeepOriginals:=", keep_originals]
)
return names[0]
def translate(self, name, vector):
self._modeler.Move(
self._selections_array(name),
["NAME:TranslateParameters",
"TranslateVectorX:=", vector[0],
"TranslateVectorY:=", vector[1],
"TranslateVectorZ:=", vector[2]]
)
def make_perfect_E(self, *objects):
name = increment_name("PerfE", self._boundaries.GetBoundaries())
self._boundaries.AssignPerfectE(["NAME:"+name, "Objects:=", objects, "InfGroundPlane:=", False])
def _make_lumped_rlc(self, r, l, c, start, end, obj_arr, name="LumpLRC"):
name = increment_name(name, self._boundaries.GetBoundaries())
params = ["NAME:"+name]
params += obj_arr
params.append(["NAME:CurrentLine", "Start:=", start, "End:=", end])
params += ["UseResist:=", r != 0, "Resistance:=", r,
"UseInduct:=", l != 0, "Inductance:=", l,
"UseCap:=", c != 0, "Capacitance:=", c]
self._boundaries.AssignLumpedRLC(params)
def _make_lumped_port(self, start, end, obj_arr, z0="50ohm", name="LumpPort"):
name = increment_name(name, self._boundaries.GetBoundaries())
params = ["NAME:"+name]
params += obj_arr
params += ["RenormalizeAllTerminals:=", True, "DoDeembed:=", False,
["NAME:Modes", ["NAME:Mode1", "ModeNum:=", 1, "UseIntLine:=", True,
["NAME:IntLine", "Start:=", start, "End:=", end],
"CharImp:=", "Zpi", "AlignmentGroup:=", 0, "RenormImp:=", "50ohm"]],
"ShowReporterFilter:=", False, "ReporterFilter:=", [True],
"FullResistance:=", "50ohm", "FullReactance:=", "0ohm"]
self._boundaries.AssignLumpedPort(params)
def get_face_ids(self, obj):
return self. _modeler.GetFaceIDs(obj)
def eval_expr(self, expr, units="mm"):
if not isinstance(expr, str):
return expr
return self.parent.eval_expr(expr, units)
class ModelEntity(str, HfssPropertyObject):
prop_tab = "Geometry3DCmdTab"
model_command = None
transparency = make_float_prop("Transparent", prop_tab="Geometry3DAttributeTab", prop_server=lambda self: self)
material = make_str_prop("Material", prop_tab="Geometry3DAttributeTab", prop_server=lambda self: self)
coordinate_system = make_str_prop("Coordinate System")
def __new__(self, val, *args, **kwargs):
return str.__new__(self, val)
def __init__(self, val, modeler):
"""
:type val: str
:type modeler: HfssModeler
"""
super(ModelEntity, self).__init__()#val) #Comment out keyword to match arguments
self.modeler = modeler
self.prop_server = self + ":" + self.model_command + ":1"
class Box(ModelEntity):
model_command = "CreateBox"
position = make_float_prop("Position")
x_size = make_float_prop("XSize")
y_size = make_float_prop("YSize")
z_size = make_float_prop("ZSize")
def __init__(self, name, modeler, corner, size):
"""
:type name: str
:type modeler: HfssModeler
:type corner: [(VariableString, VariableString, VariableString)]
:param size: [(VariableString, VariableString, VariableString)]
"""
super(Box, self).__init__(name, modeler)
self.modeler = modeler
self.prop_holder = modeler._modeler
self.corner = corner
self.size = size
self.center = [c + s/2 for c, s in zip(corner, size)]
faces = modeler.get_face_ids(self)
self.z_back_face, self.z_front_face = faces[0], faces[1]
self.y_back_face, self.y_front_face = faces[2], faces[4]
self.x_back_face, self.x_front_face = faces[3], faces[5]
class Rect(ModelEntity):
model_command = "CreateRectangle"
def __init__(self, name, modeler, corner, size):
super(Rect, self).__init__(name, modeler)
self.corner = corner
self.size = size
self.center = [c + s/2 if s else c for c, s in zip(corner, size)]
def make_center_line(self, axis):
axis_idx = ["x", "y", "z"].index(axis.lower())
start = [c for c in self.center]
start[axis_idx] -= self.size[axis_idx]/2
start = [self.modeler.eval_expr(s) for s in start]
end = [c for c in self.center]
end[axis_idx] += self.size[axis_idx]/2
end = [self.modeler.eval_expr(s) for s in end]
return start, end
def make_rlc_boundary(self, axis, r=0, l=0, c=0, name="LumpLRC"):
start, end = self.make_center_line(axis)
self.modeler._make_lumped_rlc(r, l, c, start, end, ["Objects:=", [self]], name=name)
def make_lumped_port(self, axis, z0="50ohm", name="LumpPort"):
start, end = self.make_center_line(axis)
self.modeler._make_lumped_port(start, end, ["Objects:=", [self]], z0=z0, name=name)
class HfssFieldsCalc(COMWrapper):
def __init__(self, setup):
"""
:type setup: HfssSetup
"""
super(HfssFieldsCalc, self).__init__()
self.parent = setup
self.Mag_E = NamedCalcObject("Mag_E", setup)
self.Mag_H = NamedCalcObject("Mag_H", setup)
self.Mag_Jsurf = NamedCalcObject("Mag_Jsurf", setup)
self.Mag_Jvol = NamedCalcObject("Mag_Jvol", setup)
self.Vector_E = NamedCalcObject("Vector_E", setup)
self.Vector_H = NamedCalcObject("Vector_H", setup)
self.Vector_Jsurf = NamedCalcObject("Vector_Jsurf", setup)
self.Vector_Jvol = NamedCalcObject("Vector_Jvol", setup)
self.ComplexMag_E = NamedCalcObject("ComplexMag_E", setup)
self.ComplexMag_H = NamedCalcObject("ComplexMag_H", setup)
self.ComplexMag_Jsurf = NamedCalcObject("ComplexMag_Jsurf", setup)
self.ComplexMag_Jvol = NamedCalcObject("ComplexMag_Jvol", setup)
self.P_J = NamedCalcObject("P_J", setup)
def clear_named_expressions(self):
self.parent.parent._fields_calc.ClearAllNamedExpr()
class CalcObject(COMWrapper):
def __init__(self, stack, setup):
"""
:type stack: [(str, str)]
:type setup: HfssSetup
"""
super(CalcObject, self).__init__()
self.stack = stack
self.setup = setup
self.calc_module = setup.parent._fields_calc
def _bin_op(self, other, op):
if isinstance(other, (int, float)):
other = ConstantCalcObject(other, self.setup)
stack = self.stack + other.stack
stack.append(("CalcOp", op))
return CalcObject(stack, self.setup)
def _unary_op(self, op):
stack = self.stack[:]
stack.append(("CalcOp", op))
return CalcObject(stack, self.setup)
def __add__(self, other):
return self._bin_op(other, "+")
def __radd__(self, other):
return self + other
def __sub__(self, other):
return self._bin_op(other, "-")
def __rsub__(self, other):
return (-self) + other
def __mul__(self, other):
return self._bin_op(other, "*")
def __rmul__(self, other):
return self*other
def __div__(self, other):
return self._bin_op(other, "/")
def __rdiv__(self, other):
other = ConstantCalcObject(other, self.setup)
return other/self
def __pow__(self, other):
return self._bin_op(other, "Pow")
def dot(self, other):
return self._bin_op(other,"Dot")
def __neg__(self):
return self._unary_op("Neg")
def __abs__(self):
return self._unary_op("Abs")
def __mag__(self):
return self._unary_op("Mag")
def mag(self):
return self._unary_op("Mag")
def conj(self):
return self._unary_op("Conj") # make this right
def scalar_x(self):
return self._unary_op("ScalarX")
def scalar_y(self):
return self._unary_op("ScalarY")
def scalar_z(self):
return self._unary_op("ScalarZ")
def norm_2(self):
return (self.__mag__()).__pow__(2)
#return self._unary_op("ScalarX")**2+self._unary_op("ScalarY")**2+self._unary_op("ScalarZ")**2
def real(self):
return self._unary_op("Real")
def imag(self):
return self._unary_op("Imag")
def _integrate(self, name, type):
stack = self.stack + [(type, name), ("CalcOp", "Integrate")]
return CalcObject(stack, self.setup)
def getQty(self, name):
stack = self.stack + [("EnterQty", name)]
return CalcObject(stack, self.setup)
def integrate_line(self, name):
return self._integrate(name, "EnterLine")
def integrate_line_tangent(self, name):
''' integrate line tangent to vector expression \n
name = of line to integrate over '''
self.stack = self.stack + [("EnterLine", name),
("CalcOp", "Tangent"),
("CalcOp", "Dot")]
return self.integrate_line(name)
def integrate_surf(self, name="AllObjects"):
return self._integrate(name, "EnterSurf")
def integrate_vol(self, name="AllObjects"):
return self._integrate(name, "EnterVol")
def times_eps(self):
stack = self.stack + [("ClcMaterial", ("Permittivity (epsi)", "mult"))]
return CalcObject(stack, self.setup)
def times_mu(self):
stack = self.stack + [("ClcMaterial", ("Permeability (mu)", "mult"))]
return CalcObject(stack, self.setup)
def write_stack(self):
for fn, arg in self.stack:
if numpy.size(arg)>1:
getattr(self.calc_module, fn)(*arg)
else:
getattr(self.calc_module, fn)(arg)
def save_as(self, name):
"""if the object already exists, try clearing your
named expressions first with fields.clear_named_expressions"""
self.write_stack()
self.calc_module.AddNamedExpr(name)
return NamedCalcObject(name, self.setup)
def evaluate(self, phase=0, lv=None, print_debug = False):#, n_mode=1):
self.write_stack()
if print_debug:
print '---------------------'
print 'writing to stack: OK'
print '-----------------'
#self.calc_module.set_mode(n_mode, 0)
setup_name = self.setup.solution_name
if lv is not None:
args = lv
else:
args = []
args.append("Phase:=")
args.append(str(int(phase)) + "deg")
if isinstance(self.setup, HfssDMSetup):
args.extend(["Freq:=", self.setup.solution_freq])
self.calc_module.ClcEval(setup_name, args)
return float(self.calc_module.GetTopEntryValue(setup_name, args)[0])
class NamedCalcObject(CalcObject):
def __init__(self, name, setup):
self.name = name
stack = [("CopyNamedExprToStack", name)]
super(NamedCalcObject, self).__init__(stack, setup)
class ConstantCalcObject(CalcObject):
def __init__(self, num, setup):
stack = [("EnterScalar", num)]
super(ConstantCalcObject, self).__init__(stack, setup)
def get_active_project():
''' If you see the error:
"The requested operation requires elevation."
then you need to run your python as an admin.
'''
import ctypes, os
try:
is_admin = os.getuid() == 0
except AttributeError:
is_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0
if not is_admin:
print '\033[93m WARNING: you are not runnning as an admin! You need to run as an admin. You will probably get an error next. \033[0m'
app = HfssApp()
desktop = app.get_app_desktop()
return desktop.get_active_project()
def get_active_design():
project = get_active_project()
return project.get_active_design()
def get_report_arrays(name):
d = get_active_design()
r = HfssReport(d, name)
return r.get_arrays()
def load_HFSS_project(proj_name, project_path, extension = '.aedt'): #2016 version
''' proj_name == None => get active.
(make sure 2 run as admin) '''
project_path += proj_name + extension
app = HfssApp()
desktop = app.get_app_desktop()
if proj_name is not None:
if proj_name in desktop.get_project_names():
desktop.set_active_project(proj_name)
project = desktop.get_active_project()
else:
project = desktop.open_project(project_path)
else:
project = desktop.get_active_project()
return app, desktop, project
|
ldoktor/avocado-vt
|
refs/heads/master
|
virttest/yumrepo.py
|
38
|
'''
This module implements classes that allow a user to create, enable and disable
YUM repositories on the system.
'''
import os
__all__ = ['REPO_DIR', 'YumRepo']
REPO_DIR = '/etc/yum.repos.d'
class YumRepo(object):
'''
Represents a YUM repository
The goal of this class is not to give access to all features of a YUM
Repository, but to provide a simple way to configure a valid one during
a test run.
Sample usage:
>>> mainrepo = YumRepo("main", "http://download.project.org/repo",
"/etc/yum.repos.d/main.repo")
Or to use a default path:
>>> mainrepo = YumRepo("main", 'http://download.project.org/repo')
And then:
>>> mainrepo.save()
When it comes to the repo URL, currently there's no support for setting a
mirrorlist, only a baseurl.
'''
def __init__(self, name, baseurl, path=None):
'''
Initilizes a new YumRepo object
If path is not given, it is assumed to be "$(name)s.repo" at
the default YUM repo directory.
:param name: the repository name
:param path: the full path of the file that defines this repository
'''
self.name = name
self.path = path
if self.path is None:
self.path = self._get_path_from_name(self.name)
self.baseurl = baseurl
self.enabled = True
self.gpgcheck = False
self.gpgkey = ''
@classmethod
def _get_path_from_name(cls, name):
'''
Returns the default path for the a repo of a given name
:param name: the repository name
:return: the default repo file path for the given name
'''
return os.path.join(REPO_DIR, "%s.repo" % name)
@classmethod
def _yum_value_for_boolean(cls, boolean):
'''
Returns a boolean in YUM acceptable syntax
'''
if boolean:
return '1'
else:
return '0'
def render(self):
'''
Renders the repo file
Yes, we could use ConfigParser for this, but it produces files with
spaces between keys and values, which look akward by YUM defaults.
'''
template = ("[%(name)s]\n"
"name=%(name)s\n"
"baseurl=%(baseurl)s\n"
"enabled=%(enabled)s\n"
"gpgcheck=%(gpgcheck)s\n"
"gpgkey=%(gpgkey)s\n")
values = {'name': self.name,
'baseurl': self.baseurl,
'enabled': self._yum_value_for_boolean(self.enabled),
'gpgcheck': self._yum_value_for_boolean(self.gpgcheck),
'gpgkey': self.gpgkey}
return template % values
def save(self):
'''
Saves the repo file
'''
output_file = open(self.path, 'w')
output_file.write(self.render())
output_file.close()
def remove(self):
'''
Removes the repo file
'''
if os.path.exists(self.path):
os.unlink(self.path)
|
jmarcelino/pycom-micropython
|
refs/heads/master
|
tests/basics/set_symmetric_difference.py
|
118
|
print(sorted({1,2}.symmetric_difference({2,3})))
print(sorted({1,2}.symmetric_difference([2,3])))
s = {1,2}
print(s.symmetric_difference_update({2,3}))
print(sorted(s))
|
ahmadiga/min_edx
|
refs/heads/master
|
common/test/acceptance/pages/lms/login.py
|
205
|
"""
Login page for the LMS.
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from . import BASE_URL
from .dashboard import DashboardPage
class LoginPage(PageObject):
"""
Login page for the LMS.
"""
url = BASE_URL + "/login"
def is_browser_on_page(self):
return any([
'log in' in title.lower()
for title in self.q(css='span.title-super').text
])
def login(self, email, password):
"""
Attempt to log in using `email` and `password`.
"""
self.provide_info(email, password)
self.submit()
def provide_info(self, email, password):
"""
Fill in login info.
`email` and `password` are the user's credentials.
"""
EmptyPromise(self.q(css='input#email').is_present, "Click ready").fulfill()
EmptyPromise(self.q(css='input#password').is_present, "Click ready").fulfill()
self.q(css='input#email').fill(email)
self.q(css='input#password').fill(password)
self.wait_for_ajax()
def submit(self):
"""
Submit registration info to create an account.
"""
self.q(css='button#submit').first.click()
# The next page is the dashboard; make sure it loads
dashboard = DashboardPage(self.browser)
dashboard.wait_for_page()
return dashboard
|
moertoe1/moertoe123.github.io
|
refs/heads/master
|
src/siism2015/wsgi.py
|
1
|
"""
WSGI config for siism2015 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "siism2015.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Wrap werkzeug debugger if DEBUG is on
from django.conf import settings
if settings.DEBUG:
try:
import django.views.debug
import six
from werkzeug.debug import DebuggedApplication
def null_technical_500_response(request, exc_type, exc_value, tb):
six.reraise(exc_type, exc_value, tb)
django.views.debug.technical_500_response = null_technical_500_response
application = DebuggedApplication(application, evalex=True)
except ImportError:
pass
|
taarifa/taarifa_backend
|
refs/heads/master
|
data/query_waterpoints.py
|
1
|
#!/usr/bin/env python
import requests
import json
def get_all_reports():
url = 'http://localhost:5000/reports'
params = {'service_code': 'wp001'}
response = requests.get(url, params=params)
print response.url
print response.ok
data = json.loads(response.text)
reports = data['result']
print reports[0]
print len(reports)
if __name__ == '__main__':
get_all_reports()
|
ericholscher/django
|
refs/heads/master
|
django/contrib/auth/tests/test_signals.py
|
227
|
from django.contrib.auth import signals
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
@skipIfCustomUser
@override_settings(USE_TZ=False, PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SignalTestCase(TestCase):
urls = 'django.contrib.auth.tests.urls'
fixtures = ['authtestdata.json']
def listener_login(self, user, **kwargs):
self.logged_in.append(user)
def listener_logout(self, user, **kwargs):
self.logged_out.append(user)
def listener_login_failed(self, sender, credentials, **kwargs):
self.login_failed.append(credentials)
def setUp(self):
"""Set up the listeners and reset the logged in/logged out counters"""
self.logged_in = []
self.logged_out = []
self.login_failed = []
signals.user_logged_in.connect(self.listener_login)
signals.user_logged_out.connect(self.listener_logout)
signals.user_login_failed.connect(self.listener_login_failed)
def tearDown(self):
"""Disconnect the listeners"""
signals.user_logged_in.disconnect(self.listener_login)
signals.user_logged_out.disconnect(self.listener_logout)
signals.user_login_failed.disconnect(self.listener_login_failed)
def test_login(self):
# Only a successful login will trigger the success signal.
self.client.login(username='testclient', password='bad')
self.assertEqual(len(self.logged_in), 0)
self.assertEqual(len(self.login_failed), 1)
self.assertEqual(self.login_failed[0]['username'], 'testclient')
# verify the password is cleansed
self.assertTrue('***' in self.login_failed[0]['password'])
# Like this:
self.client.login(username='testclient', password='password')
self.assertEqual(len(self.logged_in), 1)
self.assertEqual(self.logged_in[0].username, 'testclient')
# Ensure there were no more failures.
self.assertEqual(len(self.login_failed), 1)
def test_logout_anonymous(self):
# The log_out function will still trigger the signal for anonymous
# users.
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0], None)
def test_logout(self):
self.client.login(username='testclient', password='password')
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0].username, 'testclient')
def test_update_last_login(self):
"""Ensure that only `last_login` is updated in `update_last_login`"""
user = User.objects.get(pk=3)
old_last_login = user.last_login
user.username = "This username shouldn't get saved"
request = RequestFactory().get('/login')
signals.user_logged_in.send(sender=user.__class__, request=request,
user=user)
user = User.objects.get(pk=3)
self.assertEqual(user.username, 'staff')
self.assertNotEqual(user.last_login, old_last_login)
|
crackerhead/nemio
|
refs/heads/master
|
lib/python2.7/site-packages/werkzeug/useragents.py
|
148
|
# -*- coding: utf-8 -*-
"""
werkzeug.useragents
~~~~~~~~~~~~~~~~~~~
This module provides a helper to inspect user agent strings. This module
is far from complete but should work for most of the currently available
browsers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
('cros', 'chromeos'),
('iphone|ios', 'iphone'),
('ipad', 'ipad'),
(r'darwin|mac|os\s*x', 'macos'),
('win', 'windows'),
(r'android', 'android'),
(r'x11|lin(\b|ux)?', 'linux'),
('(sun|i86)os', 'solaris'),
(r'nintendo\s+wii', 'wii'),
('irix', 'irix'),
('hp-?ux', 'hpux'),
('aix', 'aix'),
('sco|unix_sv', 'sco'),
('bsd', 'bsd'),
('amiga', 'amiga'),
('blackberry|playbook', 'blackberry'),
('symbian','symbian')
)
browsers = (
('googlebot', 'google'),
('msnbot', 'msn'),
('yahoo', 'yahoo'),
('ask jeeves', 'ask'),
(r'aol|america\s+online\s+browser', 'aol'),
('opera', 'opera'),
('chrome', 'chrome'),
('firefox|firebird|phoenix|iceweasel', 'firefox'),
('galeon', 'galeon'),
('safari', 'safari'),
('webkit', 'webkit'),
('camino', 'camino'),
('konqueror', 'konqueror'),
('k-meleon', 'kmeleon'),
('netscape', 'netscape'),
(r'msie|microsoft\s+internet\s+explorer|trident/.+? rv:', 'msie'),
('lynx', 'lynx'),
('links', 'links'),
('seamonkey|mozilla', 'seamonkey')
)
_browser_version_re = r'(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?(?i)'
_language_re = re.compile(
r'(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|'
r'(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)'
)
def __init__(self):
self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms]
self.browsers = [(b, re.compile(self._browser_version_re % a))
for a, b in self.browsers]
def __call__(self, user_agent):
for platform, regex in self.platforms:
match = regex.search(user_agent)
if match is not None:
break
else:
platform = None
for browser, regex in self.browsers:
match = regex.search(user_agent)
if match is not None:
version = match.group(1)
break
else:
browser = version = None
match = self._language_re.search(user_agent)
if match is not None:
language = match.group(1) or match.group(2)
else:
language = None
return platform, browser, version, language
class UserAgent(object):
"""Represents a user agent. Pass it a WSGI environment or a user agent
string and you can inspect some of the details from the user agent
string via the attributes. The following attributes exist:
.. attribute:: string
the raw user agent string
.. attribute:: platform
the browser platform. The following platforms are currently
recognized:
- `aix`
- `amiga`
- `android`
- `bsd`
- `chromeos`
- `hpux`
- `iphone`
- `ipad`
- `irix`
- `linux`
- `macos`
- `sco`
- `solaris`
- `wii`
- `windows`
.. attribute:: browser
the name of the browser. The following browsers are currently
recognized:
- `aol` *
- `ask` *
- `camino`
- `chrome`
- `firefox`
- `galeon`
- `google` *
- `kmeleon`
- `konqueror`
- `links`
- `lynx`
- `msie`
- `msn`
- `netscape`
- `opera`
- `safari`
- `seamonkey`
- `webkit`
- `yahoo` *
(Browsers maked with a star (``*``) are crawlers.)
.. attribute:: version
the version of the browser
.. attribute:: language
the language of the browser
"""
_parser = UserAgentParser()
def __init__(self, environ_or_string):
if isinstance(environ_or_string, dict):
environ_or_string = environ_or_string.get('HTTP_USER_AGENT', '')
self.string = environ_or_string
self.platform, self.browser, self.version, self.language = \
self._parser(environ_or_string)
def to_header(self):
return self.string
def __str__(self):
return self.string
def __nonzero__(self):
return bool(self.browser)
__bool__ = __nonzero__
def __repr__(self):
return '<%s %r/%s>' % (
self.__class__.__name__,
self.browser,
self.version
)
# conceptionally this belongs in this module but because we want to lazily
# load the user agent module (which happens in wrappers.py) we have to import
# it afterwards. The class itself has the module set to this module so
# pickle, inspect and similar modules treat the object as if it was really
# implemented here.
from werkzeug.wrappers import UserAgentMixin
|
googleapis/googleapis-gen
|
refs/heads/master
|
google/cloud/gkehub/v1alpha2/gkehub-v1alpha2-py/google/cloud/gkehub_v1alpha2/types/membership.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.gkehub.v1alpha2',
manifest={
'Membership',
'MembershipEndpoint',
'KubernetesResource',
'ResourceOptions',
'GkeCluster',
'KubernetesMetadata',
'Authority',
'MembershipState',
'ListMembershipsRequest',
'ListMembershipsResponse',
'GetMembershipRequest',
'CreateMembershipRequest',
'DeleteMembershipRequest',
'UpdateMembershipRequest',
'GenerateConnectManifestRequest',
'GenerateConnectManifestResponse',
'ConnectAgentResource',
'ResourceManifest',
'TypeMeta',
'InitializeHubRequest',
'InitializeHubResponse',
'OperationMetadata',
},
)
class Membership(proto.Message):
r"""Membership contains information about a member cluster.
Attributes:
name (str):
Output only. The full, unique name of this Membership
resource in the format
``projects/*/locations/*/memberships/{membership_id}``, set
during creation.
``membership_id`` must be a valid RFC 1123 compliant DNS
label:
1. At most 63 characters in length
2. It must consist of lower case alphanumeric characters or
``-``
3. It must start and end with an alphanumeric character
Which can be expressed as the regex:
``[a-z0-9]([-a-z0-9]*[a-z0-9])?``, with a maximum length of
63 characters.
labels (Sequence[google.cloud.gkehub_v1alpha2.types.Membership.LabelsEntry]):
Optional. GCP labels for this membership.
description (str):
Output only. Description of this membership, limited to 63
characters. Must match the regex:
``[a-zA-Z0-9][a-zA-Z0-9_\-\.\ ]*``
This field is present for legacy purposes.
endpoint (google.cloud.gkehub_v1alpha2.types.MembershipEndpoint):
Optional. Endpoint information to reach this
member.
state (google.cloud.gkehub_v1alpha2.types.MembershipState):
Output only. State of the Membership
resource.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the Membership was created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the Membership was last
updated.
delete_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the Membership was deleted.
external_id (str):
Optional. An externally-generated and managed ID for this
Membership. This ID may be modified after creation, but this
is not recommended. For GKE clusters, external_id is managed
by the Hub API and updates will be ignored.
The ID must match the regex:
``[a-zA-Z0-9][a-zA-Z0-9_\-\.]*``
If this Membership represents a Kubernetes cluster, this
value should be set to the UID of the ``kube-system``
namespace object.
authority (google.cloud.gkehub_v1alpha2.types.Authority):
Optional. How to identify workloads from this
Membership. See the documentation on Workload
Identity for more details:
https://cloud.google.com/kubernetes-
engine/docs/how-to/workload-identity
last_connection_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. For clusters using Connect, the
timestamp of the most recent connection
established with Google Cloud. This time is
updated every several minutes, not continuously.
For clusters that do not use GKE Connect, or
that have never connected successfully, this
field will be unset.
unique_id (str):
Output only. Google-generated UUID for this resource. This
is unique across all Membership resources. If a Membership
resource is deleted and another resource with the same name
is created, it gets a different unique_id.
infrastructure_type (google.cloud.gkehub_v1alpha2.types.Membership.InfrastructureType):
Optional. The infrastructure type this
Membership is running on.
"""
class InfrastructureType(proto.Enum):
r"""Specifies the infrastructure type of a Membership.
Infrastructure type is used by Hub to control infrastructure-
specific behavior, including pricing.
Each GKE distribution (on-GCP, on-Prem, on-X,...) will set this
field automatically, but Attached Clusters customers should
specify a type during registration.
"""
INFRASTRUCTURE_TYPE_UNSPECIFIED = 0
ON_PREM = 1
MULTI_CLOUD = 2
name = proto.Field(
proto.STRING,
number=1,
)
labels = proto.MapField(
proto.STRING,
proto.STRING,
number=2,
)
description = proto.Field(
proto.STRING,
number=3,
)
endpoint = proto.Field(
proto.MESSAGE,
number=4,
oneof='type',
message='MembershipEndpoint',
)
state = proto.Field(
proto.MESSAGE,
number=5,
message='MembershipState',
)
create_time = proto.Field(
proto.MESSAGE,
number=6,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=7,
message=timestamp_pb2.Timestamp,
)
delete_time = proto.Field(
proto.MESSAGE,
number=8,
message=timestamp_pb2.Timestamp,
)
external_id = proto.Field(
proto.STRING,
number=9,
)
authority = proto.Field(
proto.MESSAGE,
number=10,
message='Authority',
)
last_connection_time = proto.Field(
proto.MESSAGE,
number=11,
message=timestamp_pb2.Timestamp,
)
unique_id = proto.Field(
proto.STRING,
number=12,
)
infrastructure_type = proto.Field(
proto.ENUM,
number=13,
enum=InfrastructureType,
)
class MembershipEndpoint(proto.Message):
r"""MembershipEndpoint contains information needed to contact a
Kubernetes API, endpoint and any additional Kubernetes metadata.
Attributes:
gke_cluster (google.cloud.gkehub_v1alpha2.types.GkeCluster):
Optional. GKE-specific information. Only
present if this Membership is a GKE cluster.
kubernetes_metadata (google.cloud.gkehub_v1alpha2.types.KubernetesMetadata):
Output only. Useful Kubernetes-specific
metadata.
kubernetes_resource (google.cloud.gkehub_v1alpha2.types.KubernetesResource):
Optional. The in-cluster Kubernetes Resources that should be
applied for a correctly registered cluster, in the steady
state. These resources:
- Ensure that the cluster is exclusively registered to one
and only one Hub Membership.
- Propagate Workload Pool Information available in the
Membership Authority field.
- Ensure proper initial configuration of default Hub
Features.
"""
gke_cluster = proto.Field(
proto.MESSAGE,
number=1,
message='GkeCluster',
)
kubernetes_metadata = proto.Field(
proto.MESSAGE,
number=2,
message='KubernetesMetadata',
)
kubernetes_resource = proto.Field(
proto.MESSAGE,
number=3,
message='KubernetesResource',
)
class KubernetesResource(proto.Message):
r"""KubernetesResource contains the YAML manifests and
configuration for Membership Kubernetes resources in the
cluster. After CreateMembership or UpdateMembership, these
resources should be re-applied in the cluster.
Attributes:
membership_cr_manifest (str):
Input only. The YAML representation of the
Membership CR. This field is ignored for GKE
clusters where Hub can read the CR directly.
Callers should provide the CR that is currently
present in the cluster during Create or Update,
or leave this field empty if none exists. The CR
manifest is used to validate the cluster has not
been registered with another Membership.
membership_resources (Sequence[google.cloud.gkehub_v1alpha2.types.ResourceManifest]):
Output only. Additional Kubernetes resources
that need to be applied to the cluster after
Membership creation, and after every update.
This field is only populated in the Membership
returned from a successful long-running
operation from CreateMembership or
UpdateMembership. It is not populated during
normal GetMembership or ListMemberships
requests. To get the resource manifest after the
initial registration, the caller should make a
UpdateMembership call with an empty field mask.
connect_resources (Sequence[google.cloud.gkehub_v1alpha2.types.ResourceManifest]):
Output only. The Kubernetes resources for
installing the GKE Connect agent.
This field is only populated in the Membership
returned from a successful long-running
operation from CreateMembership or
UpdateMembership. It is not populated during
normal GetMembership or ListMemberships
requests. To get the resource manifest after the
initial registration, the caller should make a
UpdateMembership call with an empty field mask.
resource_options (google.cloud.gkehub_v1alpha2.types.ResourceOptions):
Optional. Options for Kubernetes resource
generation.
"""
membership_cr_manifest = proto.Field(
proto.STRING,
number=1,
)
membership_resources = proto.RepeatedField(
proto.MESSAGE,
number=3,
message='ResourceManifest',
)
connect_resources = proto.RepeatedField(
proto.MESSAGE,
number=4,
message='ResourceManifest',
)
resource_options = proto.Field(
proto.MESSAGE,
number=5,
message='ResourceOptions',
)
class ResourceOptions(proto.Message):
r"""ResourceOptions represent options for Kubernetes resource
generation.
Attributes:
connect_version (str):
Optional. The Connect agent version to use for
connect_resources. Defaults to the latest GKE Connect
version. The version must be a currently supported version,
obsolete versions will be rejected.
v1beta1_crd (bool):
Optional. Use ``apiextensions/v1beta1`` instead of
``apiextensions/v1`` for CustomResourceDefinition resources.
This option should be set for clusters with Kubernetes
apiserver versions <1.16.
"""
connect_version = proto.Field(
proto.STRING,
number=1,
)
v1beta1_crd = proto.Field(
proto.BOOL,
number=2,
)
class GkeCluster(proto.Message):
r"""GkeCluster contains information specific to GKE clusters.
Attributes:
resource_link (str):
Immutable. Self-link of the GCP resource for
the GKE cluster. For example:
//container.googleapis.com/projects/my-
project/locations/us-west1-a/clusters/my-cluster
Zonal clusters are also supported.
"""
resource_link = proto.Field(
proto.STRING,
number=1,
)
class KubernetesMetadata(proto.Message):
r"""KubernetesMetadata provides informational metadata for
Memberships that are created from Kubernetes Endpoints
(currently, these are equivalent to Kubernetes clusters).
Attributes:
kubernetes_api_server_version (str):
Output only. Kubernetes API server version
string as reported by '/version'.
node_provider_id (str):
Output only. Node providerID as reported by the first node
in the list of nodes on the Kubernetes endpoint. On
Kubernetes platforms that support zero-node clusters (like
GKE-on-GCP), the node_count will be zero and the
node_provider_id will be empty.
node_count (int):
Output only. Node count as reported by
Kubernetes nodes resources.
vcpu_count (int):
Output only. vCPU count as reported by
Kubernetes nodes resources.
memory_mb (int):
Output only. The total memory capacity as
reported by the sum of all Kubernetes nodes
resources, defined in MB.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which these details were last
updated. This update_time is different from the
Membership-level update_time since EndpointDetails are
updated internally for API consumers.
"""
kubernetes_api_server_version = proto.Field(
proto.STRING,
number=1,
)
node_provider_id = proto.Field(
proto.STRING,
number=2,
)
node_count = proto.Field(
proto.INT32,
number=3,
)
vcpu_count = proto.Field(
proto.INT32,
number=4,
)
memory_mb = proto.Field(
proto.INT32,
number=5,
)
update_time = proto.Field(
proto.MESSAGE,
number=100,
message=timestamp_pb2.Timestamp,
)
class Authority(proto.Message):
r"""Authority encodes how Google will recognize identities from
this Membership. See the workload identity documentation for
more details: https://cloud.google.com/kubernetes-
engine/docs/how-to/workload-identity
Attributes:
issuer (str):
Optional. A JSON Web Token (JWT) issuer URI. ``issuer`` must
start with ``https://`` and be a valid URL with length <2000
characters.
If set, then Google will allow valid OIDC tokens from this
issuer to authenticate within the workload_identity_pool.
OIDC discovery will be performed on this URI to validate
tokens from the issuer, unless ``oidc_jwks`` is set.
Clearing ``issuer`` disables Workload Identity. ``issuer``
cannot be directly modified; it must be cleared (and
Workload Identity disabled) before using a new issuer (and
re-enabling Workload Identity).
oidc_jwks (bytes):
Optional. OIDC verification keys for this Membership in JWKS
format (RFC 7517).
When this field is set, OIDC discovery will NOT be performed
on ``issuer``, and instead OIDC tokens will be validated
using this field.
identity_provider (str):
Output only. An identity provider that reflects the
``issuer`` in the workload identity pool.
workload_identity_pool (str):
Output only. The name of the workload identity pool in which
``issuer`` will be recognized.
There is a single Workload Identity Pool per Hub that is
shared between all Memberships that belong to that Hub. For
a Hub hosted in {PROJECT_ID}, the workload pool format is
``{PROJECT_ID}.hub.id.goog``, although this is subject to
change in newer versions of this API.
"""
issuer = proto.Field(
proto.STRING,
number=1,
)
oidc_jwks = proto.Field(
proto.BYTES,
number=5,
)
identity_provider = proto.Field(
proto.STRING,
number=3,
)
workload_identity_pool = proto.Field(
proto.STRING,
number=4,
)
class MembershipState(proto.Message):
r"""MembershipState describes the state of a Membership resource.
Attributes:
code (google.cloud.gkehub_v1alpha2.types.MembershipState.Code):
Output only. The current state of the
Membership resource.
"""
class Code(proto.Enum):
r"""Code describes the state of a Membership resource."""
CODE_UNSPECIFIED = 0
CREATING = 1
READY = 2
DELETING = 3
UPDATING = 4
SERVICE_UPDATING = 5
code = proto.Field(
proto.ENUM,
number=1,
enum=Code,
)
class ListMembershipsRequest(proto.Message):
r"""Request message for ``GkeHub.ListMemberships`` method.
Attributes:
parent (str):
Required. The parent (project and location) where the
Memberships will be listed. Specified in the format
``projects/*/locations/*``.
page_size (int):
Optional. When requesting a 'page' of resources,
``page_size`` specifies number of resources to return. If
unspecified or set to 0, all resources will be returned.
page_token (str):
Optional. Token returned by previous call to
``ListMemberships`` which specifies the position in the list
from where to continue listing the resources.
filter (str):
Optional. Lists Memberships that match the filter
expression, following the syntax outlined in
https://google.aip.dev/160.
Examples:
- Name is ``bar`` in project ``foo-proj`` and location
``global``:
name =
"projects/foo-proj/locations/global/membership/bar"
- Memberships that have a label called ``foo``:
labels.foo:\*
- Memberships that have a label called ``foo`` whose value
is ``bar``:
labels.foo = bar
- Memberships in the CREATING state:
state = CREATING
order_by (str):
Optional. One or more fields to compare and
use to sort the output. See
https://google.aip.dev/132#ordering.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_size = proto.Field(
proto.INT32,
number=2,
)
page_token = proto.Field(
proto.STRING,
number=3,
)
filter = proto.Field(
proto.STRING,
number=4,
)
order_by = proto.Field(
proto.STRING,
number=5,
)
class ListMembershipsResponse(proto.Message):
r"""Response message for the ``GkeHub.ListMemberships`` method.
Attributes:
resources (Sequence[google.cloud.gkehub_v1alpha2.types.Membership]):
The list of matching Memberships.
next_page_token (str):
A token to request the next page of resources from the
``ListMemberships`` method. The value of an empty string
means that there are no more resources to return.
unreachable (Sequence[str]):
List of locations that could not be reached
while fetching this list.
"""
@property
def raw_page(self):
return self
resources = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='Membership',
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
unreachable = proto.RepeatedField(
proto.STRING,
number=3,
)
class GetMembershipRequest(proto.Message):
r"""Request message for ``GkeHub.GetMembership`` method.
Attributes:
name (str):
Required. The Membership resource name in the format
``projects/*/locations/*/memberships/*``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class CreateMembershipRequest(proto.Message):
r"""Request message for the ``GkeHub.CreateMembership`` method.
Attributes:
parent (str):
Required. The parent (project and location) where the
Memberships will be created. Specified in the format
``projects/*/locations/*``.
membership_id (str):
Required. Client chosen ID for the membership.
``membership_id`` must be a valid RFC 1123 compliant DNS
label:
1. At most 63 characters in length
2. It must consist of lower case alphanumeric characters or
``-``
3. It must start and end with an alphanumeric character
Which can be expressed as the regex:
``[a-z0-9]([-a-z0-9]*[a-z0-9])?``, with a maximum length of
63 characters.
resource (google.cloud.gkehub_v1alpha2.types.Membership):
Required. The membership to create.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
membership_id = proto.Field(
proto.STRING,
number=2,
)
resource = proto.Field(
proto.MESSAGE,
number=3,
message='Membership',
)
class DeleteMembershipRequest(proto.Message):
r"""Request message for ``GkeHub.DeleteMembership`` method.
Attributes:
name (str):
Required. The Membership resource name in the format
``projects/*/locations/*/memberships/*``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class UpdateMembershipRequest(proto.Message):
r"""Request message for ``GkeHub.UpdateMembership`` method.
Attributes:
name (str):
Required. The Membership resource name in the format
``projects/*/locations/*/memberships/*``.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Mask of fields to update.
resource (google.cloud.gkehub_v1alpha2.types.Membership):
Required. Only fields specified in update_mask are updated.
If you specify a field in the update_mask but don't specify
its value here that field will be deleted. If you are
updating a map field, set the value of a key to null or
empty string to delete the key from the map. It's not
possible to update a key's value to the empty string.
"""
name = proto.Field(
proto.STRING,
number=1,
)
update_mask = proto.Field(
proto.MESSAGE,
number=2,
message=field_mask_pb2.FieldMask,
)
resource = proto.Field(
proto.MESSAGE,
number=3,
message='Membership',
)
class GenerateConnectManifestRequest(proto.Message):
r"""Request message for ``GkeHub.GenerateConnectManifest`` method. .
Attributes:
name (str):
Required. The Membership resource name the Agent will
associate with, in the format
``projects/*/locations/*/memberships/*``.
namespace (str):
Optional. Namespace for GKE Connect agent resources.
Defaults to ``gke-connect``.
The Connect Agent is authorized automatically when run in
the default namespace. Otherwise, explicit authorization
must be granted with an additional IAM binding.
proxy (bytes):
Optional. URI of a proxy if connectivity from the agent to
gkeconnect.googleapis.com requires the use of a proxy.
Format must be in the form ``http(s)://{proxy_address}``,
depending on the HTTP/HTTPS protocol supported by the proxy.
This will direct the connect agent's outbound traffic
through a HTTP(S) proxy.
version (str):
Optional. The Connect agent version to use.
Defaults to the most current version.
is_upgrade (bool):
Optional. If true, generate the resources for
upgrade only. Some resources generated only for
installation (e.g. secrets) will be excluded.
registry (str):
Optional. The registry to fetch the connect
agent image from. Defaults to gcr.io/gkeconnect.
image_pull_secret_content (bytes):
Optional. The image pull secret content for
the registry, if not public.
"""
name = proto.Field(
proto.STRING,
number=1,
)
namespace = proto.Field(
proto.STRING,
number=2,
)
proxy = proto.Field(
proto.BYTES,
number=3,
)
version = proto.Field(
proto.STRING,
number=4,
)
is_upgrade = proto.Field(
proto.BOOL,
number=5,
)
registry = proto.Field(
proto.STRING,
number=6,
)
image_pull_secret_content = proto.Field(
proto.BYTES,
number=7,
)
class GenerateConnectManifestResponse(proto.Message):
r"""GenerateConnectManifestResponse contains manifest information
for installing/upgrading a Connect agent.
Attributes:
manifest (Sequence[google.cloud.gkehub_v1alpha2.types.ConnectAgentResource]):
The ordered list of Kubernetes resources that
need to be applied to the cluster for GKE
Connect agent installation/upgrade.
"""
manifest = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='ConnectAgentResource',
)
class ConnectAgentResource(proto.Message):
r"""ConnectAgentResource represents a Kubernetes resource
manifest for Connect Agent deployment.
Attributes:
type_ (google.cloud.gkehub_v1alpha2.types.TypeMeta):
Kubernetes type of the resource.
manifest (str):
YAML manifest of the resource.
"""
type_ = proto.Field(
proto.MESSAGE,
number=1,
message='TypeMeta',
)
manifest = proto.Field(
proto.STRING,
number=2,
)
class ResourceManifest(proto.Message):
r"""ResourceManifest represents a single Kubernetes resource to
be applied to the cluster.
Attributes:
manifest (str):
YAML manifest of the resource.
cluster_scoped (bool):
Whether the resource provided in the manifest is
``cluster_scoped``. If unset, the manifest is assumed to be
namespace scoped.
This field is used for REST mapping when applying the
resource in a cluster.
"""
manifest = proto.Field(
proto.STRING,
number=1,
)
cluster_scoped = proto.Field(
proto.BOOL,
number=2,
)
class TypeMeta(proto.Message):
r"""TypeMeta is the type information needed for content
unmarshalling of Kubernetes resources in the manifest.
Attributes:
kind (str):
Kind of the resource (e.g. Deployment).
api_version (str):
APIVersion of the resource (e.g. v1).
"""
kind = proto.Field(
proto.STRING,
number=1,
)
api_version = proto.Field(
proto.STRING,
number=2,
)
class InitializeHubRequest(proto.Message):
r"""Request message for the InitializeHub method.
Attributes:
project (str):
Required. The Hub to initialize, in the format
``projects/*/locations/*/memberships/*``.
"""
project = proto.Field(
proto.STRING,
number=1,
)
class InitializeHubResponse(proto.Message):
r"""Response message for the InitializeHub method.
Attributes:
service_identity (str):
Name of the Hub default service identity, in the format:
::
service-<project-number>@gcp-sa-gkehub.iam.gserviceaccount.com
The service account has ``roles/gkehub.serviceAgent`` in the
Hub project.
workload_identity_pool (str):
The Workload Identity Pool used for Workload
Identity-enabled clusters registered with this Hub. Format:
``<project-id>.hub.id.goog``
"""
service_identity = proto.Field(
proto.STRING,
number=1,
)
workload_identity_pool = proto.Field(
proto.STRING,
number=2,
)
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
Attributes:
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation was
created.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation finished
running.
target (str):
Output only. Server-defined resource path for
the target of the operation.
verb (str):
Output only. Name of the verb executed by the
operation.
status_detail (str):
Output only. Human-readable status of the
operation, if any.
cancel_requested (bool):
Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have
successfully been cancelled have [Operation.error][] value
with a [google.rpc.Status.code][google.rpc.Status.code] of
1, corresponding to ``Code.CANCELLED``.
api_version (str):
Output only. API version used to start the
operation.
"""
create_time = proto.Field(
proto.MESSAGE,
number=1,
message=timestamp_pb2.Timestamp,
)
end_time = proto.Field(
proto.MESSAGE,
number=2,
message=timestamp_pb2.Timestamp,
)
target = proto.Field(
proto.STRING,
number=3,
)
verb = proto.Field(
proto.STRING,
number=4,
)
status_detail = proto.Field(
proto.STRING,
number=5,
)
cancel_requested = proto.Field(
proto.BOOL,
number=6,
)
api_version = proto.Field(
proto.STRING,
number=7,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
Typecraft/norsourceparser
|
refs/heads/develop
|
norsourceparser/core/config.py
|
1
|
class Config(object):
DEBUG = False
MAX_PHRASES_PER_TEXT = -1
config = Config()
|
bovarysme/whisper
|
refs/heads/master
|
whisper/commands.py
|
1
|
import os
import click
from flask.cli import FlaskGroup
from . import create_app, db
def create_cli_app(info):
return create_app()
@click.group(cls=FlaskGroup, create_app=create_cli_app)
@click.option('--debug', is_flag=True, default=False)
def cli(debug):
if debug:
os.environ['FLASK_DEBUG'] = '1'
@cli.command()
def initdb():
"""Initialize the database."""
db.create_all()
@cli.command()
def dropdb():
"""Drop the database."""
db.drop_all()
if __name__ == '__main__':
cli()
|
nvoron23/brython
|
refs/heads/master
|
site/tests/unittests/test/test_pickletools.py
|
161
|
import pickle
import pickletools
from test import support
from test.pickletester import AbstractPickleTests
from test.pickletester import AbstractPickleModuleTests
class OptimizedPickleTests(AbstractPickleTests, AbstractPickleModuleTests):
def dumps(self, arg, proto=None):
return pickletools.optimize(pickle.dumps(arg, proto))
def loads(self, buf, **kwds):
return pickle.loads(buf, **kwds)
# Test relies on precise output of dumps()
test_pickle_to_2x = None
def test_main():
support.run_unittest(OptimizedPickleTests)
support.run_doctest(pickletools)
if __name__ == "__main__":
test_main()
|
apinski-cavium/mcrouter
|
refs/heads/master
|
mcrouter/test/test_mcrouter_errors.py
|
2
|
# Copyright (c) 2015, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
from mcrouter.test.MCProcess import Memcached
from mcrouter.test.McrouterTestCase import McrouterTestCase
from mcrouter.test.mock_servers import ConnectionErrorServer
from mcrouter.test.mock_servers import CustomErrorServer
from mcrouter.test.mock_servers import SleepServer
class TestMcrouterForwardedErrors(McrouterTestCase):
config = './mcrouter/test/mcrouter_test_basic_1_1_1.json'
get_cmd = 'get test_key\r\n'
set_cmd = 'set test_key 0 0 3\r\nabc\r\n'
delete_cmd = 'delete test_key\r\n'
server_errors = [
'SERVER_ERROR out of order',
'SERVER_ERROR timeout',
'SERVER_ERROR connection timeout',
'SERVER_ERROR connection error',
'SERVER_ERROR 307 busy',
'SERVER_ERROR 302 try again',
'SERVER_ERROR unavailable',
'SERVER_ERROR bad value',
'SERVER_ERROR aborted',
'SERVER_ERROR local error',
'SERVER_ERROR remote error',
'SERVER_ERROR waiting'
]
client_errors = [
'CLIENT_ERROR bad command',
'CLIENT_ERROR bad key',
'CLIENT_ERROR bad flags',
'CLIENT_ERROR bad exptime',
'CLIENT_ERROR bad lease_id',
'CLIENT_ERROR bad cas_id',
'CLIENT_ERROR malformed request',
'CLIENT_ERROR out of memory'
]
def setUp(self):
self.server = self.add_server(CustomErrorServer())
# server returned: SERVER_ERROR
def test_server_replied_server_error_for_set(self):
cmd = self.set_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.server_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
def test_server_replied_server_error_for_get(self):
cmd = self.get_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.server_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual('END\r\n', res)
def test_server_replied_server_error_for_get_with_no_miss_on_error(self):
# With --disable-miss-on-get-errors, errors should be forwarded
# to client
cmd = self.get_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.server_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config,
extra_args=['--disable-miss-on-get-errors'])
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
def test_server_replied_server_error_for_delete(self):
cmd = self.delete_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.server_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual('NOT_FOUND\r\n', res)
def test_server_replied_server_error_for_delete_with_no_asynclog(self):
# With --asynclog-disable, errors should be forwarded to client
cmd = self.delete_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.server_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config,
extra_args=['--asynclog-disable'])
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
# server returned: CLIENT_ERROR
def test_server_replied_client_error_for_set(self):
cmd = self.set_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.client_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
def test_server_replied_client_error_for_get(self):
cmd = self.get_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.client_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual('END\r\n', res)
def test_server_replied_client_error_for_get_with_no_miss_on_error(self):
# With --disable-miss-on-get-errors, errors should be forwarded
# to client
cmd = self.get_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.client_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config,
extra_args=['--disable-miss-on-get-errors'])
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
def test_server_replied_client_error_for_delete(self):
cmd = self.delete_cmd
self.server.setExpectedBytes(len(cmd))
for error in self.client_errors:
self.server.setError(error)
mcrouter = self.add_mcrouter(self.config)
res = mcrouter.issue_command(cmd)
self.assertEqual(error + '\r\n', res)
class TestMcrouterGeneratedErrors(McrouterTestCase):
config = './mcrouter/test/mcrouter_test_basic_1_1_1.json'
get_cmd = 'get test_key\r\n'
set_cmd = 'set test_key 0 0 3\r\nabc\r\n'
delete_cmd = 'delete test_key\r\n'
def getMcrouter(self, server, args=[]):
self.add_server(server)
return self.add_mcrouter(self.config, extra_args=args)
# mcrouter generated: timeout
def test_timeout_set(self):
mcrouter = self.getMcrouter(SleepServer())
res = mcrouter.issue_command(self.set_cmd)
self.assertEqual('SERVER_ERROR timeout\r\n', res)
def test_timeout_get(self):
mcrouter = self.getMcrouter(SleepServer())
res = mcrouter.issue_command(self.get_cmd)
self.assertEqual('END\r\n', res)
def test_timeout_delete(self):
mcrouter = self.getMcrouter(SleepServer())
res = mcrouter.issue_command(self.delete_cmd)
self.assertEqual('NOT_FOUND\r\n', res)
# mcrouter generated: connection error
def test_connection_error_set(self):
mcrouter = self.getMcrouter(ConnectionErrorServer())
res = mcrouter.issue_command(self.set_cmd)
self.assertTrue(re.match('SERVER_ERROR (connection|remote) error', res))
def test_connection_error_get(self):
mcrouter = self.getMcrouter(ConnectionErrorServer())
res = mcrouter.issue_command(self.get_cmd)
self.assertEqual('END\r\n', res)
def test_connection_error_delete(self):
mcrouter = self.getMcrouter(ConnectionErrorServer())
res = mcrouter.issue_command(self.delete_cmd)
self.assertEqual('NOT_FOUND\r\n', res)
# mcrouter generated: TKO
def test_tko_set(self):
mcrouter = self.getMcrouter(SleepServer(),
args=['--timeouts-until-tko', '1'])
res = mcrouter.issue_command(self.set_cmd)
res = mcrouter.issue_command(self.set_cmd)
self.assertEqual('SERVER_ERROR unavailable\r\n', res)
def test_tko_get(self):
mcrouter = self.getMcrouter(SleepServer(),
args=['--timeouts-until-tko', '1'])
res = mcrouter.issue_command(self.set_cmd)
res = mcrouter.issue_command(self.get_cmd)
self.assertEqual('END\r\n', res)
def test_tko_delete(self):
mcrouter = self.getMcrouter(SleepServer(),
args=['--timeouts-until-tko', '1'])
res = mcrouter.issue_command(self.set_cmd)
res = mcrouter.issue_command(self.delete_cmd)
self.assertEqual('NOT_FOUND\r\n', res)
# mcrouter generated: bad key
def test_bad_key_set(self):
mcrouter = self.getMcrouter(Memcached())
cmd = 'set test.key' + ('x' * 10000) + ' 0 0 3\r\nabc\r\n'
res = mcrouter.issue_command(cmd)
self.assertEqual('CLIENT_ERROR bad key\r\n', res)
def test_bad_key_get(self):
mcrouter = self.getMcrouter(Memcached())
cmd = 'get test.key' + ('x' * 10000) + '\r\n'
res = mcrouter.issue_command(cmd)
self.assertEqual('CLIENT_ERROR bad key\r\n', res)
def test_bad_key_delete(self):
mcrouter = self.getMcrouter(Memcached())
cmd = 'delete test.key' + ('x' * 10000) + '\r\n'
res = mcrouter.issue_command(cmd)
self.assertEqual('CLIENT_ERROR bad key\r\n', res)
# mcrouter generated: remote error
def test_remote_error_command_not_supported(self):
mcrouter = self.getMcrouter(Memcached())
cmd = 'flush_all\r\n'
res = mcrouter.issue_command(cmd)
self.assertEqual('SERVER_ERROR Command disabled\r\n', res)
|
AMOboxTV/AMOBox.LegoBuild
|
refs/heads/master
|
plugin.video.sembilhete.tv/resources/lib/requests/auth.py
|
174
|
# -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
import threading
from base64 import b64encode
from .compat import urlparse, str
from .cookies import extract_cookies_to_jar
from .utils import parse_dict_header, to_native_string
from .status_codes import codes
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
authstr = 'Basic ' + to_native_string(
b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
# Keep state in per-thread local storage
self._thread_local = threading.local()
def init_per_thread_state(self):
# Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'):
self._thread_local.init = True
self._thread_local.last_nonce = ''
self._thread_local.nonce_count = 0
self._thread_local.chal = {}
self._thread_local.pos = None
self._thread_local.num_401_calls = None
def build_digest_header(self, method, url):
realm = self._thread_local.chal['realm']
nonce = self._thread_local.chal['nonce']
qop = self._thread_local.chal.get('qop')
algorithm = self._thread_local.chal.get('algorithm')
opaque = self._thread_local.chal.get('opaque')
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self._thread_local.last_nonce:
self._thread_local.nonce_count += 1
else:
self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count
s = str(self._thread_local.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if not qop:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self._thread_local.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self._thread_local.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
if self._thread_local.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self._thread_local.pos)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self._thread_local.num_401_calls = 1
return r
def __call__(self, r):
# Initialize per-thread state, if needed
self.init_per_thread_state()
# If we have a saved nonce, skip the 401
if self._thread_local.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self._thread_local.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self._thread_local.pos = None
r.register_hook('response', self.handle_401)
r.register_hook('response', self.handle_redirect)
self._thread_local.num_401_calls = 1
return r
|
boompieman/iim_project
|
refs/heads/master
|
project_python2/lib/python2.7/site-packages/nbformat/v2/nbbase.py
|
12
|
"""The basic dict based notebook format.
The Python representation of a notebook is a nested structure of
dictionary subclasses that support attribute access
(ipython_genutils.ipstruct.Struct). The functions in this module are merely
helpers to build the structs in the right form.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import pprint
import uuid
from ipython_genutils.ipstruct import Struct
from ipython_genutils.py3compat import unicode_type
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class NotebookNode(Struct):
pass
def from_dict(d):
if isinstance(d, dict):
newd = NotebookNode()
for k,v in d.items():
newd[k] = from_dict(v)
return newd
elif isinstance(d, (tuple, list)):
return [from_dict(i) for i in d]
else:
return d
def new_output(output_type=None, output_text=None, output_png=None,
output_html=None, output_svg=None, output_latex=None, output_json=None,
output_javascript=None, output_jpeg=None, prompt_number=None,
etype=None, evalue=None, traceback=None):
"""Create a new code cell with input and output"""
output = NotebookNode()
if output_type is not None:
output.output_type = unicode_type(output_type)
if output_type != 'pyerr':
if output_text is not None:
output.text = unicode_type(output_text)
if output_png is not None:
output.png = bytes(output_png)
if output_jpeg is not None:
output.jpeg = bytes(output_jpeg)
if output_html is not None:
output.html = unicode_type(output_html)
if output_svg is not None:
output.svg = unicode_type(output_svg)
if output_latex is not None:
output.latex = unicode_type(output_latex)
if output_json is not None:
output.json = unicode_type(output_json)
if output_javascript is not None:
output.javascript = unicode_type(output_javascript)
if output_type == u'pyout':
if prompt_number is not None:
output.prompt_number = int(prompt_number)
if output_type == u'pyerr':
if etype is not None:
output.etype = unicode_type(etype)
if evalue is not None:
output.evalue = unicode_type(evalue)
if traceback is not None:
output.traceback = [unicode_type(frame) for frame in list(traceback)]
return output
def new_code_cell(input=None, prompt_number=None, outputs=None,
language=u'python', collapsed=False):
"""Create a new code cell with input and output"""
cell = NotebookNode()
cell.cell_type = u'code'
if language is not None:
cell.language = unicode_type(language)
if input is not None:
cell.input = unicode_type(input)
if prompt_number is not None:
cell.prompt_number = int(prompt_number)
if outputs is None:
cell.outputs = []
else:
cell.outputs = outputs
if collapsed is not None:
cell.collapsed = bool(collapsed)
return cell
def new_text_cell(cell_type, source=None, rendered=None):
"""Create a new text cell."""
cell = NotebookNode()
if source is not None:
cell.source = unicode_type(source)
if rendered is not None:
cell.rendered = unicode_type(rendered)
cell.cell_type = cell_type
return cell
def new_worksheet(name=None, cells=None):
"""Create a worksheet by name with with a list of cells."""
ws = NotebookNode()
if name is not None:
ws.name = unicode_type(name)
if cells is None:
ws.cells = []
else:
ws.cells = list(cells)
return ws
def new_notebook(metadata=None, worksheets=None):
"""Create a notebook by name, id and a list of worksheets."""
nb = NotebookNode()
nb.nbformat = 2
if worksheets is None:
nb.worksheets = []
else:
nb.worksheets = list(worksheets)
if metadata is None:
nb.metadata = new_metadata()
else:
nb.metadata = NotebookNode(metadata)
return nb
def new_metadata(name=None, authors=None, license=None, created=None,
modified=None, gistid=None):
"""Create a new metadata node."""
metadata = NotebookNode()
if name is not None:
metadata.name = unicode_type(name)
if authors is not None:
metadata.authors = list(authors)
if created is not None:
metadata.created = unicode_type(created)
if modified is not None:
metadata.modified = unicode_type(modified)
if license is not None:
metadata.license = unicode_type(license)
if gistid is not None:
metadata.gistid = unicode_type(gistid)
return metadata
def new_author(name=None, email=None, affiliation=None, url=None):
"""Create a new author."""
author = NotebookNode()
if name is not None:
author.name = unicode_type(name)
if email is not None:
author.email = unicode_type(email)
if affiliation is not None:
author.affiliation = unicode_type(affiliation)
if url is not None:
author.url = unicode_type(url)
return author
|
jagguli/intellij-community
|
refs/heads/master
|
python/testData/mover/multiLineSelection6_afterUp.py
|
83
|
class Test(object):
def q(self):
<caret><selection> a = 1
b = 2
</selection> c = 3
|
alistairlow/tensorflow
|
refs/heads/master
|
tensorflow/contrib/timeseries/python/timeseries/test_utils.py
|
86
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing time series models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.timeseries.python.timeseries import estimators
from tensorflow.contrib.timeseries.python.timeseries import input_pipeline
from tensorflow.contrib.timeseries.python.timeseries import state_management
from tensorflow.contrib.timeseries.python.timeseries.feature_keys import TrainEvalFeatures
from tensorflow.python.client import session
from tensorflow.python.estimator import estimator_lib
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import adam
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import coordinator as coordinator_lib
from tensorflow.python.training import queue_runner_impl
from tensorflow.python.util import nest
class AllWindowInputFn(input_pipeline.TimeSeriesInputFn):
"""Returns all contiguous windows of data from a full dataset.
In contrast to WholeDatasetInputFn, which does basic shape checking but
maintains the flat sequencing of data, this `TimeSeriesInputFn` creates
batches of windows. However, unlike `RandomWindowInputFn` these windows are
deterministic, starting at every possible offset (i.e. batches of size
series_length - window_size + 1 are produced).
"""
def __init__(self, time_series_reader, window_size):
"""Initialize the input_pipeline.
Args:
time_series_reader: A `input_pipeline.TimeSeriesReader` object.
window_size: The size of contiguous windows of data to produce.
"""
self._window_size = window_size
self._reader = time_series_reader
super(AllWindowInputFn, self).__init__()
def create_batch(self):
features = self._reader.read_full()
times = features[TrainEvalFeatures.TIMES]
num_windows = array_ops.shape(times)[0] - self._window_size + 1
indices = array_ops.reshape(math_ops.range(num_windows), [num_windows, 1])
# indices contains the starting point for each window. We now extend these
# indices to include the elements inside the windows as well by doing a
# broadcast addition.
increments = array_ops.reshape(math_ops.range(self._window_size), [1, -1])
all_indices = array_ops.reshape(indices + increments, [-1])
# Select the appropriate elements in the batch and reshape the output to 3D.
features = {
key: array_ops.reshape(
array_ops.gather(value, all_indices),
array_ops.concat(
[[num_windows, self._window_size], array_ops.shape(value)[1:]],
axis=0))
for key, value in features.items()
}
return (features, None)
class _SavingTensorHook(basic_session_run_hooks.LoggingTensorHook):
"""A hook to save Tensors during training."""
def __init__(self, tensors, every_n_iter=None, every_n_secs=None):
self.tensor_values = {}
super(_SavingTensorHook, self).__init__(
tensors=tensors, every_n_iter=every_n_iter,
every_n_secs=every_n_secs)
def after_run(self, run_context, run_values):
del run_context
if self._should_trigger:
for tag in self._current_tensors.keys():
self.tensor_values[tag] = run_values.results[tag]
self._timer.update_last_triggered_step(self._iter_count)
self._iter_count += 1
def _train_on_generated_data(
generate_fn, generative_model, train_iterations, seed,
learning_rate=0.1, ignore_params_fn=lambda _: (),
derived_param_test_fn=lambda _: (),
train_input_fn_type=input_pipeline.WholeDatasetInputFn,
train_state_manager=state_management.PassthroughStateManager()):
"""The training portion of parameter recovery tests."""
random_seed.set_random_seed(seed)
generate_graph = ops.Graph()
with generate_graph.as_default():
with session.Session(graph=generate_graph):
generative_model.initialize_graph()
time_series_reader, true_parameters = generate_fn(generative_model)
true_parameters = {
tensor.name: value for tensor, value in true_parameters.items()}
eval_input_fn = input_pipeline.WholeDatasetInputFn(time_series_reader)
eval_state_manager = state_management.PassthroughStateManager()
true_parameter_eval_graph = ops.Graph()
with true_parameter_eval_graph.as_default():
generative_model.initialize_graph()
ignore_params = ignore_params_fn(generative_model)
feature_dict, _ = eval_input_fn()
eval_state_manager.initialize_graph(generative_model)
feature_dict[TrainEvalFeatures.VALUES] = math_ops.cast(
feature_dict[TrainEvalFeatures.VALUES], generative_model.dtype)
model_outputs = eval_state_manager.define_loss(
model=generative_model,
features=feature_dict,
mode=estimator_lib.ModeKeys.EVAL)
with session.Session(graph=true_parameter_eval_graph) as sess:
variables.global_variables_initializer().run()
coordinator = coordinator_lib.Coordinator()
queue_runner_impl.start_queue_runners(sess, coord=coordinator)
true_param_loss = model_outputs.loss.eval(feed_dict=true_parameters)
true_transformed_params = {
param: param.eval(feed_dict=true_parameters)
for param in derived_param_test_fn(generative_model)}
coordinator.request_stop()
coordinator.join()
saving_hook = _SavingTensorHook(
tensors=true_parameters.keys(),
every_n_iter=train_iterations - 1)
class _RunConfig(estimator_lib.RunConfig):
@property
def tf_random_seed(self):
return seed
estimator = estimators.TimeSeriesRegressor(
model=generative_model,
config=_RunConfig(),
state_manager=train_state_manager,
optimizer=adam.AdamOptimizer(learning_rate))
train_input_fn = train_input_fn_type(time_series_reader=time_series_reader)
trained_loss = (estimator.train(
input_fn=train_input_fn,
max_steps=train_iterations,
hooks=[saving_hook]).evaluate(
input_fn=eval_input_fn, steps=1))["loss"]
logging.info("Final trained loss: %f", trained_loss)
logging.info("True parameter loss: %f", true_param_loss)
return (ignore_params, true_parameters, true_transformed_params,
trained_loss, true_param_loss, saving_hook,
true_parameter_eval_graph)
def test_parameter_recovery(
generate_fn, generative_model, train_iterations, test_case, seed,
learning_rate=0.1, rtol=0.2, atol=0.1, train_loss_tolerance_coeff=0.99,
ignore_params_fn=lambda _: (),
derived_param_test_fn=lambda _: (),
train_input_fn_type=input_pipeline.WholeDatasetInputFn,
train_state_manager=state_management.PassthroughStateManager()):
"""Test that a generative model fits generated data.
Args:
generate_fn: A function taking a model and returning a `TimeSeriesReader`
object and dictionary mapping parameters to their
values. model.initialize_graph() will have been called on the model
before it is passed to this function.
generative_model: A timeseries.model.TimeSeriesModel instance to test.
train_iterations: Number of training steps.
test_case: A tf.test.TestCase to run assertions on.
seed: Same as for TimeSeriesModel.unconditional_generate().
learning_rate: Step size for optimization.
rtol: Relative tolerance for tests.
atol: Absolute tolerance for tests.
train_loss_tolerance_coeff: Trained loss times this value must be less
than the loss evaluated using the generated parameters.
ignore_params_fn: Function mapping from a Model to a list of parameters
which are not tested for accurate recovery.
derived_param_test_fn: Function returning a list of derived parameters
(Tensors) which are checked for accurate recovery (comparing the value
evaluated with trained parameters to the value under the true
parameters).
As an example, for VARMA, in addition to checking AR and MA parameters,
this function can be used to also check lagged covariance. See
varma_ssm.py for details.
train_input_fn_type: The `TimeSeriesInputFn` type to use when training
(likely `WholeDatasetInputFn` or `RandomWindowInputFn`). If None, use
`WholeDatasetInputFn`.
train_state_manager: The state manager to use when training (likely
`PassthroughStateManager` or `ChainingStateManager`). If None, use
`PassthroughStateManager`.
"""
(ignore_params, true_parameters, true_transformed_params,
trained_loss, true_param_loss, saving_hook, true_parameter_eval_graph
) = _train_on_generated_data(
generate_fn=generate_fn, generative_model=generative_model,
train_iterations=train_iterations, seed=seed, learning_rate=learning_rate,
ignore_params_fn=ignore_params_fn,
derived_param_test_fn=derived_param_test_fn,
train_input_fn_type=train_input_fn_type,
train_state_manager=train_state_manager)
trained_parameter_substitutions = {}
for param in true_parameters.keys():
evaled_value = saving_hook.tensor_values[param]
trained_parameter_substitutions[param] = evaled_value
true_value = true_parameters[param]
logging.info("True %s: %s, learned: %s",
param, true_value, evaled_value)
with session.Session(graph=true_parameter_eval_graph):
for transformed_param, true_value in true_transformed_params.items():
trained_value = transformed_param.eval(
feed_dict=trained_parameter_substitutions)
logging.info("True %s [transformed parameter]: %s, learned: %s",
transformed_param, true_value, trained_value)
test_case.assertAllClose(true_value, trained_value,
rtol=rtol, atol=atol)
if ignore_params is None:
ignore_params = []
else:
ignore_params = nest.flatten(ignore_params)
ignore_params = [tensor.name for tensor in ignore_params]
if trained_loss > 0:
test_case.assertLess(trained_loss * train_loss_tolerance_coeff,
true_param_loss)
else:
test_case.assertLess(trained_loss / train_loss_tolerance_coeff,
true_param_loss)
for param in true_parameters.keys():
if param in ignore_params:
continue
evaled_value = saving_hook.tensor_values[param]
true_value = true_parameters[param]
test_case.assertAllClose(true_value, evaled_value,
rtol=rtol, atol=atol)
def parameter_recovery_dry_run(
generate_fn, generative_model, seed,
learning_rate=0.1,
train_input_fn_type=input_pipeline.WholeDatasetInputFn,
train_state_manager=state_management.PassthroughStateManager()):
"""Test that a generative model can train on generated data.
Args:
generate_fn: A function taking a model and returning a
`input_pipeline.TimeSeriesReader` object and a dictionary mapping
parameters to their values. model.initialize_graph() will have been
called on the model before it is passed to this function.
generative_model: A timeseries.model.TimeSeriesModel instance to test.
seed: Same as for TimeSeriesModel.unconditional_generate().
learning_rate: Step size for optimization.
train_input_fn_type: The type of `TimeSeriesInputFn` to use when training
(likely `WholeDatasetInputFn` or `RandomWindowInputFn`). If None, use
`WholeDatasetInputFn`.
train_state_manager: The state manager to use when training (likely
`PassthroughStateManager` or `ChainingStateManager`). If None, use
`PassthroughStateManager`.
"""
_train_on_generated_data(
generate_fn=generate_fn, generative_model=generative_model,
seed=seed, learning_rate=learning_rate,
train_input_fn_type=train_input_fn_type,
train_state_manager=train_state_manager,
train_iterations=2)
|
mdibaiee/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/pywebsocket/src/example/bench_wsh.py
|
495
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple load tester for WebSocket clients.
A client program sends a message formatted as "<time> <count> <message>" to
this handler. This handler starts sending total <count> WebSocket messages
containing <message> every <time> seconds. <time> can be a floating point
value. <count> must be an integer value.
"""
import time
def web_socket_do_extra_handshake(request):
pass # Always accept.
def web_socket_transfer_data(request):
line = request.ws_stream.receive_message()
parts = line.split(' ')
if len(parts) != 3:
raise ValueError('Bad parameter format')
wait = float(parts[0])
count = int(parts[1])
message = parts[2]
for i in xrange(count):
request.ws_stream.send_message(message)
time.sleep(wait)
# vi:sts=4 sw=4 et
|
AutorestCI/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/application_gateway_firewall_rule_set.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class ApplicationGatewayFirewallRuleSet(Resource):
"""A web application firewall rule set.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param provisioning_state: The provisioning state of the web application
firewall rule set.
:type provisioning_state: str
:param rule_set_type: The type of the web application firewall rule set.
:type rule_set_type: str
:param rule_set_version: The version of the web application firewall rule
set type.
:type rule_set_version: str
:param rule_groups: The rule groups of the web application firewall rule
set.
:type rule_groups:
list[~azure.mgmt.network.v2017_06_01.models.ApplicationGatewayFirewallRuleGroup]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
'rule_groups': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'rule_set_type': {'key': 'properties.ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'properties.ruleSetVersion', 'type': 'str'},
'rule_groups': {'key': 'properties.ruleGroups', 'type': '[ApplicationGatewayFirewallRuleGroup]'},
}
def __init__(self, rule_set_type, rule_set_version, rule_groups, id=None, location=None, tags=None, provisioning_state=None):
super(ApplicationGatewayFirewallRuleSet, self).__init__(id=id, location=location, tags=tags)
self.provisioning_state = provisioning_state
self.rule_set_type = rule_set_type
self.rule_set_version = rule_set_version
self.rule_groups = rule_groups
|
Renji/weevely3
|
refs/heads/master
|
core/messages.py
|
11
|
version = '3.2.0'
class generic:
file_s_not_found = "File '%s' not found"
error_creating_file_s_s = "Error creating file '%s': %s"
error_loading_file_s_s = 'Error loading file \'%s\': %s'
error_file_s_already_exists = 'Error file \'%s\' already exists'
error_url_format = 'URL error format \'http(s)://host/agent.php\' is expected'
error_parsing_command_s = 'Error parsing command: %s'
weevely_s_error_s_usage = """
[+] weevely %s
[!] Error: %s
[+] Run terminal to the target
weevely <URL> <password> [cmd]
[+] Load session file
weevely session <path> [cmd]
[+] Generate backdoor agent
weevely generate <password> <path>
"""
class sessions:
set_module_s_s_s = '%s.%s = %s'
set_s_s = '%s = %s'
unset_module_s_s = '%s.%s is now unset'
unset_s = '%s is now unset'
error_loading_sessions = 'Session can not be loaded'
error_session_s_not_modified = 'Error, session variable \'%s\' can\'t be modified.'
connection_info = """<%!
import urlparse
%><%
if not host:
urlparsed = urlparse.urlparse(url)
if urlparsed and urlparsed.netloc:
hostname = urlparsed.netloc
else:
hostname = 'undefined host'
else:
hostname = host
%>${'%s@' % user if user else ''}${hostname}${':%s' % path if path and path != '.' else ''}"""
class channels:
error_loading_channel_s = 'Error loading channel \'%s\''
error_proxy_format = 'Proxy format error, use \'http|https|socks5|sock4://host:port\''
class terminal:
backdoor_unavailable = 'Backdoor communication failed: please check URL reachability and password'
welcome_no_shell = """
The system shell interpreter is not available in this session.
PHP code and modules execution are available. Use the following
command replacements to simulate an unrestricted shell.
"""
help_no_shell = """
The system shell interpreter is not available in this session, use the
following command replacements to simulate a unrestricted shell.
"""
welcome_to_s = """
[+] weevely ${version}
[+] Target:\t${conn_info}
[+] Session:\t${path}
% if default_shell:
[+] Shell:\t${ 'System shell' if default_shell == 'shell_sh' else 'PHP interpreter'}
% endif
[+] Browse the filesystem or execute commands starts the connection
[+] to the target. Type :help for more information.
"""
set_usage = 'Set session variable (run :show to print). Usage:\n:set <variable> \'<value>\''
unset_usage = 'Unset session variable (run :show to print). Usage:\n:unset <variable>'
class stegareferrer:
error_generating_id = 'Error generating id, payload too long?'
error_password_hash = 'Error generating trigger, please use another password'
error_language_start_letter_s = 'Error, at least one language must start with the letter \'%s\''
error_chunk_position_i_s = 'Error chunk position %i is not indexable, delete template \'%s\''
class vectors:
wrong_target_type = 'Wrong target operating system type'
wrong_arguments_type = 'Wrong formatting argument type, a dictionary is required'
wrong_postprocessing_type = 'Wrong postprocessing argument type, a callable function is required'
wrong_payload_type = 'Wrong payload argument type, a string or a list of strings is required'
wrong_condition_type = 'Wrong condition argument type, a callable function is required'
wrong_store_name_type = 'Wrong argument type, a string with an argument name is required'
class vectorlist:
vector_s_triggers_an_exc = 'Vector \'%s\' execution triggers an exception'
class module:
error_setting_arguments_s = 'Error setting arguments: %s'
argument_s_must_be_a_vector = 'Argument \'%s\' must be a vector name'
error_module_missing_description = 'Error, module description is missing'
error_module_exec_error_s = 'Error, module execution triggered error \'%s\''
error_init_method_required = 'Error, the init() method definition is required in Modules'
module_s_exec_terminated = 'Module \'%s\' execution terminated'
module_s_inactive = 'Module \'%s\' is inactive, skipped'
error_choices_s_s_empty = 'Choices for \'%s\' argument \'%s\' is empty. Please check if vectors are declared before arguments.'
running_the_alias_s = 'Shell interpreter unavailable, running the alias \'%s\''
vector_s_not_support_arg_s_s = 'Vector \'%s\' does not support argument \'%s\' set to \'%s\''
class module_file_cd:
failed_directory_change_to_s = "Failed cd '%s': no such directory or permission denied"
error_getting_ossep = "Error getting remote directory separator"
class module_file_ls:
failed_list_file_in_directory_s = "Failed list file in directory '%s': no such directory or permission denied"
failed_list_file_in_directory_s_unknown = "Failed list file in directory '%s': unknown error"
class module_file_download:
failed_download_file = "File download failed, please check remote path and permissions"
skipping_md5_check = "Skipping MD5 check, the file integrity can't be checked"
class module_file_upload:
error_content_lpath_required = "Error, argument 'lpath' or 'content' is required"
failed_upload_file = "File upload failed, please check remote path and permissions"
failed_md5_check = "Failed MD5 check, the integrity check is wrong or not available"
class module_file_edit:
unmodified_file = "File unmodified, skipping upload"
class module_file_touch:
error_invalid_timestamp_format = "Error, invalid timestamp format"
error_source_timestamp_required = "Error, source timestamp is required."
failed_touch_file = "File touch failed, please check remote path and permissions"
class module_sql_console:
unexpected_response = "Unexpected response"
check_credentials = "Check credentials and DB availability"
no_data = "No data returned"
missing_sql_trailer_s = 'Is the trailing comma missing at the end of the SQL statement \'%s\'?'
class module_sql_dump:
sql_dump_failed_check_credentials = "SQL dump failed, check credentials and DB availability"
sql_dump_saved_s = "SQL dump saved to '%s'"
class module_file_grep:
failed_retrieve_info = "Failed retrieve file information, please check if the remote readable files exist"
class module_file_upload2web:
failed_retrieve_info = "Failed retrieve web root information"
failed_resolve_path = "Failed resolve path, please check remote path and permissions"
error_s_not_under_webroot_s = "Error, \'%s\' is not under the web root folder \'%s\'"
failed_search_writable_starting_s = "Failed search first writable folder starting from '%s'."
class module_shell_php:
error_404_remote_backdoor = 'The remote backdoor request triggers an error 404, please verify its availability'
error_500_executing = 'The remote script execution triggers an error 500, please verify script integrity and sent payload correctness'
error_URLError_network = 'Network error, unable to connect to the remote backdoor'
error_proxy = 'Proxy error, unable to connect to the remote backdoor'
missing_php_trailer_s = 'Is the trailing comma missing at the end of the PHP code \'%s\'?'
error_i_executing = 'The request triggers the error %i, please verify running code'
class module_net_ifconfig:
error_no_s_execution_result = 'Error, no \'%s\' execution result'
error_parsing_s_execution_result = 'Error parsing \'%s\' execution result'
error_interpeting_s_execution_result_s = 'Error interpreting \'%s\' execution result: \'%s\''
failed_retrieve_info = "Failed retrieve ifconfig information"
class module_backdoor_tcp:
error_parsing_connect_s = 'Error parsing hostname, connect manually to the shell on port %s'
error_connecting_to_s_s_s = 'Error connecting to %s:%s: %s'
class module_backdoor_reversetcp:
error_binding_socket_s = 'Error binding socket: \'%s\''
error_timeout = 'Timeout error'
reverse_shell_connected = 'Reverse shell connected, insert commands. Append semi-colon help to get the commands accepted.'
class module_audit_phpconf:
not_enabled = 'Not enabled'
enabled = 'Enabled'
error = 'Error getting information'
basedir_unrestricted = 'Unrestricted'
basedir_dot = 'Set to \'.\', bypassable'
basedir_no_slash = 'No trailing \'/\', bypassable'
user_win_admin = 'Check this is not an administrative user'
user_nix_root = 'User \'root\' can be abused'
feat_expose_php = 'PHP configuration information exposed'
feat_file_uploads = 'File upload enabled'
feat_register_globals = 'Insecure variable manipulation enabled'
feat_display_errors = 'Information display on error enabled'
feat_enable_dl = 'Function dl() can be used to bypass restrictions'
feat_safe_mode = 'Safe mode restrictions enabled'
feat_magic_quotes_gpc = 'Insecure SQL injection protection enabled'
feat_allow_url_include = 'Insecure inclusion of remote resources enabled'
feat_session_use_trans_sid = 'Session IDs insecurely passed via URL'
class_splFileObject = 'Class splFileObject can be used to bypass restrictions'
class_COM = 'Class COM can be used to bypass restrictions'
class_Java = 'Class Java can be used to bypass restrictions'
func_info = 'Configuration exposed'
func_files = 'Filesystem manipulation'
func_log = 'Log tampering'
func_proc_execution = 'Process execution'
func_proc_manipulation = 'Process manipulation'
class module_net_curl:
unexpected_response = 'Unexpected response, please check URL reachability'
class module_net_proxy:
https_not_implemented = 'HTTPS connection is not implemented, use module :net_phpproxy'
request_timed_out_s = 'Request timed out: %s'
proxy_set_address_s_i = 'Set proxy address to your browser as \'http://%s:%i\' and start browsing'
proxy_started_background = 'Background proxy started, will shutdown at weevely exit'
proxy_started_foreground = 'Foreground proxy started, press Ctrl-C to force shutdown'
class module_net_phpproxy:
phpproxy_installed_to_s_browser_to_s = 'PHP proxy installed to \'%s\', browse \'%s\' with your browser'
proxy_script_removed = 'The PHP proxy script will be removed at weevely exit'
proxy_script_manually_remove_s = 'Remove manually the PHP proxy script \'%s\''
class module_net_scan:
unexpected_response = 'Unexpected response'
class module_file_archive:
archive_type_not_set = 'Archive type not set and file extension not supported'
error_extracting_s_file_needed = 'Error extracting %s, first remote file argument must contain the output file path'
error_extracting_s_folder_needed = 'Error extracting %s, first remote file argument must contain the output folder path'
remote_path_check_failed = "Remote path check failed, please check if exists and is readable"
class module_file_mount:
httpfs_s_not_found = "HTTPfs binary '%s' not found, specify binary path or install it from 'https://github.com/cyrus-and/httpfs'"
error_generating_agent = "Error generating HTTPfs PHP agent"
failed_agent_upload = "Failed PHP agent upload"
agent_installed_tutorial = """HTTPfs PHP agent installed remotely as ${agent_abs_path}.
Run the following commands in another console outside weevely to mount
and unmount the remote filesystem.
Mount locally a remote folder:
${httpfs_binary} mount '${agent_url}' <local folder> <remote folder>
Unmount it:
fusermount -u <local folder>
"""
httpfs_agent_removed = 'The PHP agent will be removed at weevely exit'
httpfs_agent_manually_remove_s = 'Remove manually the PHP agent \'%s\''
class module_shell_su:
error_su_executing = 'Error executing commands with su'
class module_shell_sh:
error_sh_remote_shell = 'Error loading Sh remote shell'
class generate:
error_agent_template_s_s = 'Error with agent template \'%s\': %s'
error_obfuscator_template_s_s = 'Error with obfuscator template \'%s\': %s'
generated_backdoor_with_password_s_in_s_size_i = 'Generated backdoor with password \'%s\' in \'%s\' of %i byte size.'
class utils_code:
minify_php_missing_binary = 'Missing PHP binary, skipping PHP code minify'
minify_php_error_minifying = 'Error minifying PHP code, skipping'
|
cs591B1-Project/Social-Media-Impact-on-Stock-Market-and-Price
|
refs/heads/master
|
data/20 nike/nikeAnalyze2.py
|
1
|
from readStockData import readClosingPrice
from readStockData import readVolume
from dataAnalysis import calCorrelation
s = readClosingPrice('nike.xls')
v = readVolume('nike.xls')
calCorrelation(s,v)
|
SuperMass/distOS-lab3
|
refs/heads/master
|
src/integrated/client4/client_config.py
|
1
|
remote_server_ips = ('127.0.0.1', '127.0.0.1')
remote_server_ports = (8005, 8006)
assigned_server_index = 1 # in real system, client is distributed by a load balancing server in general; here I just simulate the balancing policy.
process_id = 4
client_addr = ('127.0.0.1', 7004)
poisson_lambda = 5
simu_len = 60
get_score_pb = 0.8
|
dims/cinder
|
refs/heads/master
|
cinder/tests/unit/volume/drivers/netapp/eseries/test_utils.py
|
31
|
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Mock unit tests for the NetApp E-series driver utility module
"""
import six
from cinder import test
from cinder.volume.drivers.netapp.eseries import utils
class NetAppEseriesDriverUtilsTestCase(test.TestCase):
def test_convert_uuid_to_es_fmt(self):
value = 'e67e931a-b2ed-4890-938b-3acc6a517fac'
result = utils.convert_uuid_to_es_fmt(value)
self.assertEqual('4Z7JGGVS5VEJBE4LHLGGUUL7VQ', result)
def test_convert_es_fmt_to_uuid(self):
value = '4Z7JGGVS5VEJBE4LHLGGUUL7VQ'
result = six.text_type(utils.convert_es_fmt_to_uuid(value))
self.assertEqual('e67e931a-b2ed-4890-938b-3acc6a517fac', result)
|
gymnasium/edx-platform
|
refs/heads/open-release/hawthorn.master
|
openedx/core/djangoapps/credit/exceptions.py
|
24
|
"""Exceptions raised by the credit API. """
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
# TODO: Cleanup this mess! ECOM-2908
class CreditApiBadRequest(Exception):
"""
Could not complete a request to the credit API because
there was a problem with the request (as opposed to an internal error).
"""
pass
class InvalidCreditRequirements(CreditApiBadRequest):
"""
The requirement dictionary provided has invalid format.
"""
pass
class InvalidCreditCourse(CreditApiBadRequest):
"""
The course is not configured for credit.
"""
pass
class UserIsNotEligible(CreditApiBadRequest):
"""
The user has not satisfied eligibility requirements for credit.
"""
pass
class CreditProviderNotConfigured(CreditApiBadRequest):
"""
The requested credit provider is not configured correctly for the course.
"""
pass
class RequestAlreadyCompleted(CreditApiBadRequest):
"""
The user has already submitted a request and received a response from the credit provider.
"""
pass
class CreditRequestNotFound(CreditApiBadRequest):
"""
The request does not exist.
"""
pass
class InvalidCreditStatus(CreditApiBadRequest):
"""
The status is not either "approved" or "rejected".
"""
pass
class InvalidCreditRequest(APIException):
""" API request is invalid. """
status_code = status.HTTP_400_BAD_REQUEST
class UserNotEligibleException(InvalidCreditRequest):
""" User not eligible for credit for a given course. """
def __init__(self, course_key, username):
detail = _('[{username}] is not eligible for credit for [{course_key}].').format(username=username,
course_key=course_key)
super(UserNotEligibleException, self).__init__(detail)
class InvalidCourseKey(InvalidCreditRequest):
""" Course key is invalid. """
def __init__(self, course_key):
detail = _('[{course_key}] is not a valid course key.').format(course_key=course_key)
super(InvalidCourseKey, self).__init__(detail)
|
dahlstrom-g/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyNoneFunctionAssignmentInspection/methodWithInheritors.py
|
83
|
class C(object):
def foo(self):
pass
def bar(self):
pass
def test(self):
x = self.foo()
<weak_warning descr="Function 'bar' doesn't return anything">y = self.bar()</weak_warning>
class D(C):
def foo(self):
return 2
|
faywong/FFPlayer
|
refs/heads/trunk
|
project/jni/python/src/Tools/i18n/msgfmt.py
|
61
|
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Written by Martin v. Löwis <loewis@informatik.hu-berlin.de>
"""Generate binary message catalog from textual translation description.
This program converts a textual Uniforum-style message catalog (.po file) into
a binary GNU catalog (.mo file). This is essentially the same function as the
GNU msgfmt program, however, it is a simpler implementation.
Usage: msgfmt.py [OPTIONS] filename.po
Options:
-o file
--output-file=file
Specify the output file to write to. If omitted, output will go to a
file named filename.mo (based off the input file name).
-h
--help
Print this message and exit.
-V
--version
Display version information and exit.
"""
import sys
import os
import getopt
import struct
import array
__version__ = "1.1"
MESSAGES = {}
def usage(code, msg=''):
print >> sys.stderr, __doc__
if msg:
print >> sys.stderr, msg
sys.exit(code)
def add(id, str, fuzzy):
"Add a non-fuzzy translation to the dictionary."
global MESSAGES
if not fuzzy and str:
MESSAGES[id] = str
def generate():
"Return the generated output."
global MESSAGES
keys = MESSAGES.keys()
# the keys are sorted in the .mo file
keys.sort()
offsets = []
ids = strs = ''
for id in keys:
# For each string, we need size and file offset. Each string is NUL
# terminated; the NUL does not count into the size.
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
ids += id + '\0'
strs += MESSAGES[id] + '\0'
output = ''
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
# the keys start right after the index tables.
# translated string.
keystart = 7*4+16*len(keys)
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1+keystart]
voffsets += [l2, o2+valuestart]
offsets = koffsets + voffsets
output = struct.pack("Iiiiiii",
0x950412deL, # Magic
0, # Version
len(keys), # # of entries
7*4, # start of key index
7*4+len(keys)*8, # start of value index
0, 0) # size and offset of hash table
output += array.array("i", offsets).tostring()
output += ids
output += strs
return output
def make(filename, outfile):
ID = 1
STR = 2
# Compute .mo name from .po name and arguments
if filename.endswith('.po'):
infile = filename
else:
infile = filename + '.po'
if outfile is None:
outfile = os.path.splitext(infile)[0] + '.mo'
try:
lines = open(infile).readlines()
except IOError, msg:
print >> sys.stderr, msg
sys.exit(1)
section = None
fuzzy = 0
# Parse the catalog
lno = 0
for l in lines:
lno += 1
# If we get a comment line after a msgstr, this is a new entry
if l[0] == '#' and section == STR:
add(msgid, msgstr, fuzzy)
section = None
fuzzy = 0
# Record a fuzzy mark
if l[:2] == '#,' and 'fuzzy' in l:
fuzzy = 1
# Skip comments
if l[0] == '#':
continue
# Now we are in a msgid section, output previous section
if l.startswith('msgid'):
if section == STR:
add(msgid, msgstr, fuzzy)
section = ID
l = l[5:]
msgid = msgstr = ''
# Now we are in a msgstr section
elif l.startswith('msgstr'):
section = STR
l = l[6:]
# Skip empty lines
l = l.strip()
if not l:
continue
# XXX: Does this always follow Python escape semantics?
l = eval(l)
if section == ID:
msgid += l
elif section == STR:
msgstr += l
else:
print >> sys.stderr, 'Syntax error on %s:%d' % (infile, lno), \
'before:'
print >> sys.stderr, l
sys.exit(1)
# Add last entry
if section == STR:
add(msgid, msgstr, fuzzy)
# Compute output
output = generate()
try:
open(outfile,"wb").write(output)
except IOError,msg:
print >> sys.stderr, msg
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'hVo:',
['help', 'version', 'output-file='])
except getopt.error, msg:
usage(1, msg)
outfile = None
# parse options
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-V', '--version'):
print >> sys.stderr, "msgfmt.py", __version__
sys.exit(0)
elif opt in ('-o', '--output-file'):
outfile = arg
# do it
if not args:
print >> sys.stderr, 'No input file given'
print >> sys.stderr, "Try `msgfmt --help' for more information."
return
for filename in args:
make(filename, outfile)
if __name__ == '__main__':
main()
|
aniketshukla/werkzeug
|
refs/heads/master
|
examples/coolmagic/utils.py
|
44
|
# -*- coding: utf-8 -*-
"""
coolmagic.utils
~~~~~~~~~~~~~~~
This module contains the subclasses of the base request and response
objects provided by werkzeug. The subclasses know about their charset
and implement some additional functionallity like the ability to link
to view functions.
:copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from os.path import dirname, join
from jinja import Environment, FileSystemLoader
from werkzeug.local import Local, LocalManager
from werkzeug.utils import redirect
from werkzeug.wrappers import BaseRequest, BaseResponse
local = Local()
local_manager = LocalManager([local])
template_env = Environment(
loader=FileSystemLoader(join(dirname(__file__), 'templates'),
use_memcache=False)
)
exported_views = {}
def export(string, template=None, **extra):
"""
Decorator for registering view functions and adding
templates to it.
"""
def wrapped(f):
endpoint = (f.__module__ + '.' + f.__name__)[16:]
if template is not None:
old_f = f
def f(**kwargs):
rv = old_f(**kwargs)
if not isinstance(rv, Response):
rv = TemplateResponse(template, **(rv or {}))
return rv
f.__name__ = old_f.__name__
f.__doc__ = old_f.__doc__
exported_views[endpoint] = (f, string, extra)
return f
return wrapped
def url_for(endpoint, **values):
"""
Build a URL
"""
return local.request.url_adapter.build(endpoint, values)
class Request(BaseRequest):
"""
The concrete request object used in the WSGI application.
It has some helper functions that can be used to build URLs.
"""
charset = 'utf-8'
def __init__(self, environ, url_adapter):
BaseRequest.__init__(self, environ)
self.url_adapter = url_adapter
local.request = self
class ThreadedRequest(object):
"""
A pseudo request object that always poins to the current
context active request.
"""
def __getattr__(self, name):
if name == '__members__':
return [x for x in dir(local.request) if not
x.startswith('_')]
return getattr(local.request, name)
def __setattr__(self, name, value):
return setattr(local.request, name, value)
class Response(BaseResponse):
"""
The concrete response object for the WSGI application.
"""
charset = 'utf-8'
default_mimetype = 'text/html'
class TemplateResponse(Response):
"""
Render a template to a response.
"""
def __init__(self, template_name, **values):
from coolmagic import helpers
values.update(
request=local.request,
h=helpers
)
template = template_env.get_template(template_name)
Response.__init__(self, template.render(values))
|
jarvys/django-1.7-jdb
|
refs/heads/master
|
tests/delete_regress/tests.py
|
19
|
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import transaction, DEFAULT_DB_ALIAS, models
from django.db.utils import ConnectionHandler
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import (Book, Award, AwardNote, Person, Child, Toy, PlayedWith,
PlayedWithNote, Email, Researcher, Food, Eaten, Policy, Version, Location,
Item, Image, File, Photo, FooFile, FooImage, FooPhoto, FooFileProxy, Login,
OrgUnit, OrderedPerson, House)
# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
class DeleteLockingTest(TransactionTestCase):
available_apps = ['delete_regress']
def setUp(self):
# Create a second connection to the default database
new_connections = ConnectionHandler(settings.DATABASES)
self.conn2 = new_connections[DEFAULT_DB_ALIAS]
# Put both DB connections into managed transaction mode
transaction.enter_transaction_management()
self.conn2.enter_transaction_management()
def tearDown(self):
# Close down the second connection.
transaction.leave_transaction_management()
self.conn2.abort()
self.conn2.close()
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_concurrent_delete(self):
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
# Create some dummy data
b1 = Book(id=1, pagecount=100)
b2 = Book(id=2, pagecount=200)
b3 = Book(id=3, pagecount=300)
b1.save()
b2.save()
b3.save()
transaction.commit()
self.assertEqual(3, Book.objects.count())
# Delete something using connection 2.
cursor2 = self.conn2.cursor()
cursor2.execute('DELETE from delete_regress_book WHERE id=1')
self.conn2._commit()
# Now perform a queryset delete that covers the object
# deleted in connection 2. This causes an infinite loop
# under MySQL InnoDB unless we keep track of already
# deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
transaction.commit()
self.assertEqual(1, Book.objects.count())
transaction.commit()
class DeleteCascadeTests(TestCase):
def test_generic_relation_cascade(self):
"""
Django cascades deletes through generic-related objects to their
reverse relations.
"""
person = Person.objects.create(name='Nelson Mandela')
award = Award.objects.create(name='Nobel', content_object=person)
AwardNote.objects.create(note='a peace prize',
award=award)
self.assertEqual(AwardNote.objects.count(), 1)
person.delete()
self.assertEqual(Award.objects.count(), 0)
# first two asserts are just sanity checks, this is the kicker:
self.assertEqual(AwardNote.objects.count(), 0)
def test_fk_to_m2m_through(self):
"""
If an M2M relationship has an explicitly-specified through model, and
some other model has an FK to that through model, deletion is cascaded
from one of the participants in the M2M, to the through model, to its
related model.
"""
juan = Child.objects.create(name='Juan')
paints = Toy.objects.create(name='Paints')
played = PlayedWith.objects.create(child=juan, toy=paints,
date=datetime.date.today())
PlayedWithNote.objects.create(played=played,
note='the next Jackson Pollock')
self.assertEqual(PlayedWithNote.objects.count(), 1)
paints.delete()
self.assertEqual(PlayedWith.objects.count(), 0)
# first two asserts just sanity checks, this is the kicker:
self.assertEqual(PlayedWithNote.objects.count(), 0)
def test_15776(self):
policy = Policy.objects.create(pk=1, policy_number="1234")
version = Version.objects.create(policy=policy)
location = Location.objects.create(version=version)
Item.objects.create(version=version, location=location)
policy.delete()
class DeleteCascadeTransactionTests(TransactionTestCase):
available_apps = ['delete_regress']
def test_inheritance(self):
"""
Auto-created many-to-many through tables referencing a parent model are
correctly found by the delete cascade when a child of that parent is
deleted.
Refs #14896.
"""
r = Researcher.objects.create()
email = Email.objects.create(
label="office-email", email_address="carl@science.edu"
)
r.contacts.add(email)
email.delete()
def test_to_field(self):
"""
Cascade deletion works with ForeignKey.to_field set to non-PK.
"""
apple = Food.objects.create(name="apple")
Eaten.objects.create(food=apple, meal="lunch")
apple.delete()
self.assertFalse(Food.objects.exists())
self.assertFalse(Eaten.objects.exists())
class LargeDeleteTests(TestCase):
def test_large_deletes(self):
"Regression for #13309 -- if the number of objects > chunk size, deletion still occurs"
for x in range(300):
Book.objects.create(pagecount=x + 100)
# attach a signal to make sure we will not fast-delete
def noop(*args, **kwargs):
pass
models.signals.post_delete.connect(noop, sender=Book)
Book.objects.all().delete()
models.signals.post_delete.disconnect(noop, sender=Book)
self.assertEqual(Book.objects.count(), 0)
class ProxyDeleteTest(TestCase):
"""
Tests on_delete behavior for proxy models.
See #16128.
"""
def create_image(self):
"""Return an Image referenced by both a FooImage and a FooFile."""
# Create an Image
test_image = Image()
test_image.save()
foo_image = FooImage(my_image=test_image)
foo_image.save()
# Get the Image instance as a File
test_file = File.objects.get(pk=test_image.pk)
foo_file = FooFile(my_file=test_file)
foo_file.save()
return test_image
def test_delete_proxy(self):
"""
Deleting the *proxy* instance bubbles through to its non-proxy and
*all* referring objects are deleted.
"""
self.create_image()
Image.objects.all().delete()
# An Image deletion == File deletion
self.assertEqual(len(Image.objects.all()), 0)
self.assertEqual(len(File.objects.all()), 0)
# The Image deletion cascaded and *all* references to it are deleted.
self.assertEqual(len(FooImage.objects.all()), 0)
self.assertEqual(len(FooFile.objects.all()), 0)
def test_delete_proxy_of_proxy(self):
"""
Deleting a proxy-of-proxy instance should bubble through to its proxy
and non-proxy parents, deleting *all* referring objects.
"""
test_image = self.create_image()
# Get the Image as a Photo
test_photo = Photo.objects.get(pk=test_image.pk)
foo_photo = FooPhoto(my_photo=test_photo)
foo_photo.save()
Photo.objects.all().delete()
# A Photo deletion == Image deletion == File deletion
self.assertEqual(len(Photo.objects.all()), 0)
self.assertEqual(len(Image.objects.all()), 0)
self.assertEqual(len(File.objects.all()), 0)
# The Photo deletion should have cascaded and deleted *all*
# references to it.
self.assertEqual(len(FooPhoto.objects.all()), 0)
self.assertEqual(len(FooFile.objects.all()), 0)
self.assertEqual(len(FooImage.objects.all()), 0)
def test_delete_concrete_parent(self):
"""
Deleting an instance of a concrete model should also delete objects
referencing its proxy subclass.
"""
self.create_image()
File.objects.all().delete()
# A File deletion == Image deletion
self.assertEqual(len(File.objects.all()), 0)
self.assertEqual(len(Image.objects.all()), 0)
# The File deletion should have cascaded and deleted *all* references
# to it.
self.assertEqual(len(FooFile.objects.all()), 0)
self.assertEqual(len(FooImage.objects.all()), 0)
def test_delete_proxy_pair(self):
"""
If a pair of proxy models are linked by an FK from one concrete parent
to the other, deleting one proxy model cascade-deletes the other, and
the deletion happens in the right order (not triggering an
IntegrityError on databases unable to defer integrity checks).
Refs #17918.
"""
# Create an Image (proxy of File) and FooFileProxy (proxy of FooFile,
# which has an FK to File)
image = Image.objects.create()
as_file = File.objects.get(pk=image.pk)
FooFileProxy.objects.create(my_file=as_file)
Image.objects.all().delete()
self.assertEqual(len(FooFileProxy.objects.all()), 0)
def test_19187_values(self):
with self.assertRaises(TypeError):
Image.objects.values().delete()
with self.assertRaises(TypeError):
Image.objects.values_list().delete()
class Ticket19102Tests(TestCase):
"""
Test different queries which alter the SELECT clause of the query. We
also must be using a subquery for the deletion (that is, the original
query has a join in it). The deletion should be done as "fast-path"
deletion (that is, just one query for the .delete() call).
Note that .values() is not tested here on purpose. .values().delete()
doesn't work for non fast-path deletes at all.
"""
def setUp(self):
self.o1 = OrgUnit.objects.create(name='o1')
self.o2 = OrgUnit.objects.create(name='o2')
self.l1 = Login.objects.create(description='l1', orgunit=self.o1)
self.l2 = Login.objects.create(description='l2', orgunit=self.o2)
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_annotate(self):
with self.assertNumQueries(1):
Login.objects.order_by('description').filter(
orgunit__name__isnull=False
).annotate(
n=models.Count('description')
).filter(
n=1, pk=self.l1.pk
).delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_extra(self):
with self.assertNumQueries(1):
Login.objects.order_by('description').filter(
orgunit__name__isnull=False
).extra(
select={'extraf': '1'}
).filter(
pk=self.l1.pk
).delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
@skipUnlessDBFeature('can_distinct_on_fields')
def test_ticket_19102_distinct_on(self):
# Both Login objs should have same description so that only the one
# having smaller PK will be deleted.
Login.objects.update(description='description')
with self.assertNumQueries(1):
Login.objects.distinct('description').order_by('pk').filter(
orgunit__name__isnull=False
).delete()
# Assumed that l1 which is created first has smaller PK.
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_select_related(self):
with self.assertNumQueries(1):
Login.objects.filter(
pk=self.l1.pk
).filter(
orgunit__name__isnull=False
).order_by(
'description'
).select_related('orgunit').delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_defer(self):
with self.assertNumQueries(1):
Login.objects.filter(
pk=self.l1.pk
).filter(
orgunit__name__isnull=False
).order_by(
'description'
).only('id').delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
class OrderedDeleteTests(TestCase):
def test_meta_ordered_delete(self):
# When a subquery is performed by deletion code, the subquery must be
# cleared of all ordering. There was a but that caused _meta ordering
# to be used. Refs #19720.
h = House.objects.create(address='Foo')
OrderedPerson.objects.create(name='Jack', lives_in=h)
OrderedPerson.objects.create(name='Bob', lives_in=h)
OrderedPerson.objects.filter(lives_in__address='Foo').delete()
self.assertEqual(OrderedPerson.objects.count(), 0)
|
massot/odoo
|
refs/heads/8.0
|
addons/l10n_sg/__init__.py
|
669
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Tech Receptives (<http://techreceptives.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kensho-technologies/graphql-compiler
|
refs/heads/main
|
graphql_compiler/tests/schema_generation_tests/__init__.py
|
7
|
# Copyright 2019-present Kensho Technologies, LLC.
|
ximion/dak-dep11
|
refs/heads/master
|
dak/dakdb/update49.py
|
6
|
#!/usr/bin/env python
# coding=utf8
"""
Permission fixups
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2011 Mark Hymers <mhy@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from socket import gethostname;
################################################################################
def do_update(self):
"""
Fix up permissions
"""
print __doc__
try:
c = self.db.cursor()
c.execute("GRANT SELECT, UPDATE, INSERT ON binaries_metadata TO ftpmaster")
c.execute("GRANT SELECT ON binaries_metadata TO public")
c.execute("GRANT USAGE ON metadata_keys_key_id_seq TO ftpmaster")
c.execute("GRANT SELECT, UPDATE, INSERT ON source_metadata TO ftpmaster")
c.execute("GRANT SELECT ON source_metadata TO public")
c.execute("GRANT SELECT, UPDATE, INSERT ON metadata_keys TO ftpmaster")
c.execute("GRANT SELECT ON metadata_keys TO public")
c.execute("GRANT SELECT, UPDATE, INSERT ON extra_src_references TO ftpmaster")
c.execute("GRANT SELECT ON extra_src_references TO public")
c.execute("GRANT SELECT, UPDATE, INSERT ON src_contents TO ftpmaster")
c.execute("GRANT SELECT ON src_contents TO public")
c.execute("GRANT USAGE ON changelogs_text_id_seq TO ftpmaster")
c.execute("GRANT SELECT ON changes_pending_files_map TO public")
c.execute("GRANT SELECT ON config TO public")
c.execute("UPDATE config SET value = '49' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 49, rollback issued. Error message : %s' % (str(msg)))
|
rpmcpp/Audacity
|
refs/heads/master
|
lib-src/lv2/lilv/waflib/Tools/gcc.py
|
64
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib import Configure,Options,Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_gcc(conf):
cc=conf.find_program(['gcc','cc'],var='CC')
cc=conf.cmd_to_list(cc)
conf.get_cc_version(cc,gcc=True)
conf.env.CC_NAME='gcc'
conf.env.CC=cc
@conf
def gcc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['SONAME_ST']='-Wl,-h,%s'
v['SHLIB_MARKER']='-Wl,-Bdynamic'
v['STLIB_MARKER']='-Wl,-Bstatic'
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['-shared']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=['-Wl,-Bstatic']
v['cstlib_PATTERN']='lib%s.a'
v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
v['CFLAGS_MACBUNDLE']=['-fPIC']
v['macbundle_PATTERN']='%s.bundle'
@conf
def gcc_modifier_win32(conf):
v=conf.env
v['cprogram_PATTERN']='%s.exe'
v['cshlib_PATTERN']='%s.dll'
v['implib_PATTERN']='lib%s.dll.a'
v['IMPLIB_ST']='-Wl,--out-implib,%s'
v['CFLAGS_cshlib']=[]
v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
@conf
def gcc_modifier_cygwin(conf):
gcc_modifier_win32(conf)
v=conf.env
v['cshlib_PATTERN']='cyg%s.dll'
v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
v['CFLAGS_cshlib']=[]
@conf
def gcc_modifier_darwin(conf):
v=conf.env
v['CFLAGS_cshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
v['LINKFLAGS_cshlib']=['-dynamiclib']
v['cshlib_PATTERN']='lib%s.dylib'
v['FRAMEWORKPATH_ST']='-F%s'
v['FRAMEWORK_ST']=['-framework']
v['ARCH_ST']=['-arch']
v['LINKFLAGS_cstlib']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['SONAME_ST']=[]
@conf
def gcc_modifier_aix(conf):
v=conf.env
v['LINKFLAGS_cprogram']=['-Wl,-brtl']
v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER']=[]
@conf
def gcc_modifier_hpux(conf):
v=conf.env
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']='-Bstatic'
v['CFLAGS_cshlib']=['-fPIC','-DPIC']
v['cshlib_PATTERN']='lib%s.sl'
@conf
def gcc_modifier_platform(conf):
gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
if gcc_modifier_func:
gcc_modifier_func()
def configure(conf):
conf.find_gcc()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
|
kosz85/django
|
refs/heads/master
|
tests/admin_utils/test_logentry.py
|
22
|
import json
from datetime import datetime
from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry
from django.contrib.admin.utils import quote
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from django.utils.encoding import force_bytes
from django.utils.html import escape
from .models import Article, ArticleProxy, Site
@override_settings(ROOT_URLCONF='admin_utils.urls')
class LogEntryTests(TestCase):
def setUp(self):
self.user = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
self.site = Site.objects.create(domain='example.org')
self.a1 = Article.objects.create(
site=self.site,
title="Title",
created=datetime(2008, 3, 12, 11, 54),
)
content_type_pk = ContentType.objects.get_for_model(Article).pk
LogEntry.objects.log_action(
self.user.pk, content_type_pk, self.a1.pk, repr(self.a1), CHANGE,
change_message='Changed something'
)
self.client.force_login(self.user)
def test_logentry_save(self):
"""
LogEntry.action_time is a timestamp of the date when the entry was
created. It shouldn't be updated on a subsequent save().
"""
logentry = LogEntry.objects.get(content_type__model__iexact="article")
action_time = logentry.action_time
logentry.save()
self.assertEqual(logentry.action_time, action_time)
def test_logentry_change_message(self):
"""
LogEntry.change_message is stored as a dumped JSON structure to be able
to get the message dynamically translated at display time.
"""
post_data = {
'site': self.site.pk, 'title': 'Changed', 'hist': 'Some content',
'created_0': '2008-03-12', 'created_1': '11:54',
}
change_url = reverse('admin:admin_utils_article_change', args=[quote(self.a1.pk)])
response = self.client.post(change_url, post_data)
self.assertRedirects(response, reverse('admin:admin_utils_article_changelist'))
logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id')
self.assertEqual(logentry.get_change_message(), 'Changed title and hist.')
with translation.override('fr'):
self.assertEqual(logentry.get_change_message(), 'Modification de title et hist.')
add_url = reverse('admin:admin_utils_article_add')
post_data['title'] = 'New'
response = self.client.post(add_url, post_data)
self.assertRedirects(response, reverse('admin:admin_utils_article_changelist'))
logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id')
self.assertEqual(logentry.get_change_message(), 'Added.')
with translation.override('fr'):
self.assertEqual(logentry.get_change_message(), 'Ajout.')
def test_logentry_change_message_not_json(self):
"""LogEntry.change_message was a string before Django 1.10."""
logentry = LogEntry(change_message='non-JSON string')
self.assertEqual(logentry.get_change_message(), logentry.change_message)
@override_settings(USE_L10N=True)
def test_logentry_change_message_localized_datetime_input(self):
"""
Localized date/time inputs shouldn't affect changed form data detection.
"""
post_data = {
'site': self.site.pk, 'title': 'Changed', 'hist': 'Some content',
'created_0': '12/03/2008', 'created_1': '11:54',
}
with translation.override('fr'):
change_url = reverse('admin:admin_utils_article_change', args=[quote(self.a1.pk)])
response = self.client.post(change_url, post_data)
self.assertRedirects(response, reverse('admin:admin_utils_article_changelist'))
logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id')
self.assertEqual(logentry.get_change_message(), 'Changed title and hist.')
def test_logentry_change_message_formsets(self):
"""
All messages for changed formsets are logged in a change message.
"""
a2 = Article.objects.create(
site=self.site,
title="Title second article",
created=datetime(2012, 3, 18, 11, 54),
)
post_data = {
'domain': 'example.com', # domain changed
'admin_articles-TOTAL_FORMS': '5',
'admin_articles-INITIAL_FORMS': '2',
'admin_articles-MIN_NUM_FORMS': '0',
'admin_articles-MAX_NUM_FORMS': '1000',
# Changed title for 1st article
'admin_articles-0-id': str(self.a1.pk),
'admin_articles-0-site': str(self.site.pk),
'admin_articles-0-title': 'Changed Title',
# Second article is deleted
'admin_articles-1-id': str(a2.pk),
'admin_articles-1-site': str(self.site.pk),
'admin_articles-1-title': 'Title second article',
'admin_articles-1-DELETE': 'on',
# A new article is added
'admin_articles-2-site': str(self.site.pk),
'admin_articles-2-title': 'Added article',
}
change_url = reverse('admin:admin_utils_site_change', args=[quote(self.site.pk)])
response = self.client.post(change_url, post_data)
self.assertRedirects(response, reverse('admin:admin_utils_site_changelist'))
self.assertQuerysetEqual(Article.objects.filter(pk=a2.pk), [])
logentry = LogEntry.objects.filter(content_type__model__iexact='site').latest('action_time')
self.assertEqual(
json.loads(logentry.change_message),
[
{"changed": {"fields": ["domain"]}},
{"added": {"object": "Added article", "name": "article"}},
{"changed": {"fields": ["title"], "object": "Changed Title", "name": "article"}},
{"deleted": {"object": "Title second article", "name": "article"}},
]
)
self.assertEqual(
logentry.get_change_message(),
'Changed domain. Added article "Added article". '
'Changed title for article "Changed Title". Deleted article "Title second article".'
)
with translation.override('fr'):
self.assertEqual(
logentry.get_change_message(),
"Modification de domain. Ajout de article « Added article ». "
"Modification de title pour l'objet article « Changed Title ». "
"Suppression de article « Title second article »."
)
def test_logentry_get_edited_object(self):
"""
LogEntry.get_edited_object() returns the edited object of a LogEntry
object.
"""
logentry = LogEntry.objects.get(content_type__model__iexact="article")
edited_obj = logentry.get_edited_object()
self.assertEqual(logentry.object_id, str(edited_obj.pk))
def test_logentry_get_admin_url(self):
"""
LogEntry.get_admin_url returns a URL to edit the entry's object or
None for nonexistent (possibly deleted) models.
"""
logentry = LogEntry.objects.get(content_type__model__iexact='article')
expected_url = reverse('admin:admin_utils_article_change', args=(quote(self.a1.pk),))
self.assertEqual(logentry.get_admin_url(), expected_url)
self.assertIn('article/%d/change/' % self.a1.pk, logentry.get_admin_url())
logentry.content_type.model = "nonexistent"
self.assertIsNone(logentry.get_admin_url())
def test_logentry_unicode(self):
log_entry = LogEntry()
log_entry.action_flag = ADDITION
self.assertTrue(str(log_entry).startswith('Added '))
log_entry.action_flag = CHANGE
self.assertTrue(str(log_entry).startswith('Changed '))
log_entry.action_flag = DELETION
self.assertTrue(str(log_entry).startswith('Deleted '))
# Make sure custom action_flags works
log_entry.action_flag = 4
self.assertEqual(str(log_entry), 'LogEntry Object')
def test_logentry_repr(self):
logentry = LogEntry.objects.first()
self.assertEqual(repr(logentry), str(logentry.action_time))
def test_log_action(self):
content_type_pk = ContentType.objects.get_for_model(Article).pk
log_entry = LogEntry.objects.log_action(
self.user.pk, content_type_pk, self.a1.pk, repr(self.a1), CHANGE,
change_message='Changed something else',
)
self.assertEqual(log_entry, LogEntry.objects.latest('id'))
def test_recentactions_without_content_type(self):
"""
If a LogEntry is missing content_type it will not display it in span
tag under the hyperlink.
"""
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_utils_article_change', args=(quote(self.a1.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(repr(self.a1)))
self.assertContains(response, should_contain)
should_contain = "Article"
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__model__iexact='article')
# If the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part (#10275).
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get(reverse('admin:index'))
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1, counted_presence_after)
def test_proxy_model_content_type_is_used_for_log_entries(self):
"""
Log entries for proxy models should have the proxy model's contenttype
(#21084).
"""
proxy_content_type = ContentType.objects.get_for_model(ArticleProxy, for_concrete_model=False)
post_data = {
'site': self.site.pk, 'title': "Foo", 'hist': "Bar",
'created_0': '2015-12-25', 'created_1': '00:00',
}
changelist_url = reverse('admin:admin_utils_articleproxy_changelist')
# add
proxy_add_url = reverse('admin:admin_utils_articleproxy_add')
response = self.client.post(proxy_add_url, post_data)
self.assertRedirects(response, changelist_url)
proxy_addition_log = LogEntry.objects.latest('id')
self.assertEqual(proxy_addition_log.action_flag, ADDITION)
self.assertEqual(proxy_addition_log.content_type, proxy_content_type)
# change
article_id = proxy_addition_log.object_id
proxy_change_url = reverse('admin:admin_utils_articleproxy_change', args=(article_id,))
post_data['title'] = 'New'
response = self.client.post(proxy_change_url, post_data)
self.assertRedirects(response, changelist_url)
proxy_change_log = LogEntry.objects.latest('id')
self.assertEqual(proxy_change_log.action_flag, CHANGE)
self.assertEqual(proxy_change_log.content_type, proxy_content_type)
# delete
proxy_delete_url = reverse('admin:admin_utils_articleproxy_delete', args=(article_id,))
response = self.client.post(proxy_delete_url, {'post': 'yes'})
self.assertRedirects(response, changelist_url)
proxy_delete_log = LogEntry.objects.latest('id')
self.assertEqual(proxy_delete_log.action_flag, DELETION)
self.assertEqual(proxy_delete_log.content_type, proxy_content_type)
|
plotly/plotly.py
|
refs/heads/master
|
packages/python/plotly/plotly/validators/layout/scene/annotation/_startstandoff.py
|
1
|
import _plotly_utils.basevalidators
class StartstandoffValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name="startstandoff",
parent_name="layout.scene.annotation",
**kwargs
):
super(StartstandoffValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
**kwargs
)
|
dishants/Land-Records-on-Blockchain
|
refs/heads/master
|
testing.py
|
1
|
import pyqtree
import random, time
class Item:
def __init__(self, x, y):
left = x-1
right = x+1
top = y-1
bottom = y+1
self.bbox = [left,top,right,bottom]
#setup and populate index
items = [Item(random.randrange(5,95),random.randrange(5,95)) for _ in range(10000)]
spindex = pyqtree.Index(bbox=[-11,-33,100,100])
for item in items:
spindex.insert(item, item.bbox)
print("{0} members in this index.".format(len(spindex)))
#test intersection
print("testing hit")
testitem = (51,51,86,86)
t = time.time()
matches = spindex.intersect(testitem)
print("{0} seconds".format(time.time()-t))
#test countmembers()
# trivial list of items
items = [Item(0.5, 0.5), Item(-0.5, 0.5), Item(-0.5, -0.5), Item(0.5, -0.5)]
# populate: maxindex=3, so we must split
spindex = pyqtree.Index(bbox=[-1, -1, 1, 1], max_items=3)
for item in items:
spindex.insert(item, item.bbox)
# check result
members = len(spindex)
assert(members == 4)
print("{0} nodes in this Index.".format(members))
|
Philips14171/qt-creator-opensource-src-4.2.1
|
refs/heads/master
|
tests/system/suite_debugger/tst_qml_js_console/test.py
|
1
|
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
source("../../shared/qtcreator.py")
def typeToDebuggerConsole(expression):
editableIndex = getQModelIndexStr("text=''",
":DebugModeWidget_Debugger::Internal::ConsoleView")
mouseClick(editableIndex, 5, 5, 0, Qt.LeftButton)
type(waitForObject(":Debugger::Internal::ConsoleEdit"), expression)
type(waitForObject(":Debugger::Internal::ConsoleEdit"), "<Return>")
def useDebuggerConsole(expression, expectedOutput, check=None, checkOutp=None):
typeToDebuggerConsole(expression)
if expectedOutput == None:
result = getQmlJSConsoleOutput()[-1]
clickButton(":*Qt Creator.Clear_QToolButton")
return result
expected = getQModelIndexStr("text='%s'" % expectedOutput,
":DebugModeWidget_Debugger::Internal::ConsoleView")
try:
obj = waitForObject(expected, 3000)
test.compare(obj.text, expectedOutput, "Verifying whether expected output appeared.")
except:
test.fail("Expected output (%s) missing - got '%s'."
% (expectedOutput, getQmlJSConsoleOutput()[-1]))
clickButton(":*Qt Creator.Clear_QToolButton")
if check:
if checkOutp == None:
checkOutp = expectedOutput
useDebuggerConsole(check, checkOutp)
def debuggerHasStopped():
stopDebugger = findObject(":Debugger Toolbar.Exit Debugger_QToolButton")
fancyDebugButton = findObject(":*Qt Creator.Start Debugging_Core::Internal::FancyToolButton")
result = test.verify(not stopDebugger.enabled and fancyDebugButton.enabled,
"Verifying whether debugger buttons are in correct state.")
ensureChecked(":Qt Creator_AppOutput_Core::Internal::OutputPaneToggleButton")
output = waitForObject("{type='Core::OutputWindow' visible='1' "
"windowTitle='Application Output Window'}")
result &= test.verify(waitFor("'Debugging has finished' in str(output.plainText)", 2000),
"Verifying whether Application output contains 'Debugging has finished'.")
return result
def getQmlJSConsoleOutput():
try:
result = []
consoleView = waitForObject(":DebugModeWidget_Debugger::Internal::ConsoleView")
model = consoleView.model()
# old input, output, new input > 2
waitFor("model.rowCount() > 2", 2000)
return dumpItems(model)[:-1]
except:
return [""]
def runChecks(elementProps, parent, checks):
mouseClick(getQModelIndexStr(elementProps, parent), 5, 5, 0, Qt.LeftButton)
for check in checks:
useDebuggerConsole(*check)
def testLoggingFeatures():
expressions = ("console.log('info message'); console.info('info message2'); console.debug()",
'console.warn("warning message")',
"console.error('error message')")
expected = (["info message", "info message2", "", "<undefined>"],
["warning message", "<undefined>"],
["error message", "<undefined>"])
filterToolTips = ("Show debug, log, and info messages.",
"Show warning messages.",
"Show error messages.",
)
for expression, expect, tooltip in zip(expressions, expected, filterToolTips):
typeToDebuggerConsole(expression)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, expect, "Verifying expected output.")
filterButton = waitForObject("{container=':Qt Creator.DebugModeWidget_QSplitter' "
"toolTip='%s' type='QToolButton' unnamed='1' visible='1'}"
% tooltip)
ensureChecked(filterButton, False)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, ["<undefined>"], "Verifying expected filtered output.")
ensureChecked(filterButton, True)
output = getQmlJSConsoleOutput()[1:]
test.compare(output, expect, "Verifying unfiltered output is displayed again.")
clickButton(":*Qt Creator.Clear_QToolButton")
def main():
test.warning("This test must be rewritten (QTCREATORBUG-15831)") # QmlJS Console has changed
return
projName = "simpleQuickUI2.qmlproject"
projFolder = os.path.dirname(findFile("testdata", "simpleQuickUI2/%s" % projName))
if not neededFilePresent(os.path.join(projFolder, projName)):
return
qmlProjDir = prepareTemplate(projFolder)
if qmlProjDir == None:
test.fatal("Could not prepare test files - leaving test")
return
qmlProjFile = os.path.join(qmlProjDir, projName)
# start Creator by passing a .qmlproject file
startApplication('qtcreator' + SettingsPath + ' "%s"' % qmlProjFile)
if not startedWithoutPluginError():
return
# if Debug is enabled - 1 valid kit is assigned - real check for this is done in tst_qml_locals
fancyDebugButton = findObject(":*Qt Creator.Start Debugging_Core::Internal::FancyToolButton")
if test.verify(waitFor('fancyDebugButton.enabled', 5000), "Start Debugging is enabled."):
# make sure QML Debugging is enabled
switchViewTo(ViewConstants.PROJECTS)
switchToBuildOrRunSettingsFor(1, 0, ProjectSettings.RUN)
ensureChecked("{container=':Qt Creator.scrollArea_QScrollArea' text='Enable QML' "
"type='QCheckBox' unnamed='1' visible='1'}")
switchViewTo(ViewConstants.EDIT)
# start debugging
clickButton(fancyDebugButton)
locAndExprTV = waitForObject(":Locals and Expressions_Debugger::Internal::WatchTreeView")
rootIndex = getQModelIndexStr("text='Rectangle'",
":Locals and Expressions_Debugger::Internal::WatchTreeView")
# make sure the items inside the root item are visible
doubleClick(waitForObject(rootIndex))
if not object.exists(":DebugModeWidget_Debugger::Internal::ConsoleView"):
invokeMenuItem("Window", "Output Panes", "Debugger Console")
progressBarWait()
# color and float values have additional ZERO WIDTH SPACE (\u200b), different usage of
# whitespaces inside expressions is part of the test
checks = [("color", u"#\u200b008000"), ("width", "50"),
("color ='silver'", "silver", "color", u"#\u200bc0c0c0"),
("width=66", "66", "width"), ("anchors.centerIn", "<unnamed object>"),
("opacity", "1"), ("opacity = .1875", u"0.\u200b1875", "opacity")]
# check red inner Rectangle
runChecks("text='Rectangle' occurrence='2'", rootIndex, checks)
checks = [("color", u"#\u200bff0000"), ("width", "100"), ("height", "100"),
("radius = Math.min(width, height) / 2", "50", "radius"),
("parent.objectName= 'mainRect'", "mainRect")]
# check green inner Rectangle
runChecks("text='Rectangle'", rootIndex, checks)
checks = [("color", u"#\u200b000000"), ("font.pointSize=14", "14", "font.pointSize"),
("font.bold", "false"), ("font.weight=Font.Bold", "75", "font.bold", "true"),
("rotation", "0"), ("rotation = 180", "180", "rotation")]
# check Text element
runChecks("text='Text'", rootIndex, checks)
# extended check must be done separately
originalVal = useDebuggerConsole("x", None)
if originalVal:
# Text element uses anchors.centerIn, so modification of x should not do anything
useDebuggerConsole("x=0", "0", "x", originalVal)
useDebuggerConsole("anchors.centerIn", "mainRect")
# ignore output as it has none
useDebuggerConsole("anchors.centerIn = null", None)
useDebuggerConsole("x = 0", "0", "x")
testLoggingFeatures()
test.log("Calling Qt.quit() from inside Qml/JS Console - inferior should quit.")
useDebuggerConsole("Qt.quit()", "<undefined>")
if not debuggerHasStopped():
__stopDebugger__()
invokeMenuItem("File", "Exit")
|
unho/pootle
|
refs/heads/master
|
pootle/apps/pootle_project/migrations/0012_set_pootle_fs_treestyle.py
|
8
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-06 13:51
from __future__ import unicode_literals
from django.db import migrations
def set_pootle_fs_style(apps, schema_editor):
Project = apps.get_model('pootle_project', 'Project')
for project in Project.objects.filter(treestyle='none'):
project.treestyle = 'pootle_fs'
project.save()
class Migration(migrations.Migration):
dependencies = [
('pootle_project', '0011_add_project_checker_validator'),
]
operations = [
migrations.RunPython(set_pootle_fs_style),
]
|
catapult-project/catapult
|
refs/heads/master
|
third_party/cloudstorage/cloudstorage/test_utils.py
|
248
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Utils for testing."""
class MockUrlFetchResult(object):
def __init__(self, status, headers, body):
self.status_code = status
self.headers = headers
self.content = body
self.content_was_truncated = False
self.final_url = None
|
anhstudios/swganh
|
refs/heads/develop
|
data/scripts/templates/object/static/particle/shared_pt_flocking_bees.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/particle/shared_pt_flocking_bees.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
sephii/django
|
refs/heads/master
|
tests/migrations/test_migrations_backwards_deps_1/0002_second.py
|
416
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('migrations', '0001_initial')]
operations = []
|
telwertowski/Books-Mac-OS-X
|
refs/heads/master
|
Versions/Books_3.0b3/OPAC SBN.plugin/Contents/Resources/PyZ3950/SRWDiagnostics.py
|
30
|
# Base Class
class SRWDiagnostic (Exception):
""" Base Diagnostic Class"""
code = 0
uri = "info:srw/diagnostic/1/"
details = ""
message = ""
surrogate = 0
fatal = 1
def __str__(self):
return "%s [%s]: %s" % (self.uri, self.message, self.details)
# NB 'Need' name for serialization in SRW
def __init__(self, name=None):
if (self.code):
self.uri = "%s%d" % (self.uri, self.code)
Exception.__init__(self)
# Diagnostic Types
class GeneralDiagnostic (SRWDiagnostic):
pass
class CQLDiagnostic (SRWDiagnostic):
pass
class RecordDiagnostic (SRWDiagnostic):
pass
class ResultSetDiagnostic (SRWDiagnostic):
pass
class SortDiagnostic (SRWDiagnostic):
pass
class StyleDiagnostic (SRWDiagnostic):
pass
class ScanDiagnostic (SRWDiagnostic):
pass
class DeprecatedDiagnostic(SRWDiagnostic):
def __init__(self, name=None):
print "WARNING: Use of deprecated diagnostic %s" % (self)
SRWDiagnostic.__init__(self)
class ExplainDiagnostic (DeprecatedDiagnostic):
pass
# Rob's (empty) diagnostic set
class RobDiagnostic (SRWDiagnostic):
uri = "info:srw/diagnostic/2/"
# Individual Diagnostics
class Diagnostic1 (GeneralDiagnostic):
code = 1
message = "General system error"
class Diagnostic2 (GeneralDiagnostic):
code = 2
message = "System temporarily unavailable"
class Diagnostic3 (GeneralDiagnostic):
code = 3
message = "Authentication error"
class Diagnostic4 (GeneralDiagnostic):
code = 4
message = "Unsupported operation"
class Diagnostic5 (GeneralDiagnostic):
code = 5
message = "Unsupported version"
class Diagnostic6 (GeneralDiagnostic):
code = 6
message = "Unsupported parameter value"
class Diagnostic7 (GeneralDiagnostic):
code = 7
message = "Mandatory parameter not supplied"
class Diagnostic8 (GeneralDiagnostic):
code = 8
message = "Unknown parameter"
class Diagnostic10 (CQLDiagnostic):
code = 10
message = "Malformed query"
class Diagnostic13 (CQLDiagnostic):
code = 13
message = "Unsupported use of parentheses"
class Diagnostic14 (CQLDiagnostic):
code = 14
message = "Unsupported use of quotes"
class Diagnostic15 (CQLDiagnostic):
code = 15
message = "Unsupported context set"
class Diagnostic16 (CQLDiagnostic):
code = 16
message = "Unsupported index"
class Diagnostic18 (CQLDiagnostic):
code = 18
message = "Unsupported combination of indexes"
class Diagnostic19 (CQLDiagnostic):
code = 19
message = "Unsupported relation"
class Diagnostic20 (CQLDiagnostic):
code = 20
message = "Unsupported relation modifier"
class Diagnostic21 (CQLDiagnostic):
code = 21
message = "Unsupported combination of relation modifiers"
class Diagnostic22 (CQLDiagnostic):
code = 22
message = "Unsupported combination of relation and index"
class Diagnostic23 (CQLDiagnostic):
code = 23
message = "Too many characters in term"
class Diagnostic24 (CQLDiagnostic):
code = 24
message = "Unsupported combination of relation and term"
class Diagnostic26 (CQLDiagnostic):
code = 26
message = "Non special character escaped in term"
class Diagnostic27 (CQLDiagnostic):
code = 27
message = "Empty term unsupported"
class Diagnostic28 (CQLDiagnostic):
code = 28
message = "Masking character not supported"
class Diagnostic29 (CQLDiagnostic):
code = 29
message = "Masked words too short"
class Diagnostic30 (CQLDiagnostic):
code = 30
message = "Too many masking characters in term"
class Diagnostic31 (CQLDiagnostic):
code = 31
message = "Anchoring character not supported"
class Diagnostic32 (CQLDiagnostic):
code = 32
message = "Anchoring character in unsupported position."
class Diagnostic33 (CQLDiagnostic):
code = 33
message = "Combination of proximity/adjacency and masking characters not supported"
class Diagnostic34 (CQLDiagnostic):
code = 34
message = "Combination of proximity/adjacency and anchoring characters not supported"
class Diagnostic35 (CQLDiagnostic):
code = 35
message = "Term only stopwords"
class Diagnostic36 (CQLDiagnostic):
code = 36
message = "Term in invalid format for index or relation"
class Diagnostic37 (CQLDiagnostic):
code = 37
message = "Unsupported boolean operator"
class Diagnostic38 (CQLDiagnostic):
code = 38
message = "Too many boolean operators"
class Diagnostic39 (CQLDiagnostic):
code = 39
message = "Proximity not supported"
class Diagnostic40 (CQLDiagnostic):
code = 40
message = "Unsupported proximity relation"
class Diagnostic41 (CQLDiagnostic):
code = 41
message = "Unsupported proximity distance"
class Diagnostic42 (CQLDiagnostic):
code = 42
message = "Unsupported proximity unit"
class Diagnostic43 (CQLDiagnostic):
code = 43
message = "Unsupported proximity ordering"
class Diagnostic44 (CQLDiagnostic):
code = 44
message = "Unsupported combination of proximity modifiers"
class Diagnostic50 (ResultSetDiagnostic):
code = 50
message = "Result sets not supported"
class Diagnostic51 (ResultSetDiagnostic):
code = 51
message = "Result set does not exist"
class Diagnostic52 (ResultSetDiagnostic):
code = 52
message = "Result set temporarily unavailable"
class Diagnostic53 (ResultSetDiagnostic):
code = 53
message = "Result sets only supported for retrieval"
class Diagnostic55 (ResultSetDiagnostic):
code = 55
message = "Combination of result sets with search terms not supported"
class Diagnostic58 (ResultSetDiagnostic):
code = 58
message = "Result set created with unpredictable partial results available"
class Diagnostic59 (ResultSetDiagnostic):
code = 59
message = "Result set created with valid partial results available"
class Diagnostic60 (RecordDiagnostic):
code = 60
message = "Too many records retrieved"
class Diagnostic61 (RecordDiagnostic):
code = 61
message = "First record position out of range"
class Diagnostic64 (RecordDiagnostic):
code = 64
message = "Record temporarily unavailable"
surrogate = 1
class Diagnostic65 (RecordDiagnostic):
code = 65
message = "Record does not exist"
surrogate = 1
class Diagnostic66 (RecordDiagnostic):
code = 66
message = "Unknown schema for retrieval"
class Diagnostic67 (RecordDiagnostic):
code = 67
message = "Record not available in this schema"
surrogate = 1
class Diagnostic68 (RecordDiagnostic):
code = 68
message = "Not authorised to send record"
surrogate = 1
class Diagnostic69 (RecordDiagnostic):
code = 69
message = "Not authorised to send record in this schema"
surrogate = 1
class Diagnostic70 (RecordDiagnostic):
code = 70
message = "Record too large to send"
surrogate = 1
class Diagnostic71 (RecordDiagnostic):
code = 71
message = "Unsupported record packing"
class Diagnostic72 (RecordDiagnostic):
code = 72
message = "XPath retrieval unsupported"
class Diagnostic73 (RecordDiagnostic):
code = 73
message = "XPath expression contains unsupported feature"
class Diagnostic74 (RecordDiagnostic):
code = 74
message = "Unable to evaluate XPath expression"
class Diagnostic80 (SortDiagnostic):
code = 80
message = "Sort not supported"
class Diagnostic82 (SortDiagnostic):
code = 82
message = "Unsupported sort sequence"
class Diagnostic83 (SortDiagnostic):
code = 83
message = "Too many records to sort"
class Diagnostic84 (SortDiagnostic):
code = 84
message = "Too many sort keys"
class Diagnostic86 (SortDiagnostic):
code = 86
message = "Incompatible record formats"
class Diagnostic87 (SortDiagnostic):
code = 87
message = "Unsupported schema for sort"
class Diagnostic88 (SortDiagnostic):
code = 88
message = "Unsupported tag path for sort"
class Diagnostic89 (SortDiagnostic):
code = 89
message = "Tag path unsupported for schema"
class Diagnostic90 (SortDiagnostic):
code = 90
message = "Unsupported direction value"
class Diagnostic91 (SortDiagnostic):
code = 91
message = "Unsupported case value"
class Diagnostic92 (SortDiagnostic):
code = 92
message = "Unsupported missing value action"
class Diagnostic110 (StyleDiagnostic):
code = 110
message = "Stylesheets not supported"
class Diagnostic111 (StyleDiagnostic):
code = 111
message = "Unsupported stylesheet"
class Diagnostic120 (ScanDiagnostic):
code = 120
message = "Response position out of range"
class Diagnostic121 (ScanDiagnostic):
code = 121
message = "Too many terms requested"
# Deprecated diagnostics
class Diagnostic11 (DeprecatedDiagnostic):
code = 11
message = "Unsupported query type"
class Diagnostic12 (DeprecatedDiagnostic):
code = 12
message = "Too many characters in query"
class Diagnostic17 (DeprecatedDiagnostic):
code = 17
message = "Illegal or unsupported combination of index and index set."
class Diagnostic25 (DeprecatedDiagnostic):
code = 25
message = "Special characters not quoted in term"
class Diagnostic45 (DeprecatedDiagnostic):
code = 45
message = "Index set name (prefix) assigned to multiple identifiers"
class Diagnostic54 (DeprecatedDiagnostic):
code = 54
message = "Retrieval may only occur from an existing result set"
class Diagnostic56 (DeprecatedDiagnostic):
code = 56
message = "Only combination of single result set with search terms supported"
class Diagnostic57 (DeprecatedDiagnostic):
code = 57
message = "Result set created but no records available"
class Diagnostic62 (DeprecatedDiagnostic):
code = 62
message = "Negative number of records requested"
class Diagnostic63 (DeprecatedDiagnostic):
code = 63
message = "System error in retrieving records"
class Diagnostic81 (DeprecatedDiagnostic):
code = 81
message = "Unsupported sort type"
class Diagnostic85 (DeprecatedDiagnostic):
code = 85
message = "Duplicate sort keys"
class Diagnostic100 (ExplainDiagnostic):
code = 100
message = "Explain not supported"
class Diagnostic101 (ExplainDiagnostic):
code = 101
message = "Explain request type not supported"
class Diagnostic102 (ExplainDiagnostic):
code = 102
message = "Explain record temporarily unavailable"
|
lduarte1991/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/util/management/commands/reset_db.py
|
16
|
# -*- coding: utf-8 -*-
"""
reset_db
========
Django command to drop and recreate a database.
Useful when running tests against a database which may previously have
had different migrations applied to it.
This handles the one specific use case of the "reset_db" command from
django-extensions that we were actually using.
originally from http://www.djangosnippets.org/snippets/828/ by dnordberg
"""
import logging
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from six.moves import configparser
class Command(BaseCommand):
help = "Resets the database for this project."
def add_arguments(self, parser):
parser.add_argument(
'-R', '--router', action='store', dest='router', default='default',
help='Use this router-database other than defined in settings.py')
def handle(self, *args, **options):
"""
Resets the database for this project.
Note: Transaction wrappers are in reverse as a work around for
autocommit, anybody know how to do this the right way?
"""
router = options.get('router')
dbinfo = settings.DATABASES.get(router)
if dbinfo is None:
raise CommandError("Unknown database router %s" % router)
engine = dbinfo.get('ENGINE').split('.')[-1]
user = password = database_name = database_host = database_port = ''
if engine == 'mysql':
(user, password, database_name, database_host, database_port) = parse_mysql_cnf(dbinfo)
user = dbinfo.get('USER') or user
password = dbinfo.get('PASSWORD') or password
owner = user
database_name = dbinfo.get('NAME') or database_name
if database_name == '':
raise CommandError("You need to specify DATABASE_NAME in your Django settings file.")
database_host = dbinfo.get('HOST') or database_host
database_port = dbinfo.get('PORT') or database_port
verbosity = int(options.get('verbosity', 1))
if engine in ('sqlite3', 'spatialite'):
import os
try:
logging.info("Unlinking %s database" % engine)
os.unlink(database_name)
except OSError:
pass
elif engine in ('mysql',):
import MySQLdb as Database
kwargs = {
'user': user,
'passwd': password,
}
if database_host.startswith('/'):
kwargs['unix_socket'] = database_host
else:
kwargs['host'] = database_host
if database_port:
kwargs['port'] = int(database_port)
connection = Database.connect(**kwargs)
drop_query = 'DROP DATABASE IF EXISTS `%s`' % database_name
utf8_support = 'CHARACTER SET utf8'
create_query = 'CREATE DATABASE `%s` %s' % (database_name, utf8_support)
logging.info('Executing... "' + drop_query + '"')
connection.query(drop_query)
logging.info('Executing... "' + create_query + '"')
connection.query(create_query)
elif engine in ('postgresql', 'postgresql_psycopg2', 'postgis'):
if engine == 'postgresql' and django.VERSION < (1, 9):
import psycopg as Database # NOQA
elif engine in ('postgresql', 'postgresql_psycopg2', 'postgis'):
import psycopg2 as Database # NOQA
conn_params = {'database': 'template1'}
if user:
conn_params['user'] = user
if password:
conn_params['password'] = password
if database_host:
conn_params['host'] = database_host
if database_port:
conn_params['port'] = database_port
connection = Database.connect(**conn_params)
connection.set_isolation_level(0) # autocommit false
cursor = connection.cursor()
drop_query = "DROP DATABASE \"%s\";" % database_name
logging.info('Executing... "' + drop_query + '"')
try:
cursor.execute(drop_query)
except Database.ProgrammingError as e:
logging.exception("Error: %s" % str(e))
create_query = "CREATE DATABASE \"%s\"" % database_name
if owner:
create_query += " WITH OWNER = \"%s\" " % owner
create_query += " ENCODING = 'UTF8'"
if engine == 'postgis' and django.VERSION < (1, 9):
# For PostGIS 1.5, fetch template name if it exists
from django.contrib.gis.db.backends.postgis.base import DatabaseWrapper
postgis_template = DatabaseWrapper(dbinfo).template_postgis
if postgis_template is not None:
create_query += ' TEMPLATE = %s' % postgis_template
if settings.DEFAULT_TABLESPACE:
create_query += ' TABLESPACE = %s;' % settings.DEFAULT_TABLESPACE
else:
create_query += ';'
logging.info('Executing... "' + create_query + '"')
cursor.execute(create_query)
else:
raise CommandError("Unknown database engine %s" % engine)
if verbosity >= 2:
print("Reset successful.")
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
cloudera/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.11.29/tests/i18n/patterns/urls/default.py
|
384
|
from django.conf.urls import include, url
from django.conf.urls.i18n import i18n_patterns
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = [
url(r'^not-prefixed/$', view, name='not-prefixed'),
url(r'^not-prefixed-include/', include('i18n.patterns.urls.included')),
url(_(r'^translated/$'), view, name='no-prefix-translated'),
url(_(r'^translated/(?P<slug>[\w-]+)/$'), view, name='no-prefix-translated-slug'),
]
urlpatterns += i18n_patterns(
url(r'^prefixed/$', view, name='prefixed'),
url(r'^prefixed\.xml$', view, name='prefixed_xml'),
url(_(r'^users/$'), view, name='users'),
url(_(r'^account/'), include('i18n.patterns.urls.namespace', namespace='account')),
)
|
erkanay/django
|
refs/heads/master
|
tests/many_to_one/tests.py
|
5
|
from copy import deepcopy
import datetime
from django.core.exceptions import MultipleObjectsReturned, FieldError
from django.db import models, transaction
from django.test import TestCase
from django.utils import six
from django.utils.translation import ugettext_lazy
from .models import (Article, Reporter, First, Third, Parent, Child,
ToFieldChild, Category, Record, Relation, School, Student)
class ManyToOneTests(TestCase):
def setUp(self):
# Create a few Reporters.
self.r = Reporter(first_name='John', last_name='Smith', email='john@example.com')
self.r.save()
self.r2 = Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
self.r2.save()
# Create an Article.
self.a = Article(id=None, headline="This is a test",
pub_date=datetime.date(2005, 7, 27), reporter=self.r)
self.a.save()
def test_get(self):
# Article objects have access to their related Reporter objects.
r = self.a.reporter
self.assertEqual(r.id, self.r.id)
# These are strings instead of unicode strings because that's what was used in
# the creation of this reporter (and we haven't refreshed the data from the
# database, which always returns unicode strings).
self.assertEqual((r.first_name, self.r.last_name), ('John', 'Smith'))
def test_create(self):
# You can also instantiate an Article by passing the Reporter's ID
# instead of a Reporter object.
a3 = Article(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
a3.save()
self.assertEqual(a3.reporter.id, self.r.id)
# Similarly, the reporter ID can be a string.
a4 = Article(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
a4.save()
self.assertEqual(repr(a4.reporter), "<Reporter: John Smith>")
def test_add(self):
# Create an Article via the Reporter object.
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.assertEqual(repr(new_article), "<Article: John's second story>")
self.assertEqual(new_article.reporter.id, self.r.id)
# Create a new article, and add it to the article set.
new_article2 = Article(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
self.r.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# Add the same article to a different article set - check that it moves.
self.r2.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r2.id)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Adding an object of the wrong type raises TypeError.
with transaction.atomic():
with six.assertRaisesRegex(self, TypeError,
"'Article' instance expected, got <Reporter.*"):
self.r.article_set.add(self.r2)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_assign(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter directly using the descriptor.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again using set descriptor.
self.r2.article_set = [new_article, new_article2]
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(self.r2.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
])
# Funny case - assignment notation can only go so far; because the
# ForeignKey cannot be null, existing members of the set must remain.
self.r.article_set = [new_article]
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Reporter cannot be null - there should not be a clear or remove method
self.assertFalse(hasattr(self.r2.article_set, 'remove'))
self.assertFalse(hasattr(self.r2.article_set, 'clear'))
def test_selects(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Reporter objects have access to their related Article objects.
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r.article_set.filter(headline__startswith='This'),
["<Article: This is a test>"])
self.assertEqual(self.r.article_set.count(), 2)
self.assertEqual(self.r2.article_set.count(), 1)
# Get articles by id
self.assertQuerysetEqual(Article.objects.filter(id__exact=self.a.id),
["<Article: This is a test>"])
self.assertQuerysetEqual(Article.objects.filter(pk=self.a.id),
["<Article: This is a test>"])
# Query on an article property
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='This'),
["<Article: This is a test>"])
# The API automatically follows relationships as far as you need.
# Use double underscores to separate relationships.
# This works as many levels deep as you want. There's no limit.
# Find all Articles for any Reporter whose first name is "John".
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name__exact='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Check that implied __exact also works
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Query twice over the related field.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# The underlying query only makes one join when a related table is referenced twice.
queryset = Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith')
self.assertNumQueries(1, list, queryset)
self.assertEqual(queryset.query.get_compiler(queryset.db).as_sql()[0].count('INNER JOIN'), 1)
# The automatically joined table has a predictable name.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John').extra(
where=["many_to_one_reporter.last_name='Smith'"]),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# ... and should work fine with the unicode that comes out of forms.Form.cleaned_data
self.assertQuerysetEqual(
(Article.objects
.filter(reporter__first_name__exact='John')
.extra(where=["many_to_one_reporter.last_name='%s'" % 'Smith'])),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Find all Articles for a Reporter.
# Use direct ID check, pk check, and object comparison
self.assertQuerysetEqual(
Article.objects.filter(reporter__id__exact=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__pk=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r.id, self.r2.id]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r, self.r2]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# You can also use a queryset instead of a literal list of instances.
# The queryset must be reduced to a list of values using values(),
# then converted into a query
self.assertQuerysetEqual(
Article.objects.filter(
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
).distinct(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_reverse_selects(self):
a3 = Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# Reporters can be queried
self.assertQuerysetEqual(Reporter.objects.filter(id__exact=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(pk=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(first_name__startswith='John'),
["<Reporter: John Smith>"])
# Reporters can query in opposite direction of ForeignKey definition
self.assertQuerysetEqual(Reporter.objects.filter(article__id__exact=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article__pk=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3.id]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T'),
["<Reporter: John Smith>", "<Reporter: John Smith>"],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct(),
["<Reporter: John Smith>"])
# Counting in the opposite direction works in conjunction with distinct()
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').count(), 2)
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct().count(), 1)
# Queries can go round in circles.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John'),
[
"<Reporter: John Smith>",
"<Reporter: John Smith>",
"<Reporter: John Smith>",
],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__exact=self.r).distinct(),
["<Reporter: John Smith>"])
# Check that implied __exact also works.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter=self.r).distinct(),
["<Reporter: John Smith>"])
# It's possible to use values() calls across many-to-one relations.
# (Note, too, that we clear the ordering here so as not to drag the
# 'headline' field into the columns being used to determine uniqueness)
d = {'reporter__first_name': 'John', 'reporter__last_name': 'Smith'}
self.assertEqual([d],
list(Article.objects.filter(reporter=self.r).distinct().order_by()
.values('reporter__first_name', 'reporter__last_name')))
def test_select_related(self):
# Check that Article.objects.select_related().dates() works properly when
# there are multiple Articles with the same date but different foreign-key
# objects (Reporters).
r1 = Reporter.objects.create(first_name='Mike', last_name='Royko', email='royko@suntimes.com')
r2 = Reporter.objects.create(first_name='John', last_name='Kass', email='jkass@tribune.com')
Article.objects.create(headline='First', pub_date=datetime.date(1980, 4, 23), reporter=r1)
Article.objects.create(headline='Second', pub_date=datetime.date(1980, 4, 23), reporter=r2)
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'day')),
[
datetime.date(1980, 4, 23),
datetime.date(2005, 7, 27),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'month')),
[
datetime.date(1980, 4, 1),
datetime.date(2005, 7, 1),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'year')),
[
datetime.date(1980, 1, 1),
datetime.date(2005, 1, 1),
])
def test_delete(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# If you delete a reporter, his articles will be deleted.
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
[
"<Reporter: John Smith>",
"<Reporter: Paul Jones>",
])
self.r2.delete()
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
["<Reporter: John Smith>"])
# You can delete using a JOIN in the query.
Reporter.objects.filter(article__headline__startswith='This').delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
self.assertQuerysetEqual(Article.objects.all(), [])
def test_explicit_fk(self):
# Create a new Article with get_or_create using an explicit value
# for a ForeignKey.
a2, created = Article.objects.get_or_create(id=None,
headline="John's second test",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r.id)
self.assertTrue(created)
self.assertEqual(a2.reporter.id, self.r.id)
# You can specify filters containing the explicit FK value.
self.assertQuerysetEqual(
Article.objects.filter(reporter_id__exact=self.r.id),
[
"<Article: John's second test>",
"<Article: This is a test>",
])
# Create an Article by Paul for the same date.
a3 = Article.objects.create(id=None, headline="Paul's commentary",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r2.id)
self.assertEqual(a3.reporter.id, self.r2.id)
# Get should respect explicit foreign keys as well.
self.assertRaises(MultipleObjectsReturned,
Article.objects.get, reporter_id=self.r.id)
self.assertEqual(repr(a3),
repr(Article.objects.get(reporter_id=self.r2.id,
pub_date=datetime.date(2011, 5, 7))))
def test_deepcopy_and_circular_references(self):
# Regression for #12876 -- Model methods that include queries that
# recursive don't cause recursion depth problems under deepcopy.
self.r.cached_query = Article.objects.filter(reporter=self.r)
self.assertEqual(repr(deepcopy(self.r)), "<Reporter: John Smith>")
def test_manager_class_caching(self):
r1 = Reporter.objects.create(first_name='Mike')
r2 = Reporter.objects.create(first_name='John')
# Same twice
self.assertTrue(r1.article_set.__class__ is r1.article_set.__class__)
# Same as each other
self.assertTrue(r1.article_set.__class__ is r2.article_set.__class__)
def test_create_relation_with_ugettext_lazy(self):
reporter = Reporter.objects.create(first_name='John',
last_name='Smith',
email='john.smith@example.com')
lazy = ugettext_lazy('test')
reporter.article_set.create(headline=lazy,
pub_date=datetime.date(2011, 6, 10))
notlazy = six.text_type(lazy)
article = reporter.article_set.get()
self.assertEqual(article.headline, notlazy)
def test_values_list_exception(self):
expected_message = "Cannot resolve keyword 'notafield' into field. Choices are: %s"
self.assertRaisesMessage(FieldError,
expected_message % ', '.join(Reporter._meta.get_all_field_names()),
Article.objects.values_list,
'reporter__notafield')
self.assertRaisesMessage(FieldError,
expected_message % ', '.join(['EXTRA'] + Article._meta.get_all_field_names()),
Article.objects.extra(select={'EXTRA': 'EXTRA_SELECT'}).values_list,
'notafield')
def test_fk_assignment_and_related_object_cache(self):
# Tests of ForeignKey assignment and the related-object cache (see #6886).
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
# Look up the object again so that we get a "fresh" object.
c = Child.objects.get(name="Child")
p = c.parent
# Accessing the related object again returns the exactly same object.
self.assertTrue(c.parent is p)
# But if we kill the cache, we get a new object.
del c._parent_cache
self.assertFalse(c.parent is p)
# Assigning a new object results in that object getting cached immediately.
p2 = Parent.objects.create(name="Parent 2")
c.parent = p2
self.assertTrue(c.parent is p2)
# Assigning None succeeds if field is null=True.
p.bestchild = None
self.assertTrue(p.bestchild is None)
# bestchild should still be None after saving.
p.save()
self.assertTrue(p.bestchild is None)
# bestchild should still be None after fetching the object again.
p = Parent.objects.get(name="Parent")
self.assertTrue(p.bestchild is None)
# Assigning None fails: Child.parent is null=False.
self.assertRaises(ValueError, setattr, c, "parent", None)
# You also can't assign an object of the wrong type here
self.assertRaises(ValueError, setattr, c, "parent", First(id=1, second=1))
# Nor can you explicitly assign None to Child.parent during object
# creation (regression for #9649).
self.assertRaises(ValueError, Child, name='xyzzy', parent=None)
self.assertRaises(ValueError, Child.objects.create, name='xyzzy', parent=None)
# Creation using keyword argument should cache the related object.
p = Parent.objects.get(name="Parent")
c = Child(parent=p)
self.assertTrue(c.parent is p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Parent()
with self.assertRaisesMessage(ValueError,
'Cannot assign "%r": "%s" instance isn\'t saved in the database.'
% (p, Child.parent.field.rel.to._meta.object_name)):
Child(parent=p)
with self.assertRaisesMessage(ValueError,
'Cannot assign "%r": "%s" instance isn\'t saved in the database.'
% (p, Child.parent.field.rel.to._meta.object_name)):
ToFieldChild(parent=p)
# Creation using attname keyword argument and an id will cause the
# related object to be fetched.
p = Parent.objects.get(name="Parent")
c = Child(parent_id=p.id)
self.assertFalse(c.parent is p)
self.assertEqual(c.parent, p)
def test_multiple_foreignkeys(self):
# Test of multiple ForeignKeys to the same model (bug #7125).
c1 = Category.objects.create(name='First')
c2 = Category.objects.create(name='Second')
c3 = Category.objects.create(name='Third')
r1 = Record.objects.create(category=c1)
r2 = Record.objects.create(category=c1)
r3 = Record.objects.create(category=c2)
r4 = Record.objects.create(category=c2)
r5 = Record.objects.create(category=c3)
Relation.objects.create(left=r1, right=r2)
Relation.objects.create(left=r3, right=r4)
Relation.objects.create(left=r1, right=r3)
Relation.objects.create(left=r5, right=r2)
Relation.objects.create(left=r3, right=r2)
q1 = Relation.objects.filter(left__category__name__in=['First'], right__category__name__in=['Second'])
self.assertQuerysetEqual(q1, ["<Relation: First - Second>"])
q2 = Category.objects.filter(record__left_set__right__category__name='Second').order_by('name')
self.assertQuerysetEqual(q2, ["<Category: First>", "<Category: Second>"])
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
self.assertRaises(ValueError, Child.objects.create, name="Grandchild", parent=c)
def test_fk_instantiation_outside_model(self):
# Regression for #12190 -- Should be able to instantiate a FK outside
# of a model, and interrogate its related field.
cat = models.ForeignKey(Category)
self.assertEqual('id', cat.rel.get_related_field().name)
def test_relation_unsaved(self):
# Test that the <field>_set manager does not join on Null value fields (#17541)
Third.objects.create(name='Third 1')
Third.objects.create(name='Third 2')
th = Third(name="testing")
# The object isn't saved an thus the relation field is null - we won't even
# execute a query in this case.
with self.assertNumQueries(0):
self.assertEqual(th.child_set.count(), 0)
th.save()
# Now the model is saved, so we will need to execute an query.
with self.assertNumQueries(1):
self.assertEqual(th.child_set.count(), 0)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_student = Student.objects.create(school=public_school)
private_school = School.objects.create(is_public=False)
private_student = Student.objects.create(school=private_school)
# Only one school is available via all() due to the custom default manager.
self.assertQuerysetEqual(
School.objects.all(),
["<School: School object>"]
)
self.assertEqual(public_student.school, public_school)
# Make sure the base manager is used so that an student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_student.school, private_school)
# If the manager is marked "use_for_related_fields", it'll get used instead
# of the "bare" queryset. Usually you'd define this as a property on the class,
# but this approximates that in a way that's easier in tests.
School.objects.use_for_related_fields = True
try:
private_student = Student.objects.get(pk=private_student.pk)
self.assertRaises(School.DoesNotExist, lambda: private_student.school)
finally:
School.objects.use_for_related_fields = False
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Article(), 'reporter'))
|
mariopro/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/minhateca.py
|
127
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
int_or_none,
parse_duration,
parse_filesize,
)
class MinhatecaIE(InfoExtractor):
_VALID_URL = r'https?://minhateca\.com\.br/[^?#]+,(?P<id>[0-9]+)\.'
_TEST = {
'url': 'http://minhateca.com.br/pereba/misc/youtube-dl+test+video,125848331.mp4(video)',
'info_dict': {
'id': '125848331',
'ext': 'mp4',
'title': 'youtube-dl test video',
'thumbnail': 're:^https?://.*\.jpg$',
'filesize_approx': 1530000,
'duration': 9,
'view_count': int,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
token = self._html_search_regex(
r'<input name="__RequestVerificationToken".*?value="([^"]+)"',
webpage, 'request token')
token_data = [
('fileId', video_id),
('__RequestVerificationToken', token),
]
req = compat_urllib_request.Request(
'http://minhateca.com.br/action/License/Download',
data=compat_urllib_parse.urlencode(token_data))
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
data = self._download_json(
req, video_id, note='Downloading metadata')
video_url = data['redirectUrl']
title_str = self._html_search_regex(
r'<h1.*?>(.*?)</h1>', webpage, 'title')
title, _, ext = title_str.rpartition('.')
filesize_approx = parse_filesize(self._html_search_regex(
r'<p class="fileSize">(.*?)</p>',
webpage, 'file size approximation', fatal=False))
duration = parse_duration(self._html_search_regex(
r'(?s)<p class="fileLeng[ht][th]">.*?class="bold">(.*?)<',
webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<p class="downloadsCounter">([0-9]+)</p>',
webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'title': title,
'ext': ext,
'filesize_approx': filesize_approx,
'duration': duration,
'view_count': view_count,
'thumbnail': self._og_search_thumbnail(webpage),
}
|
ARISE-Initiative/robosuite
|
refs/heads/master
|
robosuite/wrappers/demo_sampler_wrapper.py
|
1
|
"""
This file contains a wrapper for sampling environment states
from a set of demonstrations on every reset. The main use case is for
altering the start state distribution of training episodes for
learning RL policies.
"""
import random
import os
import h5py
import time
import numpy as np
from robosuite.utils.mjcf_utils import postprocess_model_xml
from robosuite.wrappers import Wrapper
class DemoSamplerWrapper(Wrapper):
"""
Initializes a wrapper that provides support for resetting the environment
state to one from a demonstration. It also supports curriculums for
altering how often to sample from demonstration vs. sampling a reset
state from the environment.
Args:
env (MujocoEnv): The environment to wrap.
demo_path (str): The path to the folder containing the demonstrations.
There should be a `demo.hdf5` file and a folder named `models` with
all of the stored model xml files from the demonstrations.
need_xml (bool): If True, the mujoco model needs to be reloaded when
sampling a state from a demonstration. This could be because every
demonstration was taken under varied object properties, for example.
In this case, every sampled state comes with a corresponding xml to
be used for the environment reset.
num_traj (int): If provided, subsample @number demonstrations from the
provided set of demonstrations instead of using all of them.
sampling_schemes (list of str): A list of sampling schemes
to be used. The following strings are valid schemes:
`'random'`: sample a reset state directly from the wrapped environment
`'uniform'`: sample a state from a demonstration uniformly at random
`'forward'`: sample a state from a window that grows progressively from
the start of demonstrations
`'reverse'`: sample a state from a window that grows progressively from
the end of demonstrations
scheme_ratios (list of float --> np.array): A list of probability values to
assign to each member of @sampling_schemes. Must be non-negative and
sum to 1.
open_loop_increment_freq (int): How frequently to increase
the window size in open loop schemes ("forward" and "reverse"). The
window size will increase by @open_loop_window_increment every
@open_loop_increment_freq samples. Only samples that are generated
by open loop schemes contribute to this count.
open_loop_initial_window_width (int): The width of the initial sampling
window, in terms of number of demonstration time steps, for
open loop schemes.
open_loop_window_increment (int): The window size will increase by
@open_loop_window_increment every @open_loop_increment_freq samples.
This number is in terms of number of demonstration time steps.
Raises:
AssertionError: [Incompatible envs]
AssertionError: [Invalid sampling scheme]
AssertionError: [Invalid scheme ratio]
"""
def __init__(
self,
env,
demo_path,
need_xml=False,
num_traj=-1,
sampling_schemes=("uniform", "random"),
scheme_ratios=(0.9, 0.1),
open_loop_increment_freq=100,
open_loop_initial_window_width=25,
open_loop_window_increment=25,
):
super().__init__(env)
self.demo_path = demo_path
hdf5_path = os.path.join(self.demo_path, "demo.hdf5")
self.demo_file = h5py.File(hdf5_path, "r")
# ensure that wrapped env matches the env on which demonstrations were collected
env_name = self.demo_file["data"].attrs["env"]
assert (
env_name == self.unwrapped.__class__.__name__
), "Wrapped env {} does not match env on which demos were collected ({})".format(
env.__class__.__name__, env_name
)
# list of all demonstrations episodes
self.demo_list = list(self.demo_file["data"].keys())
# subsample a selection of demonstrations if requested
if num_traj > 0:
random.seed(3141) # ensure that the same set is sampled every time
self.demo_list = random.sample(self.demo_list, num_traj)
self.need_xml = need_xml
self.demo_sampled = 0
self.sample_method_dict = {
"random": "_random_sample",
"uniform": "_uniform_sample",
"forward": "_forward_sample_open_loop",
"reverse": "_reverse_sample_open_loop",
}
self.sampling_schemes = sampling_schemes
self.scheme_ratios = np.asarray(scheme_ratios)
# make sure the list of schemes is valid
schemes = self.sample_method_dict.keys()
assert np.all([(s in schemes) for s in self.sampling_schemes])
# make sure the distribution is the correct size
assert len(self.sampling_schemes) == len(self.scheme_ratios)
# make sure the distribution lies in the probability simplex
assert np.all(self.scheme_ratios > 0.)
assert sum(self.scheme_ratios) == 1.0
# open loop configuration
self.open_loop_increment_freq = open_loop_increment_freq
self.open_loop_window_increment = open_loop_window_increment
# keep track of window size
self.open_loop_window_size = open_loop_initial_window_width
def reset(self):
"""
Logic for sampling a state from the demonstration and resetting
the simulation to that state.
Returns:
OrderedDict: Environment observation space after reset occurs
"""
state = self.sample()
if state is None:
# None indicates that a normal env reset should occur
return self.env.reset()
else:
if self.need_xml:
# reset the simulation from the model if necessary
state, xml = state
self.env.reset_from_xml_string(xml)
if isinstance(state, tuple):
state = state[0]
# force simulator state to one from the demo
self.sim.set_state_from_flattened(state)
self.sim.forward()
return self.env._get_observation()
def sample(self):
"""
This is the core sampling method. Samples a state from a
demonstration, in accordance with the configuration.
Returns:
None or np.array or 2-tuple: If np.array, is the state sampled from a demo file. If 2-tuple, additionally
includes the model xml file
"""
# chooses a sampling scheme randomly based on the mixing ratios
seed = random.uniform(0, 1)
ratio = np.cumsum(self.scheme_ratios)
ratio = ratio > seed
for i, v in enumerate(ratio):
if v:
break
sample_method = getattr(self, self.sample_method_dict[self.sampling_schemes[i]])
return sample_method()
def _random_sample(self):
"""
Sampling method.
Return None to indicate that the state should be sampled directly
from the environment.
"""
return None
def _uniform_sample(self):
"""
Sampling method.
First uniformly sample a demonstration from the set of demonstrations.
Then uniformly sample a state from the selected demonstration.
Returns:
np.array or 2-tuple: If np.array, is the state sampled from a demo file. If 2-tuple, additionally
includes the model xml file
"""
# get a random episode index
ep_ind = random.choice(self.demo_list)
# select a flattened mujoco state uniformly from this episode
states = self.demo_file["data/{}/states".format(ep_ind)][()]
state = random.choice(states)
if self.need_xml:
model_xml = self._xml_for_episode_index(ep_ind)
xml = postprocess_model_xml(model_xml)
return state, xml
return state
def _reverse_sample_open_loop(self):
"""
Sampling method.
Open loop reverse sampling from demonstrations. Starts by
sampling from states near the end of the demonstrations.
Increases the window backwards as the number of calls to
this sampling method increases at a fixed rate.
Returns:
np.array or 2-tuple: If np.array, is the state sampled from a demo file. If 2-tuple, additionally
includes the model xml file
"""
# get a random episode index
ep_ind = random.choice(self.demo_list)
# sample uniformly in a window that grows backwards from the end of the demos
states = self.demo_file["data/{}/states".format(ep_ind)][()]
eps_len = states.shape[0]
index = np.random.randint(max(eps_len - self.open_loop_window_size, 0), eps_len)
state = states[index]
# increase window size at a fixed frequency (open loop)
self.demo_sampled += 1
if self.demo_sampled >= self.open_loop_increment_freq:
if self.open_loop_window_size < eps_len:
self.open_loop_window_size += self.open_loop_window_increment
self.demo_sampled = 0
if self.need_xml:
model_xml = self._xml_for_episode_index(ep_ind)
xml = postprocess_model_xml(model_xml)
return state, xml
return state
def _forward_sample_open_loop(self):
"""
Sampling method.
Open loop forward sampling from demonstrations. Starts by
sampling from states near the beginning of the demonstrations.
Increases the window forwards as the number of calls to
this sampling method increases at a fixed rate.
Returns:
np.array or 2-tuple: If np.array, is the state sampled from a demo file. If 2-tuple, additionally
includes the model xml file
"""
# get a random episode index
ep_ind = random.choice(self.demo_list)
# sample uniformly in a window that grows forwards from the beginning of the demos
states = self.demo_file["data/{}/states".format(ep_ind)][()]
eps_len = states.shape[0]
index = np.random.randint(0, min(self.open_loop_window_size, eps_len))
state = states[index]
# increase window size at a fixed frequency (open loop)
self.demo_sampled += 1
if self.demo_sampled >= self.open_loop_increment_freq:
if self.open_loop_window_size < eps_len:
self.open_loop_window_size += self.open_loop_window_increment
self.demo_sampled = 0
if self.need_xml:
model_xml = self._xml_for_episode_index(ep_ind)
xml = postprocess_model_xml(model_xml)
return state, xml
return state
def _xml_for_episode_index(self, ep_ind):
"""
Helper method to retrieve the corresponding model xml string
for the passed episode index.
Args:
ep_ind (int): Episode index to pull from demo file
Returns:
str: model xml as a string
"""
# read the model xml, using the metadata stored in the attribute for this episode
model_file = self.demo_file["data/{}".format(ep_ind)].attrs["model_file"]
model_path = os.path.join(self.demo_path, "models", model_file)
with open(model_path, "r") as model_f:
model_xml = model_f.read()
return model_xml
|
tuxcanfly/flashbox
|
refs/heads/master
|
downloadr_lib/downloadrconfig.py
|
1
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2012 Javed Khan <tuxcanfly@gmail.com>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
# THIS IS Downloadr CONFIGURATION FILE
# YOU CAN PUT THERE SOME GLOBAL VALUE
# Do not touch unless you know what you're doing.
# you're warned :)
__all__ = [
'project_path_not_found',
'get_data_file',
'get_data_path',
]
# Where your project will look for your data (for instance, images and ui
# files). By default, this is ../data, relative your trunk layout
__downloadr_data_directory__ = '../data/'
__license__ = ''
__version__ = 'VERSION'
import os
import gettext
from gettext import gettext as _
gettext.textdomain('downloadr')
class project_path_not_found(Exception):
"""Raised when we can't find the project directory."""
def get_data_file(*path_segments):
"""Get the full path to a data file.
Returns the path to a file underneath the data directory (as defined by
`get_data_path`). Equivalent to os.path.join(get_data_path(),
*path_segments).
"""
return os.path.join(get_data_path(), *path_segments)
def get_data_path():
"""Retrieve downloadr data path
This path is by default <downloadr_lib_path>/../data/ in trunk
and /usr/share/downloadr in an installed version but this path
is specified at installation time.
"""
# Get pathname absolute or relative.
path = os.path.join(
os.path.dirname(__file__), __downloadr_data_directory__)
abs_data_path = os.path.abspath(path)
if not os.path.exists(abs_data_path):
raise project_path_not_found
return abs_data_path
def get_version():
return __version__
|
RydrDojo/Ridr
|
refs/heads/master
|
pylotVenv/lib/python2.7/site-packages/wheel/test/complex-dist/complexdist/__init__.py
|
566
|
def main():
return
|
follow99/django
|
refs/heads/master
|
tests/i18n/patterns/urls/path_unused.py
|
452
|
from django.conf.urls import url
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = [
url(r'^nl/foo/', view, name='not-translated'),
]
|
schlos/eden
|
refs/heads/master
|
modules/s3db/po.py
|
7
|
# -*- coding: utf-8 -*-
""" Sahana Eden Population Outreach Models
@copyright: 2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("OutreachAreaModel",
"OutreachHouseholdModel",
"OutreachReferralModel",
"po_rheader",
"po_organisation_onaccept",
"po_due_followups",
)
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
class OutreachAreaModel(S3Model):
""" Model representing a mesh area for outreach work """
names = ("po_area",
"po_area_id",
)
def model(self):
T = current.T
db = current.db
auth = current.auth
define_table = self.define_table
super_link = self.super_link
s3 = current.response.s3
crud_strings = s3.crud_strings
root_org = auth.root_org()
ADMIN = current.session.s3.system_roles.ADMIN
is_admin = auth.s3_has_role(ADMIN)
# ---------------------------------------------------------------------
# Area
#
tablename = "po_area"
define_table(tablename,
super_link("doc_id", "doc_entity"),
super_link("pe_id", "pr_pentity"),
Field("name",
requires = IS_NOT_EMPTY(),
),
# @todo: link demographics?
self.gis_location_id(
widget = S3LocationSelector(points = False,
polygons = True,
feature_required = True,
),
),
# Only included to set realm entity:
self.org_organisation_id(default = root_org,
readable = is_admin,
writable = is_admin,
),
Field("attempted_visits", "integer",
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Attempted Visits"),
T("Number of households in the area where nobody was at home at the time of visit"))),
default = 0,
label = T("Attempted Visits"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(minimum=0)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Area"),
title_display = T("Area Details"),
title_list = T("Areas"),
title_update = T("Edit Area"),
label_list_button = T("List Areas"),
label_delete_button = T("Delete Area"),
msg_record_created = T("Area created"),
msg_record_modified = T("Area updated"),
msg_record_deleted = T("Area deleted"),
msg_list_empty = T("No Areas currently registered"),
)
# Reusable field
represent = S3Represent(lookup=tablename, show_link=True)
area_id = S3ReusableField("area_id", "reference %s" % tablename,
label = T("Area"),
represent = represent,
requires = IS_ONE_OF(db, "po_area.id",
represent,
),
sortby = "name",
comment = S3AddResourceLink(f="area",
tooltip=T("Create a new area"),
),
)
# Components
self.add_components(tablename,
po_household = "area_id",
org_organisation = {"link": "po_organisation_area",
"joinby": "area_id",
"key": "organisation_id",
"actuate": "hide",
},
)
levels = current.gis.get_relevant_hierarchy_levels()
# Filters
filter_widgets = [S3TextFilter(["name"]),
S3LocationFilter("location_id", levels = levels),
]
# @todo: reports
# Table Configuration
self.configure(tablename,
filter_widgets = filter_widgets,
summary = ({"common": True,
"name": "add",
"widgets": [{"method": "create"}],
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map",
"ajax_init": True}],
},
),
super_entity = ("doc_entity", "pr_pentity"),
onaccept = self.area_onaccept,
ondelete = self.area_ondelete,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {"po_area_id": area_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False,
)
return {"po_area_id": lambda **attr: dummy("area_id"),
}
# -------------------------------------------------------------------------
@classmethod
def area_onaccept(cls, form):
""" Onaccept actions for po_area """
try:
record_id = form.vars.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@classmethod
def area_ondelete(cls, row):
""" Ondelete actions for po_area """
try:
record_id = row.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@staticmethod
def area_update_affiliations(record_id):
"""
Update affiliations for an area
@param record: the area record
"""
ROLE = "Areas"
db = current.db
s3db = current.s3db
table = s3db.po_area
row = db(table.id == record_id).select(table.pe_id,
table.deleted,
table.deleted_fk,
table.organisation_id,
limitby=(0, 1),
).first()
if not row:
return
# Get the organisation_id
if row.deleted:
try:
fk = json.loads(row.deleted_fk)
organisation_id = fk["organisation_id"]
except ValueError:
organisation_id = None
else:
organisation_id = row.organisation_id
# Get the PE ids
area_pe_id = row.pe_id
organisation_pe_id = s3db.pr_get_pe_id("org_organisation",
organisation_id,
)
# Remove obsolete affiliations
rtable = s3db.pr_role
atable = s3db.pr_affiliation
query = (atable.pe_id == row.pe_id) & \
(atable.deleted != True) & \
(atable.role_id == rtable.id) & \
(rtable.role == ROLE) & \
(rtable.pe_id != organisation_pe_id)
rows = db(query).select(rtable.pe_id)
for row in rows:
s3db.pr_remove_affiliation(row.pe_id, area_pe_id, role=ROLE)
# Add current affiliation
from pr import OU
s3db.pr_add_affiliation(organisation_pe_id, area_pe_id, role=ROLE, role_type=OU)
# =============================================================================
class OutreachHouseholdModel(S3Model):
names = ("po_household",
"po_household_id",
"po_household_dwelling",
"po_age_group",
"po_household_member",
"po_household_followup",
"po_household_social",
)
def model(self):
T = current.T
db = current.db
define_table = self.define_table
super_link = self.super_link
configure = self.configure
s3 = current.response.s3
crud_strings = s3.crud_strings
settings = current.deployment_settings
# ---------------------------------------------------------------------
# Household
#
tablename = "po_household"
define_table(tablename,
super_link("doc_id", "doc_entity"),
super_link("pe_id", "pr_pentity"),
self.po_area_id(),
# @todo: inherit Lx from area and hide Lx (in area prep)
self.gis_location_id(
label = T("Address"),
widget = S3LocationSelector(show_address=True,
show_map=settings.get_gis_map_selector(),
show_postcode=settings.get_gis_postcode_selector(),
prevent_duplicate_addresses = True,
),
),
s3_date("date_visited",
default = "now",
empty = False,
label = T("Date visited"),
),
Field("followup", "boolean",
default = False,
label = T("Follow up"),
represent = s3_yes_no_represent,
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Household"),
title_display = T("Household Details"),
title_list = T("Households"),
title_update = T("Edit Household"),
label_list_button = T("List Households"),
label_delete_button = T("Delete Household"),
msg_record_created = T("Household created"),
msg_record_modified = T("Household updated"),
msg_record_deleted = T("Household deleted"),
msg_list_empty = T("No Households currently registered"),
)
# Reusable Field
represent = po_HouseholdRepresent()
household_id = S3ReusableField("household_id", "reference %s" % tablename,
label = T("Household"),
represent = represent,
requires = IS_ONE_OF(db, "po_household.id",
represent,
),
sortby = "name",
comment = S3AddResourceLink(f="household",
tooltip=T("Create a new household"),
),
)
# Filter Widgets
filter_widgets = [S3TextFilter(("household_member.person_id$first_name",
"household_member.person_id$middle_name",
"household_member.person_id$last_name",
"location_id$addr_street",
),
label = T("Search"),
comment = T("Search by Address or Name of Household Member"),
),
S3OptionsFilter("area_id",
#hidden = True,
),
S3DateFilter("date_visited",
label = T("Date visited"),
hidden = True,
),
S3OptionsFilter("followup",
cols = 2,
hidden = True,
),
S3DateFilter("household_followup.followup_date",
label = T("Follow-up Date"),
hidden = True,
),
S3OptionsFilter("household_followup.completed",
cols = 2,
hidden = True,
),
S3OptionsFilter("organisation_household.organisation_id",
hidden = True,
),
]
# List fields
list_fields = ("area_id",
"location_id",
"date_visited",
"followup",
"household_followup.followup_date",
"household_followup.completed",
"organisation_household.organisation_id",
"comments",
)
# Reports
report_axes = ["area_id",
"followup",
"organisation_household.organisation_id",
"household_followup.completed",
"household_followup.evaluation",
]
reports = ((T("Number of Households Visited"), "count(id)"),
)
# Custom Form
crud_form = S3SQLCustomForm("area_id",
"location_id",
"date_visited",
"followup",
S3SQLInlineComponent("contact",
label = T("Contact Information"),
fields = ["priority",
(T("Type"), "contact_method"),
(T("Number"), "value"),
"comments",
],
orderby = "priority",
),
"household_social.language",
"household_social.community",
"household_dwelling.dwelling_type",
"household_dwelling.type_of_use",
"household_dwelling.repair_status",
"comments",
)
configure(tablename,
create_next = self.household_create_next,
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
onaccept = self.household_onaccept,
report_options = {"rows": report_axes,
"cols": report_axes,
"fact": reports,
"defaults": {
"rows": "area_id",
"cols": "followup",
"fact": "count(id)",
}
},
super_entity = ("doc_entity", "pr_pentity"),
)
# Components
self.add_components(tablename,
pr_person = {"link": "po_household_member",
"joinby": "household_id",
"key": "person_id",
"actuate": "replace",
},
po_household_dwelling = {"joinby": "household_id",
"multiple": False,
},
po_household_social = {"joinby": "household_id",
"multiple": False,
},
po_household_followup = {"joinby": "household_id",
"multiple": False,
},
po_organisation_household = "household_id",
)
# ---------------------------------------------------------------------
# Household Members
#
tablename = "po_household_member"
define_table(tablename,
household_id(),
self.pr_person_id(),
s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Household Member Age Groups (under 18,18-30,30-55,56-75,75+)
#
age_groups = ("<18", "18-30", "30-55", "56-75", "75+")
tablename = "po_age_group"
define_table(tablename,
self.pr_person_id(),
Field("age_group",
label = T("Age Group"),
requires = IS_EMPTY_OR(IS_IN_SET(age_groups)),
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Dwelling
#
dwelling_type = {"U": T("Unit"),
"H": T("House"),
"A": T("Apartment"),
"S": T("Supervised House"),
"O": T("Other"),
}
type_of_use = {"S": T("Owner-occupied"),
"R": T("Renting"),
"B": T("Boarding"),
"O": T("Other"),
}
repair_status = {"W": T("waiting"),
"R": T("rebuild"),
"C": T("completed"),
"N": T("not required"),
"O": T("other"),
}
tablename = "po_household_dwelling"
define_table(tablename,
household_id(),
Field("dwelling_type",
label = T("Type of Dwelling"),
represent = S3Represent(options=dwelling_type),
requires = IS_EMPTY_OR(IS_IN_SET(dwelling_type)),
),
Field("type_of_use",
label = T("Type of Use"),
represent = S3Represent(options=type_of_use),
requires = IS_EMPTY_OR(IS_IN_SET(type_of_use)),
),
Field("repair_status",
label = T("Stage of Repair"),
represent = S3Represent(options=repair_status),
requires = IS_EMPTY_OR(IS_IN_SET(repair_status)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Dwelling Data"),
)
# ---------------------------------------------------------------------
# Social Information
#
languages = dict(IS_ISO639_2_LANGUAGE_CODE.language_codes())
tablename = "po_household_social"
define_table(tablename,
household_id(),
Field("language",
label = T("Main Language"),
represent = S3Represent(options=languages),
requires = IS_EMPTY_OR(
IS_ISO639_2_LANGUAGE_CODE(select=None,
sort=True,
)),
),
Field("community", "text",
label = T("Community Connections"),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Social Information"),
)
# ---------------------------------------------------------------------
# Follow-up Details
#
evaluation = {"B": T("better"),
"S": T("same"),
"W": T("worse"),
}
twoweeks = current.request.utcnow + datetime.timedelta(days=14)
tablename = "po_household_followup"
define_table(tablename,
household_id(),
Field("followup_required",
label = T("Follow-up required"),
),
s3_date("followup_date",
label = T("Date for Follow-up"),
default = twoweeks,
past = 0,
),
Field("followup", "text",
label = T("Follow-up made"),
),
Field("completed", "boolean",
default = False,
label = "Follow-up completed",
represent = s3_yes_no_represent,
),
Field("evaluation",
label = T("Evaluation"),
represent = S3Represent(options=evaluation),
requires = IS_EMPTY_OR(IS_IN_SET(evaluation)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Follow-up Details"),
)
configure(tablename,
deletable = False,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {"po_household_id": household_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return {"po_household_id": lambda **attr: dummy("household_id"),
}
# -------------------------------------------------------------------------
@staticmethod
def household_create_next(r):
""" Determine where to go next after creating a new household """
post_vars = r.post_vars
next_vars = S3Method._remove_filters(r.get_vars)
next_vars.pop("w", None)
follow_up = "followup" in post_vars and post_vars["followup"]
if r.function == "area":
if follow_up:
return URL(f="household",
args=["[id]", "person"],
vars=next_vars,
)
else:
return r.url(method="",
component="household",
vars=next_vars,
)
else:
if follow_up:
return r.url(id="[id]",
component="person",
method="",
vars=next_vars,
)
else:
return r.url(method="summary",
id="",
vars=next_vars,
)
# -------------------------------------------------------------------------
@staticmethod
def household_onaccept(form):
""" Onaccept-routine for households """
formvars = form.vars
try:
record_id = formvars.id
except AttributeError:
return
# Auto-create a followup component if household.followup is True
s3db = current.s3db
htable = s3db.po_household
ftable = s3db.po_household_followup
left = ftable.on((ftable.household_id == htable.id) & \
(ftable.deleted != True))
row = current.db(htable.id == record_id).select(htable.id,
htable.followup,
htable.realm_entity,
ftable.id,
left=left,
limitby=(0, 1)).first()
if row and row[htable.followup] and not row[ftable.id]:
ftable.insert(household_id=row[htable.id],
realm_entity=row[htable.realm_entity],
)
# =============================================================================
class OutreachReferralModel(S3Model):
""" Model to track referrals of households to organisations """
names = ("po_referral_organisation",
"po_organisation_area",
"po_organisation_household",
)
def model(self):
T = current.T
db = current.db
define_table = self.define_table
configure = self.configure
s3 = current.response.s3
crud_strings = s3.crud_strings
organisation_id = self.org_organisation_id
# Organisation Represent should link to po/organisation
org_link = URL(c="po", f="organisation", args="[id]")
org_represent = self.org_OrganisationRepresent(show_link=True,
linkto=org_link,
)
# Organisation AddResourceLink should go to po/organisation
ADD_ORGANISATION = T("Create Agency")
tooltip = T("If you don't see the Agency in the list, you can add a new one by clicking link 'Create Agency'.")
org_comment = S3AddResourceLink(c="po", f="organisation",
label=ADD_ORGANISATION,
title=ADD_ORGANISATION,
tooltip=tooltip,
)
# ---------------------------------------------------------------------
# Referral Agency (context link table), currently not visible
#
tablename = "po_referral_organisation"
define_table(tablename,
organisation_id(represent=org_represent,
comment=org_comment,
),
#s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Areas Served by a Referral Agency
#
tablename = "po_organisation_area"
define_table(tablename,
# @todo: AddResourceLink should go to po/organisation
organisation_id(label=T("Agency"),
represent=org_represent,
comment=org_comment,
),
self.po_area_id(),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Agency"),
title_update = T("Edit Referral Agency"),
label_list_button = T("List Agencies"),
label_delete_button = T("Remove Agency"),
)
# ---------------------------------------------------------------------
# Referral Household=>Agency
#
tablename = "po_organisation_household"
define_table(tablename,
# @todo: AddResourceLink should go to po/organisation
organisation_id(label=T("Referral Agency"),
represent=org_represent,
comment=org_comment,
),
self.po_household_id(),
s3_date(default="now",
label=T("Date Referral Made"),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Referral"),
title_update = T("Edit Referral Details"),
label_delete_button = T("Delete Referral"),
)
# Table Configuration
configure(tablename,
orderby = "%s.date desc" % tablename,
list_fields = ("date",
"organisation_id",
"household_id",
"comments",
),
)
# =============================================================================
class po_HouseholdRepresent(S3Represent):
def __init__(self, show_link=True):
"""
Constructor
@param show_link: whether to add a URL to representations
"""
super(po_HouseholdRepresent, self).__init__(
lookup = "po_household",
show_link = show_link)
self.location_represent = \
current.s3db.gis_LocationRepresent(address_only=True,
show_link=False,
)
# -------------------------------------------------------------------------
def lookup_rows(self, key, values, fields=[]):
"""
Custom rows lookup
@param key: the key Field
@param values: the values
@param fields: unused (retained for API compatibility)
"""
s3db = current.s3db
table = self.table
count = len(values)
if count == 1:
query = (key == values[0])
else:
query = key.belongs(values)
rows = current.db(query).select(table.id,
table.location_id,
limitby = (0, count),
)
self.queries += 1
# Bulk-represent locations
location_id = str(table.location_id)
location_ids = [row[location_id] for row in rows]
if location_ids:
self.location_represent.bulk(location_ids, show_link=False)
return rows
# -------------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a row
@param row: the Row
"""
# Represent household as its address
return self.location_represent(row.location_id)
# =============================================================================
def po_rheader(r, tabs=[]):
if r.representation != "html":
# RHeaders only used in interactive views
return None
tablename = r.tablename
record = r.record
rheader = None
rheader_fields = []
if record:
T = current.T
if tablename == "po_area":
# @todo: hide "Referral Agencies" per deployment setting
if not tabs:
tabs = [(T("Basic Details"), ""),
(T("Households"), "household"),
(T("Referral Agencies"), "organisation"),
(T("Documents"), "document"),
]
rheader_fields = [["name"],
]
elif tablename == "po_household":
if not tabs:
tabs = [(T("Basic Details"), "")]
if record.followup:
tabs.extend([#(T("Contact Information"), "contact"),
#(T("Social Information"), "household_social"),
#(T("Dwelling"), "household_dwelling"),
(T("Members"), "person"),
(T("Follow-up Details"), "household_followup"),
(T("Referrals"), "organisation_household"),
])
rheader_fields = [["area_id"],
["location_id"],
]
elif tablename == "org_organisation":
# @todo: hide "Areas Served" per deployment setting
if not tabs:
tabs = [(T("Basic Details"), ""),
(T("Areas Served"), "area"),
(T("Referrals"), "organisation_household"),
]
rheader_fields = [["name"],
]
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
return rheader
# =============================================================================
def po_organisation_onaccept(form):
"""
Create a po_referral_organisation record onaccept of
an org_organisation to link it to this module.
@param form: the form
"""
formvars = form.vars
try:
organisation_id = formvars.id
except AttributeError:
return
rtable = current.s3db.po_referral_organisation
query = (rtable.organisation_id == organisation_id) & \
(rtable.deleted != True)
row = current.db(query).select(rtable.id, limitby=(0, 1)).first()
if not row:
rtable.insert(organisation_id=organisation_id)
# =============================================================================
def po_due_followups():
""" Number of due follow-ups """
query = (FS("followup_date") <= datetime.datetime.utcnow().date()) & \
(FS("completed") != True)
resource = current.s3db.resource("po_household_followup", filter=query)
return resource.count()
# END =========================================================================
|
apdjustino/DRCOG_Urbansim
|
refs/heads/master
|
src/opus_core/misc.py
|
1
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
"""Collection of useful miscellaneous functions and definitions"""
import os.path
import re
import shutil
import socket
import sys
import tempfile
import numpy
from numpy import ma, array, ndarray, divide, seterr, inf, allclose
from inspect import getmembers, ismethod
from opus_core.logger import logger
from opus_core.ndimage import sum
from pkg_resources import parse_version
DEFAULT_BYTEORDER = 'little'
class DebugPrinter(object):
"""Class for printing debugging messages on different levels."""
def __init__(self, flag):
self.flag = flag
def print_debug(self, message, level):
if self.flag >= level:
logger.log_status(message)
# Functions:
def copytree(src, dst, skip_subdirectories=[]):
"""recursively copy the directory tree rooted at src to the destination directory dst.
Skip any subdirectories in skip_subdirectories."""
# This function is adapted from the shutil.copytree function. It adds the optional
# skip_subdirectories parameter, and omits the symlinks parameter.
names = os.listdir(src)
os.mkdir(dst)
for name in names:
if name not in skip_subdirectories:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
copytree(srcname, dstname, skip_subdirectories=skip_subdirectories)
else:
shutil.copy2(srcname, dstname)
def ematch (list, str):
""" Exact match of a string in a 1D-array of strings. Returns an array of matches.
"""
from numpy import where
imatches = (list == str)
return where(imatches)[0]
def get_indices_of_matched_items(valuearray, items_to_match):
"""Returns indices of matched items from items_to_match within valuearray.
"""
from numpy import array
return array(map(lambda x: ematch(valuearray, x)[0], items_to_match))
def byteswap_if_needed(data, byteorder):
"""
Ensures that data is read and written in the correct byteorder.
To be used with the fromfile and tofile methods provided in numpy,
which assume native byteorder rather than letting you specify.
"""
if sys.byteorder <> byteorder:
data.byteswap(True)
def module_path_from_opus_path(opus_path):
"""Returns the operating system path to this fully-specified opus module name,
e.g. 'opus_core.misc' might be translated to 'C:\workspace\misc.py'
"""
return '%s.py' % directory_path_from_opus_path(opus_path)
def directory_path_from_opus_path(opus_path):
"""Returns the operating system path to this fully-specified opus directory name,
e.g. 'opus_core.docs.database_tables' might be translated to 'C:\workspace\opus_core\docs\database_tables'
"""
parts = opus_path.split('.')
exec('import %s as _start_package' % parts[0])
path = _start_package.__path__[0]
if len(parts) > 1:
for part in parts[1:]:
path = os.path.join(path, part)
return path
def opus_path_for_variable_from_module_path(file_path):
"""Given the os path to a variable's module, return the opus path for that variable"""
# abspath will be the absolute path to the variable (throwing away the .py extension -- that's the _ part in the pair)
(abspath, _) = os.path.splitext(os.path.abspath(file_path))
# the last 3 things in the path will be the package name, dataset name, and variable name
(a, variable_name) = os.path.split(abspath)
(b, dataset_name) = os.path.split(a)
(_, package_name) = os.path.split(b)
# return opus_path
return '.'.join((package_name, dataset_name, variable_name))
def read_dataset_from_flt(dataset_name, file_path='.'):
"""Reads a dataset from a binary storage. Return a dictionary with attribute names as keys and data arrays as values."""
from opus_core.store.file_flt_storage import file_flt_storage
storage = file_flt_storage(file_path)
return storage.load_table(dataset_name)
def write_to_file(filename, data, byteorder=DEFAULT_BYTEORDER):
"""Writes float data to a file."""
byteswap_if_needed(data, byteorder)
float_file = file(filename, mode="wb")
data.tofile(float_file)
float_file.close()
byteswap_if_needed(data, byteorder)
def write_to_text_file(filename, data, mode="wb", delimiter="\n", end_delimiter="\n"):
"""Writes data to a text file."""
import os
logger.log_status(os.getcwd())
#from IPython import embed
#embed()
text_file = file(filename, mode=mode)
try:
if isinstance(data, list):
n = len(data)
else:
n = data.size
if n > 0:
for index in xrange(n-1):
text_file.write(str(data[index]) + delimiter)
text_file.write(str(data[n-1]) + end_delimiter)
else:
text_file.write(end_delimiter)
finally:
text_file.close
def write_table_to_text_file(filename, table, mode="wb", delimiter=' '):
"""Writes table (2D array) into a text file. Entries in each row are delimited by 'delimiter'"""
write_to_text_file(filename, table[0,:], mode=mode, delimiter=delimiter)
for k in range(1,table.shape[0]):
write_to_text_file(filename, table[k,:], delimiter=delimiter, mode="ab")
def load_table_from_text_file(filename, convert_to_float=False, split_delimiter=' ', header=False, comment=None):
"""Reads table from a file. It returns a tuple, where the first element
is an array of the values in file, the second element is the header.
If 'convert_to_float' is False, the value array a strings array.
Otherwise the value array is a 2D numpy array. In the latter case, a row is splitted using the 'split_delimiter'.
If header is False, no header is assumed and the second element of the resulting tuple is None.
If comment is not None, lines that start with that character are ignored.
"""
from numpy import array, reshape
def readlineandignorecomments():
line = text_file.readline()
while (line != ''):
if (comment is None) or not line.startswith(comment):
break
line = text_file.readline()
return line
text_file = file(filename, "rb")
line = readlineandignorecomments()
header_line = None
if header:
header_line = re.split('\s+', line)[0:-1]
line = readlineandignorecomments()
line_list = re.split('\s+', line)
ncol = len(line_list)-1
data = []
nrow=0
while (line != ''):
nrow += 1
for column_number in range(ncol):
data.append(line_list[column_number])
line = readlineandignorecomments()
line_list = re.split('\s+', line)
text_file.close()
if convert_to_float:
def split_and_convert(row):
splitted_row = row.split(split_delimiter)
if len(splitted_row) == 1:
return float(splitted_row[0])
return map(lambda x: float(x), splitted_row)
return (reshape(array(map(lambda x: split_and_convert(x), data)), (nrow, ncol)), header_line)
return (reshape(array(data), (nrow, ncol)), header_line)
def load_from_text_file(filename, convert_to_float=False, split_delimiter=' ', comment=None):
"""Reads character data from a file. If 'convert_to_float' is False, it returns a strings array.
Otherwise it returns a numpy array. In the latter case, a row is splitted using the 'split_delimiter'.
If comment is not None, lines that start with that character are ignored.
"""
from numpy import array
text_file = file(filename, "rb")
line = text_file.readline()
data = []
while (line != ''):
while line.endswith('\n') or line.endswith('\r'):
line = line[:-1]
if (comment is None) or not line.startswith(comment):
data.append(line)
line = text_file.readline()
text_file.close()
if convert_to_float:
def split_and_convert(row):
splitted_row = row.split(split_delimiter)
if len(splitted_row) == 1:
return float(splitted_row[0])
return map(lambda x: float(x), splitted_row)
return array(map(lambda x: split_and_convert(x), data))
return array(data)
def remove_directories_with_this_name(top, dir_name):
"""Removes any directory contained in top whose name is dir_name.
"""
for root, dirs, files in os.walk(top, topdown=False):
for name in dirs:
if dir_name in name:
shutil.rmtree(os.path.join(root, dir_name))
def replace_string_in_files(directory, find, replace):
"""
Replace (recursively) all instances of find with replace for any file
in directory.
"""
from opus_core.third_party.path import path
dir = path(directory)
for file in dir.walkfiles():
f = open(file)
in_text = f.read()
out_text = in_text.replace(find, replace)
if in_text != out_text:
file.write_text(out_text)
def get_distinct_list(list):
"""Returns a list of distinct elements of the argument list."""
newlist = []
for item in list:
if not(item in newlist):
newlist = newlist + [item]
return newlist
def create_string_list(prefix, n):
"""Create a list of strings 'prefix+number' with number=1,...,n.
"""
name_list = []
for i in range(n):
name_list.append(prefix + `i+1`)
return name_list
def list2string(l, sep=' '):
"""Return a string created from the elements of the list 'l' separated by 'sep'."""
return sep.join(["%s" % x for x in l])
def check_dimensions(array1, array2):
"""Return True if all dimensions of array1 correspond to all dimensions of array2, otherwise False.
Both arrays hahve to be numpy arrays.
"""
return array1.shape == array2.shape
def remove_all(valuelist, element):
"""Remove all occurences of element in the list.
"""
result = []
for item in valuelist:
if element <> item:
result.append(item)
return result
def remove_elements_with_matched_prefix_from_list(valuelist, prefixlist):
"""Remove all occurences of elements with one of the prefix in the prefixlist
from valuelist.
"""
from numpy import array, reshape, compress, apply_along_axis, logical_and
def match_prefix(prefix, arr):
def match_single_element(item, prefix):
q = re.match('^'+prefix, item[0])
if q:
return 0
return 1
t = apply_along_axis(match_single_element, 1, arr.reshape(arr.size,1), prefix[0])
return t
result = valuelist
pl = array(prefixlist)
m = apply_along_axis(match_prefix, 1, pl.reshape(pl.size,1), array(valuelist))
l = logical_and(m[0,:], m[1,:])
return compress(l, result)
def type_convert(valuelist):
"""Convert numerical values of the given list to float. Nonnumerical values are unchanged."""
def tryconvert(x):
try:
return float(x)
except:
return x
return map(lambda x: tryconvert(x), valuelist)
def get_distinct_names(namesarray):
"""Return a list of distinct names from a list of names"""
from numpy import array, reshape, sort, asarray
if len(namesarray) <= 0:
return array([], dtype='int32')
allnames = asarray(namesarray).copy()
allnames.sort()
n = allnames.shape[0]
names = []
i = 0
while i < n:
matches = ematch(allnames, allnames[i])
names.append(allnames[i])
i += len(matches)
return reshape(array(names), (len(names),))
def take_choices(what, choices):
"""'what' is a 1D or a 2D array, 'choices' is a 1D array. It returns a 1D array of what[choices] or what[:,choices].
"""
from numpy import zeros
result = zeros((what.shape[0],), dtype="int32")
if what.ndim > 1:
for i in range(what.shape[0]):
result[i] = what[i,choices[i]]
else:
result = what[choices]
return result
def sample_1d(population, k, probabilities):
"""Called from sample, here probabilities must be 1D array."""
from numpy import searchsorted
from numpy.random import random
n = population.size
if n <> probabilities.shape[0]:
raise StandardError, "Mismatch in size of population and probabilities."
cumulative_probability = probabilities.cumsum()
if cumulative_probability[n-1]<=0:
raise StandardError, "Sum of probabilities must be > 0."
cumulative_probability = cumulative_probability/float(cumulative_probability[n-1])
draw = random([k])
choices = searchsorted(cumulative_probability, draw)
return population[choices]
def sample(population, k, probabilities=None):
"""Return a sample (array) of size k from the 'population' according to the given 'probabilities' (sampling with replacement).
'probabilities' can be 'None', 1D or 2D array. In case of 2D array, i-th row is considered as probabilities
for i-th draw. The probabilities do not need to sum to 1, they are rescaled within the procedure. They should
be positive and their sum should be > 0.
"""
from numpy import array, reshape, where, alltrue
from numpy.random import random
if isinstance(population, list):
population = array(population)
n = population.size
if probabilities == None:
probabilities = array(n*[1.0/n])
if isinstance(probabilities, list):
probabilities = array(probabilities)
if probabilities.ndim <= 1:
return sample_1d(population, k, probabilities)
if (n <> probabilities.shape[1]) or (k <> probabilities.shape[0]) :
raise StandardError, "Mismatch in size of population and probabilities."
cumulative_probability = probabilities.cumsum(axis=1)
if not alltrue(cumulative_probability[:,n-1]):
raise StandardError, "Sums of probabilities must be > 0."
cumulative_probability = cumulative_probability/reshape(cumulative_probability[:,n-1],(k,1)).astype('float32')
draw = random([k])
choices = reshape(n-where(reshape(draw, (k,1)) <= cumulative_probability,1,0).sum(axis=1), (k,))
if choices.max() >= n:
raise StandardError, "Something is wrong with the probabilities."
return population[choices]
def intersect1d(ar1, ar2, **kwargs):
""" wrapper for numpy.intersect1d and numpy.intersect1d_nu for different version of numpy
"""
import numpy
ver = parse_version(numpy.__version__)
if ver < parse_version('1.4.0'):
f = numpy.intersect1d_nu
return f(ar1, ar2)
else:
f = numpy.intersect1d
return f(ar1, ar2, **kwargs)
def unique(arr, return_index=False, **kwargs):
""" unique function that return unsorted unique elements
"""
need_index = True
unique_results = np_unique(arr, return_index=need_index, **kwargs)
unique_elements, unique_index = unique_results[0], unique_results[1]
unique_index_unsorted = unique_index.argsort()
unique_elements = unique_elements[unique_index_unsorted]
unique_index = unique_index[unique_index_unsorted]
if len(unique_results)>2:
# reverse unique index is returned
return (unique_elements, unique_index) + unique_results[2:]
if return_index:
return unique_elements, unique_index
else:
return unique_elements
def np_unique(arr, return_index=False, **kwargs):
""" wrapper for numpy.unique and numpy.unique1d for different version of numpy
"""
ver = parse_version(numpy.__version__)
if ver < parse_version('1.2.0'): #numpy 1.0 and 1.1 don't accept extra argument
f = numpy.unique1d
unique_results = f(arr, return_index=return_index)[::-1]
elif ver < parse_version('1.3.0'): #numpy 1.2+ accepts return_inverse argument
f = numpy.unique1d
unique_results = f(arr, return_index=return_index, **kwargs)[::-1]
elif ver < parse_version('1.4.0'): #numpy 1.3 reverses the order of outputs from unique1d
f = numpy.unique1d
unique_results = f(arr, return_index=return_index, **kwargs)
else: #unique1d is deprecated in numpy 1.4+, use unique instead
f = numpy.unique
unique_results = f(arr, return_index=return_index, **kwargs)
return unique_results
def uniquend(arr):
"""Returns unique elements of arr; works with n-dimension array
"""
# from numpy import array, reshape, where, concatenate
#
# new_array = reshape(array(arr[0]), (1,arr.shape[1]))
# if arr.shape[0] > 1:
# for item in arr[1:]:
# s = new_array.sum(axis=1)
# w = where(s == arr.shape[1])[0]
# if w.size <= 0:
# new_array = concatenate((new_array,reshape(item, (1,arr.shape[1]))))
# if arr.ndim == 1:
# return reshape(new_array,(arr.size,))
# return new_array
## LW: was above, which I think is incorrect:
#from numpy.randon import randint
#arr=randint(0, 3, size=100)
#arr.resize((50,2))
#unique(arr)
#>>>array([[2, 0]]) #depending on the values of b, sometimes unique gives correct results by chance
if arr.ndim == 1:
return unique(arr)
d = {}
for element in arr:
k = totuple(element)
if k not in d.keys():
d[k] = 1
else:
d[k] += 1
return array(d.keys())
def has_this_method(object, method_name):
"""Does this object have a method named method_name?"""
members = getmembers(object)
method_found = map(lambda x: method_name in x, members)
return (1 in method_found) and ismethod(getattr(object, method_name))
def all_in_list(list1, list2):
""" Return 1 if all elements of list1 are contained in list2, otherwise 0."""
return set(list1) <= set(list2)
def do_id_mapping(recarray, fieldname):
return do_id_mapping_dict_from_array(recarray.field(fieldname))
def do_id_mapping_dict_from_array(id_array):
from numpy import ndarray
if isinstance(id_array, ndarray) and id_array.ndim > 1: # transfer to tuples, since dict does not work with arrays as keys
new_id_array = map(lambda x: tuple(x.tolist()), id_array)
else:
new_id_array = id_array
id_mapping = {}
i = 0
for row in new_id_array:
id_mapping[row] = i
i += 1
return id_mapping
def do_id_mapping_array_from_array(id_array, minid=None, maxid=None):
# id_array must be a 1D numpy array
from numpy import resize, array, arange, ones
if id_array.size <= 0:
return array([], dtype="int32")
if maxid is None:
maxid = long(id_array.max())
if minid is None:
minid = long(id_array.min())
id_mapping = -1 * ones(maxid-minid+1, dtype='i')
id_mapping[id_array-minid] = arange(id_array.size).astype(id_mapping.dtype)
return id_mapping
def remove_from_array(arr, index):
"""Remove elements given by index from a numpy 'arr'."""
from numpy import compress, ones
a = ones(arr.shape, dtype="int32")
a[index] = 0
return compress(a,arr)
def flatten_list(list):
"""Flattens a nested 'list' (of 2 levels)."""
return [element for sublist in list for element in sublist]
def ncumsum(prob_array, axis=0, dtype='float64'):
"""n(ormalized)-cumsum that normalizes the cumsum result by dividing the array
by the last item in cumsum result"""
from numpy import take, ma
if not ma.allclose(prob_array.sum(axis=axis, dtype=dtype), 1.0, rtol=1.e-2):
raise ValueError, "The probability array must sum up to 1. It is " + \
str(prob_array.sum(axis=axis, dtype=dtype))
cum_prob = prob_array.cumsum(axis=axis, dtype=dtype)
return cum_prob / take(cum_prob, (-1,), axis=axis)
def corr(var_array, *var_arrays):
"""return the correlation between vars"""
from numpy import zeros, concatenate, newaxis, dot, transpose
try:
if var_array.ndim == 2:
X = var_array
elif var_array.ndim == 1:
X = var_array[:, newaxis]
for var in var_arrays:
if var_array.ndim == 2:
Y = var
elif var_array.ndim == 1:
Y = var[:, newaxis]
X = concatenate((X, Y), axis=1)
except:
raise ValueError, "Input variable arrays must have the same number of observations"
#from scipy.stats import corrcoef
from numpy import corrcoef
return corrcoef(X, rowvar=0)
def quantile(values, probs):
"""Return quantiles of given probability from the values. 'values' and probs are numpys.
The function returns an array of the same shape as 'probs'. Quantile is here defined as
the nearest order statistic."""
from numpy import sort, maximum
sorted_values = sort(values)
return sorted_values[maximum((probs*values.size-1).astype("int32"),0)]
def is_masked_array(a):
"""test whether the argument is a masked array. (This is a function because ma.array used
to be a class, and now it's a function, and the actual class changed subpackage, so we can't just
test it directly.)"""
ma_array_type = type(ma.array([3]))
return isinstance(a, ma_array_type)
def unique_values(arr, return_index=False, **kwargs):
import warnings
warnings.warn("opus_core.misc.unique_values is deprecated; use opus_core.misc.unique instead.", DeprecationWarning)
return unique(arr, return_index=return_index, **kwargs)
#def unique_values(input_array, sort_values=True):
# """return unique elements of input_array
# input_array - a sortable numpy array or list object
# """
# from numpy import array, ndarray, sort, where
# import copy
# if isinstance(input_array, ndarray):
# if input_array.ndim <> 1:
# input_array = input_array.ravel()
# raise RuntimeWarning, "input_array is converted into a rank-1 array"
# elif not isinstance(input_array, list):
# raise TypeError, "input_array must be of type ndarray or list."
#
# n = len(input_array)
# if n == 0:
# return array([], dtype='int32')
#
# t = copy.copy(input_array)
# try:
# t.sort()
# except TypeError:
# del t
# raise RuntimeError, "input_array is not sortable; unique_values fails."
# else:
# assert n > 0
# last = t[0]
# lasti = i = 1
# while i < n:
# if t[i] != last:
# t[lasti] = last = t[i]
# lasti += 1
# i += 1
# if sort_values:
# return t[:lasti]
# else:
# if isinstance(input_array, ndarray):
# unsorted_index = [where(input_array==v)[0][0] for v in t[:lasti]]
# unsorted_index.sort()
# return input_array[unsorted_index]
# else:
# unsorted_index = [input_array.index(v) for v in t[:lasti]]
# unsorted_index.sort()
# return [input_array[n] for n in unsorted_index]
def ismember(ar1, ar2) :
"""Return a Boolean 1-d array of the length of ar1 which is True whenever that
element is contained in ar2 and False when it is not.
(The numpy function setmember1d claims to do the same but works only on ar1 with unique values.)
"""
import numpy
if parse_version(numpy.__version__) >= parse_version('1.4.0'):
return numpy.in1d(ar1, ar2)
a = numpy.sort(ar2)
il = a.searchsorted(ar1, side='left')
ir = a.searchsorted(ar1, side='right')
return ir != il
def get_host_name():
"""Get the host name of this computer in a platform-independent manner."""
fullname = socket.gethostname()
# Depending on the operating system, fullname might be just the machine name,
# or might also have the full internet address; and it might be in lower or
# upper case. Normalize to be just the machine name, in lower case.
return fullname.split('.')[0].lower()
def clip_to_zero_if_needed(values, function=""):
from numpy import clip
global_min = values.min()
if global_min >= 0:
return values
global_max = values.max()
logger.log_warning("Negative values detected in function/method '%s'" % function)
logger.log_warning("Minimum: %s. Negative values clipped to zero." % global_min)
return clip(values, 0, global_max)
def convert_lower_case_with_underscores_into_camel_case(name):
"""Creates CamelCase name from this lower_case_with_underscores name.
"""
return ''.join(map(lambda s: s.capitalize(), name.split('_')))
def get_camel_case_class_name_from_opus_path(opus_path):
"""return CamelCase class name from opus_path.
"""
class_name = opus_path.split('.')[-1]
class_name = convert_lower_case_with_underscores_into_camel_case(class_name)
return class_name
def create_import_for_camel_case_class(opus_path, import_as=None):
"""Creates statement to import this class.
opus_path is full Opus path.
Module name is lower_case_with_underscores.
Class name is CamelCase version of module name.
To do import, exec returned statement.
"""
class_name = get_camel_case_class_name_from_opus_path(opus_path)
if import_as is not None:
return 'from %s import %s as %s' % (opus_path, class_name, import_as)
else:
return 'from %s import %s' % (opus_path, class_name)
def get_config_from_opus_path(opus_path):
class_name = get_camel_case_class_name_from_opus_path(opus_path)
import_stmt = 'from %s import %s' % (opus_path, class_name)
exec(import_stmt)
# Create a local variable 'config' with the configuration.
stmt = 'config = %s()' % class_name
exec(stmt)
# Return the config that we just created.
return config
def create_import_for_class(classpath, class_name):
"""Creates an import statement that may be exec'ed to import the given class
from the Opus path specified by the config_key.
"""
return 'from %s import %s' % (classpath, class_name)
def is_file_in_directory(file, directory):
"""Return True if 'file' in 'directory', otherwise False."""
files = os.listdir(directory)
if file in files:
return True
return False
def safe_array_divide(numerator, denominator, return_value_if_denominator_is_zero=0.0, type='float32', warning='ignore'):
"""If denominator == 0, return return_value_if_denominator_is_zero.
Else return numerator / denominator. numerator and denominator must be numpy arrays.
warning = {'ignore', 'warn', 'raise', 'call', 'print', 'log'}, optional, treatment for division by zero
"""
seterr(all=warning)
results = divide(numerator, denominator.astype(type))
#results > inf is there to force broadcasting of denominator
results[(results > inf) | (denominator == 0)] = return_value_if_denominator_is_zero
return results
def safe_array_divide_ma(numerator, denominator, return_value_if_denominator_is_zero=0.0, type='float32', warning='ignore'):
"""If denominator == 0, return return_value_if_denominator_is_zero.
Else return numerator / denominator. numerator and denominator must be numpy arrays.
warning = {'ignore', 'warn', 'raise', 'call', 'print', 'log'}, optional, treatment for division by zero
"""
seterr(all=warning)
results = ma.filled(numerator/ma.masked_where(denominator == 0, denominator.astype(type)),
return_value_if_denominator_is_zero)
return results
def try_transformation(data, transformation):
"""Performs 'transformation' on 'data'. Transformation is a character string naming a function
implemented in numpy, e.g. 'sqrt', 'log'. It can be also '**2' for power, or '* 10' for simple
multiplication (the same holds for addition etc.). The 'data' is a numpy array.
If the procedure fails, a warning is issued and
the raw data are returned. Otherwise the transformed data are returned.
"""
if transformation is None:
tdata = data
else:
try:
exec("from numpy import %s" % transformation)
tdata = eval("%s(data)" % transformation)
except:
try:
tdata = eval("data%s" % transformation) # would work for e.g. **2, * 10, ...
except:
logger.log_warning("Transformation not successful. Using raw data instead.")
tdata = data
return tdata
def create_combination_indices(shape):
"""Creates an index array which is a combination of all posibilities in shape. shape is a tuple."""
from numpy import product, indices, zeros, array
lshape = len(shape)
if lshape == 0:
return array([])
tindices = indices(shape)
index = zeros((product(shape), lshape), dtype="int32")
for idim in range(lshape):
index[:,idim] = tindices[idim].ravel()
return index
def get_string_or_None(arg):
if arg is not None:
return "'%s'" % arg
return None
def digits(int_scalar_or_int_sequence):
""" return number of digits """
if hasattr(int_scalar_or_int_sequence, '__iter__'):
return [digits(scalar) for scalar in int_scalar_or_int_sequence]
n = int_scalar_or_int_sequence
if n < 0:
n = -n
d = 0
step = 1
while (step <= n):
d += 1
step *= 10
return max(d, 1)
def djbhash(sequence):
"""Hash function from D J Bernstein
References:
http://www.gossamer-threads.com/lists/python/python/679002#679002
http://eternallyconfuzzled.com/tuts/algorithms/jsw_tut_hashing.aspx
"""
h = 5381L
for i in sequence:
t = (h * 33) & 0xffffffffL
h = t ^ i
return h
def fnvhash(sequence):
"""Fowler, Noll, Vo Hash function
References:
http://www.gossamer-threads.com/lists/python/python/679002#679002
http://eternallyconfuzzled.com/tuts/algorithms/jsw_tut_hashing.aspx
http://www.isthe.com/chongo/tech/comp/fnv/
"""
h = 2166136261
for i in sequence:
t = (h * 16777619) & 0xffffffffL
h = t ^ i
return h
def ndsum(input, labels, index=None):
""" extend scipy.ndimage.sum to handle labels with multi-array
index argument is not used
e.g.
input = array([3, 7, 4, 6, 2, 5 ])
attr_a = array([0, 0, 1, 0, 1, 1])
attr_b = array([3, 1, 2, 1, 2, 0])
result = ndsum(input, labels=column_stack([attr_a, attr_b]))
print result
>>> (array([13, 3, 5, 6]), (array([0, 0, 1, 1]), array([1, 3, 0, 2])) )
"""
if labels is None or not isinstance(labels, ndarray):
return sum(input, labels=labels, index=index)
assert input.size == labels.shape[0]
#labels = column_stack(labels)
hash_table = {}
def hashlabel(label):
hash_value = djbhash(label)
hash_table.update({hash_value:label})
return hash_value
labels_hash = array(map(hashlabel, labels)).astype("int32")
index = array(hash_table.keys()).astype("int32")
value = array(hash_table.values())
result = sum(input, labels=labels_hash, index=index)
return array(result), [value[:, col] for col in range(value.shape[-1])]
def get_dataset_from_storage(dataset_name, directory, storage_type, package_order=['opus_core'], dataset_args=None):
""" Returns an object of class Dataset (or its child) with data stored in a storage of type 'storage_type' in 'directory'. If the child class is defined in a specific package,
this package must be included in 'package_order'. If there is no child class definition for this 'dataset_name', set 'dataset_args' to a dictionary
(possibly empty) and a generic Dataset will be returned. 'dataset_args' should contain entries needed as arguments for the Dataset class, e.g. 'in_table_name', 'id_name'.
"""
from opus_core.storage_factory import StorageFactory
from opus_core.datasets.dataset_pool import DatasetPool
from opus_core.datasets.dataset_factory import DatasetFactory
from opus_core.datasets.dataset import Dataset
storage = StorageFactory().get_storage(storage_type, storage_location = directory)
if dataset_args is None:
pool = DatasetPool(storage=storage, package_order=package_order)
return pool.get_dataset(dataset_name)
dataset_args.update({'in_storage':storage})
try:
return DatasetFactory().search_for_dataset(dataset_name, package_order, arguments=dataset_args)
except: # take generic dataset
return Dataset(dataset_name=dataset_name, **dataset_args)
def get_dataset_from_tab_storage(dataset_name, directory, package_order=['opus_core'], dataset_args=None):
"""See doc string to get_dataset_from_storage which is called with storage_type='tab_storage'."""
return get_dataset_from_storage(dataset_name, directory, 'tab_storage', package_order=package_order, dataset_args=dataset_args)
def lookup(subarray, fullarray, index_if_not_found=-1):
"""
look up subarray in fullarray, return the index of
subarray's elements in fullarray; fill index with
index_if_not_found for elements not found in fullarray
>>> a = array([1, 9, 2, 7, 3, 5, 6])
>>> b = array([0, 3, 2, 9, 7, 10])
>>> lookup(b, a)
array([-1, 4, 2, 1, 3, -1])
"""
from numpy import arange, searchsorted, not_equal, rank
if rank(subarray)!=1 or rank(fullarray)!=1:
raise ValueError, "lookup only works with 1-d input arrays."
array_size = fullarray.size
index_all = arange(array_size)
index_argsort = fullarray.argsort()
index_sorted = searchsorted(fullarray[index_argsort], subarray)
## to avoid "index out of range" error
index_sorted[index_sorted == array_size] = array_size - 1
index_unsorted = index_all[index_argsort][index_sorted]
index_unsorted[not_equal(fullarray[index_unsorted], subarray)] = index_if_not_found
return index_unsorted
def totuple(arr):
"""
equivalent of tolist() for ndarray
"""
return tuple(map(totuple, arr)) if arr.ndim>1 else tuple(arr)
from opus_core.tests import opus_unittest
import opus_core
class MiscellaneousTests(opus_unittest.OpusTestCase):
def setUp(self):
# make a temp directory for use by multiple tests
self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
def tearDown(self):
# remove the temp directory (note that tearDown gets called even if a unit tests fails)
shutil.rmtree(self.temp_dir)
def test_digits(self):
import copy
a = 382795
d1 = digits(a)
self.assertEqual(d1, 6)
self.assertEqual(a, 382795)
b = [0, -777, 777, 328795, 23]
c = copy.copy(b)
d2 = digits(b)
self.assertEqual(d2, [1, 3, 3, 6, 2])
self.assertEqual(b, c)
def test_ndsum(self):
from numpy import array, column_stack, argsort, allclose, zeros
input = array([3, 7, 4, 6, 2, 5 ])
attr_a = array([0, 0, 1, 0, 1, 1])
attr_b = array([3, 1, 2, 1, 2, 0])
result = ndsum(input, labels=column_stack([attr_a, attr_b]))
result_mat = zeros( (1+result[1][0].max(), 1+result[1][1].max()) )
result_mat[result[1]] = result[0]
expected = ( array([13, 3, 5, 6]), (array([0, 0, 1, 1]), array([1, 3, 0, 2])) )
expected_mat = zeros( (2, 4))
expected_mat[expected[1]] = expected[0]
self.assert_(allclose(result_mat, expected_mat))
def test_opus_path_for_variable_from_module_path(self):
file_path = os.path.join('C:', 'foo', 'workspace', 'package_name', 'dataset_name', 'variable_name.py')
self.assertEqual(opus_path_for_variable_from_module_path(file_path),
'package_name.dataset_name.variable_name')
def test_create_import_for_camel_case_class(self):
self.assertEqual(create_import_for_camel_case_class('a_package.a_dir.a_class', import_as='alias'),
'from a_package.a_dir.a_class import AClass as alias')
self.assertEqual(create_import_for_camel_case_class('a_package.a_dir.a_class'),
'from a_package.a_dir.a_class import AClass')
def test_remove_directories_with_this_name(self):
files = [
['CVS', 'f1'],
['d1', 'f2'],
['d1', 'CVS', 'f3'],
['f4'],
]
for t in files:
path = self.temp_dir
for n in t:
path = os.path.join(path, n)
os.makedirs(path)
self.assert_(os.path.exists(path))
remove_directories_with_this_name(self.temp_dir, 'CVS')
for t in files:
path = self.temp_dir
for n in t:
path = os.path.join(path, n)
if 'CVS' in t:
self.assert_(not os.path.exists(path))
else:
self.assert_(os.path.exists(path))
# make sure we didn't accidentally delete the temp directory itself
self.assert_(os.path.exists(self.temp_dir))
def test_concatenate_on_strings(self):
from numpy import array, concatenate, alltrue
a = array(['a','bb','ccc'])
b = array(['ddd','ee'])
self.assert_(alltrue(concatenate([a,b]) == array(['a','bb','ccc','ddd','ee'])))
def test_concatenate_on_ints(self):
from numpy import array, concatenate, alltrue
a = array([1,2,3])
b = array([44,55])
self.assert_(alltrue(concatenate([a,b]) == array([1,2,3,44,55])))
def test_concatenate_on_mix_of_ints_and_floats(self):
from numpy import array, concatenate, alltrue
a = array([1,2,3])
b = array([4.4,5.5])
self.assert_(alltrue(concatenate([a,b]) == array([1,2,3,4.4,5.5])))
#def test_concatenate_on_mix_of_ints_and_strings(self):
#a = array(['a','bb','ccc'])
#b = array([44,55])
#threw_exception = False
#try:
#concatenate([a,b])
#except Exception:
#threw_exception = True
#self.assert_(threw_exception)
def test_concatenate_on_2d_ints(self):
from numpy import array, reshape, concatenate
a = array([['1','2','3'],['4','5','6']])
b = array(['44','55']).reshape(2,1)
try:
concatenate([a,b])
except Exception:
threw_exception = True
self.assert_(threw_exception)
def test_clip_to_zero_if_needed(self):
from numpy import array, ma
logger.enable_hidden_error_and_warning_words()
result = clip_to_zero_if_needed(array([0,3,6,-4,8]), "test1")
logger.disable_hidden_error_and_warning_words()
self.assertEqual(ma.allequal(result, array([0,3,6,0,8])), True, msg = "Error in test_clip_to_zero_if_needed" )
result = clip_to_zero_if_needed(array([0,3,6,4,8.5]), "test2")
self.assertEqual(ma.allequal(result, array([0,3,6,4,8.5])), True, msg = "Error in test_clip_to_zero_if_needed" )
def test_module_path_from_opus_path(self):
opus_core_path = opus_core.__path__[0]
self.assertEqual(module_path_from_opus_path('opus_core.__init__'),
os.path.join(opus_core_path, '__init__.py'))
def test_create_import_for_class(self):
config = {'model':'opus_core.model'}
expected_import_statement = 'from opus_core.model import Model'
import_statement = create_import_for_class(config['model'], 'Model')
self.assertEqual(import_statement, expected_import_statement)
def test_is_file_in_directory(self):
opus_core_path = opus_core.__path__[0]
self.assertEqual(is_file_in_directory('data', opus_core_path), True)
self.assertEqual(is_file_in_directory('dataxxx', opus_core_path), False)
def test_safe_array_divide(self):
from numpy import array, ma
a = array([10, 20, 30, 40])
b = array([4, 0, 10, 0])
self.assertEqual(ma.allequal(safe_array_divide(a,b), array([2.5, 0, 3, 0])), True)
def test_safe_array_divide_broadcast(self):
a = array([10, 0, 30, 40])
b = array([0])
self.assert_(allclose(safe_array_divide(a,b), array([0, 0, 0, 0])))
def test_transformation(self):
from numpy import array, ma
a = array([9, 4, 25, 36])
self.assertEqual(ma.allequal(try_transformation(a, "sqrt"), array([3, 2, 5, 6])), True)
self.assertEqual(ma.allequal(try_transformation(a, "*2"), array([18, 8, 50, 72])), True)
self.assertEqual(ma.allequal(try_transformation(a, "**2"), a**2), True)
def test_quantile(self):
from numpy import array, ma
a = array([35, 6, 22, 1, 60])
b = array([6, 3, 5, 9, 1, 7, 10, 2, 8, 4, 0])
self.assertEqual(ma.allequal(quantile(a, array([0.2, 0.9, 0.5])), array([1, 35, 6])), True)
self.assertEqual(ma.allequal(quantile(b, array([0, 0.2, 0.9, 0.5, 1])), array([0, 1, 8, 4, 10])), True)
def test_remove_elements_with_matched_prefix(self):
from numpy import array, alltrue
a = array(["max_attribute1", "min_attribute1", "attribute1", "attribute2_max", "attribute2"])
prefix_list = ["min", "max"]
result = remove_elements_with_matched_prefix_from_list(a, prefix_list)
self.assertEqual(result.size == 3, True, msg = "Error in test_remove_elements_with_matched_prefix: Size of the resulting array must be 3.")
self.assertEqual(alltrue(result == array(["attribute1", "attribute2_max", "attribute2"])), True,
msg = "Error in test_remove_elements_with_matched_prefix" )
def test_remove_elements_with_matched_prefix_with_constraints_header(self):
from numpy import array, alltrue
a = array(["constraint_id", "city_id", "is_in_wetland", "min_units", "max_units", "min_commercial_sqft",
"max_commercial_sqft", "min_industrial_sqft", "max_industrial_sqft"])
prefix_list = ["min", "max"]
result = remove_elements_with_matched_prefix_from_list(a, prefix_list)
self.assertEqual(result.size == 3, True, msg = "Error in test_remove_elements_with_matched_prefix_with_constraints_header: Size of the resulting array must be 3.")
self.assertEqual(alltrue(result == array(["constraint_id", "city_id", "is_in_wetland"])), True,
msg = "Error in test_remove_elements_with_matched_prefix_with_constraints_header" )
def test_ematch(self):
from numpy import array, ma
self.assertEqual(ma.allequal(ematch(array(["abcde", "abcd"]), "abcd"), array([1])), True,
msg = "Error in ematch.")
self.assertEqual(ma.allequal(ematch(array(["ab(c]de", "abcd"]), "ab(c]de"), array([0])), True,
msg = "Error in ematch.")
def test_get_indices_of_matched_items(self):
from numpy import array, ma
self.assertEqual(ma.allequal(get_indices_of_matched_items(array(["abcde", "abcd", "vvv"]),
array(["abcd", "vvv"])), array([1,2])), True,
msg = "Error in get_indices_of_matched_items.")
def test_directory_path_from_opus_path(self):
path_to_opus_core = opus_core.__path__[0]
input_output = (
('opus_core.tests', os.path.join(path_to_opus_core, 'tests')),
('opus_core.a_directory_that_does_not_exist', os.path.join(path_to_opus_core, 'a_directory_that_does_not_exist')),
('opus_core', os.path.join(path_to_opus_core)),
)
for input, output in input_output:
result = directory_path_from_opus_path(input)
self.assertEqual(result, output)
def test_is_masked_array(self):
import numpy
a1 = ma.array([3])
self.assert_(is_masked_array(a1))
a2 = numpy.array([4])
self.assert_(not is_masked_array(a2))
def test_copytree(self):
dest = os.path.join(self.temp_dir, 'dest')
os.mkdir(dest)
dirs = [
['d1', 'd2', 'd3', 'CVS', 'sub'],
['d2', 'd3'],
['d4', 'CVS', 'd1'],
['d5'],
['d6', '.svn', 'd1', 'd2'],
]
for t in dirs:
path = self.temp_dir
for n in t:
path = os.path.join(path, n)
os.makedirs(path)
source = os.path.join(self.temp_dir, t[0])
sub = os.path.join(dest, t[0])
copytree(source, sub, skip_subdirectories=['CVS', '.svn'])
for t in dirs:
path = dest
for n in t:
path = os.path.join(path, n)
if 'CVS' in t or '.svn' in t:
self.assert_(not os.path.exists(path))
else:
self.assert_(os.path.exists(path))
def test_unique(self):
from numpy import array, all
a = array([0.01, 0.1, 0.01, 0.2, 0.1, 0.5, 0.08])
self.assertTrue(all(ismember(unique(a), array([0.01, 0.08, 0.1, 0.2, 0.5]))))
self.assertTrue(all(ismember(unique(a, return_index=True)[0], array([0.01, 0.08, 0.1, 0.2, 0.5]))))
self.assertTrue(all(ismember(unique(a, return_index=True)[1], array([0, 6, 1, 3, 5]))))
# check that ordering is retained
self.assertArraysEqual(unique(a), array([0.01, 0.1, 0.2, 0.5, 0.08]))
#self.assertArraysEqual(unique(a, return_index=True)[0], array([0.01, 0.08, 0.1, 0.2, 0.5]))
#self.assertArraysEqual(unique(a, return_index=True)[1], array([0, 6, 1, 3, 5]))
#list
b = [0.01, 0.1, 0.01, 0.2, 0.1, 0.5, 0.08]
self.assertTrue(all(ismember(unique(b), array([0.01, 0.08, 0.1, 0.2, 0.5]))))
self.assertTrue(all(ismember(unique(b, return_index=True)[0], array([0.01, 0.08, 0.1, 0.2, 0.5]))))
self.assertTrue(all(ismember(unique(b, return_index=True)[1], array([0, 6, 1, 3, 5]))))
def test_get_dataset_from_tab_storage(self):
import opus_core
attribute = 'g2'
location = os.path.join(opus_core.__path__[0], 'data', 'tab')
dataset = get_dataset_from_tab_storage('test', directory=location)
self.assertAlmostEqual(21, dataset.attribute_sum(attribute))
def test_list2string(self):
self.assertEqual(list2string([42, 900.4, 20.333]), "42 900.4 20.333")
self.assertEqual(list2string(["aaa", 5, "xx", 6.8], sep=', '), "aaa, 5, xx, 6.8")
def test_get_distinct_list(self):
self.assertEquals(get_distinct_list([]), [])
self.assertEquals(get_distinct_list(['zxq', 'zxq', 5.4, 9, ['3', 'a'], 5.4, 5.4, ['3', 'a']]), ['zxq', 5.4, 9, ['3', 'a']] )
def test_flatten_list(self):
nestedList = [3, 4.0, 'five']
testList = [nestedList]
self.assertEquals(flatten_list(testList), nestedList)
def test_ismember(self):
from numpy import array
a = array([1, 2])
b = array([1, 2])
c = array([1, 1])
d = array([2, 1, 2, 2, 1])
e = array([])
f = array([3,1])
self.assertEqual(ismember(a,a).all(), ismember(a,b).all(), array([True, True]).all())
self.assertEqual(ismember(c,a).all(), ismember(a,d).all(), array([True, True]).all())
self.assertEqual(ismember(d,a).all(), array([True, True, True, True, True]).all())
self.assertEqual(ismember(d,c).all(), array([False, True, False, False, True]).all())
self.assertEqual(ismember(a,c).all(), array([True, False]).all())
self.assertEqual(ismember(a,f).all(), array([False, True]).all())
self.assertEqual(ismember(a,e).all(), ismember(a,f).all(), array([False, False]).all())
# also tests the load_from_text_file function
def test_write_to_text_file(self):
from numpy import array
file_name = os.path.join(self.temp_dir,'misc_test_file')
arr = array(['a', 'b', 'c'])
delim = '|'
write_to_text_file(file_name, arr, 'wb', delim)
written_data = ''
i=0
for i in range(len(arr)-1):
written_data += (arr[i] + delim)
written_data += arr[i+1]
self.assertEqual(load_from_text_file(file_name), written_data)
def test_create_string_list(self):
self.assertEqual(create_string_list('prefix',0), [])
self.assertEqual(create_string_list('prefix',3), ['prefix1','prefix2','prefix3'])
def test_remove_all(self):
self.assertEqual(remove_all((),()), [])
self.assertEqual(remove_all((1,'a'),'b'), [1,'a'])
self.assertEqual(remove_all(('a', 1,'a', 'a'),'a'), [1])
def test_lookup(self):
from numpy import array, alltrue
a = array([1, 9, 2, 7, 3, 5, 6])
b = array([0, 3, 2, 9, 7, 10])
expected = array([-1, 4, 2, 1, 3, -1])
self.assert_(alltrue(lookup(b, a)==expected))
def test_totuple(self):
a = array([1, 2, 3, 5, 7, 9])
result = totuple(a)
self.assertEqual(result, (1,2,3,5,7,9))
a = array([[1, 2], [3, 5], [7,9]])
result = totuple(a)
self.assertEqual(result, ((1,2),(3,5),(7,9)))
a = array([[[1, 2], [3, 5]], [[7, 9], [4, 6]]])
result = totuple(a)
self.assertEqual(result, (((1,2),(3,5)),((7,9), (4,6))))
def test_uniquend(self):
from numpy.random import randint
b = randint(0, 5, size=100)
result = uniquend(b)
self.assertTrue( all([i in b for i in result]) )
self.assertTrue( all([i in result for i in b]) )
self.assertTrue( set(b) == set(result) )
b = randint(0, 3, size=100)
b.resize((50,2))
result = uniquend(b)
self.assertTrue( all([i in b for i in result]) )
self.assertTrue( all([i in result for i in b]) )
self.assertTrue( set(totuple(b)) == set(totuple(result)) )
b = randint(0, 2, size=54)
b.resize((9,3,2))
result = uniquend(b)
self.assertTrue( all([i in b for i in result]) )
self.assertTrue( all([i in result for i in b]) )
self.assertTrue( set(totuple(b)) == set(totuple(result)) )
if __name__ == "__main__":
opus_unittest.main()
|
kreegr/PolyPasswordHasher
|
refs/heads/master
|
python-reference-implementation/setup.py
|
3
|
#! /usr/bin/env python
from distutils.core import setup, Extension
import sys
print "This is currently test code... You have been warned!"
# Must have Python >= 2.5 and < 3.0. If Python version == 2.5.X, then
# simplejson is required.
if sys.version_info[0] != 2 or sys.version_info[1] < 5:
print "Requires Python >= 2.5 and < 3.0"
sys.exit(1)
fastpolymath_c = Extension("fastpolymath_c",
sources=["fastpolymath.c"])
setup( name="PolyPasswordHasher",
version="0.0-prealpha",
ext_modules=[fastpolymath_c],
description="""An early version of PolyPasswordHasher.""",
author="Justin Cappos",
author_email="jcappos@poly.edu",
)
|
youfoh/webkit-efl
|
refs/heads/tizen
|
Tools/CodeCoverage/cov.py
|
27
|
# Copyright (C) 2004, 2005, 2006 Nathaniel Smith
# Copyright (C) 2006, 2007 Holger Hans Peter Freyther
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import csv
import time
import os.path
import shutil
def analyze_coverage(possible_gcov_files, source_files, runid, data_dir, base):
if not os.path.exists(data_dir):
os.makedirs(data_dir)
output = open(os.path.join(data_dir, runid + ".csv"), "w")
w = csv.writer(output)
# First row: id and time
w.writerow([runid, time.time()])
results = scan_gcov_files(possible_gcov_files, source_files)
annotated_dir = os.path.join(data_dir, runid + ".annotated")
if os.path.exists(annotated_dir):
shutil.rmtree(annotated_dir)
keys = results.keys()
keys.sort()
for path in keys:
(total, covered, annotated_data) = results[path]
path = path[path.find(base)+len(base):]
# Rest of the rows: filename, total_lines, covered_lines
w.writerow([path, total, covered])
if path[:1] == "/":
path = path[1:]
annotated_path = os.path.join(annotated_dir, path)
try:
os.makedirs(os.path.dirname(annotated_path))
except OSError:
pass
a = open(annotated_path, "w")
a.write(annotated_data)
a.close()
# zecke's rewrite
STATE_NOT_CODE = -1
STATE_NOT_SEEN = -2
STATE_TEST_CODE = -3
def find_gcov(f, possible_gcovs):
"""
Find .gcov files that could be of interest for us
"""
try:
return possible_gcovs[f]
except:
return []
def parse_source_file(file):
"""
Parse one source file and return a list of lines
"""
f_source_list = []
init_state = STATE_NOT_SEEN
in_test_code = False
nesting = 0
for line in open(file, "r"):
code = line.split(":", 2)[-1]
if not in_test_code and code.startswith("#ifdef BUILD_UNIT_TESTS"):
in_test_code = 1
if in_test_code and code.startswith("#if"):
nesting += 1
if in_test_code and code.startswith("#endif"):
nesting -= 1
if not nesting:
in_test_code = True
if in_test_code:
init_state = STATE_TEST_CODE
else:
init_state = STATE_NOT_SEEN
f_source_list.append([init_state, line.split(":", 1)[1]])
return f_source_list
# Runner-up, 3rd annual "write Python that looks like Perl" competition,
# Well, not really. It doesn't even use regexps.
# He is right so I'm cleaning it up (zecke)
def scan_gcov_files(possible_gcov_files, source_files):
"""Takes a list of gcov filenames and a list of source filenames.
The gcov files should have names of the form foo.o##foo.cc.gcov, as
created by 'gcov -l'.
Returns a dict mapping source filenames to tuples
(total_lines, tested_lines, gcov_annotated_source)
which are a number, a number, and a very long string, respectively.
The fun bit is that we merge .gcov output generated by different object
files; this way we can provide accurate information for header files and
for monotone's current unit test system."""
results = {}
for f in source_files:
possible_gcovs = find_gcov(f, possible_gcov_files)
base_name = os.path.splitext(os.path.basename(f))[0]
if len(possible_gcovs) == 0:
print "No gcov files found for: '%s' but it was compiled" % f
continue
(garbage,extension) = os.path.splitext(f)
if extension in [".cc", ".c", ".moc", ".cpp", ".cxx", ".m", ".mm"]:
lines = open(f, "r").readlines()
results[f] = (len(lines), 0, "".join(lines))
continue
elif len(possible_gcovs) > 1:
print "More than one gcov file for %s %d" % (f,len(possible_gcovs))
base_gcov_lines = parse_source_file(possible_gcovs[0])
# Now we will try hard to merge the results with others
# Our requirement is that we have the same amount of lines as
# as the original file
for cov_file in possible_gcovs:
lines = open(cov_file, "r").readlines()
# e.g. with phonon we have visualisation.h and we can not know
# which header file (foldername) it is refering to. This is a gcov
# limitation and i have no workaround yet. We just hope we will pick
# the right header file...
if len(lines) != len(base_gcov_lines):
print "Error Base: %s and Target: %s have different amount of lines" % (possible_gcovs[0],cov_file)
continue
# now do the merging of the file. If it has the same basename
# and the same number of lines things might work out
# In the future take a look at the header of the file
i = 0
for line in lines:
accumulator = base_gcov_lines[i]
if accumulator[0] != STATE_TEST_CODE:
info = line.split(":", 1)[0]
if info.endswith("-"):
if accumulator[0] == STATE_NOT_SEEN:
accumulator[0] = STATE_NOT_CODE
else:
if info.endswith("#"):
num = 0
else:
num = int(info)
if accumulator[0] in (STATE_NOT_SEEN, STATE_NOT_CODE):
accumulator[0] = 0
accumulator[0] += num
i += 1
# post processing of ths file
(total_lines, total_covered) = (0, 0)
annotated_lines = []
for state, line in base_gcov_lines:
if state == STATE_NOT_SEEN:
desc = "?????"
elif state == STATE_TEST_CODE:
desc = "+"
elif state == STATE_NOT_CODE:
desc = "-"
elif state == 0:
desc = "#####"
total_lines += 1
else:
desc = str(state)
total_lines += 1
total_covered += 1
annotated_lines.append(":".join([desc.rjust(9), line]))
results[f] = (total_lines, total_covered, "".join(annotated_lines))
return results
return results
|
jojohappy/falcon-plus
|
refs/heads/master
|
modules/transfer/scripts/query.py
|
3
|
import requests
# Copyright 2017 Xiaomi, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import json
end = int(time.time())
start = end - 3600
d = {
"start": start,
"end": end,
"cf": "AVERAGE",
"endpoint_counters": [
{
"endpoint": "lg-op-mon-onebox01.bj",
"counter": "load.1min",
},
{
"endpoint": "lg-op-mon-onebox01.bj",
"counter": "load.5min",
},
{
"endpoint": "lg-op-mon-onebox01.bj",
"counter": "load.15min",
},
],
}
url = "http://localhost:3060/graph/history"
r = requests.post(url, data=json.dumps(d))
print r.text
#curl "http://query.falcon.miliao.srv:9966/graph/history/one?cf=AVERAGE&endpoint=`hostname`&start=`date -d '1 hours ago' +%s`&counter=load.1min" |python -m json.tool
|
kielr/aquaquest
|
refs/heads/master
|
data/constants.py
|
1
|
"""
This module contains constants used by the game.
"""
__author__ = "kiel.regusters"
# Game Info
SCREEN_SIZE = (800, 600)
TILE_SIZE = 16
# Player info
MAX_VEL_X = 10
MAX_VEL_Y = 20
GRAVITY = 1
JUMP_GRAVITY = 0.5
JUMP_VEL = -7
DOUBLE_JUMP_VEL = -7
LEVEL_CAP = 20
STAT_CAP = 9
# Player states
WALKING = "walking"
IDLE = "idle"
FALLING = "falling"
JUMP = "jump"
DEAD = "dead"
DOUBLEJUMP = "djump"
NOTATTACKING = "noattack"
ATTACKING = "attack"
LEVEL_AVAILABLE = "levelavailable"
NO_SP = "nosp"
# Player forces
ACCEL = 0.5
# Game States
MAIN_MENU = "main menu"
OPTIONS_MENU = "optionsmenu"
LOAD_SCREEN = "load screen"
MAP = "map"
CONTINUE = "continue"
LOADMAP = "loadmap"
GAMEOVER = "gameover"
MUTE = False
# Real Time States
PAUSE = "pause"
UNPAUSE = "unpause"
# SoundManager states
NORMAL = "normal"
s_MENU = "sound menu"
# Level states
LEVEL1 = "level1"
LEVEL2 = "level2"
# Cursor states
PLAY = "play"
LOAD = "load"
OPTIONS = "options"
QUIT = "quit"
BACK = "back"
MUSIC_MUTE = "mute"
# Colors
BLACK = (0, 0, 0)
# ZOOM
ZOOM = 3
|
hkchenhongyi/django
|
refs/heads/master
|
tests/properties/models.py
|
542
|
"""
Using properties on models
Use properties on models just like on any other Python object.
"""
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def _get_full_name(self):
return "%s %s" % (self.first_name, self.last_name)
def _set_full_name(self, combined_name):
self.first_name, self.last_name = combined_name.split(' ', 1)
full_name = property(_get_full_name)
full_name_2 = property(_get_full_name, _set_full_name)
|
mozilla/ichnaea
|
refs/heads/main
|
ichnaea/taskapp/app.py
|
1
|
"""
Holds global celery application state and startup / shutdown handlers.
"""
from celery import Celery
from celery.app import app_or_default
from celery.signals import (
beat_init,
worker_process_init,
worker_process_shutdown,
setup_logging,
)
from ichnaea.log import configure_logging
from ichnaea.taskapp.config import (
configure_celery,
init_beat,
init_worker,
shutdown_worker,
)
@setup_logging.connect
def setup_logging_process(loglevel, logfile, format, colorize, **kwargs):
"""Called at scheduler and worker setup.
Configures logging using the same configuration as the webapp.
"""
configure_logging()
@beat_init.connect
def init_beat_process(signal, sender, **kw):
"""
Called automatically when `celery beat` is started.
Calls :func:`ichnaea.taskapp.config.init_beat`.
"""
celery_app = app_or_default()
init_beat(sender, celery_app)
@worker_process_init.connect
def init_worker_process(signal, sender, **kw):
"""
Called automatically when `celery worker` is started. This is executed
inside each forked worker process.
Calls :func:`ichnaea.taskapp.config.init_worker`.
"""
# get the app in the current worker process
celery_app = app_or_default()
init_worker(celery_app)
@worker_process_shutdown.connect
def shutdown_worker_process(signal, sender, **kw):
"""
Called automatically when `celery worker` is stopped. This is executed
inside each forked worker process.
Calls :func:`ichnaea.taskapp.config.shutdown_worker`.
"""
celery_app = app_or_default()
shutdown_worker(celery_app)
celery_app = Celery("ichnaea.taskapp.app")
configure_celery(celery_app)
|
grundprinzip/Impala
|
refs/heads/cdh5-trunk
|
thirdparty/hive-0.13.1-cdh5.4.0-SNAPSHOT/lib/py/thrift/protocol/TProtocol.py
|
83
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import *
class TProtocolException(TException):
"""Custom Protocol Exception class"""
UNKNOWN = 0
INVALID_DATA = 1
NEGATIVE_SIZE = 2
SIZE_LIMIT = 3
BAD_VERSION = 4
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
class TProtocolBase:
"""Base class for Thrift protocol driver."""
def __init__(self, trans):
self.trans = trans
def writeMessageBegin(self, name, type, seqid):
pass
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeFieldBegin(self, name, type, id):
pass
def writeFieldEnd(self):
pass
def writeFieldStop(self):
pass
def writeMapBegin(self, ktype, vtype, size):
pass
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
pass
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
pass
def writeSetEnd(self):
pass
def writeBool(self, bool):
pass
def writeByte(self, byte):
pass
def writeI16(self, i16):
pass
def writeI32(self, i32):
pass
def writeI64(self, i64):
pass
def writeDouble(self, dub):
pass
def writeString(self, str):
pass
def readMessageBegin(self):
pass
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
pass
def readFieldEnd(self):
pass
def readMapBegin(self):
pass
def readMapEnd(self):
pass
def readListBegin(self):
pass
def readListEnd(self):
pass
def readSetBegin(self):
pass
def readSetEnd(self):
pass
def readBool(self):
pass
def readByte(self):
pass
def readI16(self):
pass
def readI32(self):
pass
def readI64(self):
pass
def readDouble(self):
pass
def readString(self):
pass
def skip(self, type):
if type == TType.STOP:
return
elif type == TType.BOOL:
self.readBool()
elif type == TType.BYTE:
self.readByte()
elif type == TType.I16:
self.readI16()
elif type == TType.I32:
self.readI32()
elif type == TType.I64:
self.readI64()
elif type == TType.DOUBLE:
self.readDouble()
elif type == TType.STRING:
self.readString()
elif type == TType.STRUCT:
name = self.readStructBegin()
while True:
(name, type, id) = self.readFieldBegin()
if type == TType.STOP:
break
self.skip(type)
self.readFieldEnd()
self.readStructEnd()
elif type == TType.MAP:
(ktype, vtype, size) = self.readMapBegin()
for i in range(size):
self.skip(ktype)
self.skip(vtype)
self.readMapEnd()
elif type == TType.SET:
(etype, size) = self.readSetBegin()
for i in range(size):
self.skip(etype)
self.readSetEnd()
elif type == TType.LIST:
(etype, size) = self.readListBegin()
for i in range(size):
self.skip(etype)
self.readListEnd()
class TProtocolFactory:
def getProtocol(self, trans):
pass
|
AlekhyaMallina-Vedams/CI
|
refs/heads/master
|
nodepool/scripts/cache_git_repos.py
|
2
|
#!/usr/bin/env python
# Copyright (C) 2011-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import re
import shutil
import sys
import urllib2
from common import run_local
URL = ('https://git.openstack.org/cgit/openstack-infra/project-config/'
'plain/gerrit/projects.yaml')
PROJECT_RE = re.compile('^-?\s+project:\s+(.*)$')
# Not using an arg libraries in order to avoid module imports that
# are not available across all python versions
if len(sys.argv) > 1:
GIT_BASE = sys.argv[1]
else:
GIT_BASE = 'https://git.openstack.org'
def clone_repo(project):
remote = '%s/%s.git' % (GIT_BASE, project)
# Clear out any existing target directory first, in case of a retry.
try:
shutil.rmtree(os.path.join('/opt/git', project))
except OSError:
pass
# Try to clone the requested git repository.
(status, out) = run_local(['git', 'clone', remote, project],
status=True, cwd='/opt/git')
# If it claims to have worked, make sure we can list branches.
if status == 0:
(status, moreout) = run_local(['git', 'branch', '-a'], status=True,
cwd=os.path.join('/opt/git', project))
out = '\n'.join((out, moreout))
# If that worked, try resetting to HEAD to make sure it's there.
if status == 0:
(status, moreout) = run_local(['git', 'reset', '--hard', 'HEAD'],
status=True,
cwd=os.path.join('/opt/git', project))
out = '\n'.join((out, moreout))
# Status of 0 imples all the above worked, 1 means something failed.
return (status, out)
def main():
# TODO(jeblair): use gerrit rest api when available
data = urllib2.urlopen(URL).read()
for line in data.split('\n'):
# We're regex-parsing YAML so that we don't have to depend on the
# YAML module which is not in the stdlib.
m = PROJECT_RE.match(line)
if m:
project = m.group(1)
dirname = os.path.dirname(project)
# Skip repos that are inactive
if not ('attic' in dirname or dirname == 'stackforge'):
(status, out) = clone_repo(project)
print out
if status != 0:
print 'Retrying to clone %s' % m.group(1)
(status, out) = clone_repo(m.group(1))
print out
if status != 0:
raise Exception('Failed to clone %s' % m.group(1))
if __name__ == '__main__':
main()
|
samuelmaudo/yepes
|
refs/heads/master
|
yepes/contrib/datamigrations/importation_plans/direct.py
|
1
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from yepes.contrib.datamigrations.importation_plans import ModelImportationPlan
class DirectPlan(ModelImportationPlan):
updates_data = False
def import_batch(self, batch):
model = self.migration.model
manager = model._base_manager
manager.bulk_create(
model(**row)
for row
in batch
)
|
damdam-s/OpenUpgrade
|
refs/heads/8.0
|
addons/portal/tests/__init__.py
|
261
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_portal
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sundream/gamesrv
|
refs/heads/master
|
daobiao/parser/parse_achievement.py
|
1
|
# -*- coding: utf-8 -*-
from base import *
import os
import sys
def parse_achievement(sheet_name,sheet_data,dstpath):
cfg = {
"linefmt" :
"""
[%(id)d] = {
id = %(id)d,
name = "%(name)s",
event = "%(event)s",
target = %(target)d,
award = %(award)s,
desc = "%(desc)s",
},
["%(name)s"] = {
id = %(id)d,
name = "%(name)s",
event = "%(event)s",
target = %(target)d,
award = %(award)s,
desc = "%(desc)s",
},
""",
}
sheet = CSheet(sheet_name,sheet_data)
daobiao(sheet,"data_achievement",cfg,dstpath)
parses = {
"成就" : parse_achievement,
}
if __name__ == "__main__":
if len(sys.argv) != 3:
print("usage: python parse_award.py xlsfilename dstpath")
exit(0)
xlsfilename = sys.argv[1]
dstpath = sys.argv[2]
myparsexls(xlsfilename,dstpath,parses)
|
pubudu538/product-private-paas
|
refs/heads/master
|
components/org.wso2.ppaas.python.cartridge.agent/src/main/python/cartridge.agent/cartridge.agent/constants.py
|
2
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
PARAM_FILE_PATH = "param.file.path"
PLUGINS_DIR = "plugins.dir"
EXTENSIONS_DIR = "extensions.dir"
MB_URLS = "mb.urls"
MB_IP = "mb.ip"
MB_PORT = "mb.port"
MB_USERNAME = "mb.username"
MB_PASSWORD = "mb.password"
MB_PUBLISHER_TIMEOUT = "mb.publisher.timeout"
CARTRIDGE_KEY = "CARTRIDGE_KEY"
APPLICATION_ID = "APPLICATION_ID"
APPLICATION_PATH = "APPLICATION_PATH"
SERVICE_GROUP = "SERIVCE_GROUP"
SERVICE_NAME = "SERVICE_NAME"
CLUSTER_ID = "CLUSTER_ID"
CLUSTER_INSTANCE_ID = "CLUSTER_INSTANCE_ID"
MEMBER_ID = "MEMBER_ID"
INSTANCE_ID = "INSTANCE_ID"
LB_CLUSTER_ID = "LB_CLUSTER_ID"
NETWORK_PARTITION_ID = "NETWORK_PARTITION_ID"
PARTITION_ID = "PARTITION_ID"
TENANT_ID = "TENANT_ID"
REPO_URL = "REPO_URL"
PORTS = "PORTS"
PERSISTENCE_MAPPING = "PERSISTENCE_MAPPING"
DEPENDENCY_CLUSTER_IDS = "DEPENDENCY_CLUSTER_IDS"
EXPORT_METADATA_KEYS = "EXPORT_METADATA_KEYS"
IMPORT_METADATA_KEYS = "IMPORT_METADATA_KEYS"
CARTRIDGE_ALIAS = "CARTRIDGE_ALIAS"
TOKEN = "TOKEN"
LVS_VIRTUAL_IP = "LVS_VIRTUAL_IP"
# stratos.sh environment variables keys
LOG_FILE_PATHS = "log.file.paths"
MEMORY_CONSUMPTION = "memory_consumption"
LOAD_AVERAGE = "load_average"
PORTS_NOT_OPEN = "ports_not_open"
MULTITENANT = "MULTITENANT"
CLUSTERING = "CLUSTERING"
MIN_INSTANCE_COUNT = "MIN_COUNT"
ENABLE_ARTIFACT_UPDATE = "enable.artifact.update"
ARTIFACT_UPDATE_INTERVAL = "artifact.update.interval"
ARTIFACT_CLONE_RETRIES = "artifact.clone.retries"
ARTIFACT_CLONE_INTERVAL = "artifact.clone.interval"
COMMIT_ENABLED = "COMMIT_ENABLED"
AUTO_COMMIT = "auto.commit"
AUTO_CHECKOUT = "auto.checkout"
LISTEN_ADDRESS = "listen.address"
PROVIDER = "PROVIDER"
INTERNAL = "INTERNAL"
LB_PRIVATE_IP = "lb.private.ip"
LB_PUBLIC_IP = "lb.public.ip"
METADATA_SERVICE_URL = "metadata.service.url"
SERVICE_GROUP_TOPOLOGY_KEY = "payload_parameter.SERIVCE_GROUP"
CLUSTERING_TOPOLOGY_KEY = "payload_parameter.CLUSTERING"
CLUSTERING_PRIMARY_KEY = "PRIMARY"
SUPERTENANT_TEMP_PATH = "/tmp/-1234/"
SUPER_TENANT_REPO_PATH = "super.tenant.repository.path"
TENANT_REPO_PATH = "tenant.repository.path"
# topic names to subscribe
INSTANCE_NOTIFIER_TOPIC = "instance/#"
HEALTH_STAT_TOPIC = "health/#"
TOPOLOGY_TOPIC = "topology/#"
INITIALIZER_TOPIC = "initializer/"
TENANT_TOPIC = "tenant/#"
INSTANCE_STATUS_TOPIC = "instance/status/"
APPLICATION_SIGNUP = "application/signup/#"
# Messaging Model
TENANT_RANGE_DELIMITER = "-"
# MB events
ARTIFACT_UPDATED_EVENT = "ArtifactUpdatedEvent"
INSTANCE_STARTED_EVENT = "InstanceStartedEvent"
INSTANCE_ACTIVATED_EVENT = "InstanceActivatedEvent"
INSTANCE_MAINTENANCE_MODE_EVENT = "InstanceMaintenanceModeEvent"
INSTANCE_READY_TO_SHUTDOWN_EVENT = "InstanceReadyToShutdownEvent"
INSTANCE_CLEANUP_CLUSTER_EVENT = "InstanceCleanupClusterEvent"
INSTANCE_CLEANUP_MEMBER_EVENT = "InstanceCleanupMemberEvent"
COMPLETE_TOPOLOGY_EVENT = "CompleteTopologyEvent"
COMPLETE_TOPOLOGY_REQUEST_EVENT = "CompleteTopologyRequestEvent"
COMPLETE_TENANT_REQUEST_EVENT = "CompleteTenantRequestEvent"
COMPLETE_TENANT_EVENT = "CompleteTenantEvent"
DOMAIN_MAPPING_ADDED_EVENT = "DomainMappingAddedEvent"
DOMAIN_MAPPING_REMOVED_EVENT = "DomainMappingRemovedEvent"
MEMBER_INITIALIZED_EVENT = "MemberInitializedEvent"
MEMBER_ACTIVATED_EVENT = "MemberActivatedEvent"
MEMBER_TERMINATED_EVENT = "MemberTerminatedEvent"
MEMBER_SUSPENDED_EVENT = "MemberSuspendedEvent"
MEMBER_STARTED_EVENT = "MemberStartedEvent"
TENANT_SUBSCRIBED_EVENT = "TenantSubscribedEvent"
APPLICATION_SIGNUP_REMOVAL_EVENT = "ApplicationSignUpRemovedEvent"
# ADC related extensions
ARTIFACT_CHECKOUT_JOB = "ArtifactCheckoutJob"
ARTIFACT_COMMIT_JOB = "ArtifactCommitJob"
CREATE_LVS_DUMMY_INTERFACE = "CreateLVSDummyInterface"
PRIMARY = "PRIMARY"
MIN_COUNT = "MIN_COUNT"
# multi tenant constants
INVALID_TENANT_ID = "-1"
SUPER_TENANT_ID = "-1234"
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
PORT_CHECK_TIMEOUT = "port.check.timeout"
CEP_PUBLISHER_ENABLED = "cep.stats.publisher.enabled"
CEP_RECEIVER_URLS = "thrift.receiver.urls"
CEP_SERVER_ADMIN_USERNAME = "thrift.server.admin.username"
CEP_SERVER_ADMIN_PASSWORD = "thrift.server.admin.password"
MONITORING_PUBLISHER_ENABLED = "enable.data.publisher"
MONITORING_RECEIVER_IP = "monitoring.server.ip"
MONITORING_RECEIVER_PORT = "monitoring.server.port"
MONITORING_RECEIVER_SECURE_PORT = "monitoring.server.secure.port"
MONITORING_SERVER_ADMIN_USERNAME = "monitoring.server.admin.username"
MONITORING_SERVER_ADMIN_PASSWORD = "monitoring.server.admin.password"
LOG_ANALYZER_ENABLED = "enable.http.log.publisher"
LOG_ANALYZER_URL = "log.analyzer.url"
LOG_ANALYZER_USERNAME = "log.analyzer.username"
LOG_ANALYZER_PASSWORD = "log.analyzer.password"
|
bearstech/ansible
|
refs/heads/devel
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_launch.py
|
9
|
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_launch
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: Launch an Ansible Job.
description:
- Launch an Ansible Tower jobs. See
U(https://www.ansible.com/tower) for an overview.
options:
job_template:
description:
- Name of the job_template to use.
required: True
job_explanation:
description:
- Job explanation field.
default: null
job_type:
description:
- Job_type to use for the job, only used if prompt for job_type is set.
choices: ["run", "check", "scan"]
default: null
inventory:
description:
- Inventory to use for the job, only used if prompt for inventory is set.
default: null
credential:
description:
- Credential to use for job, only used if prompt for credential is set.
default: null
extra_vars:
description:
- Extra_vars to use for the job_template. Prepend '@' if a file.
default: null
limit:
description:
- Limit to use for the job_template.
default: null
tags:
description:
- Specific tags to use for from playbook.
default: null
use_job_endpoint:
description:
- Disable launching jobs from job template.
default: False
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Launch a job
tower_job_launch:
job_template: "My Job Template"
register: job
- name: Wait for job max 120s
tower_job_wait:
job_id: job.id
timeout: 120
'''
RETURN = '''
id:
description: job id of the newly launched job
returned: success
type: int
sample: 86
status:
description: status of newly launched job
returned: success
type: string
sample: pending
'''
from ansible.module_utils.basic import AnsibleModule
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import (
tower_auth_config,
tower_check_mode,
tower_argument_spec,
)
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
job_template=dict(required=True),
job_type=dict(choices=['run', 'check', 'scan']),
inventory=dict(),
credential=dict(),
limit=dict(),
tags=dict(type='list'),
extra_vars=dict(type='list'),
))
module = AnsibleModule(
argument_spec,
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
json_output = {}
tags = module.params.get('tags')
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
try:
params = module.params.copy()
if isinstance(tags, list):
params['tags'] = ','.join(tags)
job = tower_cli.get_resource('job')
lookup_fields = ('job_template', 'inventory', 'credential')
for field in lookup_fields:
try:
name = params.pop(field)
result = tower_cli.get_resource(field).get(name=name)
params[field] = result['id']
except exc.NotFound as excinfo:
module.fail_json(msg='Unable to launch job, {0}/{1} was not found: {2}'.format(field, name, excinfo), changed=False)
result = job.launch(no_input=True, **params)
json_output['id'] = result['id']
json_output['status'] = result['status']
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Unable to launch job: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
|
LMSlay/wiper
|
refs/heads/master
|
modules/rats/clientmesh.py
|
6
|
# Originally written by Kevin Breen (@KevTheHermit):
# https://github.com/kevthehermit/RATDecoders/blob/master/ClientMesh.py
import re
import string
from base64 import b64decode
def stringPrintable(line):
return filter(lambda x: x in string.printable, line)
def first_split(data):
splits = data.split('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7e')
if len(splits) == 2:
return splits[1]
def base64_deocde(b64_string):
return b64decode(b64_string)
def conf_extract(coded_config):
conf_list = []
decoded_conf = base64_deocde(coded_config)
split_list = decoded_conf.split('``')
for conf in split_list:
conf_list.append(conf)
return conf_list
def process_config(raw_config):
conf_dict = {}
conf_dict['Domain'] = raw_config[0]
conf_dict['Port'] = raw_config[1]
conf_dict['Password'] = raw_config[2]
conf_dict['CampaignID'] = raw_config[3]
conf_dict['MsgBoxFlag'] = raw_config[4]
conf_dict['MsgBoxTitle'] = raw_config[5]
conf_dict['MsgBoxText'] = raw_config[6]
conf_dict['Startup'] = raw_config[7]
conf_dict['RegistryKey'] = raw_config[8]
conf_dict['RegistryPersistance'] = raw_config[9]
conf_dict['LocalKeyLogger'] = raw_config[10]
conf_dict['VisibleFlag'] = raw_config[11]
conf_dict['Unknown'] = raw_config[12]
return conf_dict
def config(data):
coded_config = first_split(data)
raw_config = conf_extract(coded_config)
final_config = process_config(raw_config)
return final_config
|
eonpatapon/contrail-controller
|
refs/heads/master
|
src/config/device-manager/device_manager/plugins/juniper/qfx/qfx_conf.py
|
1
|
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
"""
This file contains implementation of netconf interface for QFX physical router
configuration manager
"""
from db import *
from dm_utils import DMUtils
from juniper_conf import JuniperConf
from juniper_conf import JunosInterface
from device_api.juniper_common_xsd import *
import abc
class QfxConf(JuniperConf):
_FAMILY_MAP = {
'route-target': '',
'e-vpn': FamilyEvpn(signaling='')
}
@classmethod
def is_product_supported(cls, name, role):
if role and role.lower().startswith('e2-'):
return False
for product in cls._products or []:
if name.lower().startswith(product.lower()):
return True
return False
# end is_product_supported
def __init__(self):
super(QfxConf, self).__init__()
self.evpn = None
self.global_switch_options_config = None
self.vlans_config = None
# end __init__
def is_spine(self):
if self.physical_router.physical_router_role == 'spine':
return True
return False
# end is_spine
def initialize(self):
self.evpn = None
self.global_switch_options_config = None
self.chassis_config = None
self.vlans_config = None
self.irb_interfaces = []
self.internal_vn_ris = []
super(QfxConf, self).initialize()
# end initialize
def add_families(self, parent, params):
if params.get('address_families') is None:
return
families = params['address_families'].get('family', [])
if not families:
return
family_etree = Family()
parent.set_family(family_etree)
for family in families:
fam = family.replace('-', '_')
if family in ['e-vpn', 'e_vpn']:
fam = 'evpn'
if family in self._FAMILY_MAP:
getattr(family_etree, "set_" + fam)(self._FAMILY_MAP[family])
else:
self._logger.info("DM does not support address family: %s on QFX" % fam)
# end add_families
def attach_irb(self, ri_conf, ri):
if not self.is_spine():
return
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
if (is_l2 and vni is not None and
self.is_family_configured(self.bgp_params, "e-vpn")):
if is_l2_l3:
self.irb_interfaces.append("irb." + str(network_id))
# end attach_irb
def set_internal_vn_irb_config(self):
if self.internal_vn_ris and self.irb_interfaces:
for int_ri in self.internal_vn_ris:
lr_uuid = DMUtils.extract_lr_uuid_from_internal_vn_name(int_ri.name)
lr = LogicalRouterDM.get(lr_uuid)
if not lr:
continue
vn_list = lr.get_connected_networks(include_internal=False)
for vn in vn_list:
vn_obj = VirtualNetworkDM.get(vn)
irb_name = "irb." + str(vn_obj.vn_network_id)
if irb_name in self.irb_interfaces:
int_ri.add_interface(Interface(name=irb_name))
# end set_internal_vn_irb_config
def add_irb_config(self, ri_conf):
vn = ri_conf.get("vn")
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
gateways = ri_conf.get("gateways", [])
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
self.interfaces_config = interfaces_config
irb_intf = Interface(name='irb', gratuitous_arp_reply='')
interfaces_config.add_interface(irb_intf)
self._logger.info("Vn=" + vn.name + ", IRB: " + str(gateways) + ", pr=" + self.physical_router.name)
if gateways is not None:
intf_unit = Unit(name=str(network_id),
comment=DMUtils.vn_irb_comment(vn, False, is_l2_l3))
irb_intf.add_unit(intf_unit)
if self.is_spine():
intf_unit.set_proxy_macip_advertisement('')
family = Family()
intf_unit.set_family(family)
inet = None
inet6 = None
for (irb_ip, gateway) in gateways:
if ':' in irb_ip:
if not inet6:
inet6 = FamilyInet6()
family.set_inet6(inet6)
addr = Address()
inet6.add_address(addr)
else:
if not inet:
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
addr.set_name(irb_ip)
addr.set_comment(DMUtils.irb_ip_comment(irb_ip))
if len(gateway) and gateway != '0.0.0.0':
addr.set_virtual_gateway_address(gateway)
# end add_irb_config
# lo0 interface in RI for route lookup to happen for Inter VN traffic
# qfx10k pfe limitation
def add_bogus_lo0(self, ri, network_id, vn):
if not self.is_spine():
return
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
ifl_num = str(1000 + int(network_id))
lo_intf = Interface(name="lo0")
interfaces_config.add_interface(lo_intf)
intf_unit = Unit(name=ifl_num, comment=DMUtils.l3_bogus_lo_intf_comment(vn))
lo_intf.add_unit(intf_unit)
family = Family()
intf_unit.set_family(family)
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
lo_ip = "127.0.0.1/32"
addr.set_name(lo_ip)
ri.add_interface(Interface(name="lo0." + ifl_num))
self.interfaces_config = interfaces_config
# end add_bogus_lo0
'''
ri_name: routing instance name to be configured on mx
is_l2: a flag used to indicate routing instance type, i.e : l2 or l3
is_l2_l3: VN forwarding mode is of type 'l2_l3' or not
import/export targets: routing instance import, export targets
prefixes: for l3 vrf static routes
gateways: for l2 evpn
interfaces: logical interfaces to be part of vrf
network_id : this is used for configuraing irb interfaces
'''
def add_routing_instance(self, ri_conf):
ri_name = ri_conf.get("ri_name")
vn = ri_conf.get("vn")
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
import_targets = ri_conf.get("import_targets", set())
export_targets = ri_conf.get("export_targets", set())
prefixes = ri_conf.get("prefixes", [])
gateways = ri_conf.get("gateways", [])
interfaces = ri_conf.get("interfaces", [])
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
is_internal_vn = True if '_contrail_lr_internal_vn_' in vn.name else False
self.routing_instances[ri_name] = ri_conf
ri_config = None
policy_config = self.policy_config or \
PolicyOptions(comment=DMUtils.policy_options_comment())
ri = None
ri_opt = None
ri_config = self.ri_config or \
RoutingInstances(comment=DMUtils.routing_instances_comment())
ri = Instance(name=ri_name)
if not is_l2:
ri_config.add_instance(ri)
ri.set_vrf_import(DMUtils.make_import_name(ri_name))
ri.set_vrf_export(DMUtils.make_export_name(ri_name))
has_ipv6_prefixes = DMUtils.has_ipv6_prefixes(prefixes)
has_ipv4_prefixes = DMUtils.has_ipv4_prefixes(prefixes)
if not is_l2:
if ri_opt is None:
ri_opt = RoutingInstanceRoutingOptions()
ri.set_routing_options(ri_opt)
ri.set_instance_type("vrf")
for interface in interfaces:
ri.add_interface(Interface(name=interface.name))
family = Family()
if has_ipv4_prefixes:
family.set_inet(FamilyInet(unicast=''))
if has_ipv6_prefixes:
family.set_inet6(FamilyInet6(unicast=''))
if has_ipv4_prefixes or has_ipv6_prefixes:
auto_export = AutoExport(family=family)
ri_opt.set_auto_export(auto_export)
if is_internal_vn:
self.internal_vn_ris.append(ri)
self.add_bogus_lo0(ri, network_id, vn)
if self.is_spine() and is_l2_l3:
self.add_irb_config(ri_conf)
self.attach_irb(ri_conf, ri)
lr_uuid = None
if is_internal_vn:
lr_uuid = DMUtils.extract_lr_uuid_from_internal_vn_name(ri_name)
# add policies for export route targets
if self.is_spine():
ps = PolicyStatement(name=DMUtils.make_export_name(ri_name))
ps.set_comment(DMUtils.vn_ps_comment(vn, "Export"))
then = Then()
ps.add_term(Term(name="t1", then=then))
for route_target in export_targets:
comm = Community(add='',
community_name=DMUtils.make_community_name(route_target))
then.add_community(comm)
then.set_accept('')
policy_config.add_policy_statement(ps)
self.add_to_global_switch_opts(DMUtils.make_export_name(ri_name), False)
# add policies for import route targets
ps = PolicyStatement(name=DMUtils.make_import_name(ri_name))
ps.set_comment(DMUtils.vn_ps_comment(vn, "Import"))
# add term switch policy
from_ = From()
term = Term(name=DMUtils.get_switch_policy_name(), fromxx=from_)
ps.add_term(term)
from_.add_community(DMUtils.get_switch_policy_name())
term.set_then(Then(accept=''))
from_ = From()
term = Term(name="t1", fromxx=from_)
ps.add_term(term)
for route_target in import_targets:
from_.add_community(DMUtils.make_community_name(route_target))
if not is_internal_vn:
self.add_vni_option(vni or network_id, route_target)
term.set_then(Then(accept=''))
policy_config.add_policy_statement(ps)
self.add_to_global_switch_opts(DMUtils.make_import_name(ri_name), True)
# add L2 EVPN and BD config
interfaces_config = self.interfaces_config
if (is_l2 and vni is not None and
self.is_family_configured(self.bgp_params, "e-vpn")):
# add vlan config
vlan_conf = self.add_vlan_config(ri_name, vni, is_l2_l3, "irb." + str(network_id))
interfaces_config = self.interfaces_config or Interfaces(comment=DMUtils.interfaces_comment())
self.build_l2_evpn_interface_config(interfaces_config,
interfaces, vn, vlan_conf)
if (not is_l2 and (vni is not None or (is_internal_vn and lr_uuid)) and \
self.is_family_configured(self.bgp_params, "e-vpn")):
evpn = self.build_evpn_config(int_vn = is_internal_vn)
if evpn:
ri.set_protocols(RoutingInstanceProtocols(evpn=evpn))
if is_internal_vn and lr_uuid:
ip_prefix_support = IpPrefixSupport()
#ip_prefix_support.set_forwarding_mode("symmetric")
ip_prefix_support.set_encapsulation("vxlan")
ip_prefix_support.set_vni(str(vni))
ip_prefix_support.set_advertise("direct-nexthop")
evpn.set_ip_prefix_support(ip_prefix_support)
else:
ri.set_vtep_source_interface("lo0.0")
if not is_internal_vn:
#add vlans
self.add_ri_vlan_config(ri, vni)
if (not is_l2 and not is_l2_l3 and gateways):
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
ifl_num = str(1000 + int(network_id))
lo_intf = Interface(name="lo0")
interfaces_config.add_interface(lo_intf)
intf_unit = Unit(name=ifl_num, comment=DMUtils.l3_lo_intf_comment(vn))
lo_intf.add_unit(intf_unit)
family = Family()
intf_unit.set_family(family)
inet = None
inet6 = None
for (lo_ip, _) in gateways:
subnet = lo_ip
(ip, _) = lo_ip.split('/')
if ':' in lo_ip:
if not inet6:
inet6 = FamilyInet6()
family.set_inet6(inet6)
addr = Address()
inet6.add_address(addr)
lo_ip = ip + '/' + '128'
else:
if not inet:
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
lo_ip = ip + '/' + '32'
addr.set_name(lo_ip)
addr.set_comment(DMUtils.lo0_ip_comment(subnet))
ri.add_interface(Interface(name="lo0." + ifl_num,
comment=DMUtils.lo0_ri_intf_comment(vn)))
self.policy_config = policy_config
self.interfaces_config = interfaces_config
self.route_targets |= import_targets | export_targets
self.ri_config = ri_config
# end add_routing_instance
def attach_acls(self, interface, unit):
if self.is_spine() or not interface.li_uuid:
return
interface = LogicalInterfaceDM.find_by_name_or_uuid(interface.li_uuid)
if not interface:
return
sg_list = interface.get_attached_sgs()
filter_list = []
for sg in sg_list:
flist = self.get_configured_filters(sg)
filter_list += flist
if filter_list:
ethernet = FamilyEthernet()
efilter = EthernetFilter()
for fname in filter_list:
efilter.add_input_list(fname)
ethernet.set_filter(efilter)
unit.set_family(Family(ethernet_switching=ethernet))
# end attach_acls
def build_l2_evpn_interface_config(self, interfaces_config, interfaces, vn, vlan_conf):
ifd_map = {}
for interface in interfaces:
ifd_map.setdefault(interface.ifd_name, []).append(interface)
for ifd_name, interface_list in ifd_map.items():
intf = Interface(name=ifd_name)
interfaces_config.add_interface(intf)
intf.set_flexible_vlan_tagging('')
intf.set_encapsulation("extended-vlan-bridge")
if interface_list[0].is_untagged():
if (len(interface_list) > 1):
self._logger.error(
"invalid logical interfaces config for ifd %s" % (
ifd_name))
continue
unit = Unit(name=interface_list[0].unit,
comment=DMUtils.l2_evpn_intf_unit_comment(vn, False),
vlan_id="4094")
# attach acls
self.attach_acls(interface_list[0], unit)
intf.add_unit(unit)
intf.set_native_vlan_id("4094")
vlan_conf.add_interface(Interface(name=ifd_name + ".0"))
else:
for interface in interface_list:
unit = Unit(name=interface.unit,
comment=DMUtils.l2_evpn_intf_unit_comment(vn,
True, interface.vlan_tag),
vlan_id=str(interface.vlan_tag))
# attach acls
self.attach_acls(interface, unit)
intf.add_unit(unit)
vlan_conf.add_interface(Interface(name=ifd_name + "." + str(interface.unit)))
# end build_l2_evpn_interface_config
@abc.abstractmethod
def build_evpn_config(self):
"""build evpn config depending on qfx model"""
# end build_evpn_config
def init_evpn_config(self):
if not self.routing_instances:
# no vn config then no need to configure evpn
return
if self.evpn:
# evpn init done
return
self.evpn = self.build_evpn_config()
self.evpn.set_extended_vni_list('all')
if not self.is_spine():
self.evpn.set_multicast_mode("ingress-replication")
if not self.proto_config:
self.proto_config = Protocols(comment=DMUtils.protocols_comment())
self.proto_config.set_evpn(self.evpn)
# end init_evpn_config
def add_vni_option(self, vni, vrf_target):
if not self.evpn:
self.init_evpn_config()
vni_options = self.evpn.get_vni_options()
if not vni_options:
vni_options = VniOptions()
self.evpn.set_extended_vni_list("all")
vni_options.add_vni(Vni(name=str(vni), vrf_target=VniTarget(community=vrf_target)))
self.evpn.set_vni_options(vni_options)
def init_global_switch_opts(self):
if self.global_switch_options_config is None:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.set_vtep_source_interface("lo0.0")
if not self.routing_instances:
# no vn config then no need to configure vrf target
return
self.global_switch_options_config.add_vrf_target(VniTarget(auto=''))
switch_options_community = DMUtils.get_switch_vrf_import(self.get_asn())
self.global_switch_options_config.add_vrf_target(VniTarget(community=switch_options_community))
self.set_global_export_policy()
# end init_global_switch_opts
def set_global_export_policy(self):
if self.is_spine():
return
export_policy = DMUtils.get_switch_export_policy_name()
ps = PolicyStatement(name=export_policy)
ps.set_comment(DMUtils.switch_export_policy_comment())
export_community = DMUtils.get_switch_export_community_name()
then = Then()
comm = Community(add='', community_name=export_community)
then.add_community(comm)
ps.add_term(Term(name="t1", then=then))
if not self.policy_config:
self.policy_config = PolicyOptions(comment=DMUtils.policy_options_comment())
self.policy_config.add_policy_statement(ps)
if not self.global_switch_options_config:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.add_vrf_export(export_policy)
# end set_global_export_policy
def add_to_global_switch_opts(self, policy, is_import):
if not self.global_switch_options_config:
self.init_global_switch_opts()
if is_import:
self.global_switch_options_config.add_vrf_import(policy)
else:
self.global_switch_options_config.add_vrf_export(policy)
# end add_to_global_switch_opts
def set_route_targets_config(self):
if self.policy_config is None:
self.policy_config = PolicyOptions(comment=DMUtils.policy_options_comment())
# add export community
export_comm = CommunityType(name=DMUtils.get_switch_export_community_name())
for route_target in self.route_targets:
comm = CommunityType(name=DMUtils.make_community_name(route_target))
comm.add_members(route_target)
self.policy_config.add_community(comm)
# add route-targets to export community
export_comm.add_members(route_target)
# if no members, no need to add community
if export_comm.get_members():
self.policy_config.add_community(export_comm)
# add community for switch options
comm = CommunityType(name=DMUtils.get_switch_policy_name())
comm.add_members(DMUtils.get_switch_vrf_import(self.get_asn()))
self.policy_config.add_community(comm)
# end set_route_targets_config
def add_vlan_config(self, vrf_name, vni, is_l2_l3=False, irb_intf=None):
if not self.vlans_config:
self.vlans_config = Vlans(comment=DMUtils.vlans_comment())
vxlan = VXLan(vni=vni)
vlan = Vlan(name=vrf_name[1:], vxlan=vxlan)
if is_l2_l3 and self.is_spine():
if not irb_intf:
self._logger.error("Missing irb interface config l3 vlan: %s" % vrf_name)
else:
vlan.set_vlan_id(str(vni))
vlan.set_l3_interface(irb_intf)
self.vlans_config.add_vlan(vlan)
return vlan
# end add_vlan_config
def add_ri_vlan_config(self, ri, vni):
vxlan = VXLan(vni=vni)
vlan = Vlan(name=vrf_name[1:], vlan_id=str(vni), vxlan=vxlan)
vlans = ri.get_vlans()
if not vlans:
vlans = Vlans()
vlans.add_vlan(vlan)
ri.set_vlans(vlans)
# end add_ri_vlan_config
# Product Specific configuration, called from parent class
def add_product_specific_config(self, groups):
groups.set_switch_options(self.global_switch_options_config)
if self.vlans_config:
groups.set_vlans(self.vlans_config)
if self.chassis_config:
groups.set_chassis(self.chassis_config)
# end add_product_specific_config
def set_route_distinguisher_config(self):
if not self.routing_instances or not self.bgp_params.get('identifier'):
# no vn config then no need to configure route distinguisher
return
if self.global_switch_options_config is None:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.set_route_distinguisher(
RouteDistinguisher(rd_type=self.bgp_params['identifier'] + ":1"))
# end set_route_distinguisher_config
def build_esi_config(self):
pr = self.physical_router
if not pr or self.is_spine():
return
if not self.interfaces_config:
self.interfaces_config = Interfaces(comment=DMUtils.interfaces_comment())
for pi_uuid in pr.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if not pi or not pi.esi or pi.esi == "0" or pi.get_parent_ae_id():
continue
esi_conf = Esi(identifier=pi.esi, all_active='')
intf = Interface(name=pi.name, esi=esi_conf)
self.interfaces_config.add_interface(intf)
# add ae interfaces
# self.ae_id_map should have all esi => ae_id mapping
for esi, ae_id in self.physical_router.ae_id_map.items():
esi_conf = Esi(identifier=esi, all_active='')
intf = Interface(name="ae" + str(ae_id), esi=esi_conf)
self.interfaces_config.add_interface(intf)
# end build_esi_config
def get_vn_li_map(self):
pr = self.physical_router
vn_list = []
# get all logical router connected networks
for lr_id in pr.logical_routers or []:
lr = LogicalRouterDM.get(lr_id)
if not lr:
continue
vn_list += lr.get_connected_networks(include_internal=True)
vn_dict = {}
for vn_id in vn_list:
vn_dict[vn_id] = []
li_set = pr.logical_interfaces
for pi_uuid in pr.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if pi is None:
continue
li_set |= pi.logical_interfaces
for li_uuid in li_set:
li = LogicalInterfaceDM.get(li_uuid)
if li is None:
continue
vmi_id = li.virtual_machine_interface
vmi = VirtualMachineInterfaceDM.get(vmi_id)
if vmi is None:
continue
vn_id = vmi.virtual_network
if li.physical_interface:
pi = PhysicalInterfaceDM.get(li.physical_interface)
ae_id = pi.get_parent_ae_id()
if ae_id and li.physical_interface:
_, unit= li.name.split('.')
ae_name = "ae" + str(ae_id) + "." + unit
vn_dict.setdefault(vn_id, []).append(
JunosInterface(ae_name, li.li_type, li.vlan_tag))
continue
vn_dict.setdefault(vn_id, []).append(
JunosInterface(li.name, li.li_type, li.vlan_tag, li_uuid=li.uuid))
return vn_dict
# end
def get_vn_associated_physical_interfaces(self):
pr = self.physical_router
li_set = set()
pi_list = []
for pi_uuid in pr.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if pi is None or not pi.esi or pi.esi == "0":
continue
if self.has_vmi(pi.logical_interfaces):
pi_list.append(pi)
return pi_list
# end get_vn_associated_physical_interfaces
def has_vmi(self, li_set):
if not li_set:
return False
for li_uuid in li_set:
li = LogicalInterfaceDM.get(li_uuid)
if not li or not li.virtual_machine_interface \
or not VirtualMachineInterfaceDM.get(li.virtual_machine_interface):
continue
return True
return False
# end has_vmi
def get_ae_alloc_esi_map(self):
pi_list = self.get_vn_associated_physical_interfaces()
esi_map = {}
for pi in pi_list:
if not pi.name.startswith("ae") and pi.esi:
esi_map.setdefault(pi.esi, []).append(pi)
return esi_map
# end get_ae_alloc_esi_map
def is_l2_supported(self, vn):
""" Check l2 capability """
return True
# end is_l2_supported
@abc.abstractmethod
def is_l3_supported(self, vn):
""" Check l3 capability """
return False
# end is_l3_supported
def set_resolve_bgp_route_target_family_config(self):
""" configure resolution config in global routing options if needed """
if not self.global_routing_options_config:
self.global_routing_options_config = RoutingOptions(
comment=DMUtils.routing_options_comment())
resolve = Resolution(rib=RIB(name="bgp.rtarget.0",
resolution_ribs="inet.0"))
self.global_routing_options_config.set_resolution(resolve)
# end set_resolve_bgp_route_target_family_config
def set_chassis_config(self):
device_count = DMUtils.get_max_ae_device_count()
aggr_devices = AggregatedDevices(Ethernet(device_count=device_count))
if not self.chassis_config:
self.chassis_config = Chassis()
self.chassis_config.set_aggregated_devices(aggr_devices)
# end set_chassis_config
def build_ae_config(self, esi_map):
if esi_map:
self.set_chassis_config()
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
# self.ae_id_map should have all esi => ae_id mapping
# esi_map should have esi => interface memberships
for esi, ae_id in self.physical_router.ae_id_map.items():
# config ae interface
ae_name = "ae" + str(ae_id)
intf = Interface(name=ae_name)
interfaces_config.add_interface(intf)
priority = DMUtils.lacp_system_priority()
system_id = esi[-17:] #last 17 charcaters from esi for ex: 00:00:00:00:00:05
lacp = Lacp(active='', system_priority=priority, \
system_id=system_id, admin_key=1)
intf.set_aggregated_ether_options(AggregatedEtherOptions(lacp=lacp))
# associate 'ae' membership
pi_list = esi_map.get(esi)
for pi in pi_list or []:
intf = Interface(name=pi.name)
interfaces_config.add_interface(intf)
etherOptions = EtherOptions(ieee_802_3ad=Ieee802(bundle=ae_name))
intf.set_gigether_options(etherOptions)
self.interfaces_config = interfaces_config
# end build_ae_config
def add_addr_term(self, term, addr_match, is_src):
if not addr_match:
return None
subnet = addr_match.get_subnet()
if not subnet:
return None
subnet_ip = subnet.get_ip_prefix()
subnet_len = subnet.get_ip_prefix_len()
if not subnet_ip or not subnet_len:
return None
from_ = term.get_from() or From()
term.set_from(from_)
if is_src:
from_.add_ip_source_address(str(subnet_ip) + "/" + str(subnet_len))
else:
from_.add_ip_destination_address(str(subnet_ip) + "/" + str(subnet_len))
# end add_addr_term
def add_port_term(self, term, port_match, is_src):
if not port_match:
return None
start_port = port_match.get_start_port()
end_port = port_match.get_end_port()
if not start_port or not end_port:
return None
port_str = str(start_port) + "-" + str(end_port)
from_ = term.get_from() or From()
term.set_from(from_)
if is_src:
from_.add_source_port(port_str)
else:
from_.add_destination_port(port_str)
# end add_port_term
def add_filter_term(self, ff, name):
term = Term()
term.set_name(name)
ff.add_term(term)
term.set_then(Then(accept=''))
return term
def add_protocol_term(self, term, protocol_match):
if not protocol_match or protocol_match == 'any':
return None
from_ = term.get_from() or From()
term.set_from(from_)
from_.set_ip_protocol(protocol_match)
# end add_protocol_term
def add_dns_dhcp_terms(self, ff):
port_list = [67, 68, 53]
term = Term()
term.set_name("allow-dns-dhcp")
from_ = From()
from_.set_ip_protocol("udp")
term.set_from(from_)
for port in port_list:
from_.add_source_port(str(port))
term.set_then(Then(accept=''))
ff.add_term(term)
# end add_dns_dhcp_terms
def add_ether_type_term(self, ff, ether_type_match):
if not ether_type_match:
return None
term = Term()
from_ = From()
term.set_from(from_)
term.set_name("ether-type")
from_.set_ether_type(ether_type_match.lower())
term.set_then(Then(accept=''))
ff.add_term(term)
# end add_ether_type_term
def build_firewall_filters(self, sg, acl, is_egress=False):
if self.is_spine():
return
if not sg or not acl or not acl.vnc_obj:
return
acl = acl.vnc_obj
entries = acl.get_access_control_list_entries()
if not entries:
return
rules = entries.get_acl_rule() or []
if not rules:
return
firewall_config = self.firewall_config or Firewall(DMUtils.firewall_comment())
ff = firewall_config.get_family() or FirewallFamily()
firewall_config.set_family(ff)
eswitching = ff.get_ethernet_switching() or FirewallEthernet()
ff.set_ethernet_switching(eswitching)
for rule in rules:
if not self.has_terms(rule):
continue
match = rule.get_match_condition()
if not match:
continue
rule_uuid = rule.get_rule_uuid()
dst_addr_match = match.get_dst_address()
dst_port_match = match.get_dst_port()
ether_type_match = match.get_ethertype()
protocol_match = match.get_protocol()
src_addr_match = match.get_src_address()
src_port_match = match.get_src_port()
filter_name = DMUtils.make_sg_filter_name(sg.name, ether_type_match, rule_uuid)
f = FirewallFilter(name=filter_name)
f.set_comment(DMUtils.sg_firewall_comment(sg.name, ether_type_match, rule_uuid))
# allow arp ether type always
self.add_ether_type_term(f, 'arp')
# allow dhcp/dns always
self.add_dns_dhcp_terms(f)
default_term = self.add_filter_term(f, "default-term")
self.add_addr_term(default_term, dst_addr_match, False)
self.add_addr_term(default_term, src_addr_match, True)
self.add_port_term(default_term, dst_port_match, False)
# source port match is not needed for now (BMS source port)
#self.add_port_term(default_term, src_port_match, True)
self.add_protocol_term(default_term, protocol_match)
eswitching.add_filter(f)
if not eswitching.get_filter():
ff.set_ethernet_switching(None)
self.firewall_config = firewall_config
# end build_firewall_filters
def build_firewall_config(self):
if self.is_spine():
return
sg_list = LogicalInterfaceDM.get_sg_list()
for sg in sg_list or []:
acls = sg.access_control_lists
for acl in acls or []:
acl = AccessControlListDM.get(acl)
if acl and not acl.is_ingress:
self.build_firewall_filters(sg, acl)
# end build_firewall_config
def is_default_sg(self, match):
if (not match.get_dst_address()) or \
(not match.get_dst_port()) or \
(not match.get_ethertype()) or \
(not match.get_src_address()) or \
(not match.get_src_port()) or \
(not match.get_protocol()):
return False
if not match.get_dst_address().get_subnet():
return False
if ((str(match.get_dst_address().get_subnet().get_ip_prefix()) == "0.0.0.0") or \
(str(match.get_dst_address().get_subnet().get_ip_prefix()) == "::")) and \
(str(match.get_dst_address().get_subnet().get_ip_prefix_len()) == "0") and \
(str(match.get_dst_port().get_start_port()) == "0") and \
(str(match.get_dst_port().get_end_port()) == "65535") and \
((str(match.get_ethertype()) == "IPv4") or \
(str(match.get_ethertype()) == "IPv6")) and \
(not match.get_src_address().get_subnet()) and \
(not match.get_src_address().get_subnet_list()) and \
(str(match.get_src_port().get_start_port()) == "0") and \
(str(match.get_src_port().get_end_port()) == "65535") and \
(str(match.get_protocol()) == "any"):
return True
return False
# end is_default_sg
def has_terms(self, rule):
match = rule.get_match_condition()
if not match:
return False
# return False if it is default SG, no filter is applied
if self.is_default_sg(match):
return False
return match.get_dst_address() or match.get_dst_port() or \
match.get_ethertype() or match.get_src_address() or match.get_src_port() or \
(match.get_protocol() and match.get_protocol() != 'any')
def get_firewall_filters(self, sg, acl, is_egress=False):
if not sg or not acl or not acl.vnc_obj:
return []
acl = acl.vnc_obj
entries = acl.get_access_control_list_entries()
if not entries:
return []
rules = entries.get_acl_rule() or []
if not rules:
return []
filter_names = []
for rule in rules:
if not self.has_terms(rule):
continue
match = rule.get_match_condition()
if not match:
continue
rule_uuid = rule.get_rule_uuid()
ether_type_match = match.get_ethertype()
if not ether_type_match:
continue
if 'ipv6' in ether_type_match.lower():
continue
filter_name = DMUtils.make_sg_filter_name(sg.name, ether_type_match, rule_uuid)
filter_names.append(filter_name)
return filter_names
# end get_firewall_filters
def get_configured_filters(self, sg):
if not sg:
return []
filter_names = []
acls = sg.access_control_lists
for acl in acls or []:
acl = AccessControlListDM.get(acl)
if acl and not acl.is_ingress:
fnames = self.get_firewall_filters(sg, acl)
filter_names += fnames
return filter_names
# end get_configured_filters
def build_ri_config(self):
if not self.is_spine():
esi_map = self.get_ae_alloc_esi_map()
self.physical_router.evaluate_ae_id_map(esi_map)
self.build_ae_config(esi_map)
vn_dict = self.get_vn_li_map()
vn_irb_ip_map = None
if self.is_spine():
self.physical_router.evaluate_vn_irb_ip_map(set(vn_dict.keys()), 'l2_l3', 'irb', False)
self.physical_router.evaluate_vn_irb_ip_map(set(vn_dict.keys()), 'l3', 'lo0', True)
vn_irb_ip_map = self.physical_router.get_vn_irb_ip_map()
for vn_id, interfaces in vn_dict.items():
vn_obj = VirtualNetworkDM.get(vn_id)
if (vn_obj is None or
vn_obj.get_vxlan_vni() is None or
vn_obj.vn_network_id is None):
continue
export_set = None
import_set = None
for ri_id in vn_obj.routing_instances:
# Find the primary RI by matching the name
ri_obj = RoutingInstanceDM.get(ri_id)
if ri_obj is None:
continue
if ri_obj.fq_name[-1] == vn_obj.fq_name[-1]:
vrf_name_l2 = DMUtils.make_vrf_name(vn_obj.fq_name[-1],
vn_obj.vn_network_id, 'l2')
vrf_name_l3 = DMUtils.make_vrf_name(vn_obj.fq_name[-1],
vn_obj.vn_network_id, 'l3')
export_set = copy.copy(ri_obj.export_targets)
import_set = copy.copy(ri_obj.import_targets)
if self.is_spine():
for ri2_id in ri_obj.routing_instances:
ri2 = RoutingInstanceDM.get(ri2_id)
if ri2 is None:
continue
import_set |= ri2.export_targets
if vn_obj.get_forwarding_mode() in ['l2', 'l2_l3']:
irb_ips = None
if vn_obj.get_forwarding_mode() == 'l2_l3' and self.is_spine():
irb_ips = vn_irb_ip_map['irb'].get(vn_id, [])
ri_conf = { 'ri_name': vrf_name_l2, 'vn': vn_obj }
ri_conf['is_l2'] = True
ri_conf['is_l2_l3'] = (vn_obj.get_forwarding_mode() == 'l2_l3')
ri_conf['import_targets'] = import_set
if self.is_spine():
ri_conf['export_targets'] = export_set
ri_conf['prefixes'] = vn_obj.get_prefixes()
ri_conf['gateways'] = irb_ips
ri_conf['interfaces'] = interfaces
ri_conf['vni'] = vn_obj.get_vxlan_vni()
ri_conf['network_id'] = vn_obj.vn_network_id
self.add_routing_instance(ri_conf)
is_internal_vn = True if '_contrail_lr_internal_vn_' in vn_obj.name else False
if vn_obj.get_forwarding_mode() in ['l3'] and self.is_l3_supported(vn_obj):
interfaces = []
lo0_ips = vn_irb_ip_map['lo0'].get(vn_id, [])
ri_conf = { 'ri_name': vrf_name_l3, 'vn': vn_obj }
ri_conf['is_l2'] = False
ri_conf['is_l2_l3'] = False
ri_conf['import_targets'] = import_set
ri_conf['export_targets'] = export_set
ri_conf['prefixes'] = vn_obj.get_prefixes()
ri_conf['interfaces'] = interfaces
if is_internal_vn:
ri_conf['vni'] = vn_obj.get_vxlan_vni(is_internal_vn = is_internal_vn)
ri_conf['gateways'] = lo0_ips
ri_conf['network_id'] = vn_obj.vn_network_id
self.add_routing_instance(ri_conf)
break
return
# end build_ri_config
def set_qfx_common_config(self):
self.build_bgp_config()
self.build_ri_config()
self.set_internal_vn_irb_config()
self.init_evpn_config()
self.build_firewall_config()
self.init_global_switch_opts()
self.set_resolve_bgp_route_target_family_config()
self.build_esi_config()
self.set_route_targets_config()
self.set_route_distinguisher_config()
# end set_qfx_common_config
# end QfxConf
|
barnsnake351/nova
|
refs/heads/master
|
nova/tests/unit/scheduler/test_rpcapi.py
|
59
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.scheduler.rpcapi
"""
from mox3 import mox
from oslo_config import cfg
from nova import context
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova import test
CONF = cfg.CONF
class SchedulerRpcAPITestCase(test.NoDBTestCase):
def _test_scheduler_api(self, method, rpc_method, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = scheduler_rpcapi.SchedulerAPI()
self.assertIsNotNone(rpcapi.client)
self.assertEqual(rpcapi.client.target.topic, CONF.scheduler_topic)
expected_retval = 'foo' if rpc_method == 'call' else None
expected_version = kwargs.pop('version', None)
expected_fanout = kwargs.pop('fanout', None)
expected_kwargs = kwargs.copy()
self.mox.StubOutWithMock(rpcapi, 'client')
rpcapi.client.can_send_version(
mox.IsA(str)).MultipleTimes().AndReturn(True)
prepare_kwargs = {}
if expected_fanout:
prepare_kwargs['fanout'] = True
if expected_version:
prepare_kwargs['version'] = expected_version
rpcapi.client.prepare(**prepare_kwargs).AndReturn(rpcapi.client)
rpc_method = getattr(rpcapi.client, rpc_method)
rpc_method(ctxt, method, **expected_kwargs).AndReturn(expected_retval)
self.mox.ReplayAll()
# NOTE(markmc): MultipleTimes() is OnceOrMore() not ZeroOrMore()
rpcapi.client.can_send_version('I fool you mox')
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
def test_select_destinations(self):
self._test_scheduler_api('select_destinations', rpc_method='call',
request_spec='fake_request_spec',
filter_properties='fake_prop',
version='4.0')
def test_update_aggregates(self):
self._test_scheduler_api('update_aggregates', rpc_method='cast',
aggregates='aggregates',
version='4.1',
fanout=True)
def test_delete_aggregate(self):
self._test_scheduler_api('delete_aggregate', rpc_method='cast',
aggregate='aggregate',
version='4.1',
fanout=True)
def test_update_instance_info(self):
self._test_scheduler_api('update_instance_info', rpc_method='cast',
host_name='fake_host',
instance_info='fake_instance',
fanout=True,
version='4.2')
def test_delete_instance_info(self):
self._test_scheduler_api('delete_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuid='fake_uuid',
fanout=True,
version='4.2')
def test_sync_instance_info(self):
self._test_scheduler_api('sync_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuids=['fake1', 'fake2'],
fanout=True,
version='4.2')
|
JioCloud/tempest
|
refs/heads/master
|
tempest/stress/cleanup.py
|
14
|
#!/usr/bin/env python
# Copyright 2013 Quanta Research Cambridge, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from tempest import clients
LOG = logging.getLogger(__name__)
def cleanup():
admin_manager = clients.AdminManager()
body = admin_manager.servers_client.list_servers({"all_tenants": True})
LOG.info("Cleanup::remove %s servers" % len(body['servers']))
for s in body['servers']:
try:
admin_manager.servers_client.delete_server(s['id'])
except Exception:
pass
for s in body['servers']:
try:
admin_manager.servers_client.wait_for_server_termination(s['id'])
except Exception:
pass
keypairs = admin_manager.keypairs_client.list_keypairs()
LOG.info("Cleanup::remove %s keypairs" % len(keypairs))
for k in keypairs:
try:
admin_manager.keypairs_client.delete_keypair(k['name'])
except Exception:
pass
secgrp_client = admin_manager.security_groups_client
secgrp = secgrp_client.list_security_groups({"all_tenants": True})
secgrp_del = [grp for grp in secgrp if grp['name'] != 'default']
LOG.info("Cleanup::remove %s Security Group" % len(secgrp_del))
for g in secgrp_del:
try:
secgrp_client.delete_security_group(g['id'])
except Exception:
pass
floating_ips = admin_manager.floating_ips_client.list_floating_ips()
LOG.info("Cleanup::remove %s floating ips" % len(floating_ips))
for f in floating_ips:
try:
admin_manager.floating_ips_client.delete_floating_ip(f['id'])
except Exception:
pass
users = admin_manager.identity_client.get_users()
LOG.info("Cleanup::remove %s users" % len(users))
for user in users:
if user['name'].startswith("stress_user"):
admin_manager.identity_client.delete_user(user['id'])
tenants = admin_manager.identity_client.list_tenants()
LOG.info("Cleanup::remove %s tenants" % len(tenants))
for tenant in tenants:
if tenant['name'].startswith("stress_tenant"):
admin_manager.identity_client.delete_tenant(tenant['id'])
# We have to delete snapshots first or
# volume deletion may block
_, snaps = admin_manager.snapshots_client.\
list_snapshots(params={"all_tenants": True})
LOG.info("Cleanup::remove %s snapshots" % len(snaps))
for v in snaps:
try:
admin_manager.snapshots_client.\
wait_for_snapshot_status(v['id'], 'available')
admin_manager.snapshots_client.delete_snapshot(v['id'])
except Exception:
pass
for v in snaps:
try:
admin_manager.snapshots_client.wait_for_resource_deletion(v['id'])
except Exception:
pass
vols = admin_manager.volumes_client.list_volumes(
params={"all_tenants": True})
LOG.info("Cleanup::remove %s volumes" % len(vols))
for v in vols:
try:
admin_manager.volumes_client.\
wait_for_volume_status(v['id'], 'available')
admin_manager.volumes_client.delete_volume(v['id'])
except Exception:
pass
for v in vols:
try:
admin_manager.volumes_client.wait_for_resource_deletion(v['id'])
except Exception:
pass
|
Rostlab/LocText
|
refs/heads/develop
|
tests/test_evaluations.py
|
2
|
# Be able to call directly such as `python test_annotators.py`
try:
from .context import loctext
except SystemError: # Parent module '' not loaded, cannot perform relative import
pass
from pytest import raises
from loctext.util import PRO_ID, LOC_ID, REL_PRO_LOC_ID, repo_path, UNIPROT_NORM_ID, GO_NORM_ID
from loctext.util.ncbi_global_align import global_align
from loctext.learning.evaluations import accept_relation_uniprot_go, accept_entity_uniprot_go_taxonomy, GO_TREE, _accept_go_ids_multiple
from nalaf import print_verbose, print_debug
#
# Note: GO:0005575 is the root of the cellular_component GO hierarchy
#
def test_accept_relation_uniprot_go_basic_eq():
accept_prediction = accept_relation_uniprot_go
assert accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_7|xxx|n_9|yyy")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|P04637|n_9|yyy",
"r_5|n_7|P04637|n_9|yyy")
# Note, the following is a stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P04637|n_9|GO:0000123",
"r_5|n_7|P04637|n_9|GO:0000123")
def test_accept_relation_uniprot_go_basic_ne():
accept_prediction = accept_relation_uniprot_go
assert not accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_7|bbb|n_9|yyy")
with raises(KeyError):
assert not accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_7|xxx|n_9|yyy_DIFERENT")
with raises(KeyError):
assert not accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_7|bbb|n_9|yyy_DIFERENT")
def test_unknowns_on_accept_relation_uniprot_go():
assert None is accept_relation_uniprot_go(
"r_5|n_7|UNKNOWN:|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0000123")
# assert None is accept_relation_uniprot_go(
# "r_5|n_7|xxx|n_9|UNKNOWN:",
# "r_5|n_7|xxx|n_9|GO:0000123")
assert False is _accept_go_ids_multiple(
"GO:0031248",
"GO:0044451")
assert None is accept_relation_uniprot_go(
"r_5|n_7|UNKNOWN:|n_9|GO:0031248",
"r_5|n_7|zzz|n_9|GO:0044451")
assert False is accept_relation_uniprot_go(
"r_5|n_7|yyy|n_9|GO:0031248",
"r_5|n_7|zzz|n_9|GO:0005575")
assert False is accept_relation_uniprot_go(
"r_5|n_7|yyy|n_9|GO:0031248",
"r_5|n_7|zzz|n_9|GO:0044451")
# assert None is accept_relation_uniprot_go(
# "r_5|n_7|xxx|n_9|UNKNOWN:",
# "r_5|n_7|zzz|n_9|GO:0000123")
# assert None is accept_relation_uniprot_go(
# "r_5|n_7|UNKNOWN:|n_9|UNKNOWN:",
# "r_5|n_7|xxx|n_9|GO:0000123")
# More basic, always false if gold is known but not predicted
assert False is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|UNKNOWN:|n_9|GO:0000123")
def test_relation_accept_uniprot_rel_type_is_not_compared():
accept_prediction = accept_relation_uniprot_go
assert accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_DIFFERENT|n_7|xxx|n_9|yyy")
def test_accept_relation_uniprot_go_exceptions():
accept_prediction = accept_relation_uniprot_go
with raises(Exception) as puta:
assert accept_prediction(
"",
"")
with raises(Exception):
assert accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_INVALID|xxx|n_9|yyy")
with raises(Exception):
assert accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_INVALID|xxx|n_9|yyy")
with raises(Exception):
assert accept_prediction(
"r_5|n_7|xxx|n_9|yyy",
"r_5|n_7|xxx|n_INVALID|yyy")
with raises(Exception):
assert accept_prediction(
"r_5|n_INVALID|xxx|n_9|yyy",
"r_5|n|xxx|n_9|yyy")
with raises(Exception):
assert accept_prediction(
"r_5|n_7|xxx|n_INVALID|yyy",
"r_5|n|xxx|n_9|yyy")
def test_accept_relation_uniprot_go_direct_children_ORDER_DOES_MATTER():
# gold must be parecent to accept the prediction, not the other way around
# see: http://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0000123#term=ancchart
# GO:0000123 (histone acetyltransferase complex) is a:
# * direct child of GO:0044451 -- nucleoplasm part
# * direct child of GO:0031248 -- protein acetyltransferase complex
accept_prediction = accept_relation_uniprot_go
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0044451",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0031248",
"r_5|n_7|xxx|n_9|GO:0000123")
# but...
assert None is accept_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0044451")
assert None is accept_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0031248")
# and not related at all with with one another as parent or child...
assert False is accept_prediction(
"r_5|n_7|xxx|n_9|GO:0031248",
"r_5|n_7|xxx|n_9|GO:0044451")
assert False is accept_prediction(
"r_5|n_7|xxx|n_9|GO:0044451",
"r_5|n_7|xxx|n_9|GO:0031248")
def test_accept_relation_uniprot_go_indirect_children():
# see: http://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0000123#term=ancchart
# GO:0000123 (histone acetyltransferase complex) is a:
# * direct child of GO:0044451 -- nucleoplasm part
# * direct child of GO:0031248 -- protein acetyltransferase complex
#
# * indirect child of GO:0044428 (nuclear part) through GO:0044451
# * indirect child of GO:0005634 (nucleus) through GO:0044451 --> GO:0044428
#
# * indirect child of GO:0005575 (cellular_component) since this is the root of the cellular component ontology
accept_prediction = accept_relation_uniprot_go
ignore_prediction = (lambda gold, pred: accept_relation_uniprot_go(gold, pred) is None)
# Accept when the prediciton is more detailed than gold (that is, the prediction is an in-/direct child of gold)
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0044451",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0031248",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0031248",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0031248",
"r_5|n_7|xxx|n_9|GO:0000123")
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0005575",
"r_5|n_7|xxx|n_9|GO:0000123")
# Ignore when the prediciton is above gold (that is, the prediction is an in-/direct parent of gold)
assert ignore_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0044451")
assert ignore_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0031248")
assert ignore_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0031248")
assert ignore_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0031248")
assert ignore_prediction(
"r_5|n_7|xxx|n_9|GO:0000123",
"r_5|n_7|xxx|n_9|GO:0005575")
def test_accept_relation_uniprot_go_all_children_of_root():
# all go terms are indirect children of the root, cellular_component=GO:0005575, including the root itself
# Therefore:
# 1) if gold=root, all predictions are True (accept)
# 2) if pred=root, all predictions are None (ignore)
accept_prediction = accept_relation_uniprot_go
assert 0 == len(GO_TREE['GO:0005575'].parents)
for go_term in GO_TREE:
pred_parents = GO_TREE[go_term].parents
no_parent_in_ontology = all(p not in GO_TREE for p in pred_parents)
is_root = len(pred_parents) == 0
not_in_ontology = no_parent_in_ontology and not is_root
# or not_in_ontology, go_term + " < " + ','.join(pred_parents)
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0005575",
"r_5|n_7|xxx|n_9|" + go_term), go_term + " < " + ','.join(pred_parents)
if not go_term == "GO:0005575":
assert None is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|" + go_term,
"r_5|n_7|xxx|n_9|GO:0005575"), (go_term, GO_TREE[go_term])
# The following tests the root with itself
assert accept_prediction(
"r_5|n_7|xxx|n_9|GO:0005575",
"r_5|n_7|xxx|n_9|GO:0005575")
# The following tests check that the root is appropriately handled without being an arbitrary/random/fake string
# Note, here the gold fake go term IS checked and that's why the expected error
with raises(KeyError):
assert not accept_prediction(
"r_5|n_7|xxx|n_9|GO:0005575",
"r_5|n_7|xxx|n_9|FAKE")
# Note, here the gold fake go term IS checked and that's why the expected error
with raises(KeyError):
assert not accept_prediction(
"r_5|n_7|xxx|n_9|FAKE",
"r_5|n_7|xxx|n_9|GO:0005575")
def test_accept_relation_uniprot_go_uniprots_as_list():
accept_prediction = accept_relation_uniprot_go
# Note, the following is a stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P04637|n_9|yyy",
"r_5|n_7|P04637,P02340|n_9|yyy")
# Note, the following is stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P04637|n_9|yyy",
"r_5|n_7|P02340,P04637|n_9|yyy")
# Note, the following is a stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P04637|n_9|yyy",
"r_5|n_7|in_the_middle:,P04637,P02340|n_9|yyy")
# Note, the following is stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P04637,P02340|n_9|yyy",
"r_5|n_7|P04637|n_9|yyy")
# Note, the following is stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P02340,P04637|n_9|yyy",
"r_5|n_7|P04637|n_9|yyy")
# Note, the following is stub test relation and does not have to be biologically true
assert accept_prediction(
"r_5|n_7|P02340,P04637,:in_the_middle|n_9|yyy",
"r_5|n_7|P04637|n_9|yyy")
def test_accept_relation_uniprot_go_uniprots_as_list_do_not_have_to_be_valid():
accept_prediction = accept_relation_uniprot_go
assert accept_prediction(
"r_5|n_7|a|n_9|yyy",
"r_5|n_7|a,b|n_9|yyy")
assert accept_prediction(
"r_5|n_7|a,b|n_9|yyy",
"r_5|n_7|a|n_9|yyy")
assert accept_prediction(
"r_5|n_7| a ,b,,|n_9|yyy",
"r_5|n_7| a |n_9|yyy")
assert accept_prediction(
"r_5|n_7| a |n_9|yyy",
"r_5|n_7| a ,b,,|n_9|yyy")
def test_accept_relation_uniprot_go_uniprots_do_not_create_spurious_ignores_Nones():
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0005737#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0044444#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0005783#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0043231#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0012505#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0044424#term=ancchart
# https://www.ebi.ac.uk/QuickGO/GTerm?id=GO:0005622#term=ancchart
assert True is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0005737")
assert True is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0044444")
assert True is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0005783")
assert False is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0043231")
assert False is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0012505")
assert None is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0044424")
assert None is accept_relation_uniprot_go(
"r_5|n_7|xxx|n_9|GO:0005737",
"r_5|n_7|xxx|n_9|GO:0005622")
def test_sequences_identity():
a = float(global_align("P35638", "P10145", column=2))
b = float(global_align("P10145", "P35638", column=2))
assert a < 15 # 10.651, last time I checked
assert a == b # order should not matter
a = float(global_align("P08100", "P02699", column=2))
b = float(global_align("P02699", "P08100", column=2))
assert a > 90 # 93.391, last time I checked
assert a == b # order should not matter
def test_accept_relation_uniprot_go_if_similar_sequence():
assert False is accept_relation_uniprot_go(
"r_5|n_7|P35638|n_9|GO:0005737",
"r_5|n_7|P10145|n_9|GO:0005737")
assert False is accept_relation_uniprot_go(
"r_5|n_7|P08100|n_9|GO:0005737",
"r_5|n_7|P02699|n_9|GO:0005737",
# default, we do not check sequence similarity, so fails
)
assert True is accept_relation_uniprot_go(
"r_5|n_7|P08100|n_9|GO:0005737",
"r_5|n_7|P02699|n_9|GO:0005737",
min_seq_identity=90)
assert False is accept_relation_uniprot_go(
"r_5|n_7|P08100|n_9|GO:0005737",
"r_5|n_7|P02699|n_9|GO:0005737",
min_seq_identity=95) # 95, too much
def test_overlapping_cases_for_proteins_on_accept_entity_uniprot_go_taxonomy():
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|1,2|n_7|P08100")
assert True is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,1|n_7|P08100")
assert True is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,2|n_7|P08100")
def test_seq_identity_for_proteins_on_accept_entity_uniprot_go_taxonomy():
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,1|n_7|P02699")
assert True is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,1|n_7|P02699",
min_seq_identity=90)
assert True is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P02699",
"e_1|0,1|n_7|P08100",
min_seq_identity=90)
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,1|n_7|P02699",
min_seq_identity=95)
def test_overlapping_unnormalizations_for_proteins_on_accept_entity_uniprot_go_taxonomy():
# Must always be false if gold is known but not the prediction
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|P08100",
"e_1|0,1|n_7|UNKNOWN:")
# Gold UNKNOWN AND overlapping --> None
assert None is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:",
"e_1|0,1|n_7|P08100")
# Gold UNKNOWN AND overlapping --> None
assert None is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:aaa",
"e_1|0,1|n_7|UNKNOWN:bbb")
# One gold is known and matches the prediction --> True
assert True is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:,P08100",
"e_1|0,1|n_7|P08100")
# One gold is known and does not match the prediction --> False
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:,P08100",
"e_1|0,1|n_7|xxx")
def test_nonoverlapping_unnormalizations_for_proteins_on_accept_entity_uniprot_go_taxonomy():
# Gold UNKNOWN BUT NO overlapping --> False
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:",
"e_1|1,2|n_7|P08100")
# Gold UNKNOWN BUT NO overlapping --> False
assert False is accept_entity_uniprot_go_taxonomy(
"e_1|0,1|n_7|UNKNOWN:",
"e_1|1,2|n_7|UNKNOWN:")
if __name__ == "__main__":
# selected tests:
test_accept_relation_uniprot_go_direct_children_ORDER_DOES_MATTER()
test_accept_relation_uniprot_go_all_children_of_root()
test_relation_accept_uniprot_uniprots_as_list()
|
datacratic/rtbkit
|
refs/heads/master
|
rtbkit/core/banker/banker_backup.py
|
20
|
#!/usr/bin/python
# Wolfgang Sourdeau - march 2003
# Copyright (c) 2013 Datacratic. All rights reserved.
# backup script for the Banker database
# FIXME: this script does not perform the backup atomically, which can lead to
# small inconsistencies between the states of the accounts
import redis
import json
import datetime
r = redis.Redis(host='localhost')
d = {}
for key in r.keys():
val_type = r.type(key)
if val_type == "hash":
d[key] = r.hgetall(key)
elif val_type == "string":
d[key] = r.get(key)
else:
raise Exception("unhandled value type: %s" % val_type)
filename = "banker_backup_%s.json" % datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")
writer = open(filename, "w")
writer.write(json.dumps(d))
writer.close()
|
Sorsly/subtle
|
refs/heads/master
|
google-cloud-sdk/platform/gsutil/third_party/boto/boto/file/connection.py
|
153
|
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# File representation of connection, for use with "file://" URIs.
from boto.file.bucket import Bucket
class FileConnection(object):
def __init__(self, file_storage_uri):
# FileConnections are per-file storage URI.
self.file_storage_uri = file_storage_uri
def get_bucket(self, bucket_name, validate=True, headers=None):
return Bucket(bucket_name, self.file_storage_uri.object_name)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.