code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 09:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Room',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roomIsOpen', models.BooleanField()),
],
),
]
| batebates/L3ProjetWeb | BDR/room/migrations/0001_initial.py | Python | mit | 544 |
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from __future__ import unicode_literals
from django.db import models
from django.db.models import Count
from django.utils import timezone
from django.db.utils import DataError
from datetime import timedelta
import json
import logging
import os
from crashsubmit import models as submit_models
from base.models import Version
logger = logging.getLogger(__name__)
module_blacklist = set()
with open(os.path.join(os.path.dirname(__file__), 'module_blacklist.txt'), 'r') as blacklist:
module_blacklist = blacklist.read().splitlines()
class CrashCountManager(models.Manager):
def get_crash_count_processed(self, versions=None, time=None):
query_set = self.get_crash_count(versions=versions, time=time)
keys = set()
data = {}
for entry in query_set:
date = str(entry.date)
keys.add(date)
version_string = entry.version.str_without_product()
if version_string not in data:
data[version_string] = {}
data[version_string][date] = entry.count
return sorted(list(keys)), data
def get_crash_count(self, versions=None, time=None):
res = self.get_queryset()
if versions is not None:
res = res.filter(version__in=versions)
if time is not None:
now = timezone.now()
before = timezone.now() - timedelta(days=time)
res = res.filter(date__range=[before, now])
res.order_by('date')
return res
class CrashCount(models.Model):
version = models.ForeignKey(submit_models.Version)
date = models.DateField()
count = models.IntegerField(default = 0)
# custom manager
objects = CrashCountManager()
class Meta:
unique_together = ('version', 'date')
class BugReport(models.Model):
# TODO: moggi: support different bug trackers
bug_nr = models.IntegerField()
fixed = models.BooleanField(default=False)
def __str__(self):
return "tdf#" + str(self.bug_nr)
def get_url(self):
return "https://bugs.documentfoundation.org/show_bug.cgi?id=" + str(self.bug_nr)
class Signature(models.Model):
signature = models.CharField(max_length=255,
primary_key=True)
first_observed = models.DateTimeField()
last_observed = models.DateTimeField()
bugs = models.ManyToManyField(BugReport)
def __str__(self):
return self.signature
class CrashByVersionData(object):
def __init__(self, id):
self.all = 0
self.win = 0
self.lin = 0
self.id = Signature.objects.get(signature=id)
def getKey(item):
return item.all
class ProcessedCrashManager(models.Manager):
def get_top_crashes(self, version=None, time=None, limit=None):
res = self.get_queryset()
if version is not None:
version_filter_params = Version.get_filter_params(version, prefix='version__')
res = res.filter(**version_filter_params)
if time is not None:
target = timezone.now() - timedelta(days=time)
res = res.filter(upload_time__gte=target)
res = res.values('os_name', 'signature').annotate(Count('os_name'))
data = {}
for entry in res:
signature = entry['signature']
if not signature in data:
data[signature] = CrashByVersionData(signature)
count = entry['os_name__count']
data[signature].all += count
if entry['os_name'] == 'linux':
data[signature].lin = count
elif entry['os_name'] == 'windows':
data[signature].win = count
values = data.values()
sorted_values = sorted(values, key=CrashByVersionData.getKey)
num_entries = len(values)
if limit is not None and num_entries > limit:
values = sorted_values[num_entries-limit:]
return values
def get_crashes_for_day(self, day, version):
res = self.get_queryset()
if version is not None:
res = res.filter(version = version)
if day is None:
return res
return res.filter(process_time__date = day)
def get_crashes_to_process(self):
processed = ProcessedCrash.objects.values_list('crash_id')
return submit_models.UploadedCrash.objects.all().exclude(crash_id__in=processed)
def get_crashes_for_version(self, version):
res = self.get_queryset()
version_filter_params = Version.get_filter_params(version, prefix='version__')
res = res.filter(**version_filter_params)
return res
class ProcessedCrash(models.Model):
# custom manager
objects = ProcessedCrashManager()
crash_id = models.CharField(max_length=100,
unique=True)
upload_time = models.DateTimeField()
process_time = models.DateTimeField(auto_now_add=True)
version = models.ForeignKey(Version,
null=True)
# Look for better solution to store dictionary
additional_data = models.TextField(default='{}')
# OS info
LINUX = 'linux'
WINDOWS = 'windows'
OSX = 'osx'
ANDROID = 'android'
IOS = 'ios'
OS_NAMES = (
(LINUX, 'Linux'),
(WINDOWS, 'Windows'),
(OSX, 'OSX'),
(ANDROID, 'Android'),
(IOS, 'IOS')
)
os_name = models.CharField(max_length=10,
choices=OS_NAMES)
os_detail = models.CharField(max_length=100,
default='')
# CPU info
cpu_architecture = models.CharField(max_length=20)
cpu_info = models.CharField(max_length=100,
default='')
signature = models.ForeignKey(Signature, on_delete=models.CASCADE)
# crash info
crash_cause = models.CharField(max_length=100,
default='SIGSEGV')
crash_address = models.CharField(max_length=100,
default='0x0')
crash_thread = models.SmallIntegerField(
default=0,
help_text='The id of the thread that caused the crash')
# modules
modules = models.TextField()
# TODO: moggi: look for better solutions
# threads
crashing_thread = models.TextField()
threads = models.TextField()
raw = models.TextField()
def __str__(self):
return self.crash_id
def set_view_os_name_to_model(self, view_os_name):
if view_os_name.lower() == ProcessedCrash.LINUX:
self.os_name = ProcessedCrash.LINUX
elif view_os_name.lower().startswith(ProcessedCrash.WINDOWS):
self.os_name = ProcessedCrash.WINDOWS
elif view_os_name.lower() == ProcessedCrash.OSX:
self.os_name = ProcessedCrash.OSX
else:
logger.warning("could not determine the os: " + view_is_name)
def _convert_frames(self, frame_list):
text = ""
for frame in frame_list:
text += self._convert_frame(frame) + "\n"
return text
def _find_frame(self, json_frame_list):
for frame in json_frame_list:
function = frame['lib_name']
if function not in module_blacklist and function is not "":
return frame
return json_frame_list[0]
def _set_signature(self, frame_list):
text = ""
json_frame_list = json.loads(frame_list)
if len(json_frame_list) > 0:
frame = self._find_frame(json_frame_list)
function = frame['function']
if len(function) > 0:
text = function
else:
text = frame['lib_name']
if len(text) is 0:
text = "Invalid signature"
logger.warn("could not create a valid signature for %s" % self.crash_id)
text = text[:255] if len(text) > 255 else text
signatures = Signature.objects.filter(signature=text)
if len(signatures) < 1:
signature = Signature()
signature.signature = text
signature.first_observed = self.upload_time
signature.last_observed = self.upload_time
else:
signature = signatures[0]
if signature.last_observed < self.upload_time:
signature.last_observed = self.upload_time
try:
signature.save()
except DataError as e:
logger.error("error trying to save signature %s" % text)
logger.error(str(e))
self.signature = signature
def set_thread_to_model(self, threads):
other_threads = {}
for thread_id, frame_list in threads.iteritems():
if int(thread_id) == int(self.crash_thread):
self._set_signature(frame_list)
self.crashing_thread = frame_list
else:
other_threads[thread_id] = json.loads(frame_list)
self.threads = json.dumps(other_threads)
def set_modules_to_model(self, modules):
self.modules = "\n".join(modules)
def get_split_module_list(self):
modules = self.modules.splitlines()
ret = []
for module in modules:
line = module.split('|')
module_name = line[1]
if module_name.startswith('LC_'):
continue
version = line[2]
debug_id = line[4]
ret.append({'name':module_name,
'version':version, 'id':debug_id})
return ret
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
| Liongold/crash | django/crashreport/processor/models.py | Python | mpl-2.0 | 9,729 |
from amqpstorm import management
if __name__ == '__main__':
# If using a self-signed certificate, change verify=True to point at your CA bundle.
# You can disable certificate verification for testing by passing in verify=False.
API = management.ManagementApi('https://rmq.amqpstorm.io:15671', 'guest',
'guest', verify=True)
try:
result = API.overview()
print('%s: %s' % (result.get('product_name'), result.get('product_version')))
print('Erlang Version: %s' % result.get('erlang_full_version'))
print('Cluster Name: %s' % result.get('cluster_name'))
print('Total Messages: %s' % result.get('queue_totals').get('messages'))
except management.ApiConnectionError as why:
print('Connection Error: %s' % why)
except management.ApiError as why:
print('ApiError: %s' % why)
| eandersson/amqpstorm | examples/management/overview.py | Python | mit | 883 |
#!/usr/bin/env python3
import re, random, sqlite3
from os import path
from ..utilities import BasePlugin
from .markov import Markov
SQLITE_DATABASE = path.join(path.dirname(path.realpath(__file__)), "chains.db")
LOOKBEHIND_LENGTH = 2
def speak_db(db_connection, lookbehind_length, initial_state = ()):
# generate a message based on probability chains
current_key = tuple(initial_state)[-lookbehind_length:]
token_list = []
while True:
row = db_connection.execute("SELECT count FROM counts WHERE key = ?", ("\n".join(current_key),)).fetchone()
if row is None: raise KeyError("Key not in chain: {}".format(current_key))
count = row[0]
random_choice = random.randrange(0, count)
choices = db_connection.execute("SELECT next_word, occurrences FROM chain WHERE key = ?", ("\n".join(current_key),))
for current_choice, occurrences in choices:
random_choice -= occurrences
if random_choice < 0:
new_token = current_choice
break
else: # couldn't find the choice somehow
raise ValueError("Bad choice for key: {}".format(current_key)) # this should never happen but would otherwise be hard to detect if it did
# add the token to the message
if new_token == None: break
token_list.append(new_token)
if len(current_key) < lookbehind_length: current_key += (new_token,) # add current token to key if just starting
else: current_key = current_key[1:] + (new_token,) # shift token onto key if inside message
return token_list
class GenerateTextPlugin(BasePlugin):
"""
Text generation plugin for Botty.
This is implemented with a Markov chain with 2 token lookbehind.
Example invocations:
#general | Me: botty gotta
#general | Botty: gotta be frustrating of course...
#general | Me: botty
#general | Botty: my friend doens't do that
#general | Me: botty don't
#general | Botty: don't think i saw the ride
"""
def __init__(self, bot):
super().__init__(bot)
assert path.exists(SQLITE_DATABASE), "Markov chain must be trained by running the `src/plugins/generate_text/generate_chains_db.py` script."
self.connection = sqlite3.connect(SQLITE_DATABASE)
def on_message(self, message):
text = self.get_message_text(message)
if text is None: return False
match = re.search(r"\bbotty(?:[\s,\.]+(.*)|$)", text, re.IGNORECASE)
if not match: return False
query = self.sendable_text_to_text(match.group(1) or "")
# use markov chain to complete given phrase
try: self.respond_raw(self.generate_sentence_starting_with(query))
except KeyError: self.respond_raw(self.generate_sentence_starting_with())
return True
def generate_sentence_starting_with(self, first_part = ""):
first_part = first_part.strip()
words = Markov.tokenize_text(first_part) if first_part != "" else []
return Markov.format_words(words + speak_db(self.connection, LOOKBEHIND_LENGTH, words))
| DanielHopper/botty-bot-bot-bot | src/plugins/generate_text/__init__.py | Python | mit | 3,146 |
#!/usr/bin/env python3
# The MIT License (MIT)
#
# Copyright (c) 2016 Benedikt Schmitt
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
jsonapi.asyncio.handler.related
===============================
"""
# std
import asyncio
from collections import OrderedDict
# local
from jsonapi.base import errors
from jsonapi.base.serializer import serialize_many
from .base import BaseHandler
class RelatedHandler(BaseHandler):
"""
Returns the related resources for the resource.
"""
def __init__(self, api, db, request):
"""
"""
super().__init__(api, db, request)
self.typename = request.japi_uri_arguments.get("type")
self.relname = request.japi_uri_arguments.get("relname")
# Initialised after the resource has been loaded.
self.real_typename = None
# The resource is loaded in *prepare()*
self.resource_id = request.japi_uri_arguments.get("id")
self.resource = None
return None
@asyncio.coroutine
def prepare(self):
"""
"""
if self.request.content_type[0] != "application/vnd.api+json":
raise errors.UnsupportedMediaType()
if not self.api.has_type(self.typename):
raise errors.NotFound()
# Load the resource.
self.resource = yield from self.db.get((self.typename, self.resource_id))
if self.resource is None:
raise errors.NotFound()
self.real_typename = self.api.get_typename(self.resource)
return None
@asyncio.coroutine
def get(self):
"""
Handles a GET request.
http://jsonapi.org/format/#fetching-relationships
"""
resources = yield from self.db.get_relatives([self.resource], [[self.relname]])
resources = resources.values()
included_resources = yield from self.db.get_relatives(
resources, self.request.japi_include
)
# Build the document.
data = serialize_many(resources, fields=self.request.japi_fields)
included = serialize_many(
included_resources.values(), fields=self.request.japi_fields
)
meta = OrderedDict()
links = OrderedDict()
# Create the response
self.response.headers["content-type"] = "application/vnd.api+json"
self.response.status_code = 200
self.response.body = self.api.dump_json(OrderedDict([
("data", data),
("included", included),
("meta", meta),
("links", links),
("jsonapi", self.api.jsonapi_object)
]))
return None
| benediktschmitt/py-jsonapi | jsonapi/asyncio/handler/related.py | Python | mit | 3,640 |
#!/usr/bin/env python
# Copyright 2015, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
args = sys.argv[1:]
if args == ['-s', 'show server']:
print 'Status Ok'
elif args == ['-s', 'show dimm']:
print 'Status Ok'
elif args == ['ctrl', 'all', 'show', 'config']:
print 'logicaldrive OK)'
else:
sys.exit('fake_hp_monitoring.py has received the following unexpected arguments - "%s".' % str(args))
| nrb/rpc-openstack | maas/testing/fake_hp_monitoring.py | Python | apache-2.0 | 940 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The code is partially taken (and modified) from django rest framework
# that is licensed under the following terms:
#
# Copyright (c) 2011-2014, Tom Christie
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from functools import update_wrapper
from django.utils.decorators import classonlymethod
from django.utils.translation import ugettext as _
from . import views
from . import mixins
from . import generics
class ViewSetMixin(object):
"""
This is the magic.
Overrides `.as_view()` so that it takes an `actions` keyword that performs
the binding of HTTP methods to actions on the Resource.
For example, to create a concrete view binding the 'GET' and 'POST' methods
to the 'list' and 'create' actions...
view = MyViewSet.as_view({'get': 'list', 'post': 'create'})
"""
@classonlymethod
def as_view(cls, actions=None, **initkwargs):
"""
Because of the way class based views create a closure around the
instantiated view, we need to totally reimplement `.as_view`,
and slightly modify the view function that is created and returned.
"""
# The suffix initkwarg is reserved for identifing the viewset type
# eg. 'List' or 'Instance'.
cls.suffix = None
# sanitize keyword arguments
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError("You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError("%s() received an invalid keyword %r"
% (cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
# We also store the mapping of request methods to actions,
# so that we can later set the action attribute.
# eg. `self.action = 'list'` on an incoming GET request.
self.action_map = actions
# Bind methods to actions
# This is the bit that's different to a standard view
for method, action in actions.items():
handler = getattr(self, action)
setattr(self, method, handler)
# Patch this in as it's otherwise only present from 1.5 onwards
if hasattr(self, 'get') and not hasattr(self, 'head'):
self.head = self.get
# And continue as usual
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
# We need to set these on the view function, so that breadcrumb
# generation can pick out these bits of information from a
# resolved URL.
view.cls = cls
view.suffix = initkwargs.get('suffix', None)
return view
def initialize_request(self, request, *args, **kargs):
"""
Set the `.action` attribute on the view,
depending on the request method.
"""
request = super(ViewSetMixin, self).initialize_request(request, *args, **kargs)
self.action = self.action_map.get(request.method.lower())
return request
def check_permissions(self, request, action:str=None, obj:object=None):
if action is None:
action = self.action
return super().check_permissions(request, action=action, obj=obj)
class NestedViewSetMixin(object):
def get_queryset(self):
return self._filter_queryset_by_parents_lookups(super().get_queryset())
def _filter_queryset_by_parents_lookups(self, queryset):
parents_query_dict = self._get_parents_query_dict()
if parents_query_dict:
return queryset.filter(**parents_query_dict)
else:
return queryset
def _get_parents_query_dict(self):
result = {}
for kwarg_name in self.kwargs:
query_value = self.kwargs.get(kwarg_name)
result[kwarg_name] = query_value
return result
class ViewSet(ViewSetMixin, views.APIView):
"""
The base ViewSet class does not provide any actions by default.
"""
pass
class GenericViewSet(ViewSetMixin, generics.GenericAPIView):
"""
The GenericViewSet class does not provide any actions by default,
but does include the base set of generic view behavior, such as
the `get_object` and `get_queryset` methods.
"""
pass
class ReadOnlyListViewSet(GenericViewSet):
"""
A viewset that provides default `list()` action.
"""
pass
class ReadOnlyModelViewSet(mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `list()` and `retrieve()` actions.
"""
pass
class ModelViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `create()`, `retrieve()`, `update()`,
`partial_update()`, `destroy()` and `list()` actions.
"""
pass
class ModelCrudViewSet(ModelViewSet):
pass
class ModelListViewSet(mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
pass
class ModelUpdateRetrieveViewSet(mixins.UpdateModelMixin,
mixins.RetrieveModelMixin,
GenericViewSet):
pass
class ModelRetrieveViewSet(mixins.RetrieveModelMixin,
GenericViewSet):
pass
| taigaio/taiga-back | taiga/base/api/viewsets.py | Python | agpl-3.0 | 7,937 |
# -*- coding: utf-8 -*-
from datetime import datetime
from .channel import Channel
import json, time, base64
def loggedIn(func):
def checkLogin(*args, **kwargs):
if args[0].isLogin:
return func(*args, **kwargs)
else:
args[0].callback.other('You want to call the function, you must login to LINE')
return checkLogin
class Timeline(Channel):
def __init__(self):
Channel.__init__(self, self.channel, self.server.CHANNEL_ID['LINE_TIMELINE'], False)
self.tl = self.getChannelResult()
self.__loginTimeline()
def __loginTimeline(self):
self.server.setTimelineHeadersWithDict({
'Content-Type': 'application/json',
'User-Agent': self.server.USER_AGENT,
'X-Line-Mid': self.profile.mid,
'X-Line-Carrier': self.server.CARRIER,
'X-Line-Application': self.server.APP_NAME,
'X-Line-ChannelToken': self.tl.channelAccessToken
})
self.profileDetail = self.getProfileDetail()
"""Timeline"""
@loggedIn
def getFeed(self, postLimit=10, commentLimit=1, likeLimit=1, order='TIME'):
params = {'postLimit': postLimit, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'order': order}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/feed/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getHomeProfile(self, mid=None, postLimit=10, commentLimit=1, likeLimit=1):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'postLimit': postLimit, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'sourceType': 'LINE_PROFILE_COVER'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/post/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getProfileDetail(self, mid=None):
if mid is None:
mid = self.profile.mid
params = {'userMid': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v1/userpopup/getDetail.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def updateProfileCoverById(self, objId):
params = {'coverImageId': objId}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/home/updateCover.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getProfileCoverId(self, mid=None):
if mid is None:
mid = self.profile.mid
home = self.getProfileDetail(mid)
return home['result']['objectId']
@loggedIn
def getProfileCoverURL(self, mid=None):
if mid is None:
mid = self.profile.mid
home = self.getProfileDetail(mid)
params = {'userid': mid, 'oid': home['result']['objectId']}
return self.server.urlEncode(self.server.LINE_OBS_DOMAIN, '/myhome/c/download.nhn', params)
"""Post"""
@loggedIn
def createPost(self, text, holdingTime=None):
params = {'homeId': self.profile.mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/post/create.json', params)
payload = {'postInfo': {'readPermission': {'type': 'ALL'}}, 'sourceType': 'TIMELINE', 'contents': {'text': text}}
if holdingTime != None:
payload["postInfo"]["holdingTime"] = holdingTime
data = json.dumps(payload)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def sendPostToTalk(self, mid, postId):
if mid is None:
mid = self.profile.mid
params = {'receiveMid': mid, 'postId': postId}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/post/sendPostToTalk.json', params)
r = self.server.getContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def createComment(self, mid, postId, text):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/comment/create.json', params)
data = {'commentText': text, 'activityExternalId': postId, 'actorId': mid}
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def deleteComment(self, mid, postId, commentId):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/comment/delete.json', params)
data = {'commentId': commentId, 'activityExternalId': postId, 'actorId': mid}
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def likePost(self, mid, postId, likeType=1001):
if mid is None:
mid = self.profile.mid
if likeType not in [1001,1002,1003,1004,1005,1006]:
raise Exception('Invalid parameter likeType')
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/like/create.json', params)
data = {'likeType': likeType, 'activityExternalId': postId, 'actorId': mid}
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def unlikePost(self, mid, postId):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/like/cancel.json', params)
data = {'activityExternalId': postId, 'actorId': mid}
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
"""Group Post"""
@loggedIn
def createGroupPost(self, mid, text):
payload = {'postInfo': {'readPermission': {'homeId': mid}}, 'sourceType': 'TIMELINE', 'contents': {'text': text}}
data = json.dumps(payload)
r = self.server.postContent(self.server.LINE_TIMELINE_API + '/v39/post/create.json', data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def createGroupAlbum(self, mid, name):
data = json.dumps({'title': name, 'type': 'image'})
params = {'homeId': mid,'count': '1','auto': '0'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album.json', params)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Create a new album failure.')
return True
@loggedIn
def deleteGroupAlbum(self, mid, albumId):
params = {'homeId': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album/%s' % albumId, params)
r = self.server.deleteContent(url, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Delete album failure.')
return True
@loggedIn
def getGroupPost(self, mid, postLimit=10, commentLimit=1, likeLimit=1):
params = {'homeId': mid, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'sourceType': 'TALKROOM'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v39/post/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
"""Group Album"""
@loggedIn
def getGroupAlbum(self, mid):
params = {'homeId': mid, 'type': 'g', 'sourceType': 'TALKROOM'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/albums.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def changeGroupAlbumName(self, mid, albumId, name):
data = json.dumps({'title': name})
params = {'homeId': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album/%s' % albumId, params)
r = self.server.putContent(url, data=data, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Change album name failure.')
return True
@loggedIn
def addImageToAlbum(self, mid, albumId, path):
file = open(path, 'rb').read()
params = {
'oid': int(time.time()),
'quality': '90',
'range': len(file),
'type': 'image'
}
hr = self.server.additionalHeaders(self.server.timelineHeaders, {
'Content-Type': 'image/jpeg',
'X-Line-Mid': mid,
'X-Line-Album': albumId,
'x-obs-params': self.genOBSParams(params,'b64')
})
r = self.server.getContent(self.server.LINE_OBS_DOMAIN + '/album/a/upload.nhn', data=file, headers=hr)
if r.status_code != 201:
raise Exception('Add image to album failure.')
return r.json()
@loggedIn
def getImageGroupAlbum(self, mid, albumId, objId, returnAs='path', saveAs=''):
if saveAs == '':
saveAs = self.genTempFile('path')
if returnAs not in ['path','bool','bin']:
raise Exception('Invalid returnAs value')
hr = self.server.additionalHeaders(self.server.timelineHeaders, {
'Content-Type': 'image/jpeg',
'X-Line-Mid': mid,
'X-Line-Album': albumId
})
params = {'ver': '1.0', 'oid': objId}
url = self.server.urlEncode(self.server.LINE_OBS_DOMAIN, '/album/a/download.nhn', params)
r = self.server.getContent(url, headers=hr)
if r.status_code == 200:
self.saveFile(saveAs, r.raw)
if returnAs == 'path':
return saveAs
elif returnAs == 'bool':
return True
elif returnAs == 'bin':
return r.raw
else:
raise Exception('Download image album failure.')
| fadhiilrachman/line-py | linepy/timeline.py | Python | bsd-3-clause | 10,410 |
from argon import rgba
from box import Box
blackish = rgba(0x00, 0x00, 0x00, 0x40)
whiteish = rgba(0xf6, 0xf3, 0xe8)
blueish = rgba(0x00, 0xf3, 0xe8, 0x80)
def clamp(low, high, value):
return min(high, max(low, value))
class Segment(object):
def __init__(self, width, text):
self.width = width
self.text = text
def line_break(width, words, space_width):
table = [(0.0, 0)]
for stop in range(1, len(words)+1):
start = stop - 1
c = words[start].width
best = (width-c)**2.0 + table[start][0], start
start -= 1
while start >= 0 and c <= width:
c += words[start].width + space_width
p = (width-c)**2.0 + table[start][0]
if p <= best[0] and c <= width:
best = p, start
start -= 1
table.append(best)
lines = []
j = len(words)
while j > 0:
_, i = table[j]
lines.append(words[i:j])
j = i
lines.reverse()
return lines
def line_offsets(lines):
yield 0
current = 0
for line in lines:
current += sum(len(word.text) for word in line) + max(0, len(line)-1) + 1
yield current
class Paragraph(Box):
def __init__(self, font, text, tags, head=0, tail=0):
self.font = font
self.text = text
self.tags = tags
self.head = head + 11
self.tail = tail + 40
Box.__init__(self)
self.width = 300
self.line_height = font.height * 1.2
self.dragging = False
def update(self):
font = self.font
space_width = font.measure(' ')[-1]
self.lines = line_break(self.width, [
Segment(font.measure(word)[-1], word)
for word in self.text.split(' ')
], space_width)
self.height = self.line_height * len(self.lines)
self.bases = list(line_offsets(self.lines))
self.offsets = font.measure(self.text)
def getline(self, offset):
for j, base in enumerate(reversed(self.bases), 1):
if base <= offset:
return len(self.bases) -j
return len(self.bases) - j
def getlox(self, offset):
line = self.getline(offset)
base = self.bases[line]
return line, self.offsets[offset] - self.offsets[base]
@property
def start(self):
return min(self.head, self.tail)
@property
def stop(self):
return max(self.head, self.tail)
def textgeometry(self, argon):
x, y = self.left, self.top + self.font.baseline
for line in self.lines:
text = ' '.join(word.text for word in line)
yield (x, y), text, whiteish, self.font
y += self.line_height
def selgeometry(self, argon, start, stop):
x0, y = self.left, self.top
x1 = x0 + self.width
l0, o0 = self.getlox(start)
l1, o1 = self.getlox(stop)
x2, x3 = x0+o0, x0+o1
if l0 == l1:
rect = x2, y + self.line_height * l0, x3-x2, self.line_height
return [(rect, blueish, argon.plain)]
elif l0+1 == l1:
rect0 = x2, y + self.line_height * l0, x1-x2, self.line_height
rect1 = x0, y + self.line_height * l1, x3-x0, self.line_height
return [
(rect0, blueish, argon.plain),
(rect1, blueish, argon.plain)
]
else:
rect0 = x2, y + self.line_height * l0, x1-x2, self.line_height
rect1 = x0, y + self.line_height * l1, x3-x0, self.line_height
rect2 = x0, y + self.line_height * (l0+1), x1-x0, self.line_height*(l1-l0-1)
return [
(rect0, blueish, argon.plain),
(rect1, blueish, argon.plain),
(rect2, blueish, argon.plain)
]
def render(self, argon):
self.update()
font = self.font
x, y = self.left, self.top + font.baseline
l, o = self.getlox(self.head)
argon.render([
(self.rect, blackish, argon.plain),
((x+o-1, self.top+self.line_height*l, 2, self.line_height), blueish, argon.plain),
] + list(self.textgeometry(argon))
+ list(self.selgeometry(argon, self.start, self.stop))
)
def pick_offset(self, (x, y)):
line = clamp(0, len(self.lines)-1, int((y - self.top) / self.line_height))
base = self.bases[line]
x = (x - self.left)
best = base, abs(x)
for i in range(base, self.bases[line+1]):
if len(self.offsets) <= i:
continue
o = abs(self.offsets[i] - self.offsets[base] - x)
if o <= best[1]:
best = i, o
return best[0]
def mousedown(self, buttons, pos):
self.head = self.tail = self.pick_offset(pos)
self.dragging = True
def mouseup(self, buttons, pos):
self.head = self.pick_offset(pos)
self.dragging = False
def mousemotion(self, pos, vel):
if self.dragging:
self.head = self.pick_offset(pos)
def replace(self, text, start, stop):
self.text = self.text[:start] + text + self.text[stop:]
def keydown(self, name, mod, text):
if name in ('backspace', 'delete') and self.start < self.stop:
self.replace(text, self.start, self.stop)
self.head = self.tail = self.start
elif name == 'backspace':
last = clamp(0, len(self.text), self.head-1)
self.replace('', last, self.head)
self.head = self.tail = last
elif name == 'delete':
nxt = clamp(0, len(self.text), self.head+1)
self.replace('', self.head, nxt)
self.head = self.tail = self.head
elif len(text) > 0:
self.replace(text, self.start, self.stop)
self.head = self.tail = self.start + len(text)
| cheery/essence | richtext.py | Python | gpl-3.0 | 5,887 |
from tehbot.plugins import *
class DeathlistPlugin(StandardPlugin):
def execute(self, connection, event, extra, dbconn):
return "blanky"
register_plugin("deathlist", DeathlistPlugin())
| spaceone/tehbot | tehbot/plugins/deathlist/__init__.py | Python | mit | 199 |
# Copyright (c) 2015 Kevin Nygaard
# See LICENSE.txt for details.
"""A gui display of stroke suggestions """
import wx
import re
from wx.lib.utils import AdjustRectToScreen
PAT = re.compile(r'[-\'"\w]+|[^\w\s]')
TITLE = 'Plover: Suggestions Display'
ON_TOP_TEXT = "Always on top"
UI_BORDER = 4
MAX_STROKE_LINES = 38
LAST_WORD_TEXT = 'Last Word: %s'
DEFAULT_LAST_WORD = '[]'
class SuggestionsDisplayDialog(wx.Dialog):
other_instances = []
def __init__(self, parent, config, engine):
self.config = config
self.engine = engine
self.words = ''
on_top = config.get_suggestions_display_on_top()
style = wx.DEFAULT_DIALOG_STYLE
style |= wx.RESIZE_BORDER
if on_top:
style |= wx.STAY_ON_TOP
pos = (config.get_suggestions_display_x(), config.get_suggestions_display_y())
wx.Dialog.__init__(self, parent, title=TITLE, style=style, pos=pos)
self.SetBackgroundColour(wx.WHITE)
sizer = wx.BoxSizer(wx.VERTICAL)
self.on_top = wx.CheckBox(self, label=ON_TOP_TEXT)
self.on_top.SetValue(config.get_suggestions_display_on_top())
self.on_top.Bind(wx.EVT_CHECKBOX, self.handle_on_top)
sizer.Add(self.on_top, flag=wx.ALL, border=UI_BORDER)
box = wx.BoxSizer(wx.HORIZONTAL)
self.header = MyStaticText(self, label=LAST_WORD_TEXT % DEFAULT_LAST_WORD)
font = self.header.GetFont()
font.SetFaceName("Courier")
self.header.SetFont(font)
sizer.Add(self.header, flag=wx.LEFT | wx.RIGHT | wx.BOTTOM,
border=UI_BORDER)
sizer.Add(wx.StaticLine(self), flag=wx.EXPAND)
self.listbox = wx.ListBox(self, size=wx.Size(210, 500))
font = self.listbox.GetFont()
font.SetFaceName("Courier")
self.listbox.SetFont(font)
sizer.Add(self.listbox,
proportion=1,
flag=wx.ALL | wx.FIXED_MINSIZE | wx.EXPAND,
border=3)
self.SetSizer(sizer)
self.SetAutoLayout(True)
sizer.Layout()
sizer.Fit(self)
self.Show()
self.close_all()
self.other_instances.append(self)
self.SetRect(AdjustRectToScreen(self.GetRect()))
self.Bind(wx.EVT_MOVE, self.on_move)
def on_move(self, event):
pos = self.GetScreenPositionTuple()
self.config.set_suggestions_display_x(pos[0])
self.config.set_suggestions_display_y(pos[1])
event.Skip()
def on_close(self, event):
self.other_instances.remove(self)
event.Skip()
def lookup_suggestions(self, phrase):
found = False
''' Return stroke suggestions for a given phrase, if it exists
If we can't find an entry, we start manipulating the phrase to see if we
can come up with something for the user. This allows for suggestions to
be given for prefixes/suffixes.
'''
mods = ['%s', '{^%s}', '{^%s^}', '{%s^}', '{&%s}']
d = self.engine.get_dictionary()
for x in [mod % phrase for mod in mods]:
strokes_list = d.reverse_lookup(x)
if not strokes_list:
continue
else:
# Return list of suggestions, sorted by amount of keys used
return True, x, sorted(strokes_list, lambda x, y: cmp(sum(map(len, x)), sum(map(len, y))))
return False, phrase, []
def show_stroke(self, old, new):
for action in old:
remove = len(action.text)
self.words = self.words[:-remove]
self.words = self.words + action.replace
for action in new:
remove = len(action.replace)
if remove > 0:
self.words = self.words[:-remove]
self.words = self.words + action.text
# Limit phrasing memory to 100 characters, because most phrases probably
# don't exceed this length
self.words = self.words[-100:]
split_words = PAT.findall(self.words)
interp_phrase = split_words[-1:]
self.listbox.Clear()
for phrase in SuggestionsDisplayDialog.tails(split_words):
phrase = ' '.join(phrase)
found, interp_phrase, suggestions = self.lookup_suggestions(phrase)
if found:
# Limit arbitrarily to 10 suggestions
for suggestion in suggestions[:10]:
self.listbox.Append('/'.join(suggestion))
break
else:
self.listbox.Append('No suggestions')
self.header.SetLabel(LAST_WORD_TEXT % interp_phrase)
def handle_on_top(self, event):
self.config.set_suggestions_display_on_top(event.IsChecked())
self.display(self.GetParent(), self.config, self.engine)
@staticmethod
def tails(ls):
''' Return all tail combinations (a la Haskell)
tails :: [x] -> [[x]]
>>> tails('abcd')
['abcd', 'bcd', 'cd', d']
'''
for i in xrange(0, len(ls)):
yield ls[i:]
@staticmethod
def close_all():
for instance in SuggestionsDisplayDialog.other_instances:
instance.Close()
del SuggestionsDisplayDialog.other_instances[:]
@staticmethod
def stroke_handler(old, new):
for instance in SuggestionsDisplayDialog.other_instances:
wx.CallAfter(instance.show_stroke, old, new)
@staticmethod
def display(parent, config, engine):
# SuggestionsDisplayDialog shows itself.
SuggestionsDisplayDialog(parent, config, engine)
# This class exists solely so that the text doesn't get grayed out when the
# window is not in focus.
class MyStaticText(wx.PyControl):
def __init__(self, parent, id=wx.ID_ANY, label="",
pos=wx.DefaultPosition, size=wx.DefaultSize,
style=0, validator=wx.DefaultValidator,
name="MyStaticText"):
wx.PyControl.__init__(self, parent, id, pos, size, style|wx.NO_BORDER,
validator, name)
wx.PyControl.SetLabel(self, label)
self.InheritAttributes()
self.SetInitialSize(size)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
def OnPaint(self, event):
dc = wx.BufferedPaintDC(self)
self.Draw(dc)
def Draw(self, dc):
width, height = self.GetClientSize()
if not width or not height:
return
backBrush = wx.Brush(wx.WHITE, wx.SOLID)
dc.SetBackground(backBrush)
dc.Clear()
dc.SetTextForeground(wx.BLACK)
dc.SetFont(self.GetFont())
label = self.GetLabel()
dc.DrawText(label, 0, 0)
def OnEraseBackground(self, event):
pass
def SetLabel(self, label):
wx.PyControl.SetLabel(self, label)
self.InvalidateBestSize()
self.SetSize(self.GetBestSize())
self.Refresh()
def SetFont(self, font):
wx.PyControl.SetFont(self, font)
self.InvalidateBestSize()
self.SetSize(self.GetBestSize())
self.Refresh()
def DoGetBestSize(self):
label = self.GetLabel()
font = self.GetFont()
if not font:
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
dc = wx.ClientDC(self)
dc.SetFont(font)
textWidth, textHeight = dc.GetTextExtent(label)
best = wx.Size(textWidth, textHeight)
self.CacheBestSize(best)
return best
def AcceptsFocus(self):
return False
def SetForegroundColour(self, colour):
wx.PyControl.SetForegroundColour(self, colour)
self.Refresh()
def SetBackgroundColour(self, colour):
wx.PyControl.SetBackgroundColour(self, colour)
self.Refresh()
def GetDefaultAttributes(self):
return wx.StaticText.GetClassDefaultAttributes()
def ShouldInheritColours(self):
return True
| blockbomb/plover | plover/gui/suggestions.py | Python | gpl-2.0 | 8,019 |
#! /usr/bin/env python2
#
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: khmer-project@idyll.org
#
import sys
import khmer
def main():
files = sys.argv[2:]
total_reads = len(files) * [0]
n_consumed = len(files) * [0]
n_seq_kept = len(files) * [0]
print 'loading ht'
ht = khmer.new_counting_hash(1, 1, 1)
ht.load(sys.argv[1])
for i, infile in enumerate(files):
print 'outputting', infile + '.freq'
ht.output_fasta_kmer_pos_freq(infile, infile + ".freq")
if __name__ == '__main__':
main()
| kdmurray91/khmer | sandbox/fasta-to-abundance-hist.py | Python | bsd-3-clause | 703 |
"""Unit test for plugin manager.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import unittest
import mock
import pkg_resources
from treadmill import plugin_manager
# pylint: disable=protected-access
_EntryPoint = collections.namedtuple('EntryPoint', ['name'])
class PluginManagerTest(unittest.TestCase):
"""Tests plugin manager."""
def setUp(self):
self.saved = plugin_manager._FILTER
def tearDown(self):
plugin_manager._FILTER = self.saved
@mock.patch('pkg_resources.iter_entry_points', mock.Mock())
def test_whitelist(self):
"""Tests plugin manager whitelist."""
pkg_resources.iter_entry_points.return_value = [
_EntryPoint(x) for x in [
'aaa',
'bbb',
'aaa.foo'
]
]
# No whitelist - load all.
plugin_manager._FILTER = {}
self.assertEqual(
set(['aaa', 'bbb', 'aaa.foo']),
set(plugin_manager.names('foo.bar'))
)
plugin_manager._FILTER = {
'x': ['aaa*']
}
# Section in the whitelist, will be filtered.
self.assertEqual(
set(['aaa', 'aaa.foo']),
set(plugin_manager.names('x'))
)
# Section not in the whitelist, will load all.
self.assertEqual(
set(['aaa', 'bbb', 'aaa.foo']),
set(plugin_manager.names('y'))
)
def test_load(self):
"""Test parsing filter string."""
self.assertEqual(
{
'x': ['aaa'],
'y': ['bbb'],
},
plugin_manager._load_filter('x=aaa:y=bbb')
)
self.assertEqual(
{
'x': ['aaa', 'ccc'],
'y': ['bbb'],
},
plugin_manager._load_filter('x=aaa,ccc:y=bbb')
)
if __name__ == '__main__':
unittest.main()
| Morgan-Stanley/treadmill | lib/python/treadmill/tests/plugin_manager_test.py | Python | apache-2.0 | 2,048 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''yans
Yet Another Network Simulator
Usage:
yans [-V] [-t --topo=<topo_path>] (up|stop|destroy)
yans [-V] [-t --topo=<topo_path>] console <node_name>
yans -h | --help
yans --version
Options:
-h --help Show this screen.
--version Show version.
-t --topo=<topo_path> Network topology YAML [default: ./topo.yaml].
-V --verbose Verbose mode
'''
from __future__ import unicode_literals, print_function
from docopt import docopt
import logging
import sys
from docker_command import destroy_links, create_nodes, create_links, ensure_docker_machine, destroy_nodes, bind_interface, attach_node
from topology import Topology, TopologySpecError
__version__ = "0.1.0"
__author__ = "Kenneth Jiang"
__license__ = "MIT"
def main():
'''Main entry point for the yans CLI.'''
args = docopt(__doc__, version=__version__)
ensure_docker_machine()
if args['--verbose']:
logging.getLogger().setLevel(logging.DEBUG)
topo_file = args['--topo']
try:
topo = Topology(topo_file)
except TopologySpecError as err:
sys.exit(err)
if args['up']:
create_links(topo.links)
create_nodes(topo.nodes)
for link in topo.links:
for interface in link.interfaces:
bind_interface(interface)
topo.draw()
print('To log into each node:')
for node in topo.nodes:
print('`$ yans -t ' + topo_file + ' console ' + node.name + '`')
if args['destroy']:
destroy_nodes(topo.nodes)
destroy_links(topo.links)
if args['console']:
node_name = args['<node_name>']
node = topo.node_by_name(node_name)
if node:
attach_node(node)
else:
sys.exit('Node named "' + node_name + '" is not found in ' + topo_file)
if __name__ == '__main__':
main()
| kennethjiang/YANS | yans.py | Python | mit | 1,933 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Describes an optional resource needed for a build.
Typically a bunch of sources that can be built in-tree within another
package to enable optional features.
"""
class Resource(object):
"""Represents an optional resource to be fetched by a package.
Aggregates a name, a fetcher, a destination and a placement.
"""
def __init__(self, name, fetcher, destination, placement):
self.name = name
self.fetcher = fetcher
self.destination = destination
self.placement = placement
| TheTimmy/spack | lib/spack/spack/resource.py | Python | lgpl-2.1 | 1,762 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq show role --role`."""
from aquilon.aqdb.model import Role
from aquilon.worker.broker import BrokerCommand
class CommandShowRoleRole(BrokerCommand):
required_parameters = ["role"]
def render(self, session, role, **_):
dbrole = Role.get_unique(session, role, compel=True)
return dbrole
| quattor/aquilon | lib/aquilon/worker/commands/show_role_role.py | Python | apache-2.0 | 1,061 |
#!/usr/bin/python
#
# just show env
#
# Tong Zhang <zhangt@frib.msu.edu>
# 2016-10-16 20:23:35 PM EDT
#
import numpy as np
import time
from flame import Machine
import matplotlib.pyplot as plt
lat_fid = open('../lattice/test_392.lat', 'r')
m = Machine(lat_fid)
s = m.allocState({})
r = m.propagate(s, 0, len(m), observe=range(len(m)))
bpms = m.find(type='bpm')
x, y = np.array([[r[i][1].moment0_env[j] for i in bpms]
for j in [0,2]])
pos = np.array([r[i][1].pos for i in bpms])
np.savetxt('orbit0.dat',
np.vstack((pos, x, y)).T,
fmt="%22.14e",
comments='# orbit data saved at ' + time.ctime() + '\n',
header="#{0:^22s} {1:^22s} {2:^22s}".format(
"zpos [m]", "x [mm]", "y [mm]"),
delimiter=' ')
fig = plt.figure()
ax1 = fig.add_subplot(211)
linex, = ax1.plot(pos, x, 'r-', label='$x$')
#ax2 = fig.add_subplot(212)
#lineryx, = ax2.plot(pos, y, 'b-', label='$y$')
ax1.legend()
#ax2.legend()
plt.show()
| archman/genopt | contrib/show_env.py | Python | mit | 998 |
from django import forms
class CommentForm(forms.Form):
text = forms.CharField(widget=forms.Textarea)
name = forms.CharField(max_length=100)
| Lukasa/minimalog | posts/forms.py | Python | mit | 150 |
'''
Observer pattern lab
"Lab: Watching a File Change Size"
'''
import os
import time
class FileWatcher:
def __init__(self, path_of_file_to_watch):
self.path = path_of_file_to_watch
self.observers_for_channel = {
'increase': set(),
'decrease': set(),
}
self._last_size = 0
def register(self, channel, observer):
self.observers_for_channel[channel].add(observer)
def unregister(self, channel, observer):
self.observers_for_channel[channel].discard(observer)
def check_forever(self):
while True:
self.check_file()
time.sleep(0.1)
def check_file(self):
size = os.stat(self.path).st_size
if size != self._last_size:
if size > self._last_size:
channel = 'increase'
else:
channel = 'decrease'
self._last_size = size
self.dispatch(channel, size)
def dispatch(self, channel, size):
for observer in self.observers_for_channel[channel]:
observer.update(size)
class FileObserver:
def __init__(self, name):
self.name = name
def update(self, size):
print('{} noticed that the file is now {} bytes'.format(self.name, size))
bob = FileObserver('Bob')
john = FileObserver('John')
stacy = FileObserver('Stacy')
watcher = FileWatcher('watched.txt')
watcher.register('increase', bob)
watcher.register('decrease', john)
watcher.register('increase', stacy)
watcher.register('decrease', stacy)
watcher.check_forever()
# Copyright 2015-2018 Aaron Maxwell. All rights reserved.
| ketan-analytics/learnpython | Safaribookonline-Python/courseware-btb/solutions/py3/patterns/filewatch_extra.py | Python | gpl-2.0 | 1,637 |
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Filter import Filter
from PLC.Auth import Auth
from PLC.Persons import Persons
from PLC.Sites import Sites
from PLC.Nodes import Nodes
from PLC.Slices import Slices
from PLC.Keys import Keys
from PLC.Peers import Peers
from PLC.Faults import *
class UnBindObjectFromPeer(Method):
"""
This method is a hopefully temporary hack to let the sfa correctly
detach the objects it creates from a remote peer object. This is
needed so that the sfa federation link can work in parallel with
RefreshPeer, as RefreshPeer depends on remote objects being
correctly marked.
UnBindObjectFromPeer is allowed to admins only.
"""
roles = ['admin']
known_types = ['site','person','slice','node','key']
types_doc = ",".join(["'%s'"%type for type in known_types])
accepts = [
Auth(),
Parameter(str,"Object type, among "+types_doc),
Parameter(int,"object_id"),
Parameter(str,"peer shortname"),
Parameter(int,"remote object_id, set to 0 if unknown"),
]
returns = Parameter (int, '1 if successful')
def locate_object (self, object_type, object_id):
# locate e.g. the Nodes symbol
class_obj = globals()[object_type.capitalize()+'s']
id_name=object_type+'_id'
# invoke e.g. Nodes ({'node_id':node_id})
objs=class_obj(self.api,{id_name:object_id})
if len(objs) != 1:
raise PLCInvalidArgument("Cannot locate object, type=%s id=%d"%\
(type,object_id))
return objs[0]
def call(self, auth, object_type, object_id, shortname):
object_type = object_type.lower()
if object_type not in self.known_types:
raise PLCInvalidArgument('Unrecognized object type %s'%object_type)
peers=Peers(self.api,{'shortname':shortname.upper()})
if len(peers) !=1:
raise PLCInvalidArgument('No such peer with shortname %s'%shortname)
peer=peers[0]
object = self.locate_object (object_type, object_id)
remover_name = 'remove_'+object_type
remove_function = getattr(type(peer),remover_name)
remove_function(peer,object)
return 1
| dreibh/planetlab-lxc-plcapi | PLC/Methods/UnBindObjectFromPeer.py | Python | bsd-3-clause | 2,262 |
# Tai Sakuma <tai.sakuma@gmail.com>
import pytest
from alphatwirl.selection.factories.expand import expand_path_cfg
from alphatwirl.selection.factories.factory import FactoryDispatcher
from alphatwirl.selection.modules.LambdaStr import LambdaStr
from alphatwirl.selection.modules import All, Any, Not
##__________________________________________________________________||
@pytest.fixture()
def alias_dict():
return {
'alias1': 'ev : ev.var1[0] >= 10',
'alias2': ('ev : ev.var2[0] >= 20', dict(name='name2')),
'alias3': 'alias1',
'alias4': 'alias3',
'alias5': 'ev : ev.var4[0] == {n}',
'alias6': ('ev : {low} <= ev.var5[0] < {high}', dict(low=11, high=20))
}
##__________________________________________________________________||
# path_cfg, expanded, obj
params = [
pytest.param(
'alias1',
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var1[0] >= 10',
name='alias1'
),
LambdaStr(
name='alias1',
lambda_str='ev : ev.var1[0] >= 10'
),
id='alias1'
),
pytest.param(
('alias1', dict(name='name1')),
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var1[0] >= 10',
name='name1'
),
LambdaStr(name='name1', lambda_str='ev : ev.var1[0] >= 10'),
id='alias1:with-name'
),
pytest.param(
'alias2',
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var2[0] >= 20',
name='name2' # name has priority over alias
),
LambdaStr(name='name2', lambda_str='ev : ev.var2[0] >= 20'),
id='alias2:name-priority-over-alias'
),
pytest.param(
('alias2', dict(name='new_name2')),
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var2[0] >= 20',
name='new_name2' # name can be overridden
),
LambdaStr(name='new_name2', lambda_str='ev : ev.var2[0] >= 20'),
id='alias2:name-overridden'
),
pytest.param(
'alias3',
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var1[0] >= 10',
name='alias3' # the outermost alias has priority
),
LambdaStr(name='alias3', lambda_str='ev : ev.var1[0] >= 10'),
id='alias3:alias-of-alias'
),
pytest.param(
'alias4',
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var1[0] >= 10',
name='alias4' # the outermost alias has priority
),
LambdaStr(name='alias4', lambda_str='ev : ev.var1[0] >= 10'),
id='alias4:alias-of-alias-of-alias'
),
pytest.param(
('alias5', dict(n=30)),
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : ev.var4[0] == {n}', # not formatted
n=30,
name='alias5'
),
LambdaStr(name='alias5', lambda_str='ev : ev.var4[0] == 30'),
id='alias5:not-formatted'
),
pytest.param(
'alias6',
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : {low} <= ev.var5[0] < {high}',
low=11,
high=20,
name='alias6',
),
LambdaStr(name='alias6', lambda_str='ev : 11 <= ev.var5[0] < 20'),
id='alias6:not-formatted-with-default-values'
),
pytest.param(
('alias6', dict(high=30)),
dict(
factory='LambdaStrFactory',
components=(),
lambda_str='ev : {low} <= ev.var5[0] < {high}',
low=11,
high=30,
name='alias6'
),
LambdaStr(name='alias6', lambda_str='ev : 11 <= ev.var5[0] < 30'),
id='alias6:not-formatted-with-default-values-overridden'
),
]
##__________________________________________________________________||
@pytest.mark.parametrize('path_cfg, expected, _', params)
def test_alias(alias_dict, path_cfg, expected, _):
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
assert expected == actual
# give expanded one
actual = expand_path_cfg(path_cfg=actual, alias_dict=alias_dict)
assert expected == actual
@pytest.mark.parametrize('path_cfg, expected, _', params)
def test_nested(alias_dict, path_cfg, expected, _):
path_cfg = dict(All=(path_cfg, ))
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='AllFactory', components=(expected, ))
assert expected == actual
path_cfg = dict(All=(path_cfg, ))
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='AllFactory', components=(expected, ))
assert expected == actual
path_cfg = dict(Not=path_cfg)
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='NotFactory', components=(expected, ))
assert expected == actual
path_cfg = dict(Any=(path_cfg, ))
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='AnyFactory', components=(expected, ))
assert expected == actual
path_cfg = dict(Any=(path_cfg, ))
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='AnyFactory', components=(expected, ))
assert expected == actual
path_cfg = dict(Not=path_cfg)
actual = expand_path_cfg(path_cfg=path_cfg, alias_dict=alias_dict)
expected = dict(factory='NotFactory', components=(expected, ))
assert expected == actual
# give expanded one
actual = expand_path_cfg(path_cfg=actual, alias_dict=alias_dict)
assert expected == actual
##__________________________________________________________________||
@pytest.mark.parametrize('path_cfg, _, expected', params)
def test_factory(alias_dict, path_cfg, _, expected):
kargs = dict(
AllClass=All, AnyClass=Any, NotClass=Not,
LambdaStrClass=LambdaStr, aliasDict=alias_dict,
)
obj = FactoryDispatcher(path_cfg=path_cfg, **kargs)
assert repr(expected) == repr(obj)
assert str(expected) == str(obj)
@pytest.mark.parametrize('path_cfg, _, expected', params)
def test_factory_nested(alias_dict, path_cfg, _, expected):
kargs = dict(
AllClass=All, AnyClass=Any, NotClass=Not,
LambdaStrClass=LambdaStr, aliasDict=alias_dict,
)
path_cfg = dict(All=(path_cfg, ))
expected = All(name='All', selections=[expected])
path_cfg = dict(All=(path_cfg, ))
expected = All(name='All', selections=[expected])
path_cfg = dict(Not=path_cfg)
expected = Not(name='Not', selection=expected)
path_cfg = dict(Any=(path_cfg, ))
expected = Any(name='Any', selections=[expected])
path_cfg = dict(Any=(path_cfg, ))
expected = Any(name='Any', selections=[expected])
path_cfg = dict(Not=path_cfg)
expected = Not(name='Not', selection=expected)
obj = FactoryDispatcher(path_cfg=path_cfg, **kargs)
assert repr(expected) == repr(obj)
assert str(expected) == str(obj)
##__________________________________________________________________||
| alphatwirl/alphatwirl | tests/unit/selection/factories/test_expand_path_cfg_alias.py | Python | bsd-3-clause | 7,417 |
from django.shortcuts import render, redirect
from users import forms
from django.contrib import auth
def signup(request):
errors = []
if request.method == 'POST':
form = forms.SignupForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
password = form.cleaned_data['password']
re_password = form.cleaned_data['re_password']
try:
user = auth.models.User.objects.get(username=username)
return redirect('accounts:login')
except auth.models.User.DoesNotExist:
if password != re_password:
errors.append('The passwords did not match, try again!')
else:
auth.models.User.objects.create_user(username, email, password)
user = auth.authenticate(username=username, password=password)
auth.login(request, user)
return redirect('beverages:hello')
else:
form = forms.SignupForm()
return render(request, 'users/signup.html', {'form': form, 'errors': errors})
def login(request):
errors = []
if request.method == 'POST':
form = forms.LoginForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
return redirect('beverages:hello')
else:
errors.append('The password is valid, but the account has been disabled!')
else:
errors.append('The username and password were incorrect')
else:
form = forms.LoginForm()
return render(request, 'users/login.html', {'form': form, 'errors': errors})
def logout(request):
auth.logout(request)
return redirect('accounts:login')
| ananko/web_collections | users/views.py | Python | mit | 2,095 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyDendropy(PythonPackage):
"""DendroPy is a Python library for phylogenetic computing. It provides
classes and functions for the simulation, processing, and manipulation of
phylogenetic trees and character matrices, and supports the reading and
writing of phylogenetic data in a range of formats, such as NEXUS, NEWICK,
NeXML, Phylip, FASTA, etc."""
homepage = "https://www.dendropy.org"
pypi = "dendropy/DendroPy-4.3.0.tar.gz"
version('4.3.0', sha256='bd5b35ce1a1c9253209b7b5f3939ac22beaa70e787f8129149b4f7ffe865d510')
version('3.12.0', sha256='38a0f36f2f7aae43ec5599408b0d0a4c80996b749589f025940d955a70fc82d4')
depends_on('python@2.7:,3.4:')
depends_on('py-setuptools', type='build')
| LLNL/spack | var/spack/repos/builtin/packages/py-dendropy/package.py | Python | lgpl-2.1 | 960 |
"""Line-like geometrical entities.
Contains
========
LinearEntity3D
Line3D
Ray3D
Segment3D
"""
from __future__ import print_function, division
from sympy.core import S, Dummy, nan
from sympy.functions.elementary.trigonometric import acos
from sympy.simplify.simplify import simplify
from sympy.solvers import solve
from sympy.geometry.exceptions import GeometryError
from .entity import GeometryEntity
from .point3d import Point3D
from .util import _symbol
from sympy.core.compatibility import is_sequence, range
class LinearEntity3D(GeometryEntity):
"""An base class for all linear entities (line, ray and segment)
in a 3-dimensional Euclidean space.
Attributes
==========
p1
p2
direction_ratio
direction_cosine
points
Notes
=====
This is a base class and is not meant to be instantiated.
"""
def __new__(cls, p1, p2, **kwargs):
p1 = Point3D(p1)
p2 = Point3D(p2)
if p1 == p2:
# if it makes sense to return a Point, handle in subclass
raise ValueError(
"%s.__new__ requires two unique Points." % cls.__name__)
return GeometryEntity.__new__(cls, p1, p2, **kwargs)
@property
def p1(self):
"""The first defining point of a linear entity.
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 1)
>>> l = Line3D(p1, p2)
>>> l.p1
Point3D(0, 0, 0)
"""
return self.args[0]
@property
def p2(self):
"""The second defining point of a linear entity.
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 1)
>>> l = Line3D(p1, p2)
>>> l.p2
Point3D(5, 3, 1)
"""
return self.args[1]
@property
def direction_ratio(self):
"""The direction ratio of a given line in 3D.
See Also
========
sympy.geometry.line.Line.equation
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 1)
>>> l = Line3D(p1, p2)
>>> l.direction_ratio
[5, 3, 1]
"""
p1, p2 = self.points
return p1.direction_ratio(p2)
@property
def direction_cosine(self):
"""The normalized direction ratio of a given line in 3D.
See Also
========
sympy.geometry.line.Line.equation
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 1)
>>> l = Line3D(p1, p2)
>>> l.direction_cosine
[sqrt(35)/7, 3*sqrt(35)/35, sqrt(35)/35]
>>> sum(i**2 for i in _)
1
"""
p1, p2 = self.points
return p1.direction_cosine(p2)
@property
def length(self):
"""
The length of the line.
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(3, 5, 1)
>>> l1 = Line3D(p1, p2)
>>> l1.length
oo
"""
return S.Infinity
@property
def points(self):
"""The two points used to define this linear entity.
Returns
=======
points : tuple of Points
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 11, 1)
>>> l1 = Line3D(p1, p2)
>>> l1.points
(Point3D(0, 0, 0), Point3D(5, 11, 1))
"""
return (self.p1, self.p2)
@staticmethod
def are_concurrent(*lines):
"""Is a sequence of linear entities concurrent?
Two or more linear entities are concurrent if they all
intersect at a single point.
Parameters
==========
lines : a sequence of linear entities.
Returns
=======
True : if the set of linear entities are concurrent,
False : otherwise.
Notes
=====
Simply take the first two lines and find their intersection.
If there is no intersection, then the first two lines were
parallel and had no intersection so concurrency is impossible
amongst the whole set. Otherwise, check to see if the
intersection point of the first two lines is a member on
the rest of the lines. If so, the lines are concurrent.
See Also
========
sympy.geometry.util.intersection
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(3, 5, 2)
>>> p3, p4 = Point3D(-2, -2, -2), Point3D(0, 2, 1)
>>> l1, l2, l3 = Line3D(p1, p2), Line3D(p1, p3), Line3D(p1, p4)
>>> Line3D.are_concurrent(l1, l2, l3)
True
>>> l4 = Line3D(p2, p3)
>>> Line3D.are_concurrent(l2, l3, l4)
False
"""
# Concurrency requires intersection at a single point; One linear
# entity cannot be concurrent.
if len(lines) <= 1:
return False
try:
# Get the intersection (if parallel)
p = lines[0].intersection(lines[1])
if len(p) == 0:
return False
# Make sure the intersection is on every linear entity
for line in lines[2:]:
if p[0] not in line:
return False
return True
except AttributeError:
return False
def is_parallel(l1, l2):
"""Are two linear entities parallel?
Parameters
==========
l1 : LinearEntity
l2 : LinearEntity
Returns
=======
True : if l1 and l2 are parallel,
False : otherwise.
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(3, 4, 5)
>>> p3, p4 = Point3D(2, 1, 1), Point3D(8, 9, 11)
>>> l1, l2 = Line3D(p1, p2), Line3D(p3, p4)
>>> Line3D.is_parallel(l1, l2)
True
>>> p5 = Point3D(6, 6, 6)
>>> l3 = Line3D(p3, p5)
>>> Line3D.is_parallel(l1, l3)
False
"""
if l1 == l2:
return True
a = l1.direction_cosine
b = l2.direction_cosine
# lines are parallel if the direction_cosines are the same or
# differ by a constant
rat = set()
for i, j in zip(a, b):
if i and j:
rat.add(i/j)
if len(rat) > 1:
return False
elif i or j:
return False
return True
def is_perpendicular(l1, l2):
"""Are two linear entities perpendicular?
Parameters
==========
l1 : LinearEntity
l2 : LinearEntity
Returns
=======
True : if l1 and l2 are perpendicular,
False : otherwise.
See Also
========
direction_ratio
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(-1, 2, 0)
>>> l1, l2 = Line3D(p1, p2), Line3D(p2, p3)
>>> l1.is_perpendicular(l2)
False
>>> p4 = Point3D(5, 3, 7)
>>> l3 = Line3D(p1, p4)
>>> l1.is_perpendicular(l3)
False
"""
a = sum([i*j for i, j in zip(l1.direction_ratio, l2.direction_ratio)])
if a == 0:
return True
else:
return False
def angle_between(l1, l2):
"""The angle formed between the two linear entities.
Parameters
==========
l1 : LinearEntity
l2 : LinearEntity
Returns
=======
angle : angle in radians
Notes
=====
From the dot product of vectors v1 and v2 it is known that:
``dot(v1, v2) = |v1|*|v2|*cos(A)``
where A is the angle formed between the two vectors. We can
get the directional vectors of the two lines and readily
find the angle between the two using the above formula.
See Also
========
is_perpendicular
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(-1, 2, 0)
>>> l1, l2 = Line3D(p1, p2), Line3D(p2, p3)
>>> l1.angle_between(l2)
acos(-sqrt(2)/3)
"""
v1 = l1.p2 - l1.p1
v2 = l2.p2 - l2.p1
return acos(v1.dot(v2)/(abs(v1)*abs(v2)))
def parallel_line(self, p):
"""Create a new Line parallel to this linear entity which passes
through the point `p`.
Parameters
==========
p : Point3D
Returns
=======
line : Line3D
See Also
========
is_parallel
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(2, 3, 4), Point3D(-2, 2, 0)
>>> l1 = Line3D(p1, p2)
>>> l2 = l1.parallel_line(p3)
>>> p3 in l2
True
>>> l1.is_parallel(l2)
True
"""
d = self.direction_ratio
return Line3D(p, direction_ratio=d)
def perpendicular_line(self, p):
"""Create a new Line perpendicular to this linear entity which passes
through the point `p`.
Parameters
==========
p : Point3D
Returns
=======
line : Line3D
See Also
========
is_perpendicular, perpendicular_segment
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(2, 3, 4), Point3D(-2, 2, 0)
>>> l1 = Line3D(p1, p2)
>>> l2 = l1.perpendicular_line(p3)
>>> p3 in l2
True
>>> l1.is_perpendicular(l2)
True
"""
p = Point3D(p)
if p in self:
raise NotImplementedError("Given point should not be on the line")
t = Dummy()
a = self.arbitrary_point(t)
b = [i - j for i, j in zip(p.args, a.args)]
c = sum([i*j for i, j in zip(b, self.direction_ratio)])
d = solve(c, t)
e = a.subs(t, d[0])
return Line3D(p, e)
def perpendicular_segment(self, p):
"""Create a perpendicular line segment from `p` to this line.
The enpoints of the segment are ``p`` and the closest point in
the line containing self. (If self is not a line, the point might
not be in self.)
Parameters
==========
p : Point3D
Returns
=======
segment : Segment3D
Notes
=====
Returns `p` itself if `p` is on this linear entity.
See Also
========
perpendicular_line
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(0, 2, 0)
>>> l1 = Line3D(p1, p2)
>>> s1 = l1.perpendicular_segment(p3)
>>> l1.is_perpendicular(s1)
True
>>> p3 in s1
True
>>> l1.perpendicular_segment(Point3D(4, 0, 0))
Segment3D(Point3D(4/3, 4/3, 4/3), Point3D(4, 0, 0))
"""
p = Point3D(p)
if p in self:
raise NotImplementedError("Given point should not be on the line")
t = Dummy()
a = self.arbitrary_point(t)
b = [i - j for i, j in zip(p.args, a.args)]
c = sum([i*j for i, j in zip(b, self.direction_ratio)])
d = solve(c, t)
e = a.subs(t, d[0])
return Segment3D(p, e)
def projection(self, o):
"""Project a point, line, ray, or segment onto this linear entity.
Parameters
==========
other : Point or LinearEntity (Line, Ray, Segment)
Returns
=======
projection : Point or LinearEntity (Line, Ray, Segment)
The return type matches the type of the parameter ``other``.
Raises
======
GeometryError
When method is unable to perform projection.
Notes
=====
A projection involves taking the two points that define
the linear entity and projecting those points onto a
Line and then reforming the linear entity using these
projections.
A point P is projected onto a line L by finding the point
on L that is closest to P. This point is the intersection
of L and the line perpendicular to L that passes through P.
See Also
========
sympy.geometry.point3d.Point3D, perpendicular_line
Examples
========
>>> from sympy import Point3D, Line3D, Segment3D, Rational
>>> p1, p2, p3 = Point3D(0, 0, 1), Point3D(1, 1, 2), Point3D(2, 0, 1)
>>> l1 = Line3D(p1, p2)
>>> l1.projection(p3)
Point3D(2/3, 2/3, 5/3)
>>> p4, p5 = Point3D(10, 0, 1), Point3D(12, 1, 3)
>>> s1 = Segment3D(p4, p5)
>>> l1.projection(s1)
[Segment3D(Point3D(10/3, 10/3, 13/3), Point3D(5, 5, 6))]
"""
tline = Line3D(self.p1, self.p2)
def _project(p):
"""Project a point onto the line representing self."""
if p in tline:
return p
l1 = tline.perpendicular_line(p)
return tline.intersection(l1)[0]
projected = None
if isinstance(o, Point3D):
return _project(o)
elif isinstance(o, LinearEntity3D):
n_p1 = _project(o.p1)
n_p2 = _project(o.p2)
if n_p1 == n_p2:
projected = n_p1
else:
projected = o.__class__(n_p1, n_p2)
# Didn't know how to project so raise an error
if projected is None:
n1 = self.__class__.__name__
n2 = o.__class__.__name__
raise GeometryError(
"Do not know how to project %s onto %s" % (n2, n1))
return self.intersection(projected)
def intersection(self, o):
"""The intersection with another geometrical entity.
Parameters
==========
o : Point or LinearEntity3D
Returns
=======
intersection : list of geometrical entities
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Line3D, Segment3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(7, 7, 7)
>>> l1 = Line3D(p1, p2)
>>> l1.intersection(p3)
[Point3D(7, 7, 7)]
>>> l1 = Line3D(Point3D(4,19,12), Point3D(5,25,17))
>>> l2 = Line3D(Point3D(-3, -15, -19), direction_ratio=[2,8,8])
>>> l1.intersection(l2)
[Point3D(1, 1, -3)]
>>> p6, p7 = Point3D(0, 5, 2), Point3D(2, 6, 3)
>>> s1 = Segment3D(p6, p7)
>>> l1.intersection(s1)
[]
"""
if isinstance(o, Point3D):
if o in self:
return [o]
else:
return []
elif isinstance(o, LinearEntity3D):
if self == o:
return [self]
elif self.is_parallel(o):
if isinstance(self, Line3D):
if o.p1 in self:
return [o]
return []
elif isinstance(self, Ray3D):
if isinstance(o, Ray3D):
# case 1, rays in the same direction
if self.xdirection == o.xdirection and \
self.ydirection == o.ydirection and \
self.zdirection == o.zdirection:
return [self] if (self.source in o) else [o]
# case 2, rays in the opposite directions
else:
if o.source in self:
if self.source == o.source:
return [self.source]
return [Segment3D(o.source, self.source)]
return []
elif isinstance(o, Segment3D):
if o.p1 in self:
if o.p2 in self:
return [o]
return [Segment3D(o.p1, self.source)]
elif o.p2 in self:
return [Segment3D(o.p2, self.source)]
return []
elif isinstance(self, Segment3D):
if isinstance(o, Segment3D):
# A reminder that the points of Segments are ordered
# in such a way that the following works. See
# Segment3D.__new__ for details on the ordering.
if self.p1 not in o:
if self.p2 not in o:
# Neither of the endpoints are in o so either
# o is contained in this segment or it isn't
if o in self:
return [o]
return []
else:
# p1 not in o but p2 is. Either there is a
# segment as an intersection, or they only
# intersect at an endpoint
if self.p2 == o.p1:
return [o.p1]
return [Segment3D(o.p1, self.p2)]
elif self.p2 not in o:
# p2 not in o but p1 is. Either there is a
# segment as an intersection, or they only
# intersect at an endpoint
if self.p1 == o.p2:
return [o.p2]
return [Segment3D(o.p2, self.p1)]
# Both points of self in o so the whole segment
# is in o
return [self]
else: # unrecognized LinearEntity
raise NotImplementedError
else:
# If the lines are not parallel then solve their arbitrary points
# to obtain the point of intersection
t = t1, t2 = Dummy(), Dummy()
a = self.arbitrary_point(t1)
b = o.arbitrary_point(t2)
dx = a.x - b.x
c = solve([dx, a.y - b.y], t)
d = solve([dx, a.z - b.z], t)
if len(c) == 1 and len(d) == 1:
return []
e = a.subs(t1, c[t1])
if e in self and e in o:
return [e]
else:
return []
return o.intersection(self)
def arbitrary_point(self, parameter='t'):
"""A parameterized point on the Line.
Parameters
==========
parameter : str, optional
The name of the parameter which will be used for the parametric
point. The default value is 't'. When this parameter is 0, the
first point used to define the line will be returned, and when
it is 1 the second point will be returned.
Returns
=======
point : Point3D
Raises
======
ValueError
When ``parameter`` already appears in the Line's definition.
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(1, 0, 0), Point3D(5, 3, 1)
>>> l1 = Line3D(p1, p2)
>>> l1.arbitrary_point()
Point3D(4*t + 1, 3*t, t)
"""
t = _symbol(parameter)
if t.name in (f.name for f in self.free_symbols):
raise ValueError('Symbol %s already appears in object '
'and cannot be used as a parameter.' % t.name)
x = simplify(self.p1.x + t*(self.p2.x - self.p1.x))
y = simplify(self.p1.y + t*(self.p2.y - self.p1.y))
z = simplify(self.p1.z + t*(self.p2.z - self.p1.z))
return Point3D(x, y, z)
def is_similar(self, other):
"""
Return True if self and other are contained in the same line.
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(2, 2, 2)
>>> l1 = Line3D(p1, p2)
>>> l2 = Line3D(p1, p3)
>>> l1.is_similar(l2)
True
"""
if isinstance(other, Line3D):
if self.direction_cosine == other.direction_cosine and other.p1 in self:
return True
else:
return False
raise NotImplementedError()
def __contains__(self, other):
"""Return a definitive answer or else raise an error if it cannot
be determined that other is on the boundaries of self."""
result = self.contains(other)
if result is not None:
return result
else:
raise Undecidable(
"can't decide whether '%s' contains '%s'" % (self, other))
def contains(self, other):
"""Subclasses should implement this method and should return
True if other is on the boundaries of self;
False if not on the boundaries of self;
None if a determination cannot be made."""
raise NotImplementedError()
class Line3D(LinearEntity3D):
"""An infinite 3D line in space.
A line is declared with two distinct points or a point and direction_ratio
as defined using keyword `direction_ratio`.
Parameters
==========
p1 : Point3D
pt : Point3D
direction_ratio : list
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> import sympy
>>> from sympy import Point3D
>>> from sympy.abc import L
>>> from sympy.geometry import Line3D, Segment3D
>>> L = Line3D(Point3D(2, 3, 4), Point3D(3, 5, 1))
>>> L
Line3D(Point3D(2, 3, 4), Point3D(3, 5, 1))
>>> L.points
(Point3D(2, 3, 4), Point3D(3, 5, 1))
"""
def __new__(cls, p1, pt=None, direction_ratio=[], **kwargs):
if isinstance(p1, LinearEntity3D):
p1, pt = p1.args
else:
p1 = Point3D(p1)
if pt is not None and len(direction_ratio) == 0:
try:
pt = Point3D(pt)
except NotImplementedError:
raise ValueError('The 2nd argument was not a valid Point. '
'If it was the direction_ratio of the desired line, enter it '
'with keyword "direction_ratio".')
elif len(direction_ratio) == 3 and pt is None:
pt = Point3D(p1.x + direction_ratio[0], p1.y + direction_ratio[1],
p1.z + direction_ratio[2])
else:
raise ValueError('A 2nd Point or keyword "direction_ratio" must '
'be used.')
return LinearEntity3D.__new__(cls, p1, pt, **kwargs)
def plot_interval(self, parameter='t'):
"""The plot interval for the default geometric plot of line. Gives
values that will produce a line that is +/- 5 units long (where a
unit is the distance between the two points that define the line).
Parameters
==========
parameter : str, optional
Default value is 't'.
Returns
=======
plot_interval : list (plot interval)
[parameter, lower_bound, upper_bound]
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 1)
>>> l1 = Line3D(p1, p2)
>>> l1.plot_interval()
[t, -5, 5]
"""
t = _symbol(parameter)
return [t, -5, 5]
def equation(self, x='x', y='y', z='z', k='k'):
"""The equation of the line in 3D
Parameters
==========
x : str, optional
The name to use for the x-axis, default value is 'x'.
y : str, optional
The name to use for the y-axis, default value is 'y'.
z : str, optional
The name to use for the x-axis, default value is 'z'.
Returns
=======
equation : tuple
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(1, 0, 0), Point3D(5, 3, 0)
>>> l1 = Line3D(p1, p2)
>>> l1.equation()
(x/4 - 1/4, y/3, zoo*z, k)
"""
x, y, z, k = _symbol(x), _symbol(y), _symbol(z), _symbol(k)
p1, p2 = self.points
a = p1.direction_ratio(p2)
return (((x - p1.x)/a[0]), ((y - p1.y)/a[1]),
((z - p1.z)/a[2]), k)
def contains(self, o):
"""Return True if o is on this Line, or False otherwise.
Examples
========
>>> from sympy import Line3D
>>> a = (0, 0, 0)
>>> b = (1, 1, 1)
>>> c = (2, 2, 2)
>>> l1 = Line3D(a, b)
>>> l2 = Line3D(b, a)
>>> l1 == l2
False
>>> l1 in l2
True
"""
if is_sequence(o):
o = Point3D(o)
if isinstance(o, Point3D):
sym = list(map(Dummy, 'xyz'))
eq = self.equation(*sym)
a = [eq[i].subs(sym[i], o.args[i]) for i in range(3)]
a = [i for i in a if i != nan]
if len(a) == 1:
return True
first = a.pop(0)
for i in a:
rv = first.equals(i)
if not rv:
return rv
return True
elif not isinstance(o, LinearEntity3D):
return False
elif isinstance(o, Line3D):
return all(i in self for i in o.points)
def distance(self, o):
"""
Finds the shortest distance between a line and a point.
Raises
======
NotImplementedError is raised if o is not an instance of Point3D
Examples
========
>>> from sympy import Point3D, Line3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(1, 1, 1)
>>> s = Line3D(p1, p2)
>>> s.distance(Point3D(-1, 1, 1))
2*sqrt(6)/3
>>> s.distance((-1, 1, 1))
2*sqrt(6)/3
"""
if not isinstance(o, Point3D):
if is_sequence(o):
o = Point3D(o)
if o in self:
return S.Zero
a = self.perpendicular_segment(o).length
return a
def equals(self, other):
"""Returns True if self and other are the same mathematical entities"""
if not isinstance(other, Line3D):
return False
return Point3D.are_collinear(self.p1, other.p1, self.p2, other.p2)
class Ray3D(LinearEntity3D):
"""
A Ray is a semi-line in the space with a source point and a direction.
Parameters
==========
p1 : Point3D
The source of the Ray
p2 : Point or a direction vector
direction_ratio: Determines the direction in which the Ray propagates.
Attributes
==========
source
xdirection
ydirection
zdirection
See Also
========
sympy.geometry.point3d.Point3D, Line3D
Examples
========
>>> import sympy
>>> from sympy import Point3D, pi
>>> from sympy.abc import r
>>> from sympy.geometry import Ray3D
>>> r = Ray3D(Point3D(2, 3, 4), Point3D(3, 5, 0))
>>> r
Ray3D(Point3D(2, 3, 4), Point3D(3, 5, 0))
>>> r.points
(Point3D(2, 3, 4), Point3D(3, 5, 0))
>>> r.source
Point3D(2, 3, 4)
>>> r.xdirection
oo
>>> r.ydirection
oo
>>> r.direction_ratio
[1, 2, -4]
"""
def __new__(cls, p1, pt=None, direction_ratio=[], **kwargs):
if isinstance(p1, LinearEntity3D):
p1, pt = p1.args
else:
p1 = Point3D(p1)
if pt is not None and len(direction_ratio) == 0:
try:
pt = Point3D(pt)
except NotImplementedError:
raise ValueError('The 2nd argument was not a valid Point. '
'If it was the direction_ratio of the desired line, enter it '
'with keyword "direction_ratio".')
elif len(direction_ratio) == 3 and pt is None:
pt = Point3D(p1.x + direction_ratio[0], p1.y + direction_ratio[1],
p1.z + direction_ratio[2])
else:
raise ValueError('A 2nd Point or keyword "direction_ratio" must'
'be used.')
return LinearEntity3D.__new__(cls, p1, pt, **kwargs)
@property
def source(self):
"""The point from which the ray emanates.
See Also
========
sympy.geometry.point3d.Point3D
Examples
========
>>> from sympy import Point3D, Ray3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(4, 1, 5)
>>> r1 = Ray3D(p1, p2)
>>> r1.source
Point3D(0, 0, 0)
"""
return self.p1
@property
def xdirection(self):
"""The x direction of the ray.
Positive infinity if the ray points in the positive x direction,
negative infinity if the ray points in the negative x direction,
or 0 if the ray is vertical.
See Also
========
ydirection
Examples
========
>>> from sympy import Point3D, Ray3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(0, -1, 0)
>>> r1, r2 = Ray3D(p1, p2), Ray3D(p1, p3)
>>> r1.xdirection
oo
>>> r2.xdirection
0
"""
if self.p1.x < self.p2.x:
return S.Infinity
elif self.p1.x == self.p2.x:
return S.Zero
else:
return S.NegativeInfinity
@property
def ydirection(self):
"""The y direction of the ray.
Positive infinity if the ray points in the positive y direction,
negative infinity if the ray points in the negative y direction,
or 0 if the ray is horizontal.
See Also
========
xdirection
Examples
========
>>> from sympy import Point3D, Ray3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(-1, -1, -1), Point3D(-1, 0, 0)
>>> r1, r2 = Ray3D(p1, p2), Ray3D(p1, p3)
>>> r1.ydirection
-oo
>>> r2.ydirection
0
"""
if self.p1.y < self.p2.y:
return S.Infinity
elif self.p1.y == self.p2.y:
return S.Zero
else:
return S.NegativeInfinity
@property
def zdirection(self):
"""The z direction of the ray.
Positive infinity if the ray points in the positive z direction,
negative infinity if the ray points in the negative z direction,
or 0 if the ray is horizontal.
See Also
========
xdirection
Examples
========
>>> from sympy import Point3D, Ray3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(-1, -1, -1), Point3D(-1, 0, 0)
>>> r1, r2 = Ray3D(p1, p2), Ray3D(p1, p3)
>>> r1.ydirection
-oo
>>> r2.ydirection
0
>>> r2.zdirection
0
"""
if self.p1.z < self.p2.z:
return S.Infinity
elif self.p1.z == self.p2.z:
return S.Zero
else:
return S.NegativeInfinity
def distance(self, o):
"""
Finds the shortest distance between the ray and a point.
Raises
======
NotImplementedError is raised if o is not a Point
Examples
========
>>> from sympy import Point3D, Ray3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(1, 1, 2)
>>> s = Ray3D(p1, p2)
>>> s.distance(Point3D(-1, -1, 2))
sqrt(6)
>>> s.distance((-1, -1, 2))
sqrt(6)
"""
if not isinstance(o, Point3D):
if is_sequence(o):
o = Point3D(o)
if o in self:
return S.Zero
s = self.perpendicular_segment(o)
if not isinstance(s, Point3D):
non_o = s.p1 if s.p1 == o else s.p2
if self.contains(non_o):
return Line3D(self).distance(o) # = s.length but simpler
# the following applies when neither of the above apply
return self.source.distance(o)
def plot_interval(self, parameter='t'):
"""The plot interval for the default geometric plot of the Ray. Gives
values that will produce a ray that is 10 units long (where a unit is
the distance between the two points that define the ray).
Parameters
==========
parameter : str, optional
Default value is 't'.
Returns
=======
plot_interval : list
[parameter, lower_bound, upper_bound]
Examples
========
>>> from sympy import Point3D, Ray3D, pi
>>> r = Ray3D(Point3D(0, 0, 0), Point3D(1, 1, 1))
>>> r.plot_interval()
[t, 0, 10]
"""
t = _symbol(parameter)
return [t, 0, 10]
def contains(self, o):
"""Is other GeometryEntity contained in this Ray?"""
if isinstance(o, Ray3D):
return (Point3D.are_collinear(self.p1, self.p2, o.p1, o.p2) and
self.xdirection == o.xdirection and
self.ydirection == o.ydirection and
self.zdirection == o.zdirection)
elif isinstance(o, Segment3D):
return o.p1 in self and o.p2 in self
elif is_sequence(o):
o = Point3D(o)
if isinstance(o, Point3D):
if Point3D.are_collinear(self.p1, self.p2, o):
if self.xdirection is S.Infinity:
rv = o.x >= self.source.x
elif self.xdirection is S.NegativeInfinity:
rv = o.x <= self.source.x
elif self.ydirection is S.Infinity:
rv = o.y >= self.source.y
elif self.ydirection is S.NegativeInfinity:
rv = o.y <= self.source.y
elif self.zdirection is S.Infinity:
rv = o.z <= self.source.z
else:
rv = o.z <= self.source.z
if rv == True or rv == False:
return bool(rv)
raise Undecidable(
'Cannot determine if %s is in %s' % (o, self))
else:
# Points are not collinear, so the rays are not parallel
# and hence it is impossible for self to contain o
return False
# No other known entity can be contained in a Ray
return False
def equals(self, other):
"""Returns True if self and other are the same mathematical entities"""
if not isinstance(other, Ray3D):
return False
return self.source == other.source and other.p2 in self
class Segment3D(LinearEntity3D):
"""A undirected line segment in a 3D space.
Parameters
==========
p1 : Point3D
p2 : Point3D
Attributes
==========
length : number or sympy expression
midpoint : Point3D
See Also
========
sympy.geometry.point.Point3D, Line3D
Examples
========
>>> import sympy
>>> from sympy import Point3D
>>> from sympy.abc import s
>>> from sympy.geometry import Segment3D
>>> Segment3D((1, 0, 0), (1, 1, 1)) # tuples are interpreted as pts
Segment3D(Point3D(1, 0, 0), Point3D(1, 1, 1))
>>> s = Segment3D(Point3D(4, 3, 9), Point3D(1, 1, 7))
>>> s
Segment3D(Point3D(1, 1, 7), Point3D(4, 3, 9))
>>> s.points
(Point3D(1, 1, 7), Point3D(4, 3, 9))
>>> s.length
sqrt(17)
>>> s.midpoint
Point3D(5/2, 2, 8)
"""
def __new__(cls, p1, p2, **kwargs):
# Reorder the two points under the following ordering:
# if p1.x != p2.x then p1.x < p2.x
# if p1.x == p2.x then p1.y < p2.y
# The z-coordinate will not come into picture while ordering
p1 = Point3D(p1)
p2 = Point3D(p2)
if p1 == p2:
return Point3D(p1)
if (p1.x > p2.x) == True:
p1, p2 = p2, p1
elif (p1.x == p2.x) == True and (p1.y > p2.y) == True:
p1, p2 = p2, p1
return LinearEntity3D.__new__(cls, p1, p2, **kwargs)
def plot_interval(self, parameter='t'):
"""The plot interval for the default geometric plot of the Segment gives
values that will produce the full segment in a plot.
Parameters
==========
parameter : str, optional
Default value is 't'.
Returns
=======
plot_interval : list
[parameter, lower_bound, upper_bound]
Examples
========
>>> from sympy import Point3D, Segment3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(5, 3, 0)
>>> s1 = Segment3D(p1, p2)
>>> s1.plot_interval()
[t, 0, 1]
"""
t = _symbol(parameter)
return [t, 0, 1]
@property
def length(self):
"""The length of the line segment.
See Also
========
sympy.geometry.point3d.Point3D.distance
Examples
========
>>> from sympy import Point3D, Segment3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(4, 3, 3)
>>> s1 = Segment3D(p1, p2)
>>> s1.length
sqrt(34)
"""
return Point3D.distance(self.p1, self.p2)
@property
def midpoint(self):
"""The midpoint of the line segment.
See Also
========
sympy.geometry.point3d.Point3D.midpoint
Examples
========
>>> from sympy import Point3D, Segment3D
>>> p1, p2 = Point3D(0, 0, 0), Point3D(4, 3, 3)
>>> s1 = Segment3D(p1, p2)
>>> s1.midpoint
Point3D(2, 3/2, 3/2)
"""
return Point3D.midpoint(self.p1, self.p2)
def distance(self, o):
"""
Finds the shortest distance between a line segment and a point.
Raises
======
NotImplementedError is raised if o is not a Point3D
Examples
========
>>> from sympy import Point3D, Segment3D
>>> p1, p2 = Point3D(0, 0, 3), Point3D(1, 1, 4)
>>> s = Segment3D(p1, p2)
>>> s.distance(Point3D(10, 15, 12))
sqrt(341)
>>> s.distance((10, 15, 12))
sqrt(341)
"""
if is_sequence(o):
o = Point3D(o)
if isinstance(o, Point3D):
seg_vector = self.p2 - self.p1
pt_vector = o - self.p1
t = seg_vector.dot(pt_vector)/self.length**2
if t >= 1:
distance = Point3D.distance(self.p2, o)
elif t <= 0:
distance = Point3D.distance(self.p1, o)
else:
distance = Point3D.distance(
self.p1 + Point3D(t*seg_vector.x, t*seg_vector.y,
t*seg_vector.y), o)
return distance
raise NotImplementedError()
def contains(self, other):
"""
Is the other GeometryEntity contained within this Segment?
Examples
========
>>> from sympy import Point3D, Segment3D
>>> p1, p2 = Point3D(0, 1, 1), Point3D(3, 4, 5)
>>> s = Segment3D(p1, p2)
>>> s2 = Segment3D(p2, p1)
>>> s.contains(s2)
True
"""
if is_sequence(other):
other = Point3D(other)
if isinstance(other, Segment3D):
return other.p1 in self and other.p2 in self
elif isinstance(other, Point3D):
if Point3D.are_collinear(self.p1, self.p2, other):
if other.distance(self.p1) + other.distance(self.p2) == self.length:
return True
else:
return False
return False
| sahilshekhawat/sympy | sympy/geometry/line3d.py | Python | bsd-3-clause | 40,855 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import functools
import tornado.web
import etc
__author__ = 'f0x11'
class BaseHandler(tornado.web.RequestHandler):
def get_current_user(self):
user_cookie = self.get_secure_cookie("user")
if user_cookie == etc.user_cookie:
return user_cookie
else:
raise tornado.web.HTTPError(401)
def user_auth(func):
@functools.wraps(func)
def wrapper(handler, *args, **kwargs):
if not handler.current_user:
raise tornado.web.HTTPError(401)
return wrapper
| f0x11/Merak | handlers/base.py | Python | mit | 578 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# C R E D I T S
##################################################################
# (C) 2016 Gernot Ziegler, - released under Blender Artistic License - www.blender.org
# (C) 2016 Marco Alesiani, - released under Blender Artistic License - www.blender.org
# Date: February 9th, 2016
# Ver: 1.1
#-----------------------------------------------------------------
# U S A G E D E T A I L S
##################################################################
# Usage: File > Export > GeoCast file
#
# NOTICE: A valid camera must be selected in the scene
#
#-----------------------------------------------------------------
bl_info = {
"name": "GeoCast Exporter",
"author": "Gernot Ziegler, Marco Alesiani",
"version": (1, 0, 2),
"blender": (2, 76, 0),
"location": "File > Export > GeoCast Thingy",
"description": "Exports depth data and camera .geocast files",
"warning": "",
"wiki_url": "http://www.geofront.eu/",
"category": "Import-Export",
}
import bpy
from bpy.types import Operator
from bpy.props import (StringProperty, IntProperty, BoolProperty, EnumProperty)
from bpy_extras.io_utils import (ExportHelper)
# The main exporter class which creates the 'GeoCast Exporter' panel in the export window
# and starts the file saving process
class ExportGeoCast(bpy.types.Operator, ExportHelper):
"""Export the current scene to a GeoCast file """
bl_idname = "export_geocast.folder"
bl_label = "GeoCast Exporter"
directory = StringProperty(subtype='DIR_PATH', name = "", default = "", description = "Path where to save GeoCast data")
check_extension = False
filename_ext = '.' # Needed, bug?
use_filter_folder = True # Export into a directory
# filter_glob = StringProperty(default="*.geocast", options={'HIDDEN'}) # File filters
# (identifier, name, description) tuples for the combo box
export_sizes = (
("256", "256 x 256", ""),
("512", "512 x 512", ""),
("1024", "1024 x 1024", "")
)
export_size = EnumProperty(
name="Export Size",
description="Export size for all the output images",
items=export_sizes,
default='256'
)
frame_start = IntProperty(name="Start Frame",
description="Start frame for exporting",
default=1, min=1, max=300000)
frame_end = IntProperty(name="End Frame",
description="End frame for exporting (not included in the export data)",
default=40, min=1, max=300000)
export_colormap = BoolProperty(
description="Export a color map together with the geocast file",
name="Export color map",
default = False)
export_depthmap = BoolProperty(
description="Export a depth map together with the geocast file",
name="Export depth map",
default = False)
@classmethod
def poll(cls, context):
return context.active_object != None
def invoke(self, context, event):
self.filepath = "" # Clears the filename since we're not using it
# Scene frame range is overridden with [1;40] by design
# self.frame_start = context.scene.frame_start
# self.frame_end = context.scene.frame_end
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self, context):
import os
directory = self.properties.directory
if os.path.isdir(directory) == False:
msg = "Please select a directory, not a file (" + directory + ")"
self.report({'WARNING'}, msg)
return {'FINISHED'}
# filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
return exportToGeoCastFile(self, context, directory, self.export_size, (self.frame_start, self.frame_end))
def draw(self, context): # UI setup
layout = self.layout
layout.label("Camera Exporter options")
layout.separator()
layout.prop(self, "frame_start")
layout.prop(self, "frame_end")
layout.prop(self, "export_size", expand=True)
layout.separator()
layout.prop(self, "export_colormap")
layout.prop(self, "export_depthmap")
# A simple progressbar in the console inspired by unix terminals
def updateProgressBar(task_title, percentage): # e.g. ("saving", 34 / 100)
import sys
bar_length = 20
block_length = int(round(bar_length * percentage))
# Caveat: using a carriage return at the beginning ensures we overwrite the same line as before.
# Just using a newline generates new output.
msg = "[{0}] [{1}] {2}%\n".format(task_title, "#" * block_length + "-" * (bar_length - block_length), round(percentage * 100, 2))
if percentage >= 1: msg += " DONE\r\n"
sys.stdout.write(msg)
sys.stdout.flush()
# Exporting routine
def exportToGeoCastFile(self, context, output_path, export_size, export_frame_range):
import os
print ("@@@@@@@@@@ START EXPORTING ROUTINE @@@@@@@@@@@@@@\n")
print ('\n')
print ('|| GeoCast exporter script V1.03 ||\n')
print ('|| February 2016, Marco Alesiani ||\n')
version = bl_info["version"]
# Debug info to be displayed in the terminal
print ('Width and height is: ', export_size)
# Set output path
print ('Output path is: ', output_path)
# Cycle through all selected objects (i.e. through all selected cameras)
for camera_object in context.selected_objects:
# Check if this camera_object is actually a camera
if camera_object.type != 'CAMERA':
raise Exception("[ERROR] - Object with name '" + camera_object.name + "' is not a camera")
print ("Found camera with name " + camera_object.name);
# Create subdirectory "CameraName/" where to store OpenEXR and GeoCast files
output_camera_path = output_path + camera_object.name
if not os.path.exists(output_camera_path):
os.makedirs(output_camera_path)
context.scene.render.filepath = output_camera_path + os.sep;
# Set up nodes to export the colormap and/or depthmap if asked to do so, plus a bunch of other safety options
nodes_to_cleanup = []
old_use_nodes = context.scene.use_nodes
old_use_compositing = context.scene.render.use_compositing
old_sequencer_colorspace_settings_name = context.scene.sequencer_colorspace_settings.name
old_camera_sensor_height = camera_object.data.sensor_height
old_resolution_percentage = context.scene.render.resolution_percentage
if camera_object.data.sensor_height != camera_object.data.sensor_width:
print ("[WARNING] - Camera '" + camera_object.name + "'' has sensor width different than sensor height. " +
"Different sensor dimensions aren't supported. Overriding sensor height to match sensor width.")
camera_object.data.sensor_height = camera_object.data.sensor_width
context.scene.render.use_compositing = True
context.scene.use_nodes = True
context.scene.sequencer_colorspace_settings.name = "Raw"
context.scene.render.resolution_percentage = 100
nodes = context.scene.node_tree.nodes
render_layers = nodes['Render Layers']
if self.export_colormap == True:
set_alpha = nodes.new("CompositorNodeSetAlpha")
nodes_to_cleanup.append(set_alpha)
color_file_output = nodes.new("CompositorNodeOutputFile")
nodes_to_cleanup.append(color_file_output)
color_file_output.base_path = context.scene.render.filepath + "colormap"
color_file_output.format.file_format = 'PNG'
color_file_output.format.color_mode = 'RGBA'
color_file_output.format.color_depth = '8' # Needed by geofront webgl viewers
color_file_output.file_slots[0].path = '' # No prefix to filenames
# Connections
context.scene.node_tree.links.new(
render_layers.outputs['Image'],
set_alpha.inputs['Image']
)
context.scene.node_tree.links.new(
render_layers.outputs['Alpha'],
set_alpha.inputs['Alpha']
)
context.scene.node_tree.links.new(
set_alpha.outputs['Image'],
color_file_output.inputs['Image']
)
if self.export_depthmap == True:
map_range = nodes.new("CompositorNodeMapRange")
nodes_to_cleanup.append(map_range)
map_range.inputs[1].default_value = 0
map_range.inputs[2].default_value = camera_object.data.clip_end
map_range.inputs[3].default_value = 0
map_range.inputs[4].default_value = 1
color_ramp = nodes.new("CompositorNodeValToRGB")
nodes_to_cleanup.append(color_ramp)
color_ramp.color_ramp.color_mode = 'RGB'
color_ramp.color_ramp.interpolation = 'LINEAR'
depth_file_output = nodes.new("CompositorNodeOutputFile")
nodes_to_cleanup.append(depth_file_output)
depth_file_output.base_path = context.scene.render.filepath + "depthmap"
depth_file_output.format.file_format = 'PNG'
depth_file_output.format.color_mode = 'BW'
depth_file_output.format.color_depth = '8' # Needed by geofront webgl viewers
depth_file_output.file_slots[0].path = '' # No prefix to filenames
# Connections
context.scene.node_tree.links.new(
render_layers.outputs['Z'],
map_range.inputs['Value']
)
context.scene.node_tree.links.new(
map_range.outputs['Value'],
color_ramp.inputs['Fac']
)
context.scene.node_tree.links.new(
color_ramp.outputs['Image'],
depth_file_output.inputs['Image']
)
for frameNr in range(export_frame_range[0], export_frame_range[1]):
updateProgressBar("Exporting GeoCast data", frameNr / export_frame_range[1])
# Save OpenEXR with depth data (color channels are only used for debugging purposes)
context.scene.camera = camera_object
context.scene.frame_start = frameNr
context.scene.frame_end = frameNr
context.scene.frame_step = 1
context.scene.frame_current = frameNr
context.scene.render.pixel_aspect_x = 1
context.scene.render.pixel_aspect_y = 1
context.scene.render.use_file_extension = True
context.scene.render.image_settings.color_mode ='RGB' # Alpha might not be present, pick RGB
context.scene.render.image_settings.file_format = 'OPEN_EXR'
context.scene.render.image_settings.exr_codec = 'ZIP'
# context.scene.render.image_settings.color_depth = '16' # Half float
context.scene.render.image_settings.use_zbuffer = True
context.scene.render.resolution_x = int(export_size)
context.scene.render.resolution_y = int(export_size)
context.scene.render.use_raytrace = False # Speeds things up considerably
# Handles markers in a timeline animation (i.e. if there's a marker, set our current camera
# as the rendering one - restore afterwards)
markersForThisFrame = [x for x in context.scene.timeline_markers if x.frame == frameNr]
previousCameras = []
if len(markersForThisFrame) > 0:
for marker in markersForThisFrame:
previousCameras.append(marker.camera)
marker.camera = camera_object
else:
context.scene.timeline_markers.new('GEOCASTMARKER', frameNr)
context.scene.timeline_markers['GEOCASTMARKER'].camera = camera_object
# Update the scene before rendering or gathering camera data
context.scene.update()
bpy.ops.render.render(animation=True) # Render
# Write the geocast file corresponding to this frame
cm = camera_object.matrix_world
#print ("Camera Location is", cm)
loc = camera_object.location.to_tuple()
#print ("Camera Position is", loc)
geocastFilename = context.scene.render.filepath + str(frameNr).zfill(4) + ".geocast"
FILE = open(geocastFilename, "w")
FILE.write('GeoCast V1.0\n')
FILE.write('# Made with GeoCast Exporter Blender Addon V%d.%d.%d\n' % (version[0], version[1], version[2]))
if camera_object.animation_data is None:
FILE.write("StaticCamera\n")
else:
FILE.write("DynamicCamera\n")
locstr = 'Pos %.02f %.02f %.02f\n' % loc
#print (locstr)
FILE.write(locstr)
viewslicestr = 'ViewSlice FODAngle %.02f Size %.02f\n' % (145, 1000)
#print (viewslicestr)
FILE.write(viewslicestr)
# World matrix is column-major stored
cam_modelmat_str = 'ModelviewMatrix\n%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n' % \
(cm[0][0], cm[1][0], cm[2][0], cm[3][0], \
cm[0][1], cm[1][1], cm[2][1], cm[3][1], \
cm[0][2], cm[1][2], cm[2][2], cm[3][2], \
cm[0][3], cm[1][3], cm[2][3], cm[3][3])
FILE.write(cam_modelmat_str)
#print (cam_modelmat_str)
clipstart = camera_object.data.clip_start
clipend = camera_object.data.clip_end
# print("Camera type is " + camera_object.data.type + "\n")
if camera_object.data.type == 'ORTHO': # Orthogonal
scale = camera_object.data.ortho_scale
dataprojstr = 'DataProject Ortho WindowSize %.02f %.02f ProjRange %.02f %.02f\n' % (scale, scale, clipstart, clipend)
#print (dataprojstr)
FILE.write(dataprojstr)
else: # Perspective
# lens = camera_object.data.lens
# Obsolete: dataprojstr = 'DataProject BlenderPerspective Aspect %.02f Lens %.04f ClipRange %.02f %.02f\n' % (1.0, lens, clipstart, clipend)
fovy = camera_object.data.angle_y
fovx = camera_object.data.angle_x
fov = camera_object.data.angle
pi = 3.14159265358979323846
fovy_deg = fovy/pi*180
fovx_deg = fovx/pi*180
dataprojstr = 'DataProject Perspective Fovy %f Aspect %f ClipRange %.05f %.05f\n' % (fovy_deg, fovx/fovy, clipstart, clipend)
#print (dataprojstr)
FILE.write(dataprojstr)
FILE.write("WorldSpaceDepth\n")
fovstr = 'FoV %.03f FoVx %.03f Fovx_deg %f Fovy %.03f Fovy_deg %f\n' % (fov, fovx, fovx_deg, fovy, fovy_deg)
# print(fovstr)
rangestr = 'ZDataRange 0.0 1.0\n'
#print (rangestr)
FILE.write(rangestr)
FILE.close()
print ("Saved: ", geocastFilename)
# Restore markers, if any
if len(markersForThisFrame) > 0:
for idx, val in enumerate(previousCameras):
markersForThisFrame[idx].camera = val
else:
context.scene.timeline_markers.remove(context.scene.timeline_markers['GEOCASTMARKER'])
for node in nodes_to_cleanup: # Cleanup nodes from the original graph
nodes.remove(node)
context.scene.render.use_compositing = old_use_compositing
context.scene.use_nodes = old_use_nodes
context.scene.sequencer_colorspace_settings.name = old_sequencer_colorspace_settings_name
context.scene.render.resolution_percentage = old_resolution_percentage
camera_object.data.sensor_height = old_camera_sensor_height
print ("@@@@@@@@@@ END EXPORTING ROUTINE @@@@@@@@@@@@@@\n")
print("This was geocast exporter plugin V%d.%d.%d" % (version[0], version[1], version[2]))
return {'FINISHED'}
def menu_export(self, context):
self.layout.operator(ExportGeoCast.bl_idname, text="GeoCast data (.geocast)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_export)
if __name__ == "__main__":
register()
| geofront-eu/geocast_export | geocast_export.py | Python | gpl-3.0 | 17,459 |
import sys
from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'.",
'opts': "Extra flags for the test runner",
'pty': "Whether to run tests under a pseudo-tty",
})
def test(c, module=None, runner=None, opts=None, pty=True):
"""
Run a Spec or Nose-powered internal test suite.
"""
runner = runner or 'spec'
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
if opts:
args += " " + opts
# Always enable timing info by default. OPINIONATED
args += " --with-timing"
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty)
@task
def coverage(c, package=None):
"""
Run tests w/ coverage enabled, generating HTML, & opening it.
Honors the 'coverage.package' config path, which supplies a default value
for the ``package`` kwarg if given.
"""
if not c.run("which coverage", hide=True, warn=True).ok:
sys.exit("You need to 'pip install coverage' to use this task!")
opts = ""
package = c.config.get('coverage', {}).get('package', package)
if package is not None:
# TODO: make omission list more configurable
opts = "--include='{0}/*' --omit='{0}/vendor/*'".format(package)
test(c, opts="--with-coverage --cover-branches")
c.run("coverage html {0}".format(opts))
c.run("open htmlcov/index.html")
| singingwolfboy/invocations | invocations/testing.py | Python | bsd-2-clause | 1,547 |
import sys
import math
# region Classes
class FrequencyChart(object):
def __init__(self, args):
self.groups = [FrequencyGroup(r) for r in args]
def mean(self):
def top_func(group):
return group.frequency * group.midpoint()
top = sum(top_func(g) for g in self.groups)
bot = sum(g.frequency for g in self.groups)
return top / bot
def total(self):
return sum(g.frequency for g in self.groups)
def standard_deviation(self):
def top_left(self):
return self.total() * (sum(g.frequency * g.midpoint() ** 2 for g in self.groups))
def top_right(self):
return sum(g.frequency * g.midpoint() for g in self.groups) ** 2
left_right = top_left(self) - top_right(self)
bottom = self.total() * (self.total() - 1)
result = math.sqrt(left_right / bottom)
return result
class FrequencyGroup(object):
def __init__(self, raw):
rawVals = raw.split(',')
self.frequency = float(rawVals[0])
self.lower = float(rawVals[1])
self.upper = float(rawVals[2])
def midpoint(self):
return midpoint([self.lower, self.upper])
pass
# endregion
# region Functions
def alpha_to_table(cl, precision=2):
percent = 1.0 - cl
percent = percent / precision
return 1.0 - percent
def bound(value, r, direction=1):
return value + (r * direction)
def find_fractile(subscript, parts, samplesize):
return math.ceil((subscript / parts) * samplesize)
def find_sample_size(z, precision, error):
return (z ** 2) * precision / (error ** 2)
def lower_bound(value, r=0.5):
return bound(value, r, -1)
def margin_error(z_alpha, p, n):
q = 1.0 - p
inner = (p * q) / n
return z_alpha * math.sqrt(inner)
def margin_error_t(t_alpha, n, s):
return t_alpha * (s / math.sqrt(n))
def mean(values):
return sum(values) / len(values)
def median_class(values):
median_point = sum(values) / 2
counter = 0.0
for i in range(len(values)):
counter += values[i]
if counter >= median_point:
return i
return -1
def median_value(sum_of_all_classes, sum_of_preceding_classes, frequency_of_median_class):
nx = (sum_of_all_classes + 1) / 2
mx = sum_of_preceding_classes + 1
return (nx - mx) / frequency_of_median_class
def midpoint(values):
return (lower_bound(values[0]) + upper_bound(values[1])) / 2
def mu(values):
val_range = stat_range(values)
sum_x = sum(values)
return sum_x / val_range
def parse_freqeuncy_group(raw):
return FrequencyGroup(raw)
def percentile_value(valuesLess, totalValues, parts=100):
return valuesLess / totalValues * parts
def pop_standard_deviation_comp(values):
val_range = stat_range(values)
_mu = mu(values)
sum_x_power_2 = power_sum(values)
left_side = sum_x_power_2 / val_range
right_side = math.pow(_mu, 2)
return math.sqrt(left_side - right_side)
def range_rule_thumb(hi, lo):
return (hi - lo) / 4.0
def required_sample_size(z_alpha, s, e):
return ((z_alpha * s) / e) ** 2
def power_sum(values, power=2):
values = [math.pow(v, power) for v in values]
return sum(values)
def stat_range(values):
values = sorted(values)
low = values[0]
hi = values[-1]
return hi - low
def standard_deviation_comp(values):
val_range = stat_range(values)
def top_left():
return val_range * sum(math.pow(x, 2) for x in values)
def top_right():
return math.pow(sum(values), 2)
def bottom():
return val_range * (val_range - 1)
left_and_right = top_left() - top_right()
return math.sqrt(left_and_right / bottom())
def upper_bound(value, r=0.5):
return bound(value, r)
def variance(values):
return math.pow(standard_deviation_comp(values), 2)
def variance_pop(values):
return math.pow(pop_standard_deviation_comp(values), 2)
def z_score(value, mean_value, sd):
return (value - mean_value) / sd
# endregion
| Chronotron/chaos-beast | MA120/statistics/statistical_functions.py | Python | mit | 4,050 |
import os
from pathlib import Path
from sys import platform as _platform
def main():
print('Platform: {}'.format(_platform))
if _platform == "linux":
music_folder = "/home/harish/Dropbox/Music"
elif _platform == "win32":
music_folder = "J:\Dropbox\Music"
else:
music_folder = "/Users/Harish/Dropbox/Music"
# music_folder = "/home/harish/test/Mixed"
print("Music folder path: {}".format(music_folder))
print("Deleting all playlist files..........")
clean_m3u(music_folder)
def clean_m3u(music_folder):
for dir_name, sub_dir_list, file_list in os.walk(music_folder):
for file_name in file_list:
full_path = '{}/{}'.format(dir_name, file_name)
if Path(full_path).suffix.lower() == '.m3u':
os.remove(full_path)
print(full_path)
print("Removed all playlist(*.m3u) files..........")
if __name__ == '__main__':
main()
| harishdinne/mp3_watermark_remover | remove_m3u_files.py | Python | gpl-3.0 | 951 |
# coding: utf-8
from src.util import Observable
from wrapt import synchronized
import math
class Cell(Observable):
def __init__(self, row, col):
super(Cell, self).__init__()
self.values = [i+1 for i in range(9)]
self.is_solved = False
self.row = row
self.col = col
def __iter__(self):
for val in self.values:
yield val
def set_value(self, value):
self.values = [value]
self.is_solved = True
self.notify_observers(value)
def get_value(self):
return self.values[0] if self.is_solved else 0
def add_observer(self, cells):
N = len(cells)
for i in range(N):
for j in range(N):
if i == self.row and j == self.col: continue
is_sline = (i == self.row) or (j == self.col)
is_sbox = (i//3 == self.row//3) and (j//3 == self.col//3)
if is_sline or is_sbox:
#if self.row == self.col == 5:
# print(self.row, self.col, i, j)
super(Cell, self).add_observer(cells[i][j])
@synchronized
def update(self, value):
if self.is_solved or value == 0: return
#if self.row == 5 and self.col == 5:
# print(value, self.values)
if value in self.values: self.values.remove(value)
if not self.is_solved and len(self.values) == 1:
self.set_value(self.values[0])
'''
Representa el tablero del Sudoku
'''
class Board(object):
def __init__(self, N = 9):
self.cells = [[Cell(i,j) for j in range(N)] for i in range(N)]
# Adiciona observadores
for i in range(N):
for j in range(N):
self.cells[i][j].add_observer(self.cells)
def __str__(self):
N = len(self)
str = '+---------+---------+---------+\n'
for i in range(N):
str += '|'
for j in range(N):
value = self.cells[i][j].get_value()
str += ' {} '.format(value if value != 0 else '.')
if (j + 1) % 3 == 0:
str += '|'
str += '\n'
if (i + 1) % 3 == 0:
str += '+---------+---------+---------+\n'
str += '\n' + self.candidates()
return str
def __len__(self):
return len(self.cells)
#How is the input puzzle list or string.
def setup(self, puzzle):
if isinstance(puzzle, list):
self._setup_from_list(puzzle)
elif isinstance(puzzle, str):
self._setup_from_str(puzzle)
def _setup_from_list(self, puzzle):
N = len(self)
assert len(puzzle) == N
for i in range(N):
for j in range(N):
if puzzle[i][j] == 0: continue
self.cells[i][j].setValue(puzzle[i][j])
def _setup_from_str(self, puzzle):
N = len(self)
#assert math.sqrt(len(puzzle)) == N
for i in range(N):
for j in range(N):
c = puzzle[i*N+j]
if c == '.': continue
self.cells[i][j].set_value(int(c))
def set_value(self, row, col, value):
self.cells[row][col].set_value(value)
def candidates(self):
N = len(self)
str = ''
for i in range(N):
for j in range(N):
str += 'cells[{}][{}] : '.format(i, j)
for k in self.cells[i][j]:
str += '{} '.format(k)
str += '\n'
return str
fd = open('sudoku.txt')
fd.readline()
fd.readline()
p = fd.readline()
b = Board()
b.setup(p)
print(b)
| h3ct0rjs/ProgrammingIVassignments | Lab5Sudoku/src/sudoku1.py | Python | mpl-2.0 | 3,142 |
# -*- encoding: utf-8 -*-
import sqlite3, sys
# Подготавливаем данные БД
alphabet=[chr(i) for i in xrange(97,123)]
# Создаем первую БД и заполняем ее данными
con=sqlite3.connect(':memory:')
cur=con.cursor()
cur.execute('create table test(s)')
cur.executemany('insert into test values (?)',alphabet)
con.commit()
# делаем файл дамп данных первой базы
with open('/tmp/dump.sql', 'w') as f:
for line in con.iterdump():
f.write('%s\n' % line)
con.close()
# создаем вторую БД
con=sqlite3.connect(':memory:')
cur=con.cursor()
# восстанавливаем дамп первой базы во второй
dump=open('/tmp/dump.sql')
for i in dump:
try:
cur.execute(i)
except:
print sys.exc_info()
dump.close()
# проверяем наличия данных
cur.execute('select * from test')
print cur.fetchall() | janusnic/21v-pyqt | unit_06/func7.py | Python | mit | 976 |
# ! /usr/bin/env python
# _*_ coding:utf-8 _*_
"""
@author = lucas.wang
@create_time = 2018-02-07
"""
# coding=utf-8
import requests
import time
from lxml import etree
import sys
sys.path.append(r'D:\CodeWorkspace\python\GeneralTools')
from Ip import Get_Proxies
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'
}
# Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.104 Safari/537.36 Core/1.53.4295.400
def getUrl():
ip = Get_Proxies.Get_proxies()
ip_url = 'http://www.xicidaili.com/nn/'
ip_list = ip.get_ip_list(ip_url, headers=headers)
ip_len = len(ip_list)
ip_error_number = 0
for i in range(33):
is_success = True
while is_success:
is_success = False
try:
proxy_ip = ip.get_random_ip(ip_list)
print(proxy_ip)
for_spider_page(i, proxy_ip)
except Exception as e:
ip_error_number += 1
is_success = True
ip_error_number = 0
# url = 'http://task.zbj.com/t-ppsj/p{}s5.html'.format(i+1)
#
# proxy_ip = ip.get_random_ip(ip_list)
# print(proxy_ip)
#
# spiderPage(url, proxy_ip)
def for_spider_page(index, proxy_ip):
url = 'http://task.zbj.com/t-ppsj/p{}s5.html'.format(index + 1)
# url = 'http://task.zbj.com/t-rjkf/p{}s5.html'.format(index + 1)
# url = 'http://task.zbj.com/t-wxptkf/p{}s5.html'.format(index + 1)
spiderPage(url, proxy_ip)
def spiderPage(url, proxy_ip):
if url is None:
return None
try:
proxies = {
'http': proxy_ip,
}
htmlText = requests.get(url, headers=headers, proxies=proxies).text
selector = etree.HTML(htmlText)
tds = selector.xpath('//*[@class="tab-switch tab-progress"]/table/tr')
for td in tds:
price = td.xpath('./td/p/em/text()')
href = td.xpath('./td/p/a/@href')
title = td.xpath('./td/p/a/text()')
subTitle = td.xpath('./td/p/text()')
deadline = td.xpath('./td/span/text()')
price = price[0] if len(price)>0 else '' # python的三目运算 :为真时的结果 if 判定条件 else 为假时的结果
title = title[0] if len(title)>0 else ''
href = href[0] if len(href)>0 else ''
subTitle = subTitle[0] if len(subTitle)>0 else ''
deadline = deadline[0] if len(deadline)>0 else ''
print(price,title,href,subTitle,deadline)
print('---------------------------------------------------------------------------------------')
spiderDetail(href)
except Exception as e:
print('出错', e)
def spiderDetail(url):
if url is None:
return None
try:
htmlText = requests.get(url).text
selector = etree.HTML(htmlText)
aboutHref = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/div/p[1]/a/@href')
price = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/div/p[1]/text()')
title = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/h2/text()')
contentDetail = selector.xpath('//*[@id="utopia_widget_10"]/div[2]/div/div[1]/div[1]/text()')
publishDate = selector.xpath('//*[@id="utopia_widget_10"]/div[2]/div/div[1]/p/text()')
aboutHref = aboutHref[0] if len(aboutHref) > 0 else '' # python的三目运算 :为真时的结果 if 判定条件 else 为假时的结果
price = price[0] if len(price) > 0 else ''
title = title[0] if len(title) > 0 else ''
contentDetail = contentDetail[0] if len(contentDetail) > 0 else ''
publishDate = publishDate[0] if len(publishDate) > 0 else ''
print(aboutHref, price, title, contentDetail, publishDate)
except:
print('出错')
if __name__ == '__main__':
getUrl() | Lucas-Wong/ToolsProject | Crawler/zhibajie.py | Python | gpl-3.0 | 4,021 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "udbproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| BFriedland/UserDataBase | manage.py | Python | mit | 253 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the autocomplete example."""
from __future__ import absolute_import
import unittest
from nose.plugins.attrib import attr
import apache_beam as beam
from apache_beam.examples.complete import autocomplete
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.test_utils import compute_hash
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
class AutocompleteTest(unittest.TestCase):
WORDS = ['this', 'this', 'that', 'to', 'to', 'to']
KINGLEAR_HASH_SUM = 268011785062540
KINGLEAR_INPUT = 'gs://dataflow-samples/shakespeare/kinglear.txt'
def test_top_prefixes(self):
with TestPipeline() as p:
words = p | beam.Create(self.WORDS)
result = words | autocomplete.TopPerPrefix(5)
# values must be hashable for now
result = result | beam.Map(lambda k_vs: (k_vs[0], tuple(k_vs[1])))
assert_that(result, equal_to(
[
('t', ((3, 'to'), (2, 'this'), (1, 'that'))),
('to', ((3, 'to'), )),
('th', ((2, 'this'), (1, 'that'))),
('thi', ((2, 'this'), )),
('this', ((2, 'this'), )),
('tha', ((1, 'that'), )),
('that', ((1, 'that'), )),
]))
@attr('IT')
def test_autocomplete_it(self):
with TestPipeline(is_integration_test=True) as p:
words = p | beam.io.ReadFromText(self.KINGLEAR_INPUT)
result = words | autocomplete.TopPerPrefix(10)
# values must be hashable for now
result = result | beam.Map(lambda k_vs: [k_vs[0],
k_vs[1][0][0], k_vs[1][0][1]])
checksum = (result
| beam.Map(lambda x: int(compute_hash(x)[:8], 16))
| beam.CombineGlobally(sum))
assert_that(checksum, equal_to([self.KINGLEAR_HASH_SUM]))
if __name__ == '__main__':
unittest.main()
| markflyhigh/incubator-beam | sdks/python/apache_beam/examples/complete/autocomplete_test.py | Python | apache-2.0 | 2,702 |
from setuptools import setup
setup(name='pre-commit-dummy-package', version='0.0.0')
| philipgian/pre-commit | pre_commit/resources/empty_template/setup.py | Python | mit | 87 |
# -*- coding: utf-8 -*-
"""
Toolbox for interstellar Reddening of models or dereddening of data, and
interstellar extinction laws.
Author: R. Lombaert
How does reddening in ComboCode work?
First and foremost, we define the interstellar extinction in the Johnson K band.
The magnitude of extinction, Ak, is derived from extinction models. Either from
Marshall et al. 2006, or from Schlegel et al. 1998 if not available from the
former. Ak requires the distance to be given, before it can be applied to a
model. All of our modeling requires a distance as input, so this is not an issue
in practice. If needed, the extinction model can be changed to a different
3d galactic map from marshall (or Schlegel), see getAk.
A note on the extinction models. Schlegel et al say that they could not verify
their values at |bb| < 5 degrees, so they should not be trusted. However, Sans
Fuentes et al 2014 checked those values by comparing them with OGLE maps for the
|ll| < 10 degrees region and found a very good match. Common practice is to
compare model values with measurements, in -- for instance -- the IRSA catalog.
http://irsa.ipac.caltech.edu/applications/DUST/ (based on COBE/DIRBE maps)
Here, you can find total extinction in several bands. This then needs to be
converted to the distance you're looking at for your source. It is more
convenient to use galactic 3D models and do this exercise once for a given
direction to see how accurate the models are.
The interstellar extinction law of preference is that of Chiar and Tielens 2006.
This law is given normalized per Ak and can be directly combined with the
interstellar extinction given from Marshall or Schlegel. We use the curve for the
local ISM. The alternative is a curve for the galactic center, but even in the
direction of the galactic center the local ISM doesn't change much in terms of
dust extinction, except at very large distances on the order of 5kpc or more. We
don't work with sources at those distances for now, so we can safely ignore it.
For completeness, the GC curve is made as well and provided as an option in the
reddening module of IvS repo.
However, while Marshall gives Ak and presents no issue, other maps give Av. To
convert Av to Ak, we have to convert the V-band normalization of Drimmel to
K-band normalization. Chiar and Tielens, however, derived a law only in the IR
hence no V-band normalization can be defined. We need a different interstellar
reddening law in V-band to be compared with the infrared law of the
former. The most recent V-band reddening law is given by Fitzpatrick 2004.
We therefore created our own interstellar reddening law from the combination of
Fitzpatrick 2004 up to the wavelength where Chiar and Tielens 2006 begins. They
match almost identically in the overlapping region, following a power law of the
form lambda**-1.8. From there, the combined law follows Chiar and Tielens, and
is extrapolated to further wavelengths with the same power law with power -1.8
as mentioned by Chiar & Tielens 2006 between 2 and 5 micron. At long
wavelengths, the extinction becomes negligible, so the extrapolation is barely
noticeable, but maintains consistency.
To convert Fitzpatrick 2004 to Ak, we do need to assume a Av to Ak conversion
that does not take into account Chiar and Tielens. The latter suggest their law
can be converted back to Av with a factor of ak/av = 0.09, which is in very
good agreement with the factor derived from fitzpatrick 2004 itself: 0.088.
Using this self-constructed reddening law, we can now convert Av to Ak from
Drimmel, and then apply that Ak together with the self-constructed reddening law
to redden our models. We use the IvS repository for the reddening.
"""
import os
from scipy import hstack, array
import numpy as np
import cc.path
from cc.tools.io import DataIO
import cc.ivs.sed.reddening as red
import cc.ivs.sed.extinctionmodels as em
def getAk(ll,bb,distance=None,map='marshall',law='fitz2004chiar2006',\
lawtype='ism'):
'''
Find the Johnson K-band interstellar extinction at given longitude and
latitude for a given galactic extinction model.
Default is marshall, and if not available there, schlegel. When marshall is
requested, schlegel is always returned for ll and bb outside the range of
marshall.
Alternatives are arenou, drimmel and schlegel, see ivs repo.
@param ll: The galactic longitude of the star
@type ll: float
@param bb: The galactic latitude of the star
@type bb: float
@keyword distance: Distance to the star. Default is None, in which case the
full extinction to infinity in a given direction is
returned
(default: None)
@type distance: float
@keyword map: The galactic 3d extinction model.
(default: 'marshall')
@type map: str
@keyword law: The reddening law
(default: 'fitz2004chiar2006')
@type law: str
@keyword lawtype: The type of Chiar & Tielens reddening law (either ism or
gc). Only when relevant.
(default: 'ism')
@type lawtype: str
@return: The interstellar extinction magnitude in K-band
@rtype: float
'''
ak = em.findext(lng=ll,lat=bb,distance=distance,model=map,redlaw=law,\
norm='Ak',curve=lawtype)
if map == 'marshall' and not ak:
map = 'schlegel'
ak = em.findext(lng=ll,lat=bb,distance=distance,model=map,\
redlaw=law,norm='Ak',curve=lawtype)
if map == 'drimmel':
ak = ak[0]
return ak
def redden(wave,flux,ak,law='Fitz2004Chiar2006',lawtype='ism'):
'''
Redden model fluxes, correcting for interstellar extinction.
Flux is assumed to be flux, and not magnitudes!
For dereddening, pass -ak instead.
The reddening law can be chosen, but should probably be Fitz2004Chiar2006 as
it was tailored to infrared reddening of AGB sources in the Solar
neighbourhood.
@param wave: The wavelength grid
@type wave: array
@param flux: The flux from the models
@type flux: array
@param ak: The interstellar reddening magnitude in Johnson K-band
@type ak: float
@keyword law: The reddening law
(default: 'Fitz2004Chiar2006')
@type law: str
@keyword lawtype: The type of Chiar & Tielens reddening law (either ism or
gc)
(default: 'ism')
@type lawtype: str
@return: The reddened fluxes
@rtype: array
'''
wave,a_ak = red.get_law(name=law,wave=wave,curve=lawtype,\
norm='Ak',wave_units='micron')
return flux / 10**(a_ak*ak/2.5)
def combineRedLaw(ofn,chiar_curve='ism',power=-1.8):
'''
A method to combine the Fitzpatrick 2004 and Chiar & Tielens 2006 reddening
laws as well as to extrapolate Chiar and Tielens 2006 to longer wavelengths.
The result is saved in a file and used by the IvS repository as a valid
reddening law.
@param ofn: The output filename with path
@type ofn: str
@keyword chiar_curve: The curve type for Chiar & Tielens 2004. Either 'gc'
or 'ism'.
(default: 'ism')
@type chiar_curve: str
@keyword power: The power for the power law extrapolation. Default is taken
from Chiar and Tielens 2006, as a typical value for local
ISM between 2 and 5 micron. gc may require different value
but not very important.
(default: -1.8)
@type power: float
'''
chiar_curve = chiar_curve.lower()
#-- Extract the two relevant extinction laws.
xchiar, a_ak_chiar = red.get_law('chiar2006',norm='Ak',wave_units='micron',\
curve=chiar_curve)
xfitz, a_ak_fitz = red.get_law('fitzpatrick2004',norm='Ak',\
wave_units='micron')
#-- Define a power law for the extrapolation
def power_law(x,scale,power): return scale*(x)**power
#-- Determine the scaling factor from specific chiar/tielens law
scale = a_ak_chiar[-1]/(xchiar[-1]**power)
#-- Create an x grid for longer wavelengths.
xlong = np.linspace(xchiar[-1]+0.1,1000,1000)
a_ak_long = power_law(xlong,scale,power)
#-- Combine the three sections
xcom = hstack([xfitz[xfitz<xchiar[0]],xchiar,xlong])
a_ak_com = hstack([a_ak_fitz[xfitz<xchiar[0]],a_ak_chiar,a_ak_long])
#-- Write the result to a file
comments = '#-- wavelength (micron) A_lambda/A_k\n'
DataIO.writeCols(filename=ofn,cols=[[comments]])
DataIO.writeCols(filename=ofn,cols=[xcom,a_ak_com],mode='a') | MarieVdS/ComboCode | cc/modeling/tools/Reddening.py | Python | gpl-3.0 | 9,079 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# aiorest documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 14 19:59:09 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('http://docs.python.org/3', None)}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'aiorest'
copyright = '2014, Andrew Svetlov'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
release = '0.4.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
try:
import sphinx_rtd_theme
except ImportError:
html_theme = 'pyramid'
else:
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'aiorestdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'aiorest.tex', 'aiorest Documentation',
'Andrew Svetlov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'aiorest', 'aiorest Documentation',
['Andrew Svetlov'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'aiorest', 'aiorest Documentation',
'Andrew Svetlov', 'aiorest', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| aio-libs/aiorest | docs/conf.py | Python | mit | 8,190 |
from django.contrib import admin
from models import Perfil
# Register your models here.
admin.site.register(Perfil) | avaquero/socialFutbol | perfils/admin.py | Python | gpl-2.0 | 116 |
from time import sleep
from gui_version import App
from tests.gui.app_testing_bed import ComboApEnSampEnTestingBed
from tests.gui.common import check_report
def test_combo_apen_sampen_wo_windows(qtbot):
window = App()
qtbot.addWidget(window)
test_app = ComboApEnSampEnTestingBed(qtbot=qtbot, window=window)
qtbot.waitForWindowShown(window)
test_app.press_windows_cb()
test_app.press_pertropy_cb()
test_app.press_cordim_cb()
test_app.press_fracdim_cb()
assert not test_app.calculate_btn_state()
test_app.choose_any_file()
assert test_app.calculate_btn_state()
test_app.press_calculate_btn()
test_app.wait_until_calculation_is_done()
path = test_app.check_modal_not_error()
check_report(path)
# qtbot.stopForInteraction()
def test_combo_apen_sampen_w_windows(qtbot):
window = App()
qtbot.addWidget(window)
test_app = ComboApEnSampEnTestingBed(qtbot=qtbot, window=window)
qtbot.waitForWindowShown(window)
test_app.press_pertropy_cb()
test_app.press_cordim_cb()
test_app.press_fracdim_cb()
assert not test_app.calculate_btn_state()
test_app.choose_any_file()
assert test_app.calculate_btn_state()
test_app.press_calculate_btn()
test_app.wait_until_calculation_is_done()
path = test_app.check_modal_not_error()
check_report(path)
| demid5111/approximate-enthropy | tests/gui/combo_apen_sampen_test_e2e.py | Python | mit | 1,363 |
#!/usr/bin/python
import sys, os, commands, time, re, copy
try:
from PyQt4 import QtCore, QtGui
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
except ImportError:
try:
from PySide import QtCore, QtGui
QtCore.QString = str
except ImportError:
raise ImportError("Cannot load either PyQt or PySide")
from GenSyntax import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class YamlData():
def __init__(self, qt_app, app_path, recache, use_cached_syntax):
self.qt_app = qt_app
self.app_path = app_path
self.use_cached_syntax = use_cached_syntax
self.gen_syntax = GenSyntax(qt_app, app_path, use_cached_syntax)
self.yaml_data = self.gen_syntax.GetSyntax(recache)
def recache(self, recache):
self.yaml_data = self.gen_syntax.GetSyntax(recache)
def recursiveYamlDataSearch(self, path, current_yaml):
if current_yaml['name'] == path:
return current_yaml
else:
if current_yaml['subblocks']:
for child in current_yaml['subblocks']:
yaml_data = self.recursiveYamlDataSearch(path, child)
if yaml_data: # Found it in a child!
return yaml_data
else: # No children.. stop recursion
return None
def findYamlEntry(self, path):
for yaml_it in self.yaml_data:
yaml_data = self.recursiveYamlDataSearch(path, yaml_it)
if yaml_data:
return yaml_data
# This means it wasn't found
return None
| Chuban/moose | gui/utils/YamlData.py | Python | lgpl-2.1 | 1,666 |
#!/usr/bin/env python
#
#
# Copyright (C) Canonical, Inc 2012
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class SMB347:
# constants for smb349
smb347_regs = {
0x00:'CHARGE',
0x01:'CHRG_CRNTS',
0x02:'VRS_FUNC',
0x03:'FLOAT_VLTG',
0x04:'CHRG_CTRL',
0x05:'STAT_TIME_CTRL',
0x06:'PIN_CTRL',
0x07:'THERM_CTRL',
0x08:'SYSOK_USB3',
0x09:'CTRL_REG',
0x0A:'OTG_TLIM_REG',
0x0B:'HRD_SFT_TEMP',
0x0C:'FAULT_INTR',
0x0D:'STS_INTR_1',
0x0E:'I2C_ADDR',
0x10:'IN_CLTG_DET',
0x11:'STS_INTR_2',
# Command registers
0x30:'CMD_REG',
0x31:'CMD_REG_B',
0x33:'CMD_REG_C',
# Interrupt Status registers
0x35:'INTR_STS_A',
0x36:'INTR_STS_B',
0x37:'INTR_STS_C',
0x38:'INTR_STS_D',
0x39:'INTR_STS_E',
0x3A:'INTR_STS_F',
# Status registers
0x3B:'STS_REG_A',
0x3C:'STS_REG_B',
0x3D:'STS_REG_C',
0x3E:'STS_REG_D',
0x3F:'STS_REG_E',
}
#'ENABLE_WRITE 1
#'DISABLE_WRITE 0
#ENABLE_WRT_ACCESS 0x80
#'ENABLE_APSD 0x04
#'HC_MODE 0x01
#'USB_5_9_CUR 0x02
#'PIN_CTRL 0x10
#'THERM_CTRL 0x10
#'BATTERY_MISSING 0x10
#'CHARGING 0x06
#'DEDICATED_CHARGER 0x02
#'CHRG_DOWNSTRM_PORT 0x04
#'ENABLE_CHARGE 0x02
#'ENABLE_CHARGER 1
#'DISABLE_CHARGER 0
#'USBIN 0x80
#'APSD_OK 0x08
#'APSD_RESULT 0x07
#'APSD_CDP 0x01
#'APSD_DCP 0x02
#'APSD_OTHER 0x03
#'APSD_SDP 0x04
#'USB_30 0x20
# ================================================
in_reg_transaction = False
temp_register = None
regname = None
def dump(self, data, labels):
""" Print list of bit masks and their values in data byte.
If bitmask in dictionary labels, substitute labels[bitmask] for bitmask,
and display enable status.
Label INOK uses a special value display."""
mask = 0x80
while mask > 0:
bitval = bool(data & mask)
label = labels.get(mask)
if not label:
print '[%#4.2x = %d]' % (mask, bitval),
else:
if label == 'INOK':
values = ['not Active High', 'Active High ']
else:
values = ['Disabled', 'Enabled ']
print '[%s %s]' % (label, values[bitval]),
mask >>= 1
print
return
def dump_generic(self, data):
self.dump(data, {})
def dump_usb3(self, data):
self.dump(data, {0x01: 'INOK'})
def dump_command(self, data):
self.dump(data, {0x80: 'WRT Access', 0x10: 'OTG', 0x02: 'Charge'})
def finish_register_access(self, deltatime, rw, data):
if rw == "Read":
rwtext = "(R)"
else:
rwtext = "(W)"
print "smb347 : %s %s, data = 0x%0.2x" % (self.regname.ljust(12), rwtext,data),
if self.temp_register == 0x30: # CMD_REG
self.dump_command(data)
elif self.temp_register == 0x08: # SYSOK_USB3
self.dump_usb3(data)
else:
print
self.in_reg_transaction = False
return
def process_transaction(self, deltatime, rw, data):
if self.in_reg_transaction:
self.finish_register_access(deltatime, rw, data)
elif data in self.smb347_regs:
# This is the first half of a register access
self.regname = self.smb347_regs[data]
self.in_reg_transaction = True
#print "%s:" % self.regname,
self.temp_register = data
else:
print "smb347: Unknown register address %d, skipping" % data
| sconklin/nexus-tools | smb347.py | Python | gpl-3.0 | 4,154 |
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from cloudify_agent import VIRTUALENV
from cloudify_agent.api import utils, exceptions
from cloudify_agent.api.pm.base import GenericLinuxDaemonMixin
class SystemDDaemon(GenericLinuxDaemonMixin):
"""
Implementation for the SystemD process management.
Following are all possible custom key-word arguments
(in addition to the ones available in the base daemon)
"""
SCRIPT_DIR = '/usr/lib/systemd/system/'
CONFIG_DIR = '/etc/sysconfig'
PROCESS_MANAGEMENT = 'systemd'
def configure(self):
super(SystemDDaemon, self).configure()
self._runner.run(self._systemctl_command('enable'))
self._runner.run('sudo systemctl daemon-reload')
def _delete(self):
self._runner.run(self._systemctl_command('disable'))
def _systemctl_command(self, command):
return 'sudo systemctl {command} {service}'.format(
command=command,
service=self.service_name,
)
def stop_command(self):
return self._systemctl_command('stop')
def start_command(self):
if not os.path.isfile(self.script_path):
raise exceptions.DaemonNotConfiguredError(self.name)
return self._systemctl_command('start')
def status_command(self):
return self._systemctl_command('status')
def _get_script_path(self):
return os.path.join(
self.SCRIPT_DIR,
'{0}.service'.format(self.service_name)
)
def _get_rendered_script(self):
self._logger.debug('Rendering SystemD script from template')
return utils.render_template_to_file(
template_path='pm/systemd/systemd.template',
virtualenv_path=VIRTUALENV,
user=self.user,
queue=self.queue,
config_path=self.config_path,
max_workers=self.max_workers,
name=self.name,
extra_env_path=self.extra_env_path,
)
def _get_rendered_config(self):
self._logger.debug('Rendering configuration script "{0}" from template'
.format(self.config_path))
return utils.render_template_to_file(
template_path='pm/systemd/systemd.conf.template',
workdir=self.workdir,
rest_host=self.rest_host,
rest_port=self.rest_port,
local_rest_cert_file=self.local_rest_cert_file,
log_level=self.log_level.upper(),
log_dir=self.log_dir,
log_max_bytes=self.log_max_bytes,
log_max_history=self.log_max_history,
name=self.name,
executable_temp_path=self.executable_temp_path,
user=self.user,
storage_dir=utils.internal.get_storage_directory(self.user),
)
| cloudify-cosmo/cloudify-agent | cloudify_agent/api/pm/systemd.py | Python | apache-2.0 | 3,410 |
from django.contrib import admin
from django.db import models
from mock import Mock
from django_browserid.admin import BrowserIDAdminSite
from django_browserid.tests import TestCase
class BrowserIDAdminSiteTests(TestCase):
def test_copy_registry(self):
"""
copy_registry should register the ModelAdmins from the given
site on the BrowserIDAdminSite.
"""
django_site = admin.AdminSite()
browserid_site = BrowserIDAdminSite()
class TestModel(models.Model):
pass
class TestModelAdmin(admin.ModelAdmin):
pass
browserid_site.register = Mock()
django_site.register(TestModel, TestModelAdmin)
browserid_site.copy_registry(django_site)
browserid_site.register.assert_any_call(TestModel, TestModelAdmin)
def test_copy_registry_multiple(self):
django_site = admin.AdminSite()
browserid_site = BrowserIDAdminSite()
class TestModel(models.Model):
pass
class TestModel2(models.Model):
pass
class TestModel3(models.Model):
pass
class TestModelAdmin(admin.ModelAdmin):
pass
class TestModel2Admin(admin.ModelAdmin):
pass
browserid_site.register = Mock()
django_site.register(TestModel, TestModelAdmin)
django_site.register(TestModel2, TestModel2Admin)
django_site.register(TestModel3, TestModelAdmin)
browserid_site.copy_registry(django_site)
browserid_site.register.assert_any_call(TestModel, TestModelAdmin)
browserid_site.register.assert_any_call(TestModel2, TestModel2Admin)
browserid_site.register.assert_any_call(TestModel3, TestModelAdmin)
| jicksy/oneanddone_test | vendor-local/lib/python/django_browserid/tests/test_admin.py | Python | mpl-2.0 | 1,750 |
##
# Copyright (c) 2005 Apple Computer, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# DRI: Wilfredo Sanchez, wsanchez@apple.com
##
"""
RFC 3253 (Versioning Extensions to WebDAV) XML Elements
This module provides XML element definitions for use with WebDAV.
See RFC 3253: http://www.ietf.org/rfc/rfc3253.txt
"""
from higgins.http.dav.element.base import *
##
# Section 1
##
class Error (WebDAVElement):
"""
Specifies an error condition. (RFC 3253, section 1.6)
"""
# FIXME: RFC 3253 doesn't quite seem to define this element...
# FIXME: Move when we update to RFC 2518bis
name = "error"
allowed_children = { WebDAVElement: (0, None) }
##
# Section 3
##
class Comment (WebDAVTextElement):
"""
Property used to track a brief comment about a resource that is suitable for
presentation to a user. On a version, can be used to indicate why that
version was created. (RFC 3253, section 3.1.1)
"""
name = "comment"
hidden = True
class CreatorDisplayName (WebDAVTextElement):
"""
Property which contains a description of the creator of the resource that is
suitable for presentation to a user. (RFC 3253, section 3.1.2)
"""
name = "creator-displayname"
hidden = True
class SupportedMethod (WebDAVElement):
"""
Property which identifies a method that is supported by a resource. A method
is supported by a resource if there is some state of that resource for which
an application of that method will successfully satisfy all postconditions
of that method, including any additional postconditions added by the
features supported by that resource. (RFC 3253, section 3.1.3)
"""
name = "supported-method"
hidden = True
allowed_children = { WebDAVElement: (0, None) }
allowed_attributes = { "name": True }
class SupportedMethodSet (WebDAVElement):
"""
Property which identifies the methods that are supported by a resource. (RFC
3253, section 3.1.3)
"""
name = "supported-method-set"
protected = True
hidden = True
allowed_children = { (dav_namespace, "supported-method"): (0, None) }
class SupportedLiveProperty (WebDAVElement):
"""
Property which identifies a live property that is supported by a resource. A
live property is supported by a resource if that property has the semantics
defined for that property. The value of this property must identify all
live properties defined by this document that are supported by the resource
and should identify all live properties that are supported by the resource.
(RFC 3253, section 3.1.4)
"""
name = "supported-live-property"
# FIXME: Where is the name element defined?
allowed_children = { (dav_namespace, "name"): (1, 1) }
class SupportedLivePropertySet (WebDAVElement):
"""
Property which identifies the live properties that are supported by a
resource. (RFC 3253, section 3.1.4)
"""
name = "supported-live-property-set"
hidden = True
protected = True
allowed_children = { (dav_namespace, "supported-live-property"): (0, None) }
class Report (WebDAVElement):
"""
A report. (RFC 3253, section 3.1.5)
"""
# FIXME: Section 3.1.5 is pretty low on information. Where else do we look?
name = "report"
allowed_children = { WebDAVElement: (0, None) }
class SupportedReport (WebDAVElement):
"""
Identifies a report that is supported by the resource. (RFC 3253, section
3.1.5)
"""
name = "supported-report"
#
# FIXME:
#
# RFC 3253, section 3.1.5 defines supported-report as:
#
# <!ELEMENT supported-report report>
#
# Which means that a report child element is required. However, section
# 3.6 defined a precondition with the same name (DAV:supported-report),
# which means that, according to section 1.6.1, this XML must be issued if
# the precondition fails:
#
# <?xml version="1.0"?>
# <D:error xmlns:D="DAV:">
# <D:supported-report/>
# </D:error>
#
# Which is a problem because here we use supported-report with no
# children.
#
# Absent any better guidance, we'll allow no children for this element for
# the time being.
#
allowed_children = { (dav_namespace, "report"): (0, 1) }
class SupportedReportSet (WebDAVElement):
"""
Property which identifies the reports that are supported by the resource.
(RFC 3253, section 3.1.5)
"""
name = "supported-report-set"
hidden = True
protected = True
allowed_children = { (dav_namespace, "supported-report"): (0, None) }
class ExpandProperty (WebDAVElement):
"""
Report which provides a mechanism for retrieving in one request the
properties from resources identified by DAV:href property values.
(RFC 3253, section 3.8)
"""
name = "expand-property"
allowed_children = { (dav_namespace, "property"): (0, None) }
class Property (WebDAVElement):
"""
Identifies a property by name. (RFC 3253, section 3.8)
Principal which matches a user if the value of the identified property of a
resource contains at most one DAV:href element, the value of that element
identifies a principal, and the user matches that principal. (RFC 3744,
section 5.5.1)
"""
name = "property"
allowed_children = { (dav_namespace, "property"): (0, None) }
allowed_attributes = {
"name" : True,
"namespace" : False,
}
| msfrank/Higgins | higgins/http/dav/element/rfc3253.py | Python | lgpl-2.1 | 6,558 |
"""
Abstract class to regroup all common options from python payloads
"""
class PythonPayload:
def __init__(self):
self.language = "python"
self.extension = "py"
self.required_python_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"],
"ARCHITECTURE" : ["32", "Select the final binary architecture (32, 64)"]
}
def _validateArchitecture(self):
if not self.required_options["ARCHITECTURE"][0] in ("32", "64"):
print helpers.color("\n [!] ARCHITECTURE must either be set to 32 or 64.\n", warning=True)
return ""
self.architecture = self.required_options["ARCHITECTURE"][0]
| Veil-Framework/Veil-Evasion | modules/common/pythonpayload.py | Python | gpl-3.0 | 858 |
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.utils import six
from djblets.testing.decorators import add_fixtures
from djblets.webapi.errors import PERMISSION_DENIED
from djblets.webapi.testing.decorators import webapi_test_template
from reviewboard.webapi.resources import resources
from reviewboard.webapi.errors import INVALID_USER
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (
review_group_user_item_mimetype, review_group_user_list_mimetype)
from reviewboard.webapi.tests.mixins import BasicTestsMetaclass
from reviewboard.webapi.tests.urls import (get_review_group_user_item_url,
get_review_group_user_list_url,
get_user_item_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(BaseWebAPITestCase):
"""Testing the ReviewGroupUserResource list API tests."""
fixtures = ['test_users']
sample_api_url = 'groups/<name>/users/'
resource = resources.review_group_user
basic_post_use_admin = True
def compare_item(self, item_rsp, user):
self.assertEqual(item_rsp['id'], user.pk)
self.assertEqual(item_rsp['username'], user.username)
self.assertEqual(item_rsp['first_name'], user.first_name)
self.assertEqual(item_rsp['last_name'], user.last_name)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
group = self.create_review_group(with_local_site=with_local_site)
if populate_items:
items = [
User.objects.get(username='doc'),
User.objects.get(username='grumpy'),
]
group.users = items
else:
items = []
return (get_review_group_user_list_url(group.name, local_site_name),
review_group_user_list_mimetype,
items)
@webapi_test_template
def test_get_with_no_access(self):
"""Testing the GET <URL> API without access to invite-only group"""
group = self.create_review_group(name='priv-group', invite_only=True)
rsp = self.api_get(get_review_group_user_list_url(group.name),
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], PERMISSION_DENIED.code)
def test_get_multiple_groups(self):
"""Testing GET <URL> API with a user in multiple groups"""
doc = User.objects.get(username='doc')
groups = [
self.create_review_group('group1'),
self.create_review_group('group2'),
]
for group in groups:
group.users.add(doc)
rsp = self.api_get(
get_review_group_user_list_url(groups[0].name),
expected_mimetype=review_group_user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['total_results'], 1)
self.compare_item(rsp['users'][0], doc)
#
# HTTP POST tests
#
def setup_basic_post_test(self, user, with_local_site, local_site_name,
post_valid_data):
group = self.create_review_group(with_local_site=with_local_site)
if post_valid_data:
post_data = {
'username': 'doc',
}
else:
post_data = {}
return (get_review_group_user_list_url(group.name, local_site_name),
review_group_user_item_mimetype,
post_data,
[group])
def check_post_result(self, user, rsp, group):
users = list(group.users.all())
self.assertEqual(len(users), 1)
self.assertEqual(users[0].username, 'doc')
self.compare_item(rsp['user'], users[0])
@webapi_test_template
def test_post_with_no_access(self, local_site=None):
"""Testing the POST <URL> API with Permission Denied"""
group = self.create_review_group()
user = User.objects.get(pk=1)
rsp = self.api_post(
get_review_group_user_list_url(group.name, local_site),
{'username': user.username},
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
@webapi_test_template
def test_post_with_invalid_user(self):
"""Testing the POST <URL> API with invalid user"""
self._login_user(admin=True)
group = self.create_review_group()
rsp = self.api_post(
get_review_group_user_list_url(group.name),
{'username': 'grabl'},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_USER.code)
self.assertEqual(group.users.count(), 0)
@webapi_test_template
def test_post_with_self(self):
"""Testing the POST <URL> API with the requesting user"""
group = self.create_review_group()
self.assertFalse(self.user.is_superuser)
rsp = self.api_post(
get_review_group_user_list_url(group.name),
{'username': self.user.username},
expected_mimetype=review_group_user_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(group.users.count(), 1)
@webapi_test_template
def test_post_with_self_and_private_group(self):
"""Testing the POST <URL> API with the requesting user and private
group
"""
group = self.create_review_group(invite_only=True)
self.assertFalse(group.is_accessible_by(self.user))
rsp = self.api_post(
get_review_group_user_list_url(group.name),
{'username': self.user.username},
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(group.users.count(), 0)
@add_fixtures(['test_site'])
@webapi_test_template
def test_post_with_self_and_site(self):
"""Testing the POST <URL> API with the requesting user on a local site
"""
self.assertFalse(self.user.is_superuser)
local_site = self.get_local_site(name=self.local_site_name)
local_site.users.add(self.user)
group = self.create_review_group(with_local_site=True)
self.assertEqual(group.users.count(), 0)
rsp = self.api_post(
get_review_group_user_list_url(group.name, self.local_site_name),
{'username': self.user.username},
expected_mimetype=review_group_user_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(group.users.count(), 1)
@add_fixtures(['test_site'])
@webapi_test_template
def test_post_with_self_and_unjoined_site(self):
"""Testing the POST <URL> API with the requesting user on an unjoined
local site
"""
self.assertFalse(self.user.is_superuser)
group = self.create_review_group(with_local_site=True)
self.assertEqual(group.users.count(), 0)
rsp = self.api_post(
get_review_group_user_list_url(group.name, self.local_site_name),
{'username': self.user.username},
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(group.users.count(), 0)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(BaseWebAPITestCase):
"""Testing the ReviewGroupUserResource item API tests."""
fixtures = ['test_users']
sample_api_url = 'groups/<name>/users/<username>/'
resource = resources.review_group_user
basic_delete_use_admin = True
basic_put_use_admin = True
def setup_http_not_allowed_item_test(self, user):
return get_review_group_user_list_url('my-group')
def compare_item(self, item_rsp, user):
self.assertEqual(item_rsp['id'], user.pk)
self.assertEqual(item_rsp['username'], user.username)
self.assertEqual(item_rsp['first_name'], user.first_name)
self.assertEqual(item_rsp['last_name'], user.last_name)
#
# HTTP DELETE tests
#
def setup_basic_delete_test(self, user, with_local_site, local_site_name):
group = self.create_review_group(with_local_site=with_local_site)
doc = User.objects.get(username='doc')
group.users.add(doc)
return (get_review_group_user_item_url(group.name, doc.username,
local_site_name),
[group, doc])
def check_delete_result(self, user, group, doc):
self.assertNotIn(doc, group.users.all())
@webapi_test_template
def test_delete_with_self(self):
"""Testing the DELETE <URL> API with the requesting user
"""
group = self.create_review_group()
group.users.add(self.user)
self.assertFalse(self.user.is_superuser)
self.api_delete(
get_review_group_user_item_url(group.name, self.user.username))
self.assertEqual(group.users.count(), 0)
@add_fixtures(['test_site'])
@webapi_test_template
def test_delete_with_self_with_site(self):
"""Testing the DELETE <URL> API with the requesting user on local site
"""
self.assertFalse(self.user.is_superuser)
local_site = self.get_local_site(name=self.local_site_name)
local_site.users.add(self.user)
group = self.create_review_group(with_local_site=True)
group.users.add(self.user)
self.assertEqual(group.users.count(), 1)
self.api_delete(
get_review_group_user_item_url(group.name, self.user.username,
self.local_site_name))
self.assertEqual(group.users.count(), 0)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
group = self.create_review_group(with_local_site=with_local_site)
doc = User.objects.get(username='doc')
group.users.add(doc)
return (get_review_group_user_item_url(group.name, doc.username,
local_site_name),
review_group_user_item_mimetype,
doc)
@webapi_test_template
def test_get_delete_link(self):
"""Testing GET <URL> API contains the correct DELETE link"""
doc = User.objects.get(username='doc')
group = self.create_review_group()
group.users.add(doc)
rsp = self.api_get(
get_review_group_user_item_url(group.name, doc.username),
expected_mimetype=review_group_user_item_mimetype)
delete_href = \
rsp['user']['links']['delete']['href'][len(self.base_url):]
self.assertEqual(
delete_href,
get_review_group_user_item_url(group.name, doc.username))
self.assertNotEqual(delete_href, get_user_item_url(doc.username))
@add_fixtures(['test_site'])
@webapi_test_template
def test_get_delete_link_local_site(self):
"""Testing GET <URL> API contains the correct DELETE link with a local
site
"""
doc = User.objects.get(username='doc')
local_site = self.get_local_site(name=self.local_site_name)
local_site.users.add(self.user)
local_site.users.add(doc)
group = self.create_review_group(local_site=local_site)
group.users.add(doc)
rsp = self.api_get(
get_review_group_user_item_url(group.name, doc.username,
local_site.name),
expected_mimetype=review_group_user_item_mimetype)
delete_href = \
rsp['user']['links']['delete']['href'][len(self.base_url):]
self.assertEqual(
delete_href,
get_review_group_user_item_url(group.name, doc.username,
local_site.name))
self.assertNotEqual(delete_href, get_user_item_url(doc.username,
local_site.name))
| davidt/reviewboard | reviewboard/webapi/tests/test_review_group_user.py | Python | mit | 12,165 |
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for AG News dataset."""
from tensorflow_datasets import testing
from tensorflow_datasets.text import ag_news_subset
class AgNewsSubsetTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = ag_news_subset.AGNewsSubset
SPLITS = {
"train": 3, # Number of fake train examples
"test": 2, # Number of fake test examples
}
DL_EXTRACT_RESULT = {"ag_news_csv": ""}
if __name__ == "__main__":
testing.test_main()
| tensorflow/datasets | tensorflow_datasets/text/ag_news_subset_test.py | Python | apache-2.0 | 1,051 |
import numpy as np
from dashboard.bokeh.helper import get_palette
from bokeh.models import LinearColorMapper, ColorBar
from bokeh.models import HoverTool, PrintfTickFormatter, ColumnDataSource
from bokeh.plotting import Figure
class Patch:
def set_amp(self, z_value):
''' Setup for AMP plots
'''
dz = z_value
# removing NaN in ranges
dz_valid = [x if x > -999 else np.nan for x in dz]
dzmax, dzmin = np.nanmax(dz_valid), np.nanmin(dz_valid)
if np.log10(dzmax) > 4 or np.log10(dzmin) < -3:
ztext = ['{:4.2e}'.format(i) for i in dz_valid]
cbarformat = "%2.1e"
elif np.log10(dzmin) > 0:
ztext = ['{:5.2f}'.format(i) for i in dz_valid]
cbarformat = "%4.2f"
else:
ztext = ['{:6.2f}'.format(i) for i in dz_valid]
cbarformat = "%5.2f"
return ztext, cbarformat
def val_status(self, val, nrg=[], wrg=[]):
''' Filtering results for status plot
'''
if val is None:
return 'NaN'
if val <=-999 or val == np.nan:
return 'NaN'
elif val > nrg[0] and val < nrg[1]:
return 'NORMAL'
elif val < wrg[0] or val > wrg[1]:
return 'ALARM'
else:
return 'WARNING'
def plot_amp(self, dz, refexp, name="", font_size="1.2vw", description="",
nrg=[],wrg=[], status_plot=False):
''' Initializing AMP plot
'''
ztext, cbarformat = self.set_amp(dz)
dx = [0, 1, 0, 1]
dy = [1, 1, 0, 0]
zvalid = np.array([x if x > -999 else np.nan for x in dz])
data_source = dict(
x=dx,
y=dy,
z=dz,
zvalid=zvalid,
ref=["{:.2f}".format(x) for x in refexp],
zdiff=zvalid - np.array(refexp),
y_offset1=[i+0.15 for i in dy],
y_offset2=[i-0.10 for i in dy],
amp=['AMP %s' % i for i in range(1, 5)],
amp_number=['%s' % i for i in range(1, 5)],
ztext=ztext,)
if status_plot:
text_val='status'
color_status = ['green','darkgrey','yellow','red']
color = {'NaN' :'darkgrey',
'NORMAL' :'green',
'WARNING' :'yellow',
'ALARM' :'red',}
status = [ self.val_status(x, nrg=nrg, wrg=wrg) for
x in zvalid - np.array(refexp) ]
color_status = [ color[self.val_status(x, nrg=nrg, wrg=wrg)] for
x in zvalid - np.array(refexp) ]
fill_color="color_status"
data_source.update({'status':status,
'color_status':color_status,})
fill_alpha=0.8
if not status_plot:
text_val='ztext'
cmap = get_palette("RdBu_r")
mapper = LinearColorMapper( palette=cmap,
low=wrg[0],
high=wrg[1],
nan_color="darkgrey")
formatter = PrintfTickFormatter(format=cbarformat)
color_bar = ColorBar( color_mapper=mapper, major_label_text_align='left',
major_label_text_font_size='10pt', label_standoff=2, location=(0, 0),
formatter=formatter,
title="(Val-Ref)", title_standoff=15, title_text_baseline="alphabetic")
fill_color={'field': 'zdiff', 'transform': mapper}
fill_alpha=0.9
cmap_tooltip = """
<div>
<div>
<span style="font-size: 1vw; font-weight: bold; color: #303030;">AMP: </span>
<span style="font-size: 1vw; color: #515151;">@amp_number</span>
</div>
<div>
<span style="font-size: 1vw; font-weight: bold; color: #303030;">counts: </span>
<span style="font-size: 1vw; color: #515151">@text_val</span>
</div>
<div>
<span style="font-size: 1vw; font-weight: bold; color: #303030;">Reference: </span>
<span style="font-size: 1vw; color: #515151;">@ref</span>
</div>
</div>
""".replace("counts:", name.replace("_AMP", "")+":").replace("text_val", text_val)
hover = HoverTool(tooltips=cmap_tooltip)
p = Figure(title=name, tools=[hover],
x_range=list([-0.5, 1.5]),
y_range=list([-0.5, 1.5]),
plot_width=450,
plot_height=400)
source = ColumnDataSource(data= data_source)
text_props = {
"source": source,
"angle": 0,
"color": "black",
"text_color": "black",
"text_align": "center",
"text_baseline": "middle"}
p.rect("x", "y", .98, .98, 0, source=source,
fill_color=fill_color, fill_alpha=fill_alpha)
p.text(x="x", y="y_offset1", text="amp",
text_font_size="2vw", **text_props)
p.text(x="x", y="y_offset2", text="ztext",
text_font_style="bold", text_font_size="2.5vw", **text_props)
if not status_plot:
p.add_layout(color_bar, 'right')
# Format:
p.xaxis.axis_label_text_font_size = font_size
p.legend.label_text_font_size = font_size
p.title.text_font_size = font_size
p.xaxis.axis_label= description
# Clear Axis:
p.grid.grid_line_color = None
p.outline_line_color = None
p.axis.minor_tick_line_color = None
p.axis.major_label_text_font_size = '0pt'
p.yaxis.major_label_text_font_size = '0pt'
p.xaxis.major_tick_line_color = None
p.xaxis.minor_tick_line_color = None
p.yaxis.major_tick_line_color = None
p.yaxis.minor_tick_line_color = None
p.yaxis.visible = False
p.xaxis.visible = True
p.axis.clear
return p | desihub/qlf | backend/framework/qlf/dashboard/bokeh/plots/patch/main.py | Python | bsd-3-clause | 6,217 |
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2011 Aldo Cortesi
# Copyright (c) 2010 Philip Kranz
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2011 Paul Colomiets
# Copyright (c) 2011-2012 roger
# Copyright (c) 2011-2012, 2014 Tycho Andersen
# Copyright (c) 2012 Dustin Lacewell
# Copyright (c) 2012 Laurie Clark-Michalek
# Copyright (c) 2012-2014 Craig Barnes
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (C) 2015, Juan Riquelme González
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import abc
import glob
import os
import pickle
import string
from collections import deque
from typing import List, Optional, Tuple
from libqtile import bar, hook, pangocffi, utils
from libqtile.command.base import CommandObject, SelectError
from libqtile.command.client import InteractiveCommandClient
from libqtile.command.interface import CommandError, QtileCommandInterface
from libqtile.log_utils import logger
from libqtile.widget import base
class AbstractCompleter(metaclass=abc.ABCMeta):
@abc.abstractmethod
def __init__(self, qtile: CommandObject) -> None:
pass
@abc.abstractmethod
def actual(self) -> Optional[str]:
pass
@abc.abstractmethod
def reset(self) -> None:
pass
@abc.abstractmethod
def complete(self, txt: str) -> str:
"""Perform the requested completion on the given text"""
pass # pragma: no cover
class NullCompleter(AbstractCompleter):
def __init__(self, qtile) -> None:
self.qtile = qtile
def actual(self) -> str:
return ""
def reset(self) -> None:
pass
def complete(self, txt: str) -> str:
return txt
class FileCompleter(AbstractCompleter):
def __init__(self, qtile, _testing=False) -> None:
self._testing = _testing
self.qtile = qtile
self.thisfinal = None # type: Optional[str]
self.lookup = None # type: Optional[List[Tuple[str, str]]]
self.reset()
def actual(self) -> Optional[str]:
return self.thisfinal
def reset(self) -> None:
self.lookup = None
def complete(self, txt: str) -> str:
"""Returns the next completion for txt, or None if there is no completion"""
if self.lookup is None:
self.lookup = []
if txt == "" or txt[0] not in "~/":
txt = "~/" + txt
path = os.path.expanduser(txt)
if os.path.isdir(path):
files = glob.glob(os.path.join(path, "*"))
prefix = txt
else:
files = glob.glob(path + "*")
prefix = os.path.dirname(txt)
prefix = prefix.rstrip("/") or "/"
for f in files:
display = os.path.join(prefix, os.path.basename(f))
if os.path.isdir(f):
display += "/"
self.lookup.append((display, f))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class QshCompleter(AbstractCompleter):
def __init__(self, qtile: CommandObject) -> None:
q = QtileCommandInterface(qtile)
self.client = InteractiveCommandClient(q)
self.thisfinal = None # type: Optional[str]
self.reset()
def actual(self) -> Optional[str]:
return self.thisfinal
def reset(self) -> None:
self.lookup = None # type: Optional[List[Tuple[str, str]]]
self.path = ''
self.offset = -1
def complete(self, txt: str) -> str:
txt = txt.lower()
if self.lookup is None:
self.lookup = []
path = txt.split('.')[:-1]
self.path = '.'.join(path)
term = txt.split('.')[-1]
if len(self.path) > 0:
self.path += '.'
contains_cmd = 'self.client.%s_contains' % self.path
try:
contains = eval(contains_cmd)
except AttributeError:
contains = []
for obj in contains:
if obj.lower().startswith(term):
self.lookup.append((obj, obj))
commands_cmd = 'self.client.%scommands()' % self.path
try:
commands = eval(commands_cmd)
except (CommandError, AttributeError):
commands = []
for cmd in commands:
if cmd.lower().startswith(term):
self.lookup.append((cmd + '()', cmd + '()'))
self.offset = -1
self.lookup.append((term, term))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = self.path + ret[0]
return self.path + ret[0]
class GroupCompleter(AbstractCompleter):
def __init__(self, qtile: CommandObject) -> None:
self.qtile = qtile
self.thisfinal = None # type: Optional[str]
self.lookup = None # type: Optional[List[Tuple[str, str]]]
self.offset = -1
def actual(self) -> Optional[str]:
"""Returns the current actual value"""
return self.thisfinal
def reset(self) -> None:
self.lookup = None
self.offset = -1
def complete(self, txt: str) -> str:
"""Returns the next completion for txt, or None if there is no completion"""
txt = txt.lower()
if not self.lookup:
self.lookup = []
for group in self.qtile.groups_map.keys(): # type: ignore
if group.lower().startswith(txt):
self.lookup.append((group, group))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class WindowCompleter(AbstractCompleter):
def __init__(self, qtile: CommandObject) -> None:
self.qtile = qtile
self.thisfinal = None # type: Optional[str]
self.lookup = None # type: Optional[List[Tuple[str, str]]]
self.offset = -1
def actual(self) -> Optional[str]:
"""Returns the current actual value"""
return self.thisfinal
def reset(self) -> None:
self.lookup = None
self.offset = -1
def complete(self, txt: str) -> str:
"""Returns the next completion for txt, or None if there is no completion"""
if self.lookup is None:
self.lookup = []
for wid, window in self.qtile.windows_map.items(): # type: ignore
if window.group and window.name.lower().startswith(txt):
self.lookup.append((window.name, wid))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class CommandCompleter:
"""
Parameters
==========
_testing :
disables reloading of the lookup table to make testing possible.
"""
DEFAULTPATH = "/bin:/usr/bin:/usr/local/bin"
def __init__(self, qtile, _testing=False):
self.lookup = None # type: Optional[List[Tuple[str, str]]]
self.offset = -1
self.thisfinal = None # type: Optional[str]
self._testing = _testing
def actual(self) -> Optional[str]:
"""Returns the current actual value"""
return self.thisfinal
def executable(self, fpath: str):
return os.access(fpath, os.X_OK)
def reset(self) -> None:
self.lookup = None
self.offset = -1
def complete(self, txt: str) -> str:
"""Returns the next completion for txt, or None if there is no completion"""
if self.lookup is None:
# Lookup is a set of (display value, actual value) tuples.
self.lookup = []
if txt and txt[0] in "~/":
path = os.path.expanduser(txt)
if os.path.isdir(path):
files = glob.glob(os.path.join(path, "*"))
prefix = txt
else:
files = glob.glob(path + "*")
prefix = os.path.dirname(txt)
prefix = prefix.rstrip("/") or "/"
for f in files:
if self.executable(f):
display = os.path.join(prefix, os.path.basename(f))
if os.path.isdir(f):
display += "/"
self.lookup.append((display, f))
else:
dirs = os.environ.get("PATH", self.DEFAULTPATH).split(":")
for d in dirs:
try:
d = os.path.expanduser(d)
for cmd in glob.iglob(os.path.join(d, "%s*" % txt)):
if self.executable(cmd):
self.lookup.append(
(
os.path.basename(cmd),
cmd
),
)
except OSError:
pass
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class Prompt(base._TextBox):
"""A widget that prompts for user input
Input should be started using the ``.start_input()`` method on this class.
"""
completers = {
"file": FileCompleter,
"qshell": QshCompleter,
"cmd": CommandCompleter,
"group": GroupCompleter,
"window": WindowCompleter,
None: NullCompleter
}
orientations = base.ORIENTATION_HORIZONTAL
defaults = [("cursor", True, "Show a cursor"),
("cursorblink", 0.5, "Cursor blink rate. 0 to disable."),
("cursor_color", "bef098",
"Color for the cursor and text over it."),
("prompt", "{prompt}: ", "Text displayed at the prompt"),
("record_history", True, "Keep a record of executed commands"),
("max_history", 100,
"Commands to keep in history. 0 for no limit."),
("ignore_dups_history", False,
"Don't store duplicates in history"),
("bell_style", "audible",
"Alert at the begin/end of the command history. " +
"Possible values: 'audible' (X11 only), 'visual' and None."),
("visual_bell_color", "ff0000",
"Color for the visual bell (changes prompt background)."),
("visual_bell_time", 0.2,
"Visual bell duration (in seconds).")]
def __init__(self, name="prompt", **config) -> None:
base._TextBox.__init__(self, "", bar.CALCULATED, **config)
self.add_defaults(Prompt.defaults)
self.name = name
self.active = False
self.completer = None # type: Optional[AbstractCompleter]
# If history record is on, get saved history or create history record
if self.record_history:
self.history_path = os.path.join(utils.get_cache_dir(),
'prompt_history')
if os.path.exists(self.history_path):
with open(self.history_path, 'rb') as f:
try:
self.history = pickle.load(f)
if self.ignore_dups_history:
self._dedup_history()
except: # noqa: E722
# unfortunately, pickle doesn't wrap its errors, so we
# can't detect what's a pickle error and what's not.
logger.exception("failed to load prompt history")
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
# self.history of size does not match.
if len(self.history) != len(self.completers):
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
if self.max_history != \
self.history[list(self.history)[0]].maxlen:
self.history = {x: deque(self.history[x],
self.max_history)
for x in self.completers}
else:
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
def _configure(self, qtile, bar) -> None:
self.markup = True
base._TextBox._configure(self, qtile, bar)
def f(win):
if self.active and not win == self.bar.window:
self._unfocus()
hook.subscribe.client_focus(f)
# Define key handlers (action to do when a specific key is hit)
keyhandlers = {
'Tab': self._trigger_complete,
'BackSpace': self._delete_char(),
'Delete': self._delete_char(False),
'KP_Delete': self._delete_char(False),
'Escape': self._unfocus,
'Return': self._send_cmd,
'KP_Enter': self._send_cmd,
'Up': self._get_prev_cmd,
'KP_Up': self._get_prev_cmd,
'Down': self._get_next_cmd,
'KP_Down': self._get_next_cmd,
'Left': self._move_cursor(),
'KP_Left': self._move_cursor(),
'Right': self._move_cursor("right"),
'KP_Right': self._move_cursor("right"),
}
self.keyhandlers = {
qtile.core.keysym_from_name(k): v for k, v in keyhandlers.items()
}
printables = {x: self._write_char for x in range(127) if
chr(x) in string.printable}
self.keyhandlers.update(printables)
self.tab = qtile.core.keysym_from_name("Tab")
self.bell_style: str
if self.bell_style == "audible" and qtile.core.name != "x11":
self.bell_style = "visual"
logger.warning("Prompt widget only supports audible bell under X11")
if self.bell_style == "visual":
self.original_background = self.background
def start_input(self, prompt, callback, complete=None,
strict_completer=False, allow_empty_input=False) -> None:
"""Run the prompt
Displays a prompt and starts to take one line of keyboard input from
the user. When done, calls the callback with the input string as
argument. If history record is enabled, also allows to browse between
previous commands with ↑ and ↓, and execute them (untouched or
modified). When history is exhausted, fires an alert. It tries to
mimic, in some way, the shell behavior.
Parameters
==========
complete :
Tab-completion. Can be None, "cmd", "file", "group", "qshell" or
"window".
prompt :
text displayed at the prompt, e.g. "spawn: "
callback :
function to call with returned value.
complete :
completer to use.
strict_completer :
When True the return value wil be the exact completer result where
available.
allow_empty_input :
When True, an empty value will still call the callback function
"""
if self.cursor and self.cursorblink and not self.active:
self.timeout_add(self.cursorblink, self._blink)
self.display = self.prompt.format(prompt=prompt)
self.display = pangocffi.markup_escape_text(self.display)
self.active = True
self.user_input = ""
self.archived_input = ""
self.show_cursor = self.cursor
self.cursor_position = 0
self.callback = callback
self.completer = self.completers[complete](self.qtile)
self.strict_completer = strict_completer
self.allow_empty_input = allow_empty_input
self._update()
self.bar.widget_grab_keyboard(self)
if self.record_history:
self.completer_history = self.history[complete]
self.position = len(self.completer_history)
def calculate_length(self) -> int:
if self.text:
width = min(
self.layout.width,
self.bar.width
) + self.actual_padding * 2
return width
else:
return 0
def _blink(self) -> None:
self.show_cursor = not self.show_cursor
self._update()
if self.active:
self.timeout_add(self.cursorblink, self._blink)
def _highlight_text(self, text) -> str:
color = utils.hex(self.cursor_color)
text = '<span foreground="{0}">{1}</span>'.format(color, text)
if self.show_cursor:
text = '<u>{}</u>'.format(text)
return text
def _update(self) -> None:
if self.active:
self.text = self.archived_input or self.user_input
cursor = pangocffi.markup_escape_text(" ")
if self.cursor_position < len(self.text):
txt1 = self.text[:self.cursor_position]
txt2 = self.text[self.cursor_position]
txt3 = self.text[self.cursor_position + 1:]
for text in (txt1, txt2, txt3):
text = pangocffi.markup_escape_text(text)
txt2 = self._highlight_text(txt2)
self.text = "{0}{1}{2}{3}".format(txt1, txt2, txt3, cursor)
else:
self.text = pangocffi.markup_escape_text(self.text)
self.text += self._highlight_text(cursor)
self.text = self.display + self.text
else:
self.text = ""
self.bar.draw()
def _trigger_complete(self) -> None:
# Trigger the auto completion in user input
assert self.completer is not None
self.user_input = self.completer.complete(self.user_input)
self.cursor_position = len(self.user_input)
def _history_to_input(self) -> None:
# Move actual command (when exploring history) to user input and update
# history position (right after the end)
if self.archived_input:
self.user_input = self.archived_input
self.archived_input = ""
self.position = len(self.completer_history)
def _insert_before_cursor(self, charcode) -> None:
# Insert a character (given their charcode) in input, before the cursor
txt1 = self.user_input[:self.cursor_position]
txt2 = self.user_input[self.cursor_position:]
self.user_input = txt1 + chr(charcode) + txt2
self.cursor_position += 1
def _delete_char(self, backspace=True):
# Return a function that deletes character from the input text.
# If backspace is True, function will emulate backspace, else Delete.
def f():
self._history_to_input()
step = -1 if backspace else 0
if not backspace and self.cursor_position == len(self.user_input):
self._alert()
elif len(self.user_input) > 0 and self.cursor_position + step > -1:
txt1 = self.user_input[:self.cursor_position + step]
txt2 = self.user_input[self.cursor_position + step + 1:]
self.user_input = txt1 + txt2
if step:
self.cursor_position += step
else:
self._alert()
return f
def _write_char(self):
# Add pressed (legal) char key to user input.
# No LookupString in XCB... oh, the shame! Unicode users beware!
self._history_to_input()
self._insert_before_cursor(self.key)
def _unfocus(self):
# Remove focus from the widget
self.active = False
self._update()
self.bar.widget_ungrab_keyboard()
def _send_cmd(self):
# Send the prompted text for execution
self._unfocus()
if self.strict_completer:
self.user_input = self.actual_value or self.user_input
del self.actual_value
self._history_to_input()
if self.user_input or self.allow_empty_input:
# If history record is activated, also save command in history
if self.record_history:
# ensure no dups in history
if self.ignore_dups_history and (self.user_input in self.completer_history):
self.completer_history.remove(self.user_input)
self.position -= 1
self.completer_history.append(self.user_input)
if self.position < self.max_history:
self.position += 1
os.makedirs(os.path.dirname(self.history_path), exist_ok=True)
with open(self.history_path, mode='wb') as f:
pickle.dump(self.history, f, protocol=2)
self.callback(self.user_input)
def _alert(self):
# Fire an alert (audible or visual), if bell style is not None.
if self.bell_style == "audible":
self.qtile.core.conn.conn.core.Bell(0)
elif self.bell_style == "visual":
self.background = self.visual_bell_color
self.timeout_add(self.visual_bell_time, self._stop_visual_alert)
def _stop_visual_alert(self):
self.background = self.original_background
self._update()
def _get_prev_cmd(self):
# Get the previous command in history.
# If there isn't more previous commands, ring system bell
if self.record_history:
if not self.position:
self._alert()
else:
self.position -= 1
self.archived_input = self.completer_history[self.position]
self.cursor_position = len(self.archived_input)
def _get_next_cmd(self):
# Get the next command in history.
# If the last command was already reached, ring system bell.
if self.record_history:
if self.position == len(self.completer_history):
self._alert()
elif self.position < len(self.completer_history):
self.position += 1
if self.position == len(self.completer_history):
self.archived_input = ""
else:
self.archived_input = self.completer_history[self.position]
self.cursor_position = len(self.archived_input)
def _cursor_to_left(self):
# Move cursor to left, if possible
if self.cursor_position:
self.cursor_position -= 1
else:
self._alert()
def _cursor_to_right(self):
# move cursor to right, if possible
command = self.archived_input or self.user_input
if self.cursor_position < len(command):
self.cursor_position += 1
else:
self._alert()
def _move_cursor(self, direction="left"):
# Move the cursor to left or right, according to direction
if direction == "left":
return self._cursor_to_left
elif direction == "right":
return self._cursor_to_right
def _get_keyhandler(self, k):
# Return the action (a function) to do according the pressed key (k).
self.key = k
if k in self.keyhandlers:
if k != self.tab:
self.actual_value = self.completer.actual()
self.completer.reset()
return self.keyhandlers[k]
def process_key_press(self, keysym: int):
"""Key press handler for the minibuffer.
Currently only supports ASCII characters.
"""
handle_key = self._get_keyhandler(keysym)
if handle_key:
handle_key()
del self.key
self._update()
def cmd_fake_keypress(self, key: str) -> None:
self.process_key_press(self.qtile.core.keysym_from_name(key))
def cmd_info(self):
"""Returns a dictionary of info for this object"""
return dict(
name=self.name,
width=self.width,
text=self.text,
active=self.active,
)
def cmd_exec_general(
self, prompt, object_name, cmd_name, selector=None, completer=None):
"""
Execute a cmd of any object. For example layout, group, window, widget
, etc with a string that is obtained from start_input.
Parameters
==========
prompt :
Text displayed at the prompt.
object_name :
Name of a object in Qtile. This string has to be 'layout', 'widget',
'bar', 'window' or 'screen'.
cmd_name :
Execution command of selected object using object_name and selector.
selector :
This value select a specific object within a object list that is
obtained by object_name.
If this value is None, current object is selected. e.g. current layout,
current window and current screen.
completer:
Completer to use.
config example:
Key([alt, 'shift'], 'a',
lazy.widget['prompt'].exec_general(
'section(add)',
'layout',
'add_section'))
"""
try:
obj = self.qtile.select([(object_name, selector)])
except SelectError:
logger.warning("cannot select a object")
return
cmd = obj.command(cmd_name)
if not cmd:
logger.warning("command not found")
return
def f(args):
if args:
cmd(args)
self.start_input(prompt, f, completer)
def _dedup_history(self):
"""Filter the history deque, clearing all duplicate values."""
self.history = {x: self._dedup_deque(self.history[x])
for x in self.completers}
def _dedup_deque(self, dq):
return deque(_LastUpdatedOrderedDict.fromkeys(dq))
class _LastUpdatedOrderedDict(dict):
"""Store items in the order the keys were last added."""
def __setitem__(self, key, value):
if key in self:
del self[key]
super().__setitem__(key, value)
| ramnes/qtile | libqtile/widget/prompt.py | Python | mit | 28,153 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-12 10:55
from __future__ import unicode_literals
import django.db.models.deletion
import modelcluster.contrib.taggit
import modelcluster.fields
import taggit.managers
from django.conf import settings
from django.db import migrations, models
import wagtail.wagtailadmin.taggable
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailimages.blocks
import wagtail.wagtailimages.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('wagtailimages', '0010_change_on_delete_behaviour'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wagtailcore', '0024_alter_page_content_type_on_delete_behaviour'),
('taggit', '0002_auto_20150616_2121'),
('wagtaildocs', '0005_alter_uploaded_by_user_on_delete_action'),
]
operations = [
migrations.CreateModel(
name='Advert',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(blank=True, null=True)),
('text', models.CharField(max_length=255)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='AdvertPlacement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('colour', models.CharField(max_length=255)),
('advert', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='tests.Advert')),
],
),
migrations.CreateModel(
name='AdvertTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_object', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='tagged_items', to='tests.Advert')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tests_adverttag_items', to='taggit.Tag')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='AdvertWithTabbedInterface',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(blank=True, null=True)),
('text', models.CharField(max_length=255)),
('something_else', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='BlogCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=80, unique=True)),
],
),
migrations.CreateModel(
name='BlogCategoryBlogPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='tests.BlogCategory')),
],
),
migrations.CreateModel(
name='BusinessChild',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='BusinessIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='BusinessNowherePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='BusinessSubIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='CustomImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='title')),
('file', models.ImageField(height_field='height', upload_to=wagtail.wagtailimages.models.get_upload_to, verbose_name='file', width_field='width')),
('width', models.IntegerField(editable=False, verbose_name='width')),
('height', models.IntegerField(editable=False, verbose_name='height')),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='created at')),
('focal_point_x', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_y', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_width', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_height', models.PositiveIntegerField(blank=True, null=True)),
('file_size', models.PositiveIntegerField(editable=False, null=True)),
('caption', models.CharField(max_length=255)),
('not_editable_field', models.CharField(max_length=255)),
('tags', taggit.managers.TaggableManager(blank=True, help_text=None, through='taggit.TaggedItem', to='taggit.Tag', verbose_name='tags')),
('uploaded_by_user', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='uploaded by user')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtail.wagtailadmin.taggable.TagSearchable),
),
migrations.CreateModel(
name='CustomImageFilePath',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='title')),
('file', models.ImageField(height_field='height', upload_to=wagtail.wagtailimages.models.get_upload_to, verbose_name='file', width_field='width')),
('width', models.IntegerField(editable=False, verbose_name='width')),
('height', models.IntegerField(editable=False, verbose_name='height')),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='created at')),
('focal_point_x', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_y', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_width', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_height', models.PositiveIntegerField(blank=True, null=True)),
('file_size', models.PositiveIntegerField(editable=False, null=True)),
('tags', taggit.managers.TaggableManager(blank=True, help_text=None, through='taggit.TaggedItem', to='taggit.Tag', verbose_name='tags')),
('uploaded_by_user', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='uploaded by user')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtail.wagtailadmin.taggable.TagSearchable),
),
migrations.CreateModel(
name='CustomManagerPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='EventIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.wagtailcore.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='EventPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('date_from', models.DateField(null=True, verbose_name='Start date')),
('date_to', models.DateField(blank=True, help_text='Not required if event is on a single day', null=True, verbose_name='End date')),
('time_from', models.TimeField(blank=True, null=True, verbose_name='Start time')),
('time_to', models.TimeField(blank=True, null=True, verbose_name='End time')),
('audience', models.CharField(choices=[('public', 'Public'), ('private', 'Private')], max_length=255)),
('location', models.CharField(max_length=255)),
('body', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('cost', models.CharField(max_length=255)),
('signup_link', models.URLField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='EventPageCarouselItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name='External link')),
('embed_url', models.URLField(blank=True, verbose_name='Embed URL')),
('caption', models.CharField(blank=True, max_length=255)),
('image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='EventPageChooserModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='EventPageRelatedLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name='External link')),
('title', models.CharField(help_text='Link title', max_length=255)),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='EventPageSpeaker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name='External link')),
('first_name', models.CharField(blank=True, max_length=255, verbose_name='Name')),
('last_name', models.CharField(blank=True, max_length=255, verbose_name='Surname')),
('image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='FilePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('file_field', models.FileField(upload_to='')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='FormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.CharField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', max_length=512, verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='FormPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('to_address', models.CharField(blank=True, help_text='Optional - form submissions will be emailed to this address', max_length=255, verbose_name='to address')),
('from_address', models.CharField(blank=True, max_length=255, verbose_name='from address')),
('subject', models.CharField(blank=True, max_length=255, verbose_name='subject')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='GenericSnippetPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('snippet_object_id', models.PositiveIntegerField(null=True)),
('snippet_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.ContentType')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='IconSetting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ManyToManyBlogPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.wagtailcore.fields.RichTextField(blank=True)),
('adverts', models.ManyToManyField(blank=True, to='tests.Advert')),
('blog_categories', models.ManyToManyField(blank=True, through='tests.BlogCategoryBlogPage', to='tests.BlogCategory')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MTIBasePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'verbose_name': 'MTI Base page',
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MyCustomPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='NotYetRegisteredSetting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PageChooserModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PageWithOldStyleRouteMethod',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('content', models.TextField()),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SimplePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('content', models.TextField()),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SingletonPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SnippetChooserModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('advert', models.ForeignKey(help_text='help text', on_delete=django.db.models.deletion.CASCADE, to='tests.Advert')),
],
),
migrations.CreateModel(
name='StandardChild',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='StandardIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='StreamModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())))),
],
),
migrations.CreateModel(
name='StreamPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())))),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='TaggedPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='TaggedPageTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_object', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='tagged_items', to='tests.TaggedPage')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tests_taggedpagetag_items', to='taggit.Tag')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TestSetting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('email', models.EmailField(max_length=50)),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ValidatedPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('foo', models.CharField(max_length=255)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MTIChildPage',
fields=[
('mtibasepage_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.MTIBasePage')),
],
options={
'abstract': False,
},
bases=('tests.mtibasepage',),
),
migrations.CreateModel(
name='SingleEventPage',
fields=[
('eventpage_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.EventPage')),
('excerpt', models.TextField(blank=True, help_text='Short text to describe what is this action about', max_length=255, null=True)),
],
options={
'abstract': False,
},
bases=('tests.eventpage',),
),
migrations.AddField(
model_name='taggedpage',
name='tags',
field=modelcluster.contrib.taggit.ClusterTaggableManager(blank=True, help_text='A comma-separated list of tags.', through='tests.TaggedPageTag', to='taggit.Tag', verbose_name='Tags'),
),
migrations.AddField(
model_name='pagechoosermodel',
name='page',
field=models.ForeignKey(help_text='help text', on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page'),
),
migrations.AddField(
model_name='formfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='tests.FormPage'),
),
migrations.AddField(
model_name='eventpagespeaker',
name='link_page',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='eventpagespeaker',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='speakers', to='tests.EventPage'),
),
migrations.AddField(
model_name='eventpagerelatedlink',
name='link_page',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='eventpagerelatedlink',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='related_links', to='tests.EventPage'),
),
migrations.AddField(
model_name='eventpagechoosermodel',
name='page',
field=models.ForeignKey(help_text='more help text', on_delete=django.db.models.deletion.CASCADE, to='tests.EventPage'),
),
migrations.AddField(
model_name='eventpagecarouselitem',
name='link_page',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='eventpagecarouselitem',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='carousel_items', to='tests.EventPage'),
),
migrations.AddField(
model_name='eventpage',
name='feed_image',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='blogcategoryblogpage',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='categories', to='tests.ManyToManyBlogPage'),
),
migrations.AddField(
model_name='advertplacement',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='advert_placements', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='advert',
name='tags',
field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='tests.AdvertTag', to='taggit.Tag', verbose_name='Tags'),
),
]
| hamsterbacke23/wagtail | wagtail/tests/testapp/migrations/0001_initial.py | Python | bsd-3-clause | 30,539 |
from glob import glob
import pandas
import numpy
import pickle
import os
import re
home = os.environ["HOME"]
base = "%s/DATA/PUBMED" %os.environ["SCRATCH"]
counts_folder = "%s/counts" %(base)
files = glob("%s/*.tsv" %counts_folder)
len(files)
#4660
# save a lookup to associate repos with pmids, if there are repeats
repos = dict()
# First let's get unique columns
colnames = []
for f in files:
entry = os.path.basename(f).replace("extcounts.tsv","").split("_")
pmid = entry[0]
user_name = entry[1]
repo_name = "".join(entry[2:])
uid = "%s_%s" %(user_name,repo_name)
result = pandas.read_csv(f,index_col=0,sep="\t")
colnames = numpy.unique([colnames + result.index.tolist()]).tolist()
# Note that we will want to combine counts for job files (.o[number] and .e[number])
colnames = [x for x in colnames if not re.search("[.]o[0-9]+",x)]
colnames = [x for x in colnames if not re.search("[.]e[0-9]+",x)]
colnames = colnames + ["JOBFILE_OUTPUT","JOBFILE_ERROR"]
len(colnames)
#4611
# Save a data frame of counts
counts = pandas.DataFrame(columns=colnames)
#counts = pandas.read_csv("%s/extension_counts.tsv" %base,sep="\t",index_col=0)
#repos = pickle.load(open("%s/extension_repos.pkl" %base,"rb"))
# A function to find output / error files, return updated result object
def find_job_files(result):
error_files = [x for x in result.index if re.search("[.]e[0-9]+",x)]
output_files = [x for x in result.index if re.search("[.]o[0-9]+",x)]
if len(error_files)>0:
error_count = result.loc[error_files].sum()["count"]
result = result.drop(error_files)
result.loc["JOBFILE_ERROR"] = error_count
if len(output_files)>0:
output_count = result.loc[output_files].sum()["count"]
result = result.drop(output_files)
result.loc["JOBFILE_OUTPUT"] = output_count
return result
for f in range(len(files)):
print "Parsing %s of %s" %(f,len(files))
filey = files[f]
entry = os.path.basename(filey).replace("extcounts.tsv","").split("_")
pmid = entry[0]
user_name = entry[1]
repo_name = "".join(entry[2:])
uid = "%s_%s" %(user_name,repo_name)
if uid not in counts.index:
result = pandas.read_csv(filey,index_col=0,sep="\t")
# Find output and error files
result = find_job_files(result)
counts.loc[uid,result.index] = result["count"]
if uid in repos:
if pmid not in repos[uid]:
repos[uid].append(pmid)
else:
repos[uid] = [pmid]
counts = counts.fillna(0)
counts.to_csv("%s/extension_counts.tsv" %base,sep="\t")
pickle.dump(repos,open("%s/extension_repos.pkl" %base,"wb"))
| vsoch/repofish | analysis/methods/6.compile_count_extensions.py | Python | mit | 2,657 |
import turtle
# initialisation
turtle.mode("standard")
turtle.home()
turtle.showturtle()
turtle.speed(1)
turtle.pencolor("red")
turtle.pensize(2)
turtle.pendown()
# dessin du carré
side = 100
angle = 90
for i in range(4):
turtle.forward(side)
turtle.right(angle)
# finalisation
turtle.hideturtle()
turtle.done()
| TGITS/programming-workouts | erri/python/lesson_48/one_square.py | Python | mit | 326 |
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import paddle.dataset.flowers
import unittest
class TestFlowers(unittest.TestCase):
def check_reader(self, reader):
sum = 0
label = 0
size = 224 * 224 * 3
for l in reader():
self.assertEqual(l[0].size, size)
if l[1] > label:
label = l[1]
sum += 1
return sum, label
def test_train(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.train())
self.assertEqual(instances, 6149)
self.assertEqual(max_label_value, 102)
def test_test(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.test())
self.assertEqual(instances, 1020)
self.assertEqual(max_label_value, 102)
def test_valid(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.valid())
self.assertEqual(instances, 1020)
self.assertEqual(max_label_value, 102)
if __name__ == '__main__':
unittest.main()
| QiJune/Paddle | python/paddle/dataset/tests/flowers_test.py | Python | apache-2.0 | 1,707 |
from pyduino import *
import time
ard = Arduino('COM10')
pin = 11
#declare output pins as a list/tuple
ard.pinMode(pin, OUTPUT)
brightness = 0
fadeAmount = 5
while (True):
ard.analogWrite(pin, brightness)
brightness = brightness + fadeAmount
if (brightness == 0 or brightness == 255):
fadeAmount = -fadeAmount
| tushutripathi/ardupy | Examples/fade.py | Python | mit | 334 |
"""
File: logging.py
Author: Ulf Krumnack
Email: krumnack@uni-osnabrueck.de
Github: https://github.com/krumnack
"""
# standard imports
from base import Runner
from toolbox import Toolbox
# Qt imports
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QPushButton, QSpinBox,
QVBoxLayout, QHBoxLayout)
# toolbox imports
from toolbox import Toolbox
from dltb.base.data import Data
from dltb.base.image import Image, Imagelike
# GUI imports
from .panel import Panel
from ..utils import QObserver
from ..widgets.matplotlib import QMatplotlib
from ..widgets.training import QTrainingBox
from ..widgets.data import QDataSelector
class AdversarialExamplePanel(Panel, QObserver, qobservables={
Toolbox: {'input_changed'}}):
"""A panel displaying adversarial examples.
Attributes
----------
_network: NetworkView
A network trained as autoencoder.
"""
def __init__(self, toolbox: Toolbox = None, **kwargs):
"""Initialization of the AdversarialExamplePanel.
Parameters
----------
parent: QWidget
The parent argument is sent to the QWidget constructor.
"""
super().__init__(**kwargs)
self._controller = None # FIXME[old]
self._initUI()
self._layoutUI()
self.setToolbox(toolbox)
# FIXME[old]
# self.setController(AdversarialExampleController())
def _initUI(self):
"""Initialize the user interface.
The user interface contains the following elements:
* the data selector: depicting the current input image
and allowing to select new inputs from a datasource
* ouput: adversarial example
* ouput: adversarial perturbation
* ouput: statistics
"""
#
# Input data
#
self._dataSelector = QDataSelector()
self._dataView = self._dataSelector.dataView()
self._dataView.addAttribute('filename')
self._dataView.addAttribute('basename')
self._dataView.addAttribute('directory')
self._dataView.addAttribute('path')
self._dataView.addAttribute('regions')
self._dataView.addAttribute('image')
#
# Controls
#
self._buttonCreateModel = QPushButton("Create")
self._buttonTrainModel = QPushButton("Train")
self._buttonLoadModel = QPushButton("Load")
self._buttonSaveModel = QPushButton("Save")
self._buttonResetModel = QPushButton("Reset")
self._buttonPlotModel = QPushButton("Plot Model")
self._buttonShowExample = QPushButton("Show")
self._buttonShowExample.clicked.connect(self._onShowExample)
#
# Plots
#
self._trainingBox = QTrainingBox()
self._pltOriginal = QMatplotlib()
self._pltAdversarial = QMatplotlib()
def _layoutUI(self):
"""Layout the UI elements.
"""
# The big picture:
#
# +--------------------+----------------------------------------+
# |+------------------+|+------------------------------------+ |
# ||dataSelector ||| Result | |
# ||[view] ||| (Adversarial Example) | |
# || ||| | |
# || ||| | |
# || ||| Diffs | |
# || ||| (Adversarial Perturbation) | |
# ||[navigator] ||| Statistics | |
# || ||| | |
# || ||| Selector | |
# |+------------------+|+------------------------------------+ |
# +--------------------+----------------------------------------+
plotBar = QHBoxLayout()
plotBar.addWidget(self._dataSelector)
plotBar.addWidget(self._trainingBox)
plotBar.addWidget(self._pltOriginal)
plotBar.addWidget(self._pltAdversarial)
buttonBar = QHBoxLayout()
buttonBar.addWidget(self._buttonCreateModel)
buttonBar.addWidget(self._buttonTrainModel)
buttonBar.addWidget(self._buttonLoadModel)
buttonBar.addWidget(self._buttonSaveModel)
buttonBar.addWidget(self._buttonResetModel)
buttonBar.addWidget(self._buttonPlotModel)
buttonBar.addWidget(self._buttonShowExample)
layout = QVBoxLayout()
layout.addLayout(plotBar)
layout.addLayout(buttonBar)
self.setLayout(layout)
def setImage(self, image: Imagelike) -> None:
"""Set the image for this :py:class:`FacePanel`. This
will initiate the processing of this image using the
current tools.
"""
self.setData(Image.as_data(image))
def setData(self, data: Data) -> None:
"""Set the data to be processed by this :py:class:`FacePanel`.
"""
# set data for the dataView - this is redundant if data is set
# from the toolbox (as the dataView also observes the toolbox),
# but it is necessary, if setData is called independently.
self._dataView.setData(data)
# FIXME[todo]: generate adversarial example.
def setToolbox(self, toolbox: Toolbox) -> None:
"""Set a new Toolbox.
We are only interested in changes of the input data.
"""
self._dataSelector.setToolbox(toolbox)
# self._dataView.setToolbox(toolbox)
self.setData(toolbox.input_data if toolbox is not None else None)
def toolbox_changed(self, toolbox: Toolbox,
change: Toolbox.Change) -> None:
# pylint: disable=invalid-name
"""The FacePanel is a Toolbox.Observer. It is interested
in input changes and will react with applying face recognition
to a new input image.
"""
if change.input_changed:
self.setData(toolbox.input_data)
# FIXME[old]
# FIXME[hack]: no quotes!
def setController(self, controller: 'AdversarialExampleController') -> None:
self._controller = controller
self._buttonCreateModel.clicked.connect(controller.create_model)
self._buttonTrainModel.clicked.connect(controller.train_model)
self._buttonLoadModel.clicked.connect(controller.load_model)
self._buttonSaveModel.clicked.connect(controller.save_model)
self._buttonResetModel.clicked.connect(controller.reset_model)
self.observe(controller)
def _enableComponents(self, running=False):
print(f"enable components: {running}")
available = self._controller is not None and not running
self._buttonCreateModel.setEnabled(not running)
for w in (self._buttonTrainModel,
self._buttonLoadModel, self._buttonSaveModel,
self._buttonPlotModel,
self._buttonShowExample):
w.setEnabled(available)
def _onShowExample(self):
if self._controller is None:
self._pltOriginal.noData()
self._pltAdversarial.noData()
else:
example_data, example_label, example_prediction = \
self._controller.get_example()
with self._pltOriginal as ax:
ax.imshow(example_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Label = {example_label.argmax()}, "
f"Prediction = {example_prediction.argmax()}")
adversarial_data, adversarial_prediction = \
self._controller.get_adversarial_example()
with self._pltAdversarial as ax:
ax.imshow(adversarial_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Prediction = {adversarial_prediction.argmax()}")
def adversarialControllerChanged(self, controller, change):
if 'busy_changed' in change:
self._enableComponents(controller.busy)
| Petr-By/qtpyvis | qtgui/panels/advexample.py | Python | mit | 8,103 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2013 J.P. Krauss <jkrauss@asymworks.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import time
import duplicity.backend
from duplicity import globals
from duplicity import log
from duplicity.errors import * # @UnusedWildImport
from duplicity.util import exception_traceback
from duplicity.backend import retry
class PyraxBackend(duplicity.backend.Backend):
"""
Backend for Rackspace's CloudFiles using Pyrax
"""
def __init__(self, parsed_url):
try:
import pyrax
except ImportError:
raise BackendException("This backend requires the pyrax "
"library available from Rackspace.")
# Inform Pyrax that we're talking to Rackspace
# per Jesus Monzon (gsusmonzon)
pyrax.set_setting("identity_type", "rackspace")
conn_kwargs = {}
if not os.environ.has_key('CLOUDFILES_USERNAME'):
raise BackendException('CLOUDFILES_USERNAME environment variable'
'not set.')
if not os.environ.has_key('CLOUDFILES_APIKEY'):
raise BackendException('CLOUDFILES_APIKEY environment variable not set.')
conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']
if os.environ.has_key('CLOUDFILES_REGION'):
conn_kwargs['region'] = os.environ['CLOUDFILES_REGION']
container = parsed_url.path.lstrip('/')
try:
pyrax.set_credentials(**conn_kwargs)
except Exception, e:
log.FatalError("Connection failed, please check your credentials: %s %s"
% (e.__class__.__name__, str(e)),
log.ErrorCode.connection_failed)
self.client_exc = pyrax.exceptions.ClientException
self.nso_exc = pyrax.exceptions.NoSuchObject
self.cloudfiles = pyrax.cloudfiles
self.container = pyrax.cloudfiles.create_container(container)
def put(self, source_path, remote_filename = None):
if not remote_filename:
remote_filename = source_path.get_filename()
for n in range(1, globals.num_retries + 1):
log.Info("Uploading '%s/%s' " % (self.container, remote_filename))
try:
self.container.upload_file(source_path.name, remote_filename)
return
except self.client_exc, error:
log.Warn("Upload of '%s' failed (attempt %d): pyrax returned: %s %s"
% (remote_filename, n, error.__class__.__name__, error.message))
except Exception, e:
log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
% (remote_filename, n, e.__class__.__name__, str(e)))
log.Debug("Backtrace of previous error: %s"
% exception_traceback())
time.sleep(30)
log.Warn("Giving up uploading '%s' after %s attempts"
% (remote_filename, globals.num_retries))
raise BackendException("Error uploading '%s'" % remote_filename)
def get(self, remote_filename, local_path):
for n in range(1, globals.num_retries + 1):
log.Info("Downloading '%s/%s'" % (self.container, remote_filename))
try:
sobject = self.container.get_object(remote_filename)
f = open(local_path.name, 'w')
f.write(sobject.get())
local_path.setdata()
return
except self.nso_exc:
return
except self.client_exc, resperr:
log.Warn("Download of '%s' failed (attempt %s): pyrax returned: %s %s"
% (remote_filename, n, resperr.__class__.__name__, resperr.message))
except Exception, e:
log.Warn("Download of '%s' failed (attempt %s): %s: %s"
% (remote_filename, n, e.__class__.__name__, str(e)))
log.Debug("Backtrace of previous error: %s"
% exception_traceback())
time.sleep(30)
log.Warn("Giving up downloading '%s' after %s attempts"
% (remote_filename, globals.num_retries))
raise BackendException("Error downloading '%s/%s'"
% (self.container, remote_filename))
def _list(self):
for n in range(1, globals.num_retries + 1):
log.Info("Listing '%s'" % (self.container))
try:
# Cloud Files will return a max of 10,000 objects. We have
# to make multiple requests to get them all.
objs = self.container.get_object_names()
keys = objs
while len(objs) == 10000:
objs = self.container.get_object_names(marker = keys[-1])
keys += objs
return keys
except self.client_exc, resperr:
log.Warn("Listing of '%s' failed (attempt %s): pyrax returned: %s %s"
% (self.container, n, resperr.__class__.__name__, resperr.message))
except Exception, e:
log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
% (self.container, n, e.__class__.__name__, str(e)))
log.Debug("Backtrace of previous error: %s"
% exception_traceback())
time.sleep(30)
log.Warn("Giving up listing of '%s' after %s attempts"
% (self.container, globals.num_retries))
raise BackendException("Error listing '%s'"
% (self.container))
def delete_one(self, remote_filename):
for n in range(1, globals.num_retries + 1):
log.Info("Deleting '%s/%s'" % (self.container, remote_filename))
try:
self.container.delete_object(remote_filename)
return
except self.client_exc, resperr:
if n > 1 and resperr.status == 404:
# We failed on a timeout, but delete succeeded on the server
log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename)
return
log.Warn("Delete of '%s' failed (attempt %s): pyrax returned: %s %s"
% (remote_filename, n, resperr.__class__.__name__, resperr.message))
except Exception, e:
log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
% (remote_filename, n, e.__class__.__name__, str(e)))
log.Debug("Backtrace of previous error: %s"
% exception_traceback())
time.sleep(30)
log.Warn("Giving up deleting '%s' after %s attempts"
% (remote_filename, globals.num_retries))
raise BackendException("Error deleting '%s/%s'"
% (self.container, remote_filename))
def delete(self, filename_list):
for file_ in filename_list:
self.delete_one(file_)
log.Debug("Deleted '%s/%s'" % (self.container, file_))
@retry
def _query_file_info(self, filename, raise_errors = False):
try:
sobject = self.container.get_object(filename)
return {'size': sobject.total_bytes}
except self.nso_exc:
return {'size': -1}
except Exception, e:
log.Warn("Error querying '%s/%s': %s"
"" % (self.container,
filename,
str(e)))
if raise_errors:
raise e
else:
return {'size': None}
duplicity.backend.register_backend("cf+http", PyraxBackend)
| alanfranz/duplicity | duplicity/backends/_cf_pyrax.py | Python | gpl-2.0 | 8,617 |
# -*- coding: utf-8 -*-
import buy_order
import buy_receipt
import buy_adjust
import money
import vendor_goods
import partner
| luoguizhou/gooderp_addons | buy/models/__init__.py | Python | agpl-3.0 | 126 |
#!/usr/bin/env python2
# vim: expandtab:tabstop=4:shiftwidth=4
'''
docker container DNS tester
'''
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
import time
import os
from docker import APIClient as DockerClient
from docker.errors import APIError
# Jenkins doesn't have our tools which results in import errors
# pylint: disable=import-error
from openshift_tools.monitoring.metric_sender import MetricSender
ZBX_KEY = "docker.container.dns.resolution"
ZBX_KEY_TIMEOUT = "docker.container.dns.resolution.timeout"
if __name__ == "__main__":
cli = DockerClient(version='auto', base_url='unix://var/run/docker.sock', timeout=120)
container_id = os.environ['container_uuid']
image = cli.inspect_container(container_id)['Image']
container = cli.create_container(image, command='getent hosts redhat.com')
cli.start(container=container.get('Id'))
exit_code = cli.wait(container)
for i in range(0, 3):
try:
cli.remove_container(container.get('Id'))
break
except APIError:
print "Error while cleaning up container."
time.sleep(5)
container = cli.create_container(image, command='timeout 0.2s getent hosts redhat.com')
cli.start(container=container.get('Id'))
timeout_exit_code = cli.wait(container)
for i in range(0, 3):
try:
cli.remove_container(container.get('Id'))
break
except APIError:
print "Error while cleaning up container."
time.sleep(5)
ms = MetricSender()
ms.add_metric({ZBX_KEY: exit_code})
ms.add_metric({ZBX_KEY_TIMEOUT: timeout_exit_code})
print "Sending these metrics:"
print ZBX_KEY + ": " + str(exit_code)
print ZBX_KEY_TIMEOUT + ": " + str(timeout_exit_code)
ms.send_metrics()
print "\nDone.\n"
| blrm/openshift-tools | scripts/monitoring/cron-send-docker-dns-resolution.py | Python | apache-2.0 | 1,920 |
import sys
def setup(core, object):
object.setAttachment('radial_filename', 'deeds/harvesterDeed')
object.setAttachment('ConstructorTemplate', 'object/installation/mining_ore/construction/shared_construction_mining_ore_harvester_style_1.iff')
object.setAttachment('StructureTemplate', 'object/installation/mining_gas/shared_mining_gas_harvester_style_1.iff')
object.setAttachment('LotRequirement', 1)
object.setIntAttribute('examine_maintenance_rate', 16)
return | agry/NGECore2 | scripts/object/tangible/deed/harvester_deed/harvester_gas_deed.py | Python | lgpl-3.0 | 469 |
# Author: Konrad Lindenbach <klindenb@ualberta.ca>,
# Emmanuel Odeke <odeke@ualberta.ca>
# Copyright (c) 2014
# Table name strings
MESSAGE_TABLE_KEY = "Message"
RECEIPIENT_TABLE_KEY = "Receipient"
MESSAGE_MARKER_TABLE_KEY = "MessageMarker"
MAX_NAME_LENGTH = 60 # Arbitrary value
MAX_BODY_LENGTH = 200 # Arbitrary value
MAX_ALIAS_LENGTH = 60 # Arbitrary value
MAX_TOKEN_LENGTH = 512 # Arbitrary value
MAX_SUBJECT_LENGTH = 80 # Arbitrary value
MAX_PROFILE_URI_LENGTH = 400 # Arbitrary value
| odeke-em/restAssured | chatServer/chatServerConstants.py | Python | mit | 500 |
import kivy
kivy.require('1.1.3')
from kivy.properties import NumericProperty
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.anchorlayout import AnchorLayout
from kivy.uix.button import Button
from kivy.uix.widget import Widget
from kivy.uix.scatter import Scatter
from kivy.uix.treeview import TreeView, TreeViewLabel
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.properties import StringProperty
from kivy.clock import Clock
import random
class Showcase(FloatLayout):
pass
class KivyImageScatter(Scatter):
pass
class ButtonsScatter(Scatter):
pass
class AnchorLayoutShowcase(FloatLayout):
anchor_x = StringProperty('left')
anchor_y = StringProperty('top')
def __init__(self, **kwargs):
super(AnchorLayoutShowcase, self).__init__(**kwargs)
Clock.schedule_once(self.change_anchor, 1)
def change_anchor(self, *l):
if self.anchor_x == 'left':
self.anchor_x = 'center'
elif self.anchor_x == 'center':
self.anchor_x = 'right'
else:
self.anchor_x = 'left'
if self.anchor_y == 'top':
self.anchor_y = 'center'
elif self.anchor_y == 'center':
self.anchor_y = 'bottom'
else:
self.anchor_y = 'top'
Clock.schedule_once(self.change_anchor, 1)
class BoxLayoutShowcase(FloatLayout):
def __init__(self, **kwargs):
super(BoxLayoutShowcase, self).__init__(**kwargs)
self.buttons = 0
self.txt = 'horizontal'
self.text_size = self.size
Clock.schedule_once(self.add_button, 1)
def add_button(self, *l):
self.buttons += 1
if self.buttons > 5:
self.buttons = 0
self.blayout.clear_widgets()
if self.txt == "vertical":
self.txt = self.blayout.orientation = 'horizontal'
else:
self.txt = self.blayout.orientation = 'vertical'
btn = Button(text=self.txt, halign='center', valign='middle')
btn.bind(size=btn.setter('text_size'))
self.blayout.add_widget(btn)
Clock.schedule_once(self.add_button, 1)
class FloatLayoutShowcase(FloatLayout):
def __init__(self, **kwargs):
super(FloatLayoutShowcase, self).__init__(**kwargs)
self.buttons = 0
Clock.schedule_once(self.add_button, 1)
def add_button(self, *l):
self.buttons += 1
if self.buttons > 5:
self.buttons = 0
self.flayout.clear_widgets()
self.flayout.add_widget(Button(text='no restrictions\n what so ever',
size_hint=(None, None), size=(150, 40),
pos_hint={'x': random.random(), 'y': random.random()}))
Clock.schedule_once(self.add_button, 1)
class GridLayoutShowcase(FloatLayout):
def __init__(self, **kwargs):
super(GridLayoutShowcase, self).__init__(**kwargs)
self.buttons = 0
self.cols_default = self.glayout.cols
self.rows_default = self.glayout.rows
self.glayout.rows = 3
self.txt = "rows = 3"
Clock.schedule_once(self.add_button, 1)
def add_button(self, *l):
self.buttons += 1
if self.buttons > 10:
self.buttons = 0
self.glayout.clear_widgets()
if self.txt == "rows = 3":
self.glayout.cols = 3
self.glayout.rows = 7
self.txt = "cols = 3"
else:
self.glayout.rows = 3
self.glayout.cols = 7
self.txt = "rows = 3"
self.glayout.add_widget(Button(text=self.txt))
Clock.schedule_once(self.add_button, 1)
class StackLayoutShowcase(FloatLayout):
def __init__(self, **kwargs):
super(StackLayoutShowcase, self).__init__(**kwargs)
self.buttons = 0
self.orientationit = 0
self.txt = 'lr-tb'
Clock.schedule_once(self.add_button, 1)
def add_button(self, *l):
orientations = ('lr-tb', 'tb-lr',
'rl-tb', 'tb-rl',
'lr-bt', 'bt-lr',
'rl-bt', 'bt-rl')
self.buttons += 1
if self.buttons > 11:
self.buttons = 0
self.slayout.clear_widgets()
self.orientationit = (self.orientationit + 1) % len(orientations)
self.slayout.orientation = orientations[self.orientationit]
self.txt = self.slayout.orientation
self.slayout.add_widget(Button(
text=("%s %d" % (self.txt, self.buttons)),
size_hint=(.1 + self.buttons * 0.02, .1 + self.buttons * 0.01)))
Clock.schedule_once(self.add_button, .5)
class StandardWidgets(FloatLayout):
value = NumericProperty(0)
def __init__(self, **kwargs):
super(StandardWidgets, self).__init__(**kwargs)
Clock.schedule_interval(self.increment_value, 1 / 30.)
def increment_value(self, dt):
self.value += dt
class ComplexWidgets(FloatLayout):
pass
class TreeViewWidgets(FloatLayout):
pass
class ShowcaseApp(App):
def on_select_node(self, instance, value):
# ensure that any keybaord is released
self.content.get_parent_window().release_keyboard()
self.content.clear_widgets()
try:
w = getattr(self, 'show_%s' %
value.text.lower().replace(' ', '_'))()
self.content.add_widget(w)
except Exception, e:
print e
def on_pause(self):
return True
def build(self):
root = BoxLayout(orientation='horizontal', padding=20, spacing=20)
tree = TreeView(
size_hint=(None, 1), width=200, hide_root=True, indent_level=0)
def create_tree(text):
return tree.add_node(TreeViewLabel(
text=text, is_open=True, no_selection=True))
def attach_node(text, n):
tree.add_node(TreeViewLabel(text=text), n)
tree.bind(selected_node=self.on_select_node)
n = create_tree('Widgets')
attach_node('Standard widgets', n)
attach_node('Complex widgets', n)
attach_node('Scatters', n)
attach_node('Treeviews', n)
attach_node('Popup', n)
n = create_tree('Layouts')
attach_node('Anchor Layout', n)
attach_node('Box Layout', n)
attach_node('Float Layout', n)
attach_node('Grid Layout', n)
attach_node('Stack Layout', n)
root.add_widget(tree)
self.content = content = BoxLayout()
root.add_widget(content)
sc = Showcase()
sc.content.add_widget(root)
self.content.add_widget(StandardWidgets())
return sc
def show_standard_widgets(self):
return StandardWidgets()
def show_complex_widgets(self):
return ComplexWidgets()
def show_anchor_layout(self):
return AnchorLayoutShowcase()
def show_box_layout(self):
return BoxLayoutShowcase()
def show_float_layout(self):
return FloatLayoutShowcase()
def show_grid_layout(self):
return GridLayoutShowcase()
def show_stack_layout(self):
return StackLayoutShowcase()
def show_scatters(self):
col = Widget()
center = self.content.center_x - 150, self.content.center_y
s = KivyImageScatter(center=center)
col.add_widget(s)
center = self.content.center_x + 150, self.content.center_y
s = ButtonsScatter(size=(300, 200))
s.center = center
col.add_widget(s)
return col
def show_popup(self):
btnclose = Button(text='Close this popup', size_hint_y=None, height='50sp')
content = BoxLayout(orientation='vertical')
content.add_widget(Label(text='Hello world'))
content.add_widget(btnclose)
popup = Popup(content=content, title='Modal popup example',
size_hint=(None, None), size=('300dp', '300dp'))
btnclose.bind(on_release=popup.dismiss)
button = Button(text='Open popup', size_hint=(None, None),
size=('150sp', '70dp'),
on_release=popup.open)
popup.open()
col = AnchorLayout()
col.add_widget(button)
return col
def show_treeviews(self):
tv = TreeViewWidgets()
self.populate_treeview(tv.treeview1)
self.populate_treeview(tv.treeview2)
return tv
def populate_treeview(self, tv):
n = tv.add_node(TreeViewLabel(text='Item 1'))
for x in xrange(3):
tv.add_node(TreeViewLabel(text='Subitem %d' % x), n)
n = tv.add_node(TreeViewLabel(text='Item 2', is_open=True))
for x in xrange(3):
tv.add_node(TreeViewLabel(text='Subitem %d' % x), n)
n = tv.add_node(TreeViewLabel(text='Item 3'))
for x in xrange(3):
tv.add_node(TreeViewLabel(text='Subitem %d' % x), n)
return tv
if __name__ == '__main__':
ShowcaseApp().run()
| nuigroup/kivy | examples/demo/showcase/main.py | Python | lgpl-3.0 | 9,118 |
'''
Created on 04.10.2012
@author: michi
'''
from PyQt4.QtCore import pyqtSignal
from ems.qt4.applicationservice import ApplicationService #@UnresolvedImport
class ModelUpdateService(ApplicationService):
objectIdsUpdated = pyqtSignal(str, list)
objectsUpdated = pyqtSignal(str)
modelUpdated = pyqtSignal()
def triggerUpdate(self, modelObjectName, keys=None):
if keys is not None:
self.objectIdsUpdated.emit(modelObjectName, keys)
else:
self.objectsUpdated.emit(modelObjectName)
self.modelUpdated.emit() | mtils/ems | ems/qt4/services/modelupdate.py | Python | mit | 590 |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import find_packages
from setuptools import setup
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj:
README = file_obj.read()
# NOTE: This is duplicated throughout and we should try to
# consolidate.
SETUP_BASE = {
'author': 'Google Cloud Platform',
'author_email': 'googleapis-publisher@google.com',
'scripts': [],
'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python',
'license': 'Apache 2.0',
'platforms': 'Posix; MacOS X; Windows',
'include_package_data': True,
'zip_safe': False,
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
],
}
REQUIREMENTS = [
'google-cloud-core >= 0.26.0, < 0.27dev',
]
setup(
name='google-cloud-runtimeconfig',
version='0.26.0',
description='Python Client for Google Cloud RuntimeConfig',
long_description=README,
namespace_packages=[
'google',
'google.cloud',
],
packages=find_packages(exclude=('tests*',)),
install_requires=REQUIREMENTS,
**SETUP_BASE
)
| calpeyser/google-cloud-python | runtimeconfig/setup.py | Python | apache-2.0 | 2,169 |
from file_transfer_helper import SendFileTest, ReceiveFileTest, \
exec_file_transfer_test
from config import JINGLE_FILE_TRANSFER_ENABLED
if not JINGLE_FILE_TRANSFER_ENABLED:
print "NOTE: built with --disable-file-transfer or --disable-voip"
raise SystemExit(77)
class ReceiveFileAndDisconnectTest(ReceiveFileTest):
def receive_file(self):
s = self.create_socket()
s.connect(self.address)
# return True so the test will be ended and the connection
# disconnected
return True
if __name__ == '__main__':
exec_file_transfer_test(SendFileTest, ReceiveFileAndDisconnectTest)
| Ziemin/telepathy-gabble | tests/twisted/jingle-share/test-receive-file-and-disconnect.py | Python | lgpl-2.1 | 638 |
def ExOh(str):
temp = list(str)
xcount = 0
ocount = 0
for c in temp:
if c == "x":
xcount += 1
if c == "o":
ocount += 1
if xcount == ocount:
print "true"
elif xcount != ocount:
print "false"
ExOh(raw_input()) | ohgodscience/Python | Exercises/ExOh.py | Python | gpl-2.0 | 230 |
# -*- coding: utf-8 -*-
"""Handlers for application signals."""
import six
from .models.terminal import clean_order
from .storage.strategies import SoftDeleteStrategy
# pylint: disable=unused-argument
def store_ssh_key(sender, command, instance):
"""Write private key to file."""
if not instance.private_key:
return
path = instance.file_path(command)
if not path.parent.is_dir():
path.parent.mkdir(parents=True)
path.write_text(six.text_type(instance.private_key))
path.chmod(instance.file_mode)
# pylint: disable=unused-argument
def delete_ssh_key(sender, command, instance):
"""Delete private key file."""
path = instance.file_path(command)
if path.is_file():
path.unlink()
def clean_data(sender, command, email):
"""Clean data for account with email."""
with command.storage:
_clean_data(command.storage)
def _clean_data(storage):
for model in clean_order:
instances = storage.get_all(model)
for i in instances:
storage.delete(i)
deleted_set = SoftDeleteStrategy(storage).get_delete_sets()
storage.confirm_delete(deleted_set)
| Crystalnix/serverauditor-sshconfig | termius/core/subscribers.py | Python | bsd-3-clause | 1,154 |
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
float_or_none,
int_or_none,
)
class DotsubIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?dotsub\.com/view/(?P<id>[^/]+)'
_TESTS = [{
'url': 'https://dotsub.com/view/9c63db2a-fa95-4838-8e6e-13deafe47f09',
'md5': '21c7ff600f545358134fea762a6d42b6',
'info_dict': {
'id': '9c63db2a-fa95-4838-8e6e-13deafe47f09',
'ext': 'flv',
'title': 'MOTIVATION - "It\'s Possible" Best Inspirational Video Ever',
'description': 'md5:41af1e273edbbdfe4e216a78b9d34ac6',
'thumbnail': 're:^https?://dotsub.com/media/9c63db2a-fa95-4838-8e6e-13deafe47f09/p',
'duration': 198,
'uploader': 'liuxt',
'timestamp': 1385778501.104,
'upload_date': '20131130',
'view_count': int,
}
}, {
'url': 'https://dotsub.com/view/747bcf58-bd59-45b7-8c8c-ac312d084ee6',
'md5': '2bb4a83896434d5c26be868c609429a3',
'info_dict': {
'id': '168006778',
'ext': 'mp4',
'title': 'Apartments and flats in Raipur the white symphony',
'description': 'md5:784d0639e6b7d1bc29530878508e38fe',
'thumbnail': 're:^https?://dotsub.com/media/747bcf58-bd59-45b7-8c8c-ac312d084ee6/p',
'duration': 290,
'timestamp': 1476767794.2809999,
'upload_date': '20161018',
'uploader': 'parthivi001',
'uploader_id': 'user52596202',
'view_count': int,
},
'add_ie': ['Vimeo'],
}]
def _real_extract(self, url):
video_id = self._match_id(url)
info = self._download_json(
'https://dotsub.com/api/media/%s/metadata' % video_id, video_id)
video_url = info.get('mediaURI')
if not video_url:
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
[r'<source[^>]+src="([^"]+)"', r'"file"\s*:\s*\'([^\']+)'],
webpage, 'video url', default=None)
info_dict = {
'id': video_id,
'url': video_url,
'ext': 'flv',
}
if not video_url:
setup_data = self._parse_json(self._html_search_regex(
r'(?s)data-setup=([\'"])(?P<content>(?!\1).+?)\1',
webpage, 'setup data', group='content'), video_id)
info_dict = {
'_type': 'url_transparent',
'url': setup_data['src'],
}
info_dict.update({
'title': info['title'],
'description': info.get('description'),
'thumbnail': info.get('screenshotURI'),
'duration': int_or_none(info.get('duration'), 1000),
'uploader': info.get('user'),
'timestamp': float_or_none(info.get('dateCreated'), 1000),
'view_count': int_or_none(info.get('numberOfViews')),
})
return info_dict
| vinegret/youtube-dl | youtube_dl/extractor/dotsub.py | Python | unlicense | 3,079 |
import os
import tempfile
import unittest
import shutil
from unittest.mock import MagicMock
test_links = {
"direct": "https://i.imgur.com/AaLX1Wn.jpg",
"gfycat": "https://gfycat.com/QualifiedDefensiveAddax",
"imgur_link": "https://imgur.com/AaLX1Wn",
"imgur_album": "https://imgur.com/a/IEKXq",
"imgur_gifv": "https://i.imgur.com/bJ0h81c.gifv",
"redditbooru_gallery": "http://awwnime.redditbooru.com/gallery/nchg/chiyo-chan/",
"deviantart": "http://magusverus.deviantart.com/art/Oblivion-of-Mehrunes-153808629",
"twitter": "https://twitter.com/BugInTheShell/status/670658715813601281",
"fail": "https://github.com/SeriousBug/redditcurl"
}
test_links_404 = {
"direct": "https://i.imgur.com/000000.jpg",
"gfycat": "https://gfycat.com/00000000000000",
"imgur_link": "https://imgur.com/000000000",
"imgur_album": "https://imgur.com/a/00000000000",
"imgur_gifv": "https://i.imgur.com/00000000.gifv",
"redditbooru_gallery": "http://awwnime.redditbooru.com/gallery/000000/000000000/",
"deviantart": "http://magusverus.deviantart.com/art/Oblivion-of-Mehrunes-000000000",
"twitter": "https://twitter.com/BugInTheShell/status/0",
}
test_args = [ "--savedir", "testdir"
, "--processes", "10"
, "--subfolders"
, "--subreddits", "test,testing"]
test_config = """
[redditcurl]
savedir = testdir
processes = 0
notitles = true
"""
test_config_auth = """
[oauth]
clientid = testid
redirect = http://testredirect
refresh_token = refreshtoken
access_token = accesstoken
"""
def create_submission(url="", title="", subreddit=""):
submission = MagicMock()
submission.url = url
submission.title = title
submission.subreddit.display_name = subreddit
return submission
test_submissions = [create_submission(url, title, "testsubreddit") for title, url in test_links.items()]
test_downloaded = [(url, (lambda x: x != "fail")(title)) for title, url in test_links.items()]
# Creates a downloaded items list, with all test links as successfully downloaded except "fail" link.
class EnterTemp(unittest.TestCase):
"""Creates a temporary folder for each test.
The current working directory will be set as this temporary folder.
This folder will also have a subfolder named "sub".
The folder will be cleaned up after each test.
"""
def setUp(self):
os.chdir(tempfile.mkdtemp())
os.mkdir("sub")
def tearDown(self):
shutil.rmtree(os.getcwd(), ignore_errors=True)
| SeriousBug/redditcurl | tests/test_base.py | Python | gpl-3.0 | 2,526 |
from .matrix import MatrixDistance
from .matrix_cosine import MatrixCosineDistance
__all__ = [MatrixCosineDistance, MatrixDistance]
| elezar/ann-fab | annfab/distances/__init__.py | Python | mit | 134 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, io
import sys, getopt
import random
from openpyxl import load_workbook
# input: words list
# output
# 1. random matrix view with colume and row parameter
#
#
def MatrixFromXLS(filepath, sel_sheets, river):
print filepath
wb = load_workbook(filename=filepath, read_only=True, data_only=True)
worksheets = wb.get_sheet_names()
print worksheets
words = {}
formatstr = ''
print sorted(worksheets)
for sheetname in sorted(worksheets):
print sheetname
iSskip = True
for sheetsel in sel_sheets:
if sheetsel is not None and sheetname == sheetsel:
iSskip = False
break
if iSskip:
continue
ws = wb[sheetname]
words[sheetname] = []
formatstr = '%%-%ds' % river
for row in ws.rows:
if len(row) > 1:
if row[1].value == None:
if row[0].value != None:
words[sheetname].append(formatstr % row[0].value)
else:
if isinstance(row[1].value,float):
words[sheetname].append(formatstr % str(row[0].value))
else:
words[sheetname].append(formatstr % row[1].value)
else:
if row[0].value != None:
words[sheetname].append(formatstr % row[0].value)
#'%-16s'
return words
def MatrixFromRawTxt(inputfile):
content = []
with open(inputfile) as f:
for line in f.readlines():
word = line.strip().rstrip('\n')
if word != '':
content.append(word)
random.shuffle(content)
print {'allinone':content}
def CreateMatrixFile(content, outputfile, columns):
with io.open(outputfile, 'w', encoding='utf8') as of:
for key in content:
k = 1
wordline = ''
of.writelines(u'\n')
of.writelines(key.decode('ascii'))
of.writelines(u'\n')
for word in content[key]:
if k % columns == 0:
wordline += word.strip()
wordline +=u'\n'
of.writelines(wordline)
wordline = ''
else:
wordline += word
k+=1
if wordline != '':
print wordline
if __name__ == '__main__':
inputfile = ''
outputfile = ''
columns = 5
river = 16
sheetnames = []
try:
opts, args = getopt.getopt(sys.argv[1:],"hi:o:c:s:r:",["ifile=","ofile=","columns=","sheet=","river="])
except getopt.GetoptError:
print 'test.py -i <inputfile> -o <outputfile>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'test.py -i <inputfile> -s <xls sheet name> -o <outputfile> -c <numbers>'
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
elif opt in ("-c", "--columns"):
columns = int(arg)
elif opt in ("-s","--sheet"):
sheetnames = arg.split(",")
elif opt in ("-r","--river"):
river = int(arg)
print 'Input file is "', inputfile
print 'Output file is "', outputfile
print sheetnames
dat = MatrixFromXLS(inputfile, sheetnames,river)
#MatrixFromRawTxt(inputfile,outputfile,columns)
CreateMatrixFile(dat,outputfile,columns)
| howmind/tools | WordsMatrix.py | Python | apache-2.0 | 3,621 |
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
from pytraits import extendable
# Let's start by creating a simple class with some values. It contains
# class variables and instance variables. Composed functions will have
# access to all these variables.
@extendable
class ExampleClass(object):
PUBLIC = 24
_HIDDEN = 25
__PRIVATE = 26
def __init__(self):
self.public = 42
self._hidden = 43
self.__private = 44
def new_method(self):
return self.public, self._hidden, self.__private
def new_class_function(cls):
return cls.PUBLIC, cls._HIDDEN, cls.__PRIVATE
def new_static_function():
return 1, 2, 3
# Create instance of ExampleClass and compose cherry-picked functions into it.
example_instance = ExampleClass()
example_instance.add_traits(new_method, new_class_function, new_static_function)
# Here are the proofs that composed functions work as part of new instance. Also
# we demonstrate that original class is still untouched.
assert example_instance.new_static_function() == (1, 2, 3),\
"Instance composition fails with static method in instance!"
assert example_instance.new_class_function() == (24, 25, 26),\
"Instance composition fails with class method in instance!"
assert example_instance.new_method() == (42, 43, 44),\
"Instance composition fails with instance method!"
assert not hasattr(ExampleClass, "new_static_function"),\
"Instance composition fails due to class has changed!"
assert not hasattr(ExampleClass, "new_class_function"),\
"Instance composition fails due to class has changed!"
assert not hasattr(ExampleClass, "new_method"),\
"Instance composition fails due to class has changed!"
| justanr/py3traits | examples/function_is_composed_as_a_part_of_instance.py | Python | apache-2.0 | 1,697 |
from unexistent_import import * | siddhika1889/Pydev-Editor | tests/pysrc/extendable/recursion_on_non_existent/__init__.py | Python | epl-1.0 | 31 |
# -*- coding -*-
"""
Provides step definitions for beehive based on beehive4cmd.
REQUIRES:
* beehive4cmd.steplib.output steps (command output from beehive).
"""
from beehive import then
from beehive.runner_util import make_undefined_step_snippet
# -----------------------------------------------------------------------------
# UTILITY FUNCTIONS:
# -----------------------------------------------------------------------------
def text_indent(text, indent_size=0):
prefix = " " * indent_size
return prefix.join(text.splitlines(True))
# -----------------------------------------------------------------------------
# STEPS FOR: Undefined step definitions
# -----------------------------------------------------------------------------
@then(u'an undefined-step snippets section exists')
def step_undefined_step_snippets_section_exists(context):
"""
Checks if an undefined-step snippet section is in beehive command output.
"""
context.execute_steps(u'''
Then the command output should contain:
"""
You can implement step definitions for undefined steps with these snippets:
"""
''')
@then(u'an undefined-step snippet should exist for "{step}"')
def step_undefined_step_snippet_should_exist_for(context, step):
"""
Checks if an undefined-step snippet is provided for a step
in beehive command output (last command).
EXAMPLE:
Then an undefined-step snippet should exist for "Given an undefined step"
"""
undefined_step_snippet = make_undefined_step_snippet(step)
context.execute_steps(u'''\
Then the command output should contain:
"""
{undefined_step_snippet}
"""
'''.format(undefined_step_snippet=text_indent(undefined_step_snippet, 4)))
@then(u'an undefined-step snippet should not exist for "{step}"')
def step_undefined_step_snippet_should_not_exist_for(context, step):
"""
Checks if an undefined-step snippet is provided for a step
in beehive command output (last command).
"""
undefined_step_snippet = make_undefined_step_snippet(step)
context.execute_steps(u'''\
Then the command output should not contain:
"""
{undefined_step_snippet}
"""
'''.format(undefined_step_snippet=text_indent(undefined_step_snippet, 4)))
@then(u'undefined-step snippets should exist for')
def step_undefined_step_snippets_should_exist_for_table(context):
"""
Checks if undefined-step snippets are provided.
EXAMPLE:
Then undefined-step snippets should exist for:
| Step |
| When an undefined step is used |
| Then another undefined step is used |
"""
assert context.table, "REQUIRES: table"
for row in context.table.rows:
step = row["Step"]
step_undefined_step_snippet_should_exist_for(context, step)
@then(u'undefined-step snippets should not exist for')
def step_undefined_step_snippets_should_not_exist_for_table(context):
"""
Checks if undefined-step snippets are not provided.
EXAMPLE:
Then undefined-step snippets should not exist for:
| Step |
| When an known step is used |
| Then another known step is used |
"""
assert context.table, "REQUIRES: table"
for row in context.table.rows:
step = row["Step"]
step_undefined_step_snippet_should_not_exist_for(context, step)
| vrutkovs/beehive | features/steps/beehive_undefined_steps.py | Python | bsd-2-clause | 3,408 |
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Common API for all public keys.
"""
import base64
from binascii import hexlify, unhexlify
import os
from hashlib import md5
from Crypto.Cipher import DES3, AES
from paramiko import util
from paramiko.common import o600, zero_byte
from paramiko.py3compat import u, encodebytes, decodebytes, b
from paramiko.ssh_exception import SSHException, PasswordRequiredException
class PKey (object):
"""
Base class for public keys.
"""
# known encryption types for private key files:
_CIPHER_TABLE = {
'AES-128-CBC': {'cipher': AES, 'keysize': 16, 'blocksize': 16, 'mode': AES.MODE_CBC},
'DES-EDE3-CBC': {'cipher': DES3, 'keysize': 24, 'blocksize': 8, 'mode': DES3.MODE_CBC},
}
def __init__(self, msg=None, data=None):
"""
Create a new instance of this public key type. If ``msg`` is given,
the key's public part(s) will be filled in from the message. If
``data`` is given, the key's public part(s) will be filled in from
the string.
:param .Message msg:
an optional SSH `.Message` containing a public key of this type.
:param str data: an optional string containing a public key of this type
:raises SSHException:
if a key cannot be created from the ``data`` or ``msg`` given, or
no key was passed in.
"""
pass
def asbytes(self):
"""
Return a string of an SSH `.Message` made up of the public part(s) of
this key. This string is suitable for passing to `__init__` to
re-create the key object later.
"""
return bytes()
def __str__(self):
return self.asbytes()
# noinspection PyUnresolvedReferences
def __cmp__(self, other):
"""
Compare this key to another. Returns 0 if this key is equivalent to
the given key, or non-0 if they are different. Only the public parts
of the key are compared, so a public key will compare equal to its
corresponding private key.
:param .Pkey other: key to compare to.
"""
hs = hash(self)
ho = hash(other)
if hs != ho:
return cmp(hs, ho)
return cmp(self.asbytes(), other.asbytes())
def __eq__(self, other):
return hash(self) == hash(other)
def get_name(self):
"""
Return the name of this private key implementation.
:return:
name of this private key type, in SSH terminology, as a `str` (for
example, ``"ssh-rsa"``).
"""
return ''
def get_bits(self):
"""
Return the number of significant bits in this key. This is useful
for judging the relative security of a key.
:return: bits in the key (as an `int`)
"""
return 0
def can_sign(self):
"""
Return ``True`` if this key has the private part necessary for signing
data.
"""
return False
def get_fingerprint(self):
"""
Return an MD5 fingerprint of the public part of this key. Nothing
secret is revealed.
:return:
a 16-byte `string <str>` (binary) of the MD5 fingerprint, in SSH
format.
"""
return md5(self.asbytes()).digest()
def get_base64(self):
"""
Return a base64 string containing the public part of this key. Nothing
secret is revealed. This format is compatible with that used to store
public key files or recognized host keys.
:return: a base64 `string <str>` containing the public part of the key.
"""
return u(encodebytes(self.asbytes())).replace('\n', '')
def sign_ssh_data(self, data):
"""
Sign a blob of data with this private key, and return a `.Message`
representing an SSH signature message.
:param str data: the data to sign.
:return: an SSH signature `message <.Message>`.
"""
return bytes()
def verify_ssh_sig(self, data, msg):
"""
Given a blob of data, and an SSH message representing a signature of
that data, verify that it was signed with this key.
:param str data: the data that was signed.
:param .Message msg: an SSH signature message
:return:
``True`` if the signature verifies correctly; ``False`` otherwise.
"""
return False
@classmethod
def from_private_key_file(cls, filename, password=None):
"""
Create a key object by reading a private key file. If the private
key is encrypted and ``password`` is not ``None``, the given password
will be used to decrypt the key (otherwise `.PasswordRequiredException`
is thrown). Through the magic of Python, this factory method will
exist in all subclasses of PKey (such as `.RSAKey` or `.DSSKey`), but
is useless on the abstract PKey class.
:param str filename: name of the file to read
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted
:return: a new `.PKey` based on the given private key
:raises IOError: if there was an error reading the file
:raises PasswordRequiredException: if the private key file is
encrypted, and ``password`` is ``None``
:raises SSHException: if the key file is invalid
"""
key = cls(filename=filename, password=password)
return key
@classmethod
def from_private_key(cls, file_obj, password=None):
"""
Create a key object by reading a private key from a file (or file-like)
object. If the private key is encrypted and ``password`` is not
``None``, the given password will be used to decrypt the key (otherwise
`.PasswordRequiredException` is thrown).
:param file_obj: the file-like object to read from
:param str password:
an optional password to use to decrypt the key, if it's encrypted
:return: a new `.PKey` based on the given private key
:raises IOError: if there was an error reading the key
:raises PasswordRequiredException:
if the private key file is encrypted, and ``password`` is ``None``
:raises SSHException: if the key file is invalid
"""
key = cls(file_obj=file_obj, password=password)
return key
def write_private_key_file(self, filename, password=None):
"""
Write private key contents into a file. If the password is not
``None``, the key is encrypted before writing.
:param str filename: name of the file to write
:param str password:
an optional password to use to encrypt the key file
:raises IOError: if there was an error writing the file
:raises SSHException: if the key is invalid
"""
raise Exception('Not implemented in PKey')
def write_private_key(self, file_obj, password=None):
"""
Write private key contents into a file (or file-like) object. If the
password is not ``None``, the key is encrypted before writing.
:param file_obj: the file-like object to write into
:param str password: an optional password to use to encrypt the key
:raises IOError: if there was an error writing to the file
:raises SSHException: if the key is invalid
"""
raise Exception('Not implemented in PKey')
def _read_private_key_file(self, tag, filename, password=None):
"""
Read an SSH2-format private key file, looking for a string of the type
``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we
find, and return it as a string. If the private key is encrypted and
``password`` is not ``None``, the given password will be used to decrypt
the key (otherwise `.PasswordRequiredException` is thrown).
:param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param str filename: name of the file to read.
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted.
:return: data blob (`str`) that makes up the private key.
:raises IOError: if there was an error reading the file.
:raises PasswordRequiredException: if the private key file is
encrypted, and ``password`` is ``None``.
:raises SSHException: if the key file is invalid.
"""
with open(filename, 'r') as f:
data = self._read_private_key(tag, f, password)
return data
def _read_private_key(self, tag, f, password=None):
lines = f.readlines()
start = 0
while (start < len(lines)) and (lines[start].strip() != '-----BEGIN ' + tag + ' PRIVATE KEY-----'):
start += 1
if start >= len(lines):
raise SSHException('not a valid ' + tag + ' private key file')
# parse any headers first
headers = {}
start += 1
while start < len(lines):
l = lines[start].split(': ')
if len(l) == 1:
break
headers[l[0].lower()] = l[1].strip()
start += 1
# find end
end = start
while end < len(lines) and lines[end].strip() != '-----END ' + tag + ' PRIVATE KEY-----':
end += 1
# if we trudged to the end of the file, just try to cope.
try:
data = decodebytes(b(''.join(lines[start:end])))
except base64.binascii.Error as e:
raise SSHException('base64 decoding error: ' + str(e))
if 'proc-type' not in headers:
# unencryped: done
return data
# encrypted keyfile: will need a password
if headers['proc-type'] != '4,ENCRYPTED':
raise SSHException('Unknown private key structure "%s"' % headers['proc-type'])
try:
encryption_type, saltstr = headers['dek-info'].split(',')
except:
raise SSHException("Can't parse DEK-info in private key file")
if encryption_type not in self._CIPHER_TABLE:
raise SSHException('Unknown private key cipher "%s"' % encryption_type)
# if no password was passed in, raise an exception pointing out that we need one
if password is None:
raise PasswordRequiredException('Private key file is encrypted')
cipher = self._CIPHER_TABLE[encryption_type]['cipher']
keysize = self._CIPHER_TABLE[encryption_type]['keysize']
mode = self._CIPHER_TABLE[encryption_type]['mode']
salt = unhexlify(b(saltstr))
key = util.generate_key_bytes(md5, salt, password, keysize)
return cipher.new(key, mode, salt).decrypt(data)
def _write_private_key_file(self, tag, filename, data, password=None):
"""
Write an SSH2-format private key file in a form that can be read by
paramiko or openssh. If no password is given, the key is written in
a trivially-encoded format (base64) which is completely insecure. If
a password is given, DES-EDE3-CBC is used.
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
:param str data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises IOError: if there was an error writing the file.
"""
with open(filename, 'w', o600) as f:
# grrr... the mode doesn't always take hold
os.chmod(filename, o600)
self._write_private_key(tag, f, data, password)
def _write_private_key(self, tag, f, data, password=None):
f.write('-----BEGIN %s PRIVATE KEY-----\n' % tag)
if password is not None:
cipher_name = list(self._CIPHER_TABLE.keys())[0]
cipher = self._CIPHER_TABLE[cipher_name]['cipher']
keysize = self._CIPHER_TABLE[cipher_name]['keysize']
blocksize = self._CIPHER_TABLE[cipher_name]['blocksize']
mode = self._CIPHER_TABLE[cipher_name]['mode']
salt = os.urandom(blocksize)
key = util.generate_key_bytes(md5, salt, password, keysize)
if len(data) % blocksize != 0:
n = blocksize - len(data) % blocksize
#data += os.urandom(n)
# that would make more sense ^, but it confuses openssh.
data += zero_byte * n
data = cipher.new(key, mode, salt).encrypt(data)
f.write('Proc-Type: 4,ENCRYPTED\n')
f.write('DEK-Info: %s,%s\n' % (cipher_name, u(hexlify(salt)).upper()))
f.write('\n')
s = u(encodebytes(data))
# re-wrap to 64-char lines
s = ''.join(s.split('\n'))
s = '\n'.join([s[i: i + 64] for i in range(0, len(s), 64)])
f.write(s)
f.write('\n')
f.write('-----END %s PRIVATE KEY-----\n' % tag)
| lowitty/selenium | libs/windows/paramiko/pkey.py | Python | mit | 13,953 |
#!/usr/bin/env python3
# Filename: paste
# Purpose: XmlRpc interface client to paste.debian.net
# Original code by: Copyright 2007-2011 Michael Gebetsroither <michael@mgeb.org>
# Author of this fork (AoF): Copyright 2016 Github user bvanrijn, <b.vanrijn@me.com>
# License: This file is licensed under the GPL v2+. Full license text in LICENSE
# Modified original: Yes, I forked the repository at commit 8adff71 and modified it
# AoF started working on modified version: Thu Jun 16 21:08:56 2016 +0200
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import sys
import xmlrpc.client
import optparse
import inspect
import getpass
# program defaults
DEFAULT_SERVER = 'http://paste.debian.net/server.pl'
class ActionFailedException(Exception):
# Thrown if server returned an error
def __init__(self, errormsg, ret):
Exception.__init__(self, errormsg, ret)
def what(self):
# Get error message
return self.args[0]
def dwhat(self):
# Get more verbose errormessage
return self.args[1]
class Action(object):
def __init__(self, args, opts):
self.args_ = args
self.opts_ = opts
def _createProxy(self):
return xmlrpc.client.ServerProxy(self.opts_.server, verbose=False)
def _callProxy(self, functor, server=None):
'''Wrapper for xml-rpc calls to server which throws an
ActionFailedException on error'''
if server is None:
server = self._createProxy()
ret = functor(server)
if ret['rc'] != 0:
raise ActionFailedException(ret['statusmessage'], ret)
return ret
def call(self, method_name):
# External Interface to call the appropriate action
return self.__getattribute__(method_name)()
def actionAddPaste(self):
'''
Add paste to the server: <1.line> <2.line> ...
default Read paste from stdin.
[text] Every argument on the commandline will be interpreted as
a seperate line of paste.
'''
server = self._createProxy()
o = self.opts_
code = self.args_
if len(self.args_) == 0:
code = [ i.rstrip() for i in sys.stdin.readlines() ]
code = '\n'.join(code)
result = self._callProxy(lambda s: s.paste.addPaste(code, o.name, o.expire * 3600, o.lang, o.private),
server)
return (result['statusmessage'], result)
def actionDelPaste(self):
'''
Delete paste from server: <digest>
<digest> Digest of paste you want to remove.
'''
try:
digest = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.deletePaste(digest))
return (result['statusmessage'], result)
except:
parser.error("Please provide me with a valid digest")
exit(1)
def actionGetPaste(self):
'''
Get paste from server: <id>
<id> Id of paste you want to receive.
'''
try:
id = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.getPaste(id))
return (result['code'], result)
except:
parser.error("Please provide me with a paste ID")
exit(1)
def actionGetLangs(self):
'''
Get supported language highlighting types from server
'''
result = self._callProxy(lambda s: s.paste.getLanguages())
return ('\n'.join(result['langs']), result)
def actionAddShortUrl(self):
'''
Add short-URL: <url>
<url> Short-URL to add
'''
try:
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.addShortURL(url))
return (result['url'], result)
except:
parser.error("Please provide me with a valid short URL")
exit(1)
def actionGetShortUrl(self):
'''
Resolve short-URL: <url>
<url> Short-URL to get clicks of
'''
try:
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.resolveShortURL(url))
return (result['url'], result)
except:
parser.error("Please provide me with a valid short URL")
exit(1)
def actionGetShortUrlClicks(self):
'''
Get clicks of short-URL: <url>
<url> Short-URL to get clicks of
'''
try:
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.ShortURLClicks(url))
return (result['count'], result)
except:
parser.error("Please provide me with a valid short URL")
exit(1)
def actionHelp(self):
'''
Print more verbose help about specific action: <action>
<action> Topic on which you need more verbose help.
'''
if len(self.args_) < 1:
alias = "help"
else:
alias = self.args_.pop(0)
if alias in actions:
fun = actions[alias]
print(inspect.getdoc(self.__getattribute__(fun)))
print("\nalias: " + " ".join([i for i in actions_r[fun] if i != alias]))
else:
print("Error: No such command - %s" % (alias))
OPT_PARSER.print_usage()
exit(0)
# actionAddPaste -> [add, a]
actions_r = {}
# add -> actionAddPaste
# a -> actionAddPaste
actions = {}
# option parser
OPT_PARSER = None
##
# MAIN
##
if __name__ == "__main__":
action_spec = ['actionAddPaste add a',
'actionDelPaste del d rm',
'actionGetPaste get g',
'actionGetLangs getlangs gl langs l',
'actionAddShortUrl addurl au',
'actionGetShortUrl geturl gu',
'actionGetShortUrlClicks getclicks gc',
'actionHelp help h']
for i in action_spec:
aliases = i.split()
cmd = aliases.pop(0)
actions_r[cmd] = aliases
for (k,v) in list(actions_r.items()):
for i in v:
actions[i] = k
usage = "usage: %prog [options] ACTION <args>\n\n" +\
"actions:\n" +\
"\n".join(["%12s\t%s" % (v[0], str(inspect.getdoc(getattr(Action, k))).split('\n')[0]) \
for (k,v) in list(actions_r.items())])
running_user = getpass.getuser()
parser = optparse.OptionParser(usage=usage)
parser.add_option('-n', '--name', default=running_user, help="Name of poster")
parser.add_option('-e', '--expire', type=int, default=72, metavar='HOURS',
help='Time at wich paste should expire')
parser.add_option('-l', '--lang', default='Plain', help='Type of language to highlight')
parser.add_option("-p", "--private", action="count", dest="private", default=0,
help='Create hidden paste'),
parser.add_option('-s', '--server', default=DEFAULT_SERVER,
help='Paste server')
parser.add_option('-v', '--verbose', action='count', default=0, help='More output')
(opts, args) = parser.parse_args()
OPT_PARSER = parser
if len(args) == 0:
parser.error('Please provide me with an action')
elif args[0] in actions:
cmd = args.pop(0)
action = Action(args, opts)
try:
(msg, ret) = action.call(actions[cmd])
if opts.verbose == 0:
print(msg)
else:
print(ret)
except ActionFailedException as e:
sys.stderr.write('Server Error: %s\n' % e.what())
if opts.verbose >0:
print(e.dwhat())
exit(1)
else:
parser.error('Unknown action: %s' % args[0])
| bvanrijn/debianpaste-clients | paste.py | Python | gpl-2.0 | 8,665 |
#!/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
gluon.rewrite parses incoming URLs and formats outgoing URLs for gluon.html.URL.
In addition, it rewrites both incoming and outgoing URLs based on the (optional) user-supplied routes.py,
which also allows for rewriting of certain error messages.
routes.py supports two styles of URL rewriting, depending on whether 'routers' is defined.
Refer to router.example.py and routes.example.py for additional documentation.
"""
import os
import re
import logging
import traceback
import threading
import urllib
from gluon.storage import Storage, List
from gluon.http import HTTP
from gluon.fileutils import abspath, read_file
from gluon.settings import global_settings
isdir = os.path.isdir
isfile = os.path.isfile
exists = os.path.exists
pjoin = os.path.join
logger = logging.getLogger('web2py.rewrite')
THREAD_LOCAL = threading.local() # thread-local storage for routing params
regex_at = re.compile(r'(?<!\\)\$[a-zA-Z]\w*')
regex_anything = re.compile(r'(?<!\\)\$anything')
regex_redirect = re.compile(r'(\d+)->(.*)')
regex_full_url = re.compile(
r'^(?P<scheme>http|https|HTTP|HTTPS)\://(?P<host>[^/]*)(?P<uri>.*)')
regex_version = re.compile(r'^(_[\d]+\.[\d]+\.[\d]+)$')
# pattern to find valid paths in url /application/controller/...
# this could be:
# for static pages:
# /<b:application>/static/<x:file>
# for dynamic pages:
# /<a:application>[/<c:controller>[/<f:function>[.<e:ext>][/<s:args>]]]
# application, controller, function and ext may only contain [a-zA-Z0-9_]
# file and args may also contain '-', '=', '.' and '/'
# apps in routes_apps_raw must parse raw_args into args
regex_url = re.compile('^/((?P<a>\w+)(/(?P<c>\w+)(/(?P<z>(?P<f>\w+)(\.(?P<e>[\w.]+))?(?P<s>.*)))?)?)?$')
regex_args = re.compile('[^\w/.@=-]')
def _router_default():
"return new copy of default base router"
router = Storage(
default_application='init',
applications='ALL',
default_controller='default',
controllers='DEFAULT',
default_function='index',
functions=dict(),
default_language=None,
languages=None,
root_static=['favicon.ico', 'robots.txt'],
map_static=None,
domains=None,
exclusive_domain=False,
map_hyphen=False,
acfe_match=r'\w+$', # legal app/ctlr/fcn/ext
#
# Implementation note:
# The file_match & args_match patterns use look-behind to avoid
# pathological backtracking from nested patterns.
#
file_match = r'([-+=@$%\w]|(?<=[-+=@$%\w])[./])*$', # legal static subpath
args_match=r'([\w@ -]|(?<=[\w@ -])[.=])*$', # legal arg in args
)
return router
def _params_default(app=None):
"return new copy of default parameters"
p = Storage()
p.name = app or "BASE"
p.default_application = app or "init"
p.default_controller = "default"
p.default_function = "index"
p.routes_app = []
p.routes_in = []
p.routes_out = []
p.routes_onerror = []
p.routes_apps_raw = []
p.error_handler = None
p.error_message = '<html><body><h1>%s</h1></body></html>'
p.error_message_ticket = \
'<html><body><h1>Internal error</h1>Ticket issued: <a href="/admin/default/ticket/%(ticket)s" target="_blank">%(ticket)s</a></body><!-- this is junk text else IE does not display the page: ' + ('x' * 512) + ' //--></html>'
p.routers = None
p.logging = 'off'
return p
params_apps = dict()
params = _params_default(app=None) # regex rewrite parameters
THREAD_LOCAL.routes = params # default to base regex rewrite parameters
routers = None
def log_rewrite(string):
"Log rewrite activity under control of routes.py"
if params.logging == 'debug': # catch common cases first
logger.debug(string)
elif params.logging == 'off' or not params.logging:
pass
elif params.logging == 'print':
print string
elif params.logging == 'info':
logger.info(string)
elif params.logging == 'warning':
logger.warning(string)
elif params.logging == 'error':
logger.error(string)
elif params.logging == 'critical':
logger.critical(string)
else:
logger.debug(string)
ROUTER_KEYS = set(
('default_application', 'applications',
'default_controller', 'controllers',
'default_function', 'functions',
'default_language', 'languages',
'domain', 'domains', 'root_static', 'path_prefix',
'exclusive_domain', 'map_hyphen', 'map_static',
'acfe_match', 'file_match', 'args_match'))
ROUTER_BASE_KEYS = set(
('applications', 'default_application',
'domains', 'path_prefix'))
# The external interface to rewrite consists of:
#
# load: load routing configuration file(s)
# url_in: parse and rewrite incoming URL
# url_out: assemble and rewrite outgoing URL
#
# THREAD_LOCAL.routes.default_application
# THREAD_LOCAL.routes.error_message
# THREAD_LOCAL.routes.error_message_ticket
# THREAD_LOCAL.routes.try_redirect_on_error
# THREAD_LOCAL.routes.error_handler
#
# filter_url: helper for doctest & unittest
# filter_err: helper for doctest & unittest
# regex_filter_out: doctest
def fixup_missing_path_info(environ):
eget = environ.get
path_info = eget('PATH_INFO')
request_uri = eget('REQUEST_URI')
if not path_info and request_uri:
# for fcgi, get path_info and
# query_string from request_uri
items = request_uri.split('?')
path_info = environ['PATH_INFO'] = items[0]
environ['QUERY_STRING'] = items[1] if len(items) > 1 else ''
elif not request_uri:
query_string = eget('QUERY_STRING')
if query_string:
environ['REQUEST_URI'] = '%s?%s' % (path_info, query_string)
else:
environ['REQUEST_URI'] = path_info
if not eget('HTTP_HOST'):
environ['HTTP_HOST'] = \
'%s:%s' % (eget('SERVER_NAME'), eget('SERVER_PORT'))
def url_in(request, environ):
"parse and rewrite incoming URL"
if routers:
return map_url_in(request, environ)
return regex_url_in(request, environ)
def url_out(request, environ, application, controller, function,
args, other, scheme, host, port):
"assemble and rewrite outgoing URL"
if routers:
acf = map_url_out(request, environ, application, controller,
function, args, other, scheme, host, port)
url = '%s%s' % (acf, other)
else:
url = '/%s/%s/%s%s' % (application, controller, function, other)
url = regex_filter_out(url, environ)
#
# fill in scheme and host if absolute URL is requested
# scheme can be a string, eg 'http', 'https', 'ws', 'wss'
#
if host is True or (host is None and (scheme or port is not None)):
host = request.env.http_host
if not scheme or scheme is True:
scheme = request.env.get('wsgi_url_scheme', 'http').lower() \
if request else 'http'
if host:
host_port = host if not port else host.split(':', 1)[0] + ':%s' % port
url = '%s://%s%s' % (scheme, host_port, url)
return url
def try_rewrite_on_error(http_response, request, environ, ticket=None):
"""
called from main.wsgibase to rewrite the http response.
"""
status = int(str(http_response.status).split()[0])
if status >= 399 and THREAD_LOCAL.routes.routes_onerror:
keys = set(('%s/%s' % (request.application, status),
'%s/*' % (request.application),
'*/%s' % (status),
'*/*'))
for (key, uri) in THREAD_LOCAL.routes.routes_onerror:
if key in keys:
if uri == '!':
# do nothing!
return http_response, environ
elif '?' in uri:
path_info, query_string = uri.split('?', 1)
query_string += '&'
else:
path_info, query_string = uri, ''
query_string += \
'code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(status, ticket, urllib.quote_plus(
request.env.request_uri), request.url)
if uri.startswith('http://') or uri.startswith('https://'):
# make up a response
url = path_info + '?' + query_string
message = 'You are being redirected <a href="%s">here</a>'
return HTTP(303, message % url, Location=url), environ
elif not environ.get('__ROUTES_ONERROR__', False):
# wsgibase will be called recursively with
# the routes_onerror path.
environ['__ROUTES_ONERROR__'] = True # limit recursion
path_info = '/' + path_info.lstrip('/')
environ['PATH_INFO'] = path_info
environ['QUERY_STRING'] = query_string
environ['WEB2PY_STATUS_CODE'] = status
return None, environ
# do nothing!
return http_response, environ
def try_redirect_on_error(http_object, request, ticket=None):
"called from main.wsgibase to rewrite the http response"
status = int(str(http_object.status).split()[0])
if status > 399 and THREAD_LOCAL.routes.routes_onerror:
keys = set(('%s/%s' % (request.application, status),
'%s/*' % (request.application),
'*/%s' % (status),
'*/*'))
for (key, redir) in THREAD_LOCAL.routes.routes_onerror:
if key in keys:
if redir == '!':
break
elif '?' in redir:
url = '%s&code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(redir, status, ticket,
urllib.quote_plus(request.env.request_uri),
request.url)
else:
url = '%s?code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(redir, status, ticket,
urllib.quote_plus(request.env.request_uri),
request.url)
return HTTP(303, 'You are being redirected <a href="%s">here</a>' % url, Location=url)
return http_object
def load(routes='routes.py', app=None, data=None, rdict=None):
"""
load: read (if file) and parse routes
store results in params
(called from main.py at web2py initialization time)
If data is present, it's used instead of the routes.py contents.
If rdict is present, it must be a dict to be used for routers (unit test)
"""
global params
global routers
if app is None:
# reinitialize
global params_apps
params_apps = dict()
params = _params_default(app=None) # regex rewrite parameters
THREAD_LOCAL.routes = params # default to base regex rewrite parameters
routers = None
if isinstance(rdict, dict):
symbols = dict(routers=rdict)
path = 'rdict'
else:
if data is not None:
path = 'routes'
else:
if app is None:
path = abspath(routes)
else:
path = abspath('applications', app, routes)
if not exists(path):
return
data = read_file(path).replace('\r\n', '\n')
symbols = dict(app=app)
try:
exec (data + '\n') in symbols
except SyntaxError, e:
logger.error(
'%s has a syntax error and will not be loaded\n' % path
+ traceback.format_exc())
raise e
p = _params_default(app)
for sym in ('routes_app', 'routes_in', 'routes_out'):
if sym in symbols:
for items in symbols[sym]:
p[sym].append(compile_regex(*items))
for sym in ('routes_onerror', 'routes_apps_raw',
'error_handler', 'error_message', 'error_message_ticket',
'default_application', 'default_controller', 'default_function',
'logging'):
if sym in symbols:
p[sym] = symbols[sym]
if 'routers' in symbols:
p.routers = Storage(symbols['routers'])
for key in p.routers:
if isinstance(p.routers[key], dict):
p.routers[key] = Storage(p.routers[key])
if app is None:
params = p # install base rewrite parameters
THREAD_LOCAL.routes = params # install default as current routes
#
# create the BASE router if routers in use
#
routers = params.routers # establish routers if present
if isinstance(routers, dict):
routers = Storage(routers)
if routers is not None:
router = _router_default()
if routers.BASE:
router.update(routers.BASE)
routers.BASE = router
# scan each app in applications/
# create a router, if routers are in use
# parse the app-specific routes.py if present
#
all_apps = []
apppath = abspath('applications')
for appname in os.listdir(apppath):
if not appname.startswith('.') and \
isdir(abspath(apppath, appname)) and \
isdir(abspath(apppath, appname, 'controllers')):
all_apps.append(appname)
if routers:
router = Storage(routers.BASE) # new copy
if appname in routers:
for key in routers[appname].keys():
if key in ROUTER_BASE_KEYS:
raise SyntaxError("BASE-only key '%s' in router '%s'" % (key, appname))
router.update(routers[appname])
routers[appname] = router
if exists(abspath('applications', appname, routes)):
load(routes, appname)
if routers:
load_routers(all_apps)
else: # app
params_apps[app] = p
if routers and p.routers:
if app in p.routers:
routers[app].update(p.routers[app])
log_rewrite('URL rewrite is on. configuration in %s' % path)
def compile_regex(k, v, env=None):
"""
Preprocess and compile the regular expressions in routes_app/in/out
The resulting regex will match a pattern of the form:
[remote address]:[protocol]://[host]:[method] [path]
We allow abbreviated regexes on input; here we try to complete them.
"""
k0 = k # original k for error reporting
# bracket regex in ^...$ if not already done
if not k[0] == '^':
k = '^%s' % k
if not k[-1] == '$':
k = '%s$' % k
# if there are no :-separated parts, prepend a catch-all for the IP address
if k.find(':') < 0:
# k = '^.*?:%s' % k[1:]
k = '^.*?:https?://[^:/]+:[a-z]+ %s' % k[1:]
# if there's no ://, provide a catch-all for the protocol, host & method
if k.find('://') < 0:
i = k.find(':/')
if i < 0:
raise SyntaxError("routes pattern syntax error: path needs leading '/' [%s]" % k0)
k = r'%s:https?://[^:/]+:[a-z]+ %s' % (k[:i], k[i + 1:])
# $anything -> ?P<anything>.*
for item in regex_anything.findall(k):
k = k.replace(item, '(?P<anything>.*)')
# $a (etc) -> ?P<a>\w+
for item in regex_at.findall(k):
k = k.replace(item, r'(?P<%s>\w+)' % item[1:])
# same for replacement pattern, but with \g
for item in regex_at.findall(v):
v = v.replace(item, r'\g<%s>' % item[1:])
return (re.compile(k, re.DOTALL), v, env or {})
def load_routers(all_apps):
"load-time post-processing of routers"
for app in routers:
# initialize apps with routers that aren't present,
# on behalf of unit tests
if app not in all_apps:
all_apps.append(app)
router = Storage(routers.BASE) # new copy
if app != 'BASE':
keys = set(routers[app]).intersection(ROUTER_BASE_KEYS)
if keys:
raise SyntaxError("BASE-only key(s) %s in router '%s'" % (
tuple(keys), app))
router.update(routers[app])
routers[app] = router
router = routers[app]
keys = set(router).difference(ROUTER_KEYS)
if keys:
raise SyntaxError("unknown key(s) %s in router '%s'" % (
tuple(keys), app))
if not router.controllers:
router.controllers = set()
elif not isinstance(router.controllers, str):
router.controllers = set(router.controllers)
if router.languages:
router.languages = set(router.languages)
else:
router.languages = set()
if router.functions:
if isinstance(router.functions, (set, tuple, list)):
functions = set(router.functions)
if isinstance(router.default_function, str):
functions.add(
router.default_function) # legacy compatibility
router.functions = {router.default_controller: functions}
for controller in router.functions:
router.functions[controller] = set(
router.functions[controller])
else:
router.functions = dict()
if app != 'BASE':
for base_only in ROUTER_BASE_KEYS:
router.pop(base_only, None)
if 'domain' in router:
routers.BASE.domains[router.domain] = app
if isinstance(router.controllers, str) and router.controllers == 'DEFAULT':
router.controllers = set()
if isdir(abspath('applications', app)):
cpath = abspath('applications', app, 'controllers')
for cname in os.listdir(cpath):
if isfile(abspath(cpath, cname)) and cname.endswith('.py'):
router.controllers.add(cname[:-3])
if router.controllers:
router.controllers.add('static')
router.controllers.add(router.default_controller)
if isinstance(routers.BASE.applications, str) and routers.BASE.applications == 'ALL':
routers.BASE.applications = list(all_apps)
if routers.BASE.applications:
routers.BASE.applications = set(routers.BASE.applications)
else:
routers.BASE.applications = set()
for app in routers.keys():
# set router name
router = routers[app]
router.name = app
# compile URL validation patterns
router._acfe_match = re.compile(router.acfe_match)
router._file_match = re.compile(router.file_match)
if router.args_match:
router._args_match = re.compile(router.args_match)
# convert path_prefix to a list of path elements
if router.path_prefix:
if isinstance(router.path_prefix, str):
router.path_prefix = router.path_prefix.strip('/').split('/')
# rewrite BASE.domains as tuples
#
# key: 'domain[:port]' -> (domain, port)
# value: 'application[/controller] -> (application, controller)
# (port and controller may be None)
#
domains = dict()
if routers.BASE.domains:
for (d, a) in routers.BASE.domains.iteritems():
(domain, app) = (d.strip(':'), a.strip('/'))
if ':' in domain:
(domain, port) = domain.split(':')
else:
port = None
if '/' in app:
(app, ctlr) = app.split('/', 1)
else:
ctlr = None
if ctlr and '/' in ctlr:
(ctlr, fcn) = ctlr.split('/')
else:
fcn = None
if app not in all_apps and app not in routers:
raise SyntaxError("unknown app '%s' in domains" % app)
domains[(domain, port)] = (app, ctlr, fcn)
routers.BASE.domains = domains
def regex_uri(e, regexes, tag, default=None):
"filter incoming URI against a list of regexes"
path = e['PATH_INFO']
host = e.get('HTTP_HOST', e.get('SERVER_NAME', 'localhost')).lower()
i = host.find(':')
if i > 0:
host = host[:i]
key = '%s:%s://%s:%s %s' % \
(e.get('REMOTE_ADDR', 'localhost'),
e.get('wsgi.url_scheme', 'http').lower(), host,
e.get('REQUEST_METHOD', 'get').lower(), path)
for (regex, value, custom_env) in regexes:
if regex.match(key):
e.update(custom_env)
rewritten = regex.sub(value, key)
log_rewrite('%s: [%s] [%s] -> %s' % (tag, key, value, rewritten))
return rewritten
log_rewrite('%s: [%s] -> %s (not rewritten)' % (tag, key, default))
return default
def regex_select(env=None, app=None, request=None):
"""
select a set of regex rewrite params for the current request
"""
if app:
THREAD_LOCAL.routes = params_apps.get(app, params)
elif env and params.routes_app:
if routers:
map_url_in(request, env, app=True)
else:
app = regex_uri(env, params.routes_app, "routes_app")
THREAD_LOCAL.routes = params_apps.get(app, params)
else:
THREAD_LOCAL.routes = params # default to base rewrite parameters
log_rewrite("select routing parameters: %s" % THREAD_LOCAL.routes.name)
return app # for doctest
def regex_filter_in(e):
"regex rewrite incoming URL"
routes = THREAD_LOCAL.routes
query = e.get('QUERY_STRING', None)
e['WEB2PY_ORIGINAL_URI'] = e['PATH_INFO'] + (query and ('?' + query) or '')
if routes.routes_in:
path = regex_uri(e, routes.routes_in,
"routes_in", e['PATH_INFO'])
rmatch = regex_redirect.match(path)
if rmatch:
raise HTTP(int(rmatch.group(1)), location=rmatch.group(2))
items = path.split('?', 1)
e['PATH_INFO'] = items[0]
if len(items) > 1:
if query:
query = items[1] + '&' + query
else:
query = items[1]
e['QUERY_STRING'] = query
e['REQUEST_URI'] = e['PATH_INFO'] + (query and ('?' + query) or '')
return e
def sluggify(key):
return key.lower().replace('.', '_')
def invalid_url(routes):
raise HTTP(400,
routes.error_message % 'invalid request',
web2py_error='invalid path')
def regex_url_in(request, environ):
"rewrite and parse incoming URL"
# ##################################################
# select application
# rewrite URL if routes_in is defined
# update request.env
# ##################################################
regex_select(env=environ, request=request)
routes = THREAD_LOCAL.routes
if routes.routes_in:
environ = regex_filter_in(environ)
request.env.update(
(k.lower().replace('.', '_'), v) for k, v in environ.iteritems())
# ##################################################
# serve if a static file
# ##################################################
path = urllib.unquote(request.env.path_info) or '/'
path = path.replace('\\', '/')
if path.endswith('/') and len(path) > 1:
path = path[:-1]
match = regex_url.match(path)
if not match:
invalid_url(routes)
request.raw_args = (match.group('s') or '')
if request.raw_args.startswith('/'):
request.raw_args = request.raw_args[1:]
if match.group('c') == 'static':
application = match.group('a')
version, filename = None, match.group('z').replace(' ','_')
if not filename:
raise HTTP(404)
items = filename.split('/', 1)
if regex_version.match(items[0]):
version, filename = items
static_folder = pjoin(request.env.applications_parent,
'applications', application,'static')
static_file = os.path.abspath(pjoin(static_folder,filename))
if not static_file.startswith(static_folder):
invalid_url(routes)
return (static_file, version, environ)
else:
# ##################################################
# parse application, controller and function
# ##################################################
request.application = match.group('a') or routes.default_application
request.controller = match.group('c') or routes.default_controller
request.function = match.group('f') or routes.default_function
request.raw_extension = match.group('e')
request.extension = request.raw_extension or 'html'
if request.application in routes.routes_apps_raw:
# application is responsible for parsing args
request.args = None
elif request.raw_args:
args = regex_args.sub('_',request.raw_args)
request.args = List(args.split('/'))
else:
request.args = List([])
return (None, None, environ)
def regex_filter_out(url, e=None):
"regex rewrite outgoing URL"
if not hasattr(THREAD_LOCAL, 'routes'):
regex_select() # ensure routes is set (for application threads)
routes = THREAD_LOCAL.routes
if routers:
return url # already filtered
if routes.routes_out:
items = url.split('?', 1)
if e:
host = e.get('http_host', 'localhost').lower()
i = host.find(':')
if i > 0:
host = host[:i]
items[0] = '%s:%s://%s:%s %s' % \
(e.get('remote_addr', ''),
e.get('wsgi_url_scheme', 'http').lower(), host,
e.get('request_method', 'get').lower(), items[0])
else:
items[0] = ':http://localhost:get %s' % items[0]
for (regex, value, tmp) in routes.routes_out:
if regex.match(items[0]):
rewritten = '?'.join([regex.sub(value, items[0])] + items[1:])
log_rewrite('routes_out: [%s] -> %s' % (url, rewritten))
return rewritten
log_rewrite('routes_out: [%s] not rewritten' % url)
return url
def filter_url(url, method='get', remote='0.0.0.0',
out=False, app=False, lang=None,
domain=(None, None), env=False, scheme=None,
host=None, port=None):
"""
doctest/unittest interface to regex_filter_in() and regex_filter_out()
"""
match = regex_full_url.match(url)
urlscheme = match.group('scheme').lower()
urlhost = match.group('host').lower()
uri = match.group('uri')
k = uri.find('?')
if k < 0:
k = len(uri)
if isinstance(domain, str):
domain = (domain, None)
(path_info, query_string) = (uri[:k], uri[k + 1:])
path_info = urllib.unquote(path_info) # simulate server
e = {
'REMOTE_ADDR': remote,
'REQUEST_METHOD': method,
'wsgi.url_scheme': urlscheme,
'HTTP_HOST': urlhost,
'REQUEST_URI': uri,
'PATH_INFO': path_info,
'QUERY_STRING': query_string,
#for filter_out request.env use lowercase
'remote_addr': remote,
'request_method': method,
'wsgi_url_scheme': urlscheme,
'http_host': urlhost
}
request = Storage()
e["applications_parent"] = global_settings.applications_parent
request.env = Storage(e)
request.uri_language = lang
# determine application only
#
if app:
if routers:
return map_url_in(request, e, app=True)
return regex_select(e)
# rewrite outbound URL
#
if out:
(request.env.domain_application,
request.env.domain_controller) = domain
items = path_info.lstrip('/').split('/')
if items[-1] == '':
items.pop() # adjust trailing empty args
assert len(items) >= 3, "at least /a/c/f is required"
a = items.pop(0)
c = items.pop(0)
f = items.pop(0)
if not routers:
return regex_filter_out(uri, e)
acf = map_url_out(
request, None, a, c, f, items, None, scheme, host, port)
if items:
url = '%s/%s' % (acf, '/'.join(items))
if items[-1] == '':
url += '/'
else:
url = acf
if query_string:
url += '?' + query_string
return url
# rewrite inbound URL
#
(static, version, e) = url_in(request, e)
if static:
return static
result = "/%s/%s/%s" % (
request.application, request.controller, request.function)
if request.extension and request.extension != 'html':
result += ".%s" % request.extension
if request.args:
result += " %s" % request.args
if e['QUERY_STRING']:
result += " ?%s" % e['QUERY_STRING']
if request.uri_language:
result += " (%s)" % request.uri_language
if env:
return request.env
return result
def filter_err(status, application='app', ticket='tkt'):
"doctest/unittest interface to routes_onerror"
routes = THREAD_LOCAL.routes
if status > 399 and routes.routes_onerror:
keys = set(('%s/%s' % (application, status),
'%s/*' % (application),
'*/%s' % (status),
'*/*'))
for (key, redir) in routes.routes_onerror:
if key in keys:
if redir == '!':
break
elif '?' in redir:
url = redir + '&' + 'code=%s&ticket=%s' % (status, ticket)
else:
url = redir + '?' + 'code=%s&ticket=%s' % (status, ticket)
return url # redirection
return status # no action
# router support
#
class MapUrlIn(object):
"logic for mapping incoming URLs"
def __init__(self, request=None, env=None):
"initialize a map-in object"
self.request = request
self.env = env
self.router = None
self.application = None
self.language = None
self.controller = None
self.function = None
self.extension = 'html'
self.controllers = set()
self.functions = dict()
self.languages = set()
self.default_language = None
self.map_hyphen = False
self.exclusive_domain = False
path = self.env['PATH_INFO']
self.query = self.env.get('QUERY_STRING', None)
path = path.lstrip('/')
self.env['PATH_INFO'] = '/' + path
self.env['WEB2PY_ORIGINAL_URI'] = self.env['PATH_INFO'] + (
self.query and ('?' + self.query) or '')
# to handle empty args, strip exactly one trailing slash, if present
# .../arg1// represents one trailing empty arg
#
if path.endswith('/'):
path = path[:-1]
self.args = List(path and path.split('/') or [])
# see http://www.python.org/dev/peps/pep-3333/#url-reconstruction for URL composition
self.remote_addr = self.env.get('REMOTE_ADDR', 'localhost')
self.scheme = self.env.get('wsgi.url_scheme', 'http').lower()
self.method = self.env.get('REQUEST_METHOD', 'get').lower()
(self.host, self.port) = (self.env.get('HTTP_HOST'), None)
if not self.host:
(self.host, self.port) = (
self.env.get('SERVER_NAME'), self.env.get('SERVER_PORT'))
if not self.host:
(self.host, self.port) = ('localhost', '80')
if ':' in self.host:
(self.host, self.port) = self.host.rsplit(':',1) # for ipv6 support
if not self.port:
self.port = '443' if self.scheme == 'https' else '80'
def map_prefix(self):
"strip path prefix, if present in its entirety"
prefix = routers.BASE.path_prefix
if prefix:
prefixlen = len(prefix)
if prefixlen > len(self.args):
return
for i in xrange(prefixlen):
if prefix[i] != self.args[i]:
return # prefix didn't match
self.args = List(self.args[prefixlen:]) # strip the prefix
def map_app(self):
"determine application name"
base = routers.BASE # base router
self.domain_application = None
self.domain_controller = None
self.domain_function = None
arg0 = self.harg0
if not base.exclusive_domain and base.applications and arg0 in base.applications:
self.application = arg0
elif not base.exclusive_domain and arg0 and not base.applications:
self.application = arg0
elif (self.host, self.port) in base.domains:
(self.application, self.domain_controller,
self.domain_function) = base.domains[(self.host, self.port)]
self.env['domain_application'] = self.application
self.env['domain_controller'] = self.domain_controller
self.env['domain_function'] = self.domain_function
elif (self.host, None) in base.domains:
(self.application, self.domain_controller,
self.domain_function) = base.domains[(self.host, None)]
self.env['domain_application'] = self.application
self.env['domain_controller'] = self.domain_controller
self.env['domain_function'] = self.domain_function
elif base.applications and arg0 in base.applications:
self.application = arg0
elif arg0 and not base.applications:
self.application = arg0
else:
self.application = base.default_application or ''
self.pop_arg_if(self.application == arg0)
if not base._acfe_match.match(self.application):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error="invalid application: '%s'" % self.application)
if self.application not in routers and \
(self.application != THREAD_LOCAL.routes.default_application or self.application == 'welcome'):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error="unknown application: '%s'" % self.application)
# set the application router
#
log_rewrite("select application=%s" % self.application)
self.request.application = self.application
if self.application not in routers:
self.router = routers.BASE # support gluon.main.wsgibase init->welcome
else:
self.router = routers[self.application] # application router
self.controllers = self.router.controllers
self.default_controller = self.domain_controller or self.router.default_controller
self.functions = self.router.functions
self.languages = self.router.languages
self.default_language = self.router.default_language
self.map_hyphen = self.router.map_hyphen
self.exclusive_domain = self.router.exclusive_domain
self._acfe_match = self.router._acfe_match
self.file_match = self.router.file_match
self._file_match = self.router._file_match
self._args_match = self.router._args_match
def map_root_static(self):
'''
handle root-static files (no hyphen mapping)
a root-static file is one whose incoming URL expects it to be at the root,
typically robots.txt & favicon.ico
'''
if len(self.args) == 1 and self.arg0 in self.router.root_static:
self.controller = self.request.controller = 'static'
root_static_file = pjoin(self.request.env.applications_parent,
'applications', self.application,
self.controller, self.arg0)
log_rewrite("route: root static=%s" % root_static_file)
return root_static_file, None
return None, None
def map_language(self):
"handle language (no hyphen mapping)"
arg0 = self.arg0 # no hyphen mapping
if arg0 and self.languages and arg0 in self.languages:
self.language = arg0
else:
self.language = self.default_language
if self.language:
log_rewrite("route: language=%s" % self.language)
self.pop_arg_if(self.language == arg0)
arg0 = self.arg0
def map_controller(self):
"identify controller"
# handle controller
#
arg0 = self.harg0 # map hyphens
if not arg0 or (self.controllers and arg0 not in self.controllers):
self.controller = self.default_controller or ''
else:
self.controller = arg0
self.pop_arg_if(arg0 == self.controller)
log_rewrite("route: controller=%s" % self.controller)
if not self.router._acfe_match.match(self.controller):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid controller')
def map_static(self):
'''
handle static files
file_match but no hyphen mapping
'''
if self.controller != 'static':
return None, None
version = regex_version.match(self.args(0))
if self.args and version:
file = '/'.join(self.args[1:])
else:
file = '/'.join(self.args)
if len(self.args) == 0:
bad_static = True # require a file name
elif '/' in self.file_match:
# match the path
bad_static = not self.router._file_match.match(file)
else:
# match path elements
bad_static = False
for name in self.args:
bad_static = bad_static or name in (
'', '.', '..') or not self.router._file_match.match(name)
if bad_static:
log_rewrite('bad static path=%s' % file)
raise HTTP(400,
THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid static file')
#
# support language-specific static subdirectories,
# eg /appname/en/static/filename => applications/appname/static/en/filename
# if language-specific file doesn't exist, try same file in static
#
if self.language:
static_file = pjoin(self.request.env.applications_parent,
'applications', self.application,
'static', self.language, file)
if not self.language or not isfile(static_file):
static_file = pjoin(self.request.env.applications_parent,
'applications', self.application,
'static', file)
self.extension = None
log_rewrite("route: static=%s" % static_file)
return static_file, version
def map_function(self):
"handle function.extension"
arg0 = self.harg0 # map hyphens
functions = self.functions.get(self.controller, set())
if isinstance(self.router.default_function, dict):
default_function = self.router.default_function.get(
self.controller, None)
else:
default_function = self.router.default_function # str or None
default_function = self.domain_function or default_function
if not arg0 or functions and arg0 not in functions:
self.function = default_function or ""
self.pop_arg_if(arg0 and self.function == arg0)
else:
func_ext = arg0.split('.')
if len(func_ext) > 1:
self.function = func_ext[0]
self.extension = func_ext[-1]
else:
self.function = arg0
self.pop_arg_if(True)
log_rewrite(
"route: function.ext=%s.%s" % (self.function, self.extension))
if not self.router._acfe_match.match(self.function):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid function')
if self.extension and not self.router._acfe_match.match(self.extension):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid extension')
def validate_args(self):
'''
check args against validation pattern
'''
for arg in self.args:
if not self.router._args_match.match(arg):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid arg <%s>' % arg)
def sluggify(self):
""
self.request.env.update(
(k.lower().replace('.', '_'), v) for k, v in self.env.iteritems())
def update_request(self):
'''
update request from self
build env.request_uri
make lower-case versions of http headers in env
'''
self.request.application = self.application
self.request.controller = self.controller
self.request.function = self.function
self.request.extension = self.extension
self.request.args = self.args
if self.language:
self.request.uri_language = self.language
uri = '/%s/%s' % (self.controller, self.function)
app = self.application
if self.map_hyphen:
uri = uri.replace('_', '-')
app = app.replace('_', '-')
if self.extension and self.extension != 'html':
uri += '.' + self.extension
if self.language:
uri = '/%s%s' % (self.language, uri)
uri = '/%s%s%s%s' % (
app,
uri,
urllib.quote('/' + '/'.join(
str(x) for x in self.args)) if self.args else '',
('?' + self.query) if self.query else '')
self.env['REQUEST_URI'] = uri
self.sluggify()
@property
def arg0(self):
"return first arg"
return self.args(0)
@property
def harg0(self):
"return first arg with optional hyphen mapping"
if self.map_hyphen and self.args(0):
return self.args(0).replace('-', '_')
return self.args(0)
def pop_arg_if(self, dopop):
"conditionally remove first arg and return new first arg"
if dopop:
self.args.pop(0)
class MapUrlOut(object):
"logic for mapping outgoing URLs"
def __init__(self, request, env, application, controller,
function, args, other, scheme, host, port):
"initialize a map-out object"
self.default_application = routers.BASE.default_application
if application in routers:
self.router = routers[application]
else:
self.router = routers.BASE
self.request = request
self.env = env
self.application = application
self.controller = controller
self.is_static = (
controller == 'static' or controller.startswith('static/'))
self.function = function
self.args = args
self.other = other
self.scheme = scheme
self.host = host
self.port = port
self.applications = routers.BASE.applications
self.controllers = self.router.controllers
self.functions = self.router.functions.get(self.controller, set())
self.languages = self.router.languages
self.default_language = self.router.default_language
self.exclusive_domain = self.router.exclusive_domain
self.map_hyphen = self.router.map_hyphen
self.map_static = self.router.map_static
self.path_prefix = routers.BASE.path_prefix
self.domain_application = request and self.request.env.domain_application
self.domain_controller = request and self.request.env.domain_controller
if isinstance(self.router.default_function, dict):
self.default_function = self.router.default_function.get(
self.controller, None)
else:
self.default_function = self.router.default_function
if (self.router.exclusive_domain and self.domain_application and self.domain_application != self.application and not self.host):
raise SyntaxError('cross-domain conflict: must specify host')
lang = request and request.uri_language
if lang and self.languages and lang in self.languages:
self.language = lang
else:
self.language = None
self.omit_application = False
self.omit_language = False
self.omit_controller = False
self.omit_function = False
def omit_lang(self):
"omit language if possible"
if not self.language or self.language == self.default_language:
self.omit_language = True
def omit_acf(self):
"omit what we can of a/c/f"
router = self.router
# Handle the easy no-args case of tail-defaults: /a/c /a /
#
if not self.args and self.function == self.default_function:
self.omit_function = True
if self.controller == router.default_controller:
self.omit_controller = True
if self.application == self.default_application:
self.omit_application = True
# omit default application
# (which might be the domain default application)
#
default_application = self.domain_application or self.default_application
if self.application == default_application:
self.omit_application = True
# omit controller if default controller
#
default_controller = ((self.application == self.domain_application) and self.domain_controller) or router.default_controller or ''
if self.controller == default_controller:
self.omit_controller = True
# omit function if possible
#
if self.functions and self.function in self.functions and self.function == self.default_function:
self.omit_function = True
# prohibit ambiguous cases
#
# because we presume the lang string to be unambiguous, its presence protects application omission
#
if self.exclusive_domain:
applications = [self.domain_application]
else:
applications = self.applications
if self.omit_language:
if not applications or self.controller in applications:
self.omit_application = False
if self.omit_application:
if not applications or self.function in applications:
self.omit_controller = False
if not self.controllers or self.function in self.controllers:
self.omit_controller = False
if self.args:
if self.args[0] in self.functions or self.args[0] in self.controllers or self.args[0] in applications:
self.omit_function = False
if self.omit_controller:
if self.function in self.controllers or self.function in applications:
self.omit_controller = False
if self.omit_application:
if self.controller in applications:
self.omit_application = False
# handle static as a special case
# (easier for external static handling)
#
if self.is_static:
if not self.map_static:
self.omit_application = False
if self.language:
self.omit_language = False
self.omit_controller = False
self.omit_function = False
def build_acf(self):
"build acf from components"
acf = ''
if self.map_hyphen:
self.application = self.application.replace('_', '-')
self.controller = self.controller.replace('_', '-')
if self.controller != 'static' and not self.controller.startswith('static/'):
self.function = self.function.replace('_', '-')
if not self.omit_application:
acf += '/' + self.application
# handle case of flipping lang/static/file to static/lang/file for external rewrite
if self.is_static and self.map_static is False and not self.omit_language:
acf += '/' + self.controller + '/' + self.language
else:
if not self.omit_language:
acf += '/' + self.language
if not self.omit_controller:
acf += '/' + self.controller
if not self.omit_function:
acf += '/' + self.function
if self.path_prefix:
acf = '/' + '/'.join(self.path_prefix) + acf
if self.args:
return acf
return acf or '/'
def acf(self):
"convert components to /app/lang/controller/function"
if not routers:
return None # use regex filter
self.omit_lang() # try to omit language
self.omit_acf() # try to omit a/c/f
return self.build_acf() # build and return the /a/lang/c/f string
def map_url_in(request, env, app=False):
"route incoming URL"
# initialize router-url object
#
THREAD_LOCAL.routes = params # default to base routes
map = MapUrlIn(request=request, env=env)
map.sluggify()
map.map_prefix() # strip prefix if present
map.map_app() # determine application
# configure THREAD_LOCAL.routes for error rewrite
#
if params.routes_app:
THREAD_LOCAL.routes = params_apps.get(app, params)
if app:
return map.application
root_static_file, version = map.map_root_static(
) # handle root-static files
if root_static_file:
map.update_request()
return (root_static_file, version, map.env)
# handle mapping of lang/static to static/lang in externally-rewritten URLs
# in case we have to handle them ourselves
if map.languages and map.map_static is False and map.arg0 == 'static' and map.args(1) in map.languages:
map.map_controller()
map.map_language()
else:
map.map_language()
map.map_controller()
static_file, version = map.map_static()
if static_file:
map.update_request()
return (static_file, version, map.env)
map.map_function()
map.validate_args()
map.update_request()
return (None, None, map.env)
def map_url_out(request, env, application, controller,
function, args, other, scheme, host, port):
'''
supply /a/c/f (or /a/lang/c/f) portion of outgoing url
The basic rule is that we can only make transformations
that map_url_in can reverse.
Suppose that the incoming arguments are a,c,f,args,lang
and that the router defaults are da, dc, df, dl.
We can perform these transformations trivially if args=[] and lang=None or dl:
/da/dc/df => /
/a/dc/df => /a
/a/c/df => /a/c
We would also like to be able to strip the default application or application/controller
from URLs with function/args present, thus:
/da/c/f/args => /c/f/args
/da/dc/f/args => /f/args
We use [applications] and [controllers] and {functions} to suppress ambiguous omissions.
We assume that language names do not collide with a/c/f names.
'''
map = MapUrlOut(request, env, application, controller,
function, args, other, scheme, host, port)
return map.acf()
def get_effective_router(appname):
"return a private copy of the effective router for the specified application"
if not routers or appname not in routers:
return None
return Storage(routers[appname]) # return a copy
| pouyana/teireader | webui/gluon/rewrite.py | Python | mit | 52,398 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2012 Tuukka Turto
#
# This file is part of satin-python.
#
# pyherc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyherc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with satin-python. If not, see <http://www.gnu.org/licenses/>.
"""
Module for testing labels
"""
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
from .enumerators import all_widgets
class LabelMatcher(BaseMatcher):
"""
Check if Widget has label with given text
"""
def __init__(self, text):
"""
Default constructor
"""
super(LabelMatcher, self).__init__()
if hasattr(text, 'matches'):
self.text = text
else:
self.text = wrap_matcher(text)
def _matches(self, item):
"""
Check if matcher matches item
:param item: object to match against
:returns: True if matching, otherwise False
:rtype: Boolean
"""
widgets = all_widgets(item)
for widget in widgets:
if hasattr(widget, 'text') and self.text.matches(widget.text()):
return True
return False
def describe_to(self, description):
"""
Describe this matcher
"""
description.append('Control with label {0}'.format(self.text))
def describe_mismatch(self, item, mismatch_description):
"""
Describe this mismatch
"""
mismatch_description.append(
'QLabel with text {0} was not found'.format(self.text))
def has_label(text):
"""
Check if Widget has label with given text
"""
return LabelMatcher(text)
| tuturto/satin-python | satin/label.py | Python | gpl-3.0 | 2,211 |
from filebeat import BaseTest
import gzip
import os
import time
import unittest
"""
Tests that YapdnsBeat shuts down cleanly.
"""
class Test(BaseTest):
def test_shutdown(self):
"""
Test starting and stopping YapdnsBeat under load.
"""
# Uncompress the nasa log file.
nasa_log = '../files/logs/nasa-50k.log'
if not os.path.isfile(nasa_log):
with gzip.open('../files/logs/nasa-50k.log.gz', 'rb') as infile:
with open(nasa_log, 'w') as outfile:
for line in infile:
outfile.write(line)
self.render_config_template(
path=os.path.abspath(self.working_dir) + "/log/*",
ignoreOlder="1h"
)
os.mkdir(self.working_dir + "/log/")
self.copy_files(["logs/nasa-50k.log"],
source_dir="../files",
target_dir="log")
for i in range(1,5):
proc = self.start_beat(logging_args=["-e", "-v"])
time.sleep(.5)
proc.check_kill_and_wait()
| yapdns/yapdnsbeat | tests/system/test_shutdown.py | Python | mit | 1,098 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Course.short_description'
db.add_column(u'courses_course', 'short_description',
self.gf('django.db.models.fields.CharField')(default='', max_length=300, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Course.short_description'
db.delete_column(u'courses_course', 'short_description')
models = {
u'courses.course': {
'Meta': {'object_name': 'Course'},
'applications_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'enable_applications': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_time': ('django.db.models.fields.DateField', [], {}),
'git_repository': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'show_on_index': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateField', [], {})
}
}
complete_apps = ['courses'] | HackBulgaria/Odin | courses/south_migrations/0007_auto__add_field_course_short_description.py | Python | agpl-3.0 | 1,858 |
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
from optparse import OptionParser, Option
from spacewalk.common.rhnConfig import PRODUCT_NAME
# Not strictly necessary, but makes them easier to type
option_parser = OptionParser
option = Option
# pylint: disable=R0903
class UI:
def __init__(self):
self.optiontable = [
option("-d", "--dir", action="store",
help="This is the directory that the information that you want to sync gets dumped in."),
option("--hard-links", action="store_true", default=0,
help="Exported RPM and kickstart are hard linked to original files."),
option("--list-channels", action="store_true", default=0,
help="List all of the channels that can be exported."),
option("--list-steps", action="store_true", default=0,
help="List all of the steps that rhn-satellite-exporter takes while exporting data."
+ " These can be used as values for --step"),
option("-c", "--channel", action="append",
help="Include this channel in the export."),
option("-a", "--all-channels", action="store_true", default=0,
help="Export all channels."),
option("--start-date", action="store",
help="The start date limit that the last modified dates are compared against. "
+ "Should be in the format 'YYYYMMDDHH24MISS'."),
option("--end-date", action="store",
help="The end date limit that the last modified dates are compared against. "
+ "Should be in the format 'YYYYMMDDHH24MISS'."),
option("--use-rhn-date", action="store_true",
help="Limit exported packages according to the date when they appeared at Red Hat Network."),
option("--use-sync-date", action="store_true",
help="Limit exported packages according to the date they where pulled into %s." % PRODUCT_NAME),
option("--whole-errata", action="store_true",
help="Always include package if it belongs to errata which is withing start/end-date range."),
option("--make-isos", action="store",
help="Create channel dump isos a directory called satellite-isos. Usage: --make-isos=cd or dvd"),
option("-p", "--print-configuration", action="store_true", default=0,
help="Print the configuration and exit."),
option("--print-report", action="store_true", default=0,
help="Print the report to the terminal when the export is complete."),
option("--step", action="store",
help="Export only up to this step."),
option("--no-rpms", action="store_true",
help="Do not export RPMs."),
option("--no-packages", action="store_true",
help="Do not export package metadata."),
option("--no-errata", action="store_true",
help="Do not export errata data."),
option("--no-kickstarts", action="store_true",
help="Do not export kickstart data."),
option("--debug-level", action="store",
help="Set the debug level to this value. Overrides the value in rhn.conf."),
option("-v", "--verbose", action="store_true",
help="Set debug level to 3. Overrides the value in rhn.conf.."),
option("--email", action="store_true",
help="Email a report of what was exported."),
option("--traceback-mail", action="store",
help="Alternative email address for --email."),
option("--all-orgs", action="store_true",
help="Export all orgs."),
option("-o", "--org", action="append",
help="Include the org with this id in the export."),
option("--list-orgs", action="store_true",
help="List all orgs that can be exported"),
]
self.optionparser = option_parser(option_list=self.optiontable)
self.options, self.args = self.optionparser.parse_args()
if self.options.verbose and not self.options.debug_level:
self.options.debug_level = 3
for i in self.options.__dict__.keys():
if i not in self.__dict__:
self.__dict__[i] = self.options.__dict__[i]
if __name__ == "__main__":
# pylint: disable=E1101
a = UI()
print(str(a.no_errata))
| shastah/spacewalk | backend/satellite_tools/disk_dumper/iss_ui.py | Python | gpl-2.0 | 5,523 |
from django.conf import settings
from django.utils.importlib import import_module
import mimetypes
PREPARE_UPLOAD_BACKEND = getattr(settings,
'PREPARE_UPLOAD_BACKEND',
'filetransfers.backends.default.prepare_upload')
SERVE_FILE_BACKEND = getattr(settings,
'SERVE_FILE_BACKEND',
'filetransfers.backends.default.serve_file')
PUBLIC_DOWNLOAD_URL_BACKEND = getattr(settings,
'PUBLIC_DOWNLOAD_URL_BACKEND',
'filetransfers.backends.default.public_download_url')
_backends_cache = {}
# Public API
def prepare_upload(request, url, private=False, backend=None):
handler = _load_backend(backend, PREPARE_UPLOAD_BACKEND)
return handler(request, url, private=private)
def serve_file(request, file, backend=None, save_as=False, content_type=None):
# Backends are responsible for handling range requests.
handler = _load_backend(backend, SERVE_FILE_BACKEND)
filename = file.name.rsplit('/')[-1]
if save_as is True:
save_as = filename
if not content_type:
content_type = mimetypes.guess_type(filename)[0]
return handler(request, file, save_as=save_as, content_type=content_type)
def public_download_url(file, backend=None):
handler = _load_backend(backend, PUBLIC_DOWNLOAD_URL_BACKEND)
return handler(file)
# Internal utilities
def _load_backend(backend, default_backend):
if backend is None:
backend = default_backend
if backend not in _backends_cache:
module_name, func_name = backend.rsplit('.', 1)
_backends_cache[backend] = getattr(import_module(module_name), func_name)
return _backends_cache[backend]
| gquirozbogner/contentbox-master | third_party/filetransfers/api.py | Python | apache-2.0 | 1,615 |
#!/usr/bin/env python3
import os
from config import Dirs, Disks, Files, XAS99_CONFIG
from utils import (xas, xdm, sinc, error, clear_env, delfile, check_obj_code_eq, check_image_set_eq,
check_image_files_eq, read_stderr, get_source_markers, check_errors)
# Main test
def runtest():
"""check cross-generated output against native reference files"""
clear_env(XAS99_CONFIG)
# object code
for inp_file, opts, ref_file, compr_file in [
('asdirs.asm', [], 'ASDIRS-O', 'ASDIRS-C'),
('asorgs.asm', [], 'ASORGS-O', 'ASORGS-C'),
('asopcs.asm', [], 'ASOPCS-O', 'ASOPCS-C'),
('asexprs.asm', [], 'ASEXPRS-O', None),
('asbss.asm', [], 'ASBSS-O', 'ASBSS-C'),
('asregs.asm', ['-R'], 'ASREGS-O', 'ASREGS-C'),
('ashellon.asm', ['-R'], 'ASHELLO-O', 'ASHELLO-C'),
('ascopy.asm', [], 'ASCOPY-O', None),
('ascopyn.asm', [], 'ASCOPYN-O', None),
('assize1.asm', [], 'ASSIZE1-O', 'ASSIZE1-C'),
('assize2.asm', [], 'ASSIZE2-O', None),
('assize3.asm', [], 'ASSIZE3-O', None),
('assize4.asm', [], 'ASSIZE4-O', None),
('asextsym.asm', [], 'ASEXTSYM-O', None),
('asdorg.asm', [], 'ASDORG-O', None),
('asrorg.asm', [], 'ASRORG-O', None),
('asimg1.asm', [], 'ASIMG1-O', 'ASIMG1-C'),
('asimg2.asm', [], 'ASIMG2-O', None),
('asimg3.asm', [], 'ASIMG3-OX', None),
('asreloc.asm', [], 'ASRELOC-O', None),
('asxorg.asm', [], 'ASXORG-O', None),
('ascart.asm', ['-R'], 'ASCART-O', 'ASCART-C')
]:
source = os.path.join(Dirs.sources, inp_file)
xdm(Disks.asmsrcs, '-e', ref_file, '-o', Files.reference)
xas(*[source] + opts + ['-q', '-o', Files.output])
check_obj_code_eq(Files.output, Files.reference)
xas(*[source] + opts + ['--strict', '-q', '-o', Files.output])
check_obj_code_eq(Files.output, Files.reference)
if compr_file:
# compressed object code
xas(*[source] + opts + ['-C', '-q', '-o', Files.output])
xdm(Disks.asmsrcs, '-e', compr_file, '-o', Files.reference)
check_obj_code_eq(Files.output, Files.reference, compressed=True)
# image files
for inp_file, ref_file in [
('asimg1.asm', 'ASIMG1-I'),
('asimg2.asm', 'ASIMG2-I'),
('asimg3.asm', 'ASIMG3-I')
]:
source = os.path.join(Dirs.sources, inp_file)
xas(source, '-i', '-o', Files.output)
xdm(Disks.asmsrcs, '-e', ref_file, '-o', Files.reference)
check_image_files_eq(Files.output, Files.reference)
for inp_file, reffiles in [
('aslimg.asm', ['ASLIMG-I', 'ASLIMG-J', 'ASLIMG-K']),
('assimg.asm', ['ASSIMG-I', 'ASSIMG-J', 'ASSIMG-K', 'ASSIMG-L']),
('asreloc.asm', ['ASRELOC-I'])
]:
source = os.path.join(Dirs.sources, inp_file)
xas(source, '-R', '-i', '-q', '-o', Files.output)
gendata = []
refdata = []
for i, ref_file in enumerate(reffiles):
xdm(Disks.asmimgs, '-e', ref_file, '-o', Files.reference)
with open(Files.outputff[i], 'rb') as fgen, open(Files.reference, 'rb') as fref:
gendata.append(fgen.read())
refdata.append(fref.read())
check_image_set_eq(gendata, refdata)
# JMP instruction
source = os.path.join(Dirs.sources, 'asjmp.asm')
with open(Files.error, 'w') as ferr:
xas(source, '-o', Files.output, stderr=ferr, rc=1)
xaserrors = read_stderr(Files.error)
referrors = get_source_markers(source, r';ERROR(:....)?')
check_errors(referrors, xaserrors)
# xas99-defined symbols
source = os.path.join(Dirs.sources, 'asxassym.asm')
xas(source, '-b', '-o', Files.output)
with open(Files.output, 'rb') as f:
data = f.read()
for i in range(0, len(data), 2):
if data[i:i + 2] == b'\x00\x00':
error('symbols', 'Undefined xas99 symbol')
# DORG special cases
source = os.path.join(Dirs.sources, 'asdorg.asm')
xas(source, '-a', '>2000', '-o', Files.output)
ref = os.path.join(Dirs.sources, 'asdorg-ti.asm')
xas(ref, '-a', '>2000', '-o', Files.reference)
check_obj_code_eq(Files.output, Files.reference)
# cleanup
delfile(Dirs.tmp)
if __name__ == '__main__':
runtest()
print('OK')
| endlos99/xdt99 | test/as-checkobj.py | Python | gpl-3.0 | 4,385 |
"""
Sponge Knowledge Base
gRPC demo - Counter - sender.
"""
from java.util.concurrent.atomic import AtomicLong
def onInit():
# Initialize a counter as a Sponge variable.
sponge.setVariable("counter", AtomicLong(1))
class CounterSender(Trigger):
def onConfigure(self):
self.withEvent("counterSender")
def onRun(self, event):
# Increment the counter.
value = sponge.getVariable("counter").incrementAndGet()
# Send a counterNotification event that can be subscribed by a GUI.
sponge.event("counterNotification").set({"counter":value}).label("The counter is " + str(value)).send()
def onStartup():
# Send counterSender event every 5 seconds.
sponge.event("counterSender").sendEvery(Duration.ofSeconds(5))
| softelnet/sponge | sponge-app/sponge-app-demo-service/sponge/sponge_demo_events_counter_sender.py | Python | apache-2.0 | 769 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import atexit
import errno
import os
import shutil
import subprocess
import sys
import tempfile
import time
from thrift.TSerialization import deserialize as thrift_deserialize
from twitter.common.contextutil import environment_as, temporary_file
from apache.thermos.common.ckpt import CheckpointDispatcher
from apache.thermos.common.path import TaskPath
from apache.thermos.config.loader import ThermosTaskWrapper
from gen.apache.thermos.ttypes import RunnerState
class Runner(object):
RUN_JOB_SCRIPT = """
# this is a hack to process wheel nspkg declarations
import os, sys, site
for path in sys.path:
if path.endswith('.whl') and os.path.isdir(path):
site.addsitedir(path)
import os
import random
import sys
from twitter.common import log
from twitter.common.log.options import LogOptions
from apache.thermos.config.loader import ThermosConfigLoader
from apache.thermos.core.helper import TaskRunnerHelper
from apache.thermos.core.runner import TaskRunner, TaskRunnerUniversalHandler
from thrift.TSerialization import serialize as thrift_serialize
random.seed(%(random_seed)d)
log.init('runner_base')
LogOptions.set_disk_log_level('DEBUG')
task = ThermosConfigLoader.load_json('%(filename)s')
task = task.tasks()[0].task
success_rate=%(success_rate)d
class AngryHandler(TaskRunnerUniversalHandler):
def checkpoint(self, record):
if not self._runner._recovery:
if random.randint(0, 100) <= success_rate:
super(AngryHandler, self).checkpoint(record)
else:
sys.exit(1)
sandbox = os.path.join('%(sandbox)s', '%(task_id)s')
args = {}
args['task_id'] = '%(task_id)s'
if %(portmap)s:
args['portmap'] = %(portmap)s
args['universal_handler'] = AngryHandler
runner = TaskRunner(task, '%(root)s', sandbox, **args)
runner.run()
with open('%(state_filename)s', 'w') as fp:
fp.write(thrift_serialize(runner.state))
"""
def __init__(self, task, portmap={}, success_rate=100, random_seed=31337):
"""
task = Thermos task
portmap = port map
success_rate = success rate of writing checkpoint to disk
"""
self.task = task
with temporary_file(cleanup=False) as fp:
self.job_filename = fp.name
fp.write(ThermosTaskWrapper(task).to_json())
self.state_filename = tempfile.mktemp()
self.tempdir = tempfile.mkdtemp()
self.task_id = '%s-runner-base' % int(time.time() * 1000000)
self.sandbox = os.path.join(self.tempdir, 'sandbox')
self.portmap = portmap
self.cleaned = False
self.pathspec = TaskPath(root=self.tempdir, task_id=self.task_id)
self.script_filename = None
self.success_rate = success_rate
self.random_seed = random_seed
self._run_count = 0
@property
def pid(self):
return self.po.pid
@property
def root(self):
return self.tempdir
def run(self):
self._run_count += 1
atexit.register(self.cleanup)
if self.script_filename:
os.unlink(self.script_filename)
with temporary_file(cleanup=False) as fp:
self.script_filename = fp.name
fp.write(self.RUN_JOB_SCRIPT % {
'filename': self.job_filename,
'sandbox': self.sandbox,
'root': self.tempdir,
'task_id': self.task_id,
'state_filename': self.state_filename,
'portmap': repr(self.portmap),
'success_rate': self.success_rate,
'random_seed': self.random_seed + self._run_count,
})
with environment_as(PYTHONPATH=os.pathsep.join(sys.path)):
self.po = subprocess.Popen([sys.executable, self.script_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
so, se = self.po.communicate()
except OSError as e:
if e.errno == errno.ECHILD:
so = se = 'Killed'
else:
raise
rc = self.po.returncode
if rc != 0:
if os.path.exists(self.job_filename):
with open(self.job_filename) as fp:
config = fp.read()
else:
config = 'Nonexistent!'
if 'THERMOS_DEBUG' in os.environ:
print("Runner failed!\n\n\nconfig:%s\n\n\nstdout:%s\n\n\nstderr:%s\n\n\n" % (
config, so, se))
try:
with open(self.state_filename, 'r') as fp:
self.state = thrift_deserialize(RunnerState(), fp.read())
except Exception as e:
if 'THERMOS_DEBUG' in os.environ:
print('Failed to load Runner state: %s' % e, file=sys.stderr)
self.state = RunnerState()
try:
self.reconstructed_state = CheckpointDispatcher.from_file(
self.pathspec.getpath('runner_checkpoint'))
except Exception as e:
print('Failed to replay checkpoint: %s' % e, file=sys.stderr)
self.reconstructed_state = None
self.initialized = True
return rc
def cleanup(self):
if not self.cleaned:
if hasattr(self, 'po'):
try:
self.po.kill()
except Exception as e:
print('Failed to kill runner: %s' % e, file=sys.stderr)
pass
os.unlink(self.job_filename)
os.unlink(self.script_filename)
if 'THERMOS_DEBUG' not in os.environ:
shutil.rmtree(self.tempdir, ignore_errors=True)
else:
print('Logs saved in %s' % self.tempdir)
self.cleaned = True
class RunnerTestBase(object):
@classmethod
def task(cls):
raise NotImplementedError
@classmethod
def setup_class(cls):
cls.runner = Runner(cls.task(), portmap=getattr(cls, 'portmap', {}))
cls.runner.run()
cls.state = cls.runner.state
@classmethod
def teardown_class(cls):
cls.runner.cleanup()
def test_runner_state_reconstruction(self):
assert self.state == self.runner.reconstructed_state
| shahankhatch/aurora | src/main/python/apache/thermos/testing/runner.py | Python | apache-2.0 | 6,225 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRoutePortsLocationsOperations:
"""ExpressRoutePortsLocationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRoutePortsLocationListResult"]:
"""Retrieves all ExpressRoutePort peering locations. Does not return available bandwidths for each
location. Available bandwidths can only be obtained when retrieving a specific peering
location.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortsLocationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.ExpressRoutePortsLocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortsLocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations'} # type: ignore
async def get(
self,
location_name: str,
**kwargs: Any
) -> "_models.ExpressRoutePortsLocation":
"""Retrieves a single ExpressRoutePort peering location, including the list of available
bandwidths available at said peering location.
:param location_name: Name of the requested ExpressRoutePort peering location.
:type location_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRoutePortsLocation, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ExpressRoutePortsLocation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'locationName': self._serialize.url("location_name", location_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePortsLocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations/{locationName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/aio/operations/_express_route_ports_locations_operations.py | Python | mit | 7,901 |
#!/usr/bin/python
class Graph:
"""
Directed Graph class - containing vertices and edges
"""
DEFAULT_WEIGHT = 1
DIRECTED = True
def __init__(self, graph_dict=None):
"""initializes a graph object"""
if graph_dict is None:
graph_dict = {}
self.__graph_dist = graph_dict
def __str__(self):
return "Directed Graph \nNodes: %s \nEdges: %s" % (
self.list_vertices(), self.list_edges())
def graph(self):
return self.__graph_dist
def list_vertices(self):
return list(self.__graph_dist.keys())
def list_edges(self):
return self.__generate_edges()
def __generate_edges(self):
graph_edges = []
for vertex in self.__graph_dist:
for neighbour in self.__graph_dist[vertex]:
if {vertex, neighbour} not in graph_edges:
graph_edges.append({vertex, neighbour})
return graph_edges
def add_edge(self, edge, weight=DEFAULT_WEIGHT):
u, v = set(edge)
if u not in self.__graph_dist[v] and v not in self.__graph_dist[u]:
self.__graph_dist[v][u] = weight
def add_vertex(self, vertex):
if vertex not in self.__graph_dist:
self.__graph_dist[vertex] = {}
def has_edge(self, edge):
u, v = set(edge)
return (v in self.__graph_dist.get(u, []))
def delete_edge(self, edge):
u, v = set(edge)
if not self.has_edge(edge):
raise Exception("Edge (%s, %s) not an existing edge" % (u, v))
del self.__graph_dist[v][u]
def delete_vertex(self, vertex):
if vertex in self.__graph_dist:
for edge in list(self.__graph_dist[vertex]):
self.delete_edge((vertex, edge))
del(self.__graph_dist[vertex])
def set_edge_weight(self, edge, weight):
u, v = set(edge)
if not self.has_edge(edge):
raise Exception("Edge (%s, %s) not an existing edge" % (u, v))
self.__graph_dist[u][v] = weight
def get_edge_weight(self, edge):
u, v = set(edge)
if not self.has_edge(edge):
raise Exception("%s not an existing edge" % edge)
return self.__graph_dist[u].get(v, self.DEFAULT_WEIGHT)
def find_path(self, start_vertex, end_vertex, path=None):
if path is None:
path = []
path.append(start_vertex)
if start_vertex == end_vertex:
return path
if start_vertex not in self.__graph_dist:
return None
for vertex in self.__graph_dist[start_vertex]:
if vertex not in path:
extended_path = self.find_path(vertex, end_vertex, path)
if extended_path:
return extended_path
return None
if __name__ == "__main__":
g = {
"a": {"d": 4},
"b": {"c": 2},
"c": {"b": 2, "c": 5, "d": 1, "e": 7},
"d": {"a": 4, "c": 1},
"e": {"c": 7}
}
graph = Graph(g)
print("Vertices of graph:")
print(graph.list_vertices())
print("\nEdges of graph:")
print(graph.list_edges())
print("\nAdding a vertice")
graph.add_vertex("g")
print (graph.list_vertices())
graph.add_edge(("g", "a"))
graph.add_edge(("a", "c"))
graph.add_edge(("g", "c"))
print("\nEdges of graph:")
print(graph.list_edges())
print (graph.list_vertices())
print(graph.graph())
print(graph.has_edge(("a", "c")))
print(graph.graph())
print("\nDeleting edge (a, d):")
graph.delete_edge(("a", "d"))
print(graph.list_edges())
print (graph.list_vertices())
print(graph.graph())
print("\nDeleting vertex a:")
graph.delete_vertex("a")
print (graph.list_vertices())
print(graph.list_edges())
print(graph.graph())
print("\nPath between b to e:")
print(graph.find_path("b", "e"))
print("\nSetting edge weight for (c, e):")
graph.set_edge_weight(("c", "e"), 2)
print(graph.graph())
| codervikash/algorithms | Python/Graphs/directional_graph.py | Python | mit | 4,051 |
from datetime import datetime
from itertools import imap
from bson import ObjectId
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from django.views.generic import TemplateView, FormView, ListView, RedirectView, View
from django.conf import settings
from django import http
from tastypie.http import HttpNoContent
from blog.models import Post
from profiles.mixins import LoginRequiredMixin
from newsfeed.models import Entry
from newsfeed.constants import *
from documents.constants import *
from documents.forms import DocumentForm, ForkDocumentForm, SearchForm
from documents.mixins import DocumentMixin
from documents.models import Document
from documents.resources import DocumentResource
from documents.utils import extract_keywords
from documents.signals import (document_done, fork_done, star_done,
document_delete, fork_delete)
from documents.exporters.sql import (MysqlExporter, PostgresExporter,
SQLiteExporter, OracleExporter)
DOCUMENT_EXPORTERS = {
EXPORTER_MYSQL: MysqlExporter,
EXPORTER_POSTGRES: PostgresExporter,
EXPORTER_SQLITE: SQLiteExporter,
EXPORTER_ORACLE: OracleExporter,
}
class HomeView(TemplateView):
template_name = "index.html"
def get_context_data(self, **kwargs):
if self.request.user.is_anonymous():
is_public = True
else:
is_public = self.request.GET.get("public") == "true"
try:
page_number = int(self.request.GET.get("page"))
except (ValueError, TypeError):
page_number = 1
newsfeed = self.get_newsfeed(
public=is_public,
offset=NEWSFEED_LIMIT * (page_number - 1))
if NEWSFEED_LIMIT * page_number < newsfeed.count():
next_page_url = self.get_next_page_url(self.request, page_number)
else:
next_page_url = None
return {
"is_public": is_public,
"newsfeed": imap(Entry, newsfeed),
"next_page_url": next_page_url,
"featured_documents": self.get_featured_documents(),
"starred_documents": self.get_starred_documents(),
"latest_posts": self.get_latest_posts(),
"search_form": SearchForm()
}
def get_featured_documents(self):
return Document.objects.featured()
def get_starred_documents(self):
if self.request.user.is_anonymous():
return []
return Document.objects.starred(user_id=self.request.user.id)
def get_newsfeed(self, public=True, offset=0, limit=NEWSFEED_LIMIT):
"""
Fetches news items from the newsfeed database
"""
parameters = {
"news_type": {
"$in": [NEWS_TYPE_REGISTRATION,
NEWS_TYPE_COMMENT,
NEWS_TYPE_DOCUMENT,
NEWS_TYPE_FORK,
NEWS_TYPE_STAR,
NEWS_TYPE_FOLLOWING]
}}
if not public:
parameters["recipients"] = {
"$in": [self.request.user.pk]
}
newsfeed = Entry.objects.collection.find(
parameters).sort([("date_created", -1)])
return newsfeed[offset:offset + limit]
def get_next_page_url(self, request, page_number):
"""
Builds the next page link from GET parameters.
"""
return "%(newsfeed_url)s?%(parameters)s" % {
"newsfeed_url": reverse("home"),
"parameters": urlencode({
"public": request.GET.get("public") or "false",
"page": page_number + 1
})}
def get_latest_posts(self):
return Post.objects.all()[:10]
class DocumentDetailView(DocumentMixin, TemplateView):
template_name = "documents/show.html"
def get_context_data(self, **kwargs):
return {"document": self.get_document(),
"exporters": EXPORTERS}
class ExportDocumentView(DocumentMixin, View):
def get(self, *args, **kwargs):
klass = DOCUMENT_EXPORTERS.get(kwargs.get("exporter"))
if klass is None:
return http.HttpResponseBadRequest()
document = self.get_document()
exporter = klass(document)
return http.HttpResponse(exporter.as_text(), content_type="text/plain")
class DocumentForksView(DocumentDetailView):
template_name = "documents/forks.html"
def get_context_data(self, **kwargs):
context = super(DocumentForksView, self).get_context_data(**kwargs)
context["forks"] = context.get("document").forks()
return context
class DocumentStarsView(DocumentDetailView):
template_name = "documents/stars.html"
class StarDocumentView(LoginRequiredMixin, RedirectView, DocumentMixin):
def post(self, request, *args, **kwargs):
document = self.get_document()
stars = document.get_stars()
if request.user.pk in stars:
stars.remove(request.user.pk)
else:
stars.append(request.user.pk)
star_done.send(sender=self, instance=document,
user=request.user)
Document.objects.collection.update(
{"_id": document.pk},
{"$set": {"stars": stars, "star_count": len(stars)}})
return super(StarDocumentView, self).post(request, *args, **kwargs)
def get_redirect_url(self, **kwargs):
return reverse("show_document", args=[self.kwargs.get("slug")])
class DocumentEditView(LoginRequiredMixin, DocumentDetailView):
template_name = "documents/edit.html"
def get(self, request, *args, **kwargs):
if not self.is_authorized():
return self.redirect()
return super(DocumentEditView, self).get(request, *args, **kwargs)
def delete(self, *args, **kwargs):
if not self.is_authorized():
return self.redirect()
document = self.get_document()
if document.fork_of is not None:
signal = fork_delete
else:
signal = document_delete
signal.send(sender=self, instance=self.get_document())
resource = DocumentResource()
resource.obj_delete(pk=self.kwargs.get("slug"))
return HttpNoContent()
def is_authorized(self):
return self.get_document().is_editable(user_id=self.request.user.id)
def redirect(self):
return http.HttpResponseRedirect(
reverse("show_document", kwargs=self.kwargs))
def get_context_data(self, **kwargs):
context = super(DocumentEditView, self).get_context_data(**kwargs)
context["edit"] = True
context["FIELD_TYPES"] = FIELD_TYPES
context["SOCKETIO_HOST"] = settings.SOCKETIO_HOST
return context
class NewDocumentView(LoginRequiredMixin, FormView):
form_class = DocumentForm
template_name = "documents/new.html"
def form_valid(self, form, **kwargs):
self.object_id = Document.objects.collection.insert({
"title": form.cleaned_data.get("title"),
"user_id": self.request.user.pk,
"date_created": datetime.now(),
"entities": form.cleaned_data.get("entities"),
"is_public": form.cleaned_data.get("is_public"),
"_keywords": extract_keywords(form.cleaned_data.get("title"))
})
document = Document.objects.get(_id=ObjectId(self.object_id))
document_done.send(sender=self, instance=document)
return super(NewDocumentView, self).form_valid(form)
def get_success_url(self):
return reverse("edit_document", args=[self.object_id])
class MyDocumentsView(LoginRequiredMixin, TemplateView):
template_name = "documents/list.html"
def get_context_data(self, **kwargs):
return {
"documents": self.get_documents(),
"shared": self.get_shared_documents()
}
def get_documents(self):
collection = Document.objects.for_user(self.request.user.id)
return map(Document, collection)
def get_shared_documents(self):
collection = Document.objects.assigned(self.request.user.id)
return map(Document, collection)
class SearchDocumentView(ListView):
template_name = "documents/search.html"
context_object_name = "documents"
def get_queryset(self):
form = self.get_form()
if not form.is_valid():
return []
keyword = form.cleaned_data.get("keyword")
collection = Document.objects.collection.find({
"_keywords": {"$all": keyword.split()}})
return map(Document, collection)
def get_context_data(self, **kwargs):
return super(SearchDocumentView, self).get_context_data(
search_form=self.form,
keyword=self.request.GET.get("keyword"),
**kwargs)
def get_form(self):
self.form = SearchForm(self.request.GET)
return self.form
class ForkDocumentView(DocumentMixin, NewDocumentView):
form_class = ForkDocumentForm
template_name = "documents/fork.html"
def get_initial(self):
return {
"title": self.get_document().title
}
def form_valid(self, form, **kwargs):
document = self.get_document()
self.object_id = Document.objects.collection.insert({
"title": form.cleaned_data.get("title"),
"user_id": self.request.user.pk,
"entities": document.entities,
"fork_of": document.pk,
"date_created": datetime.now(),
"is_public": document.is_public,
"_keywords": extract_keywords(form.cleaned_data.get("title"))
})
Document.objects.collection.update(
{'_id': ObjectId(document.pk)},
{"$inc": {'fork_count': 1}})
document = Document.objects.get(_id=ObjectId(self.object_id))
fork_done.send(sender=self, instance=document)
return super(NewDocumentView, self).form_valid(form)
def get_context_data(self, **kwargs):
data = super(ForkDocumentView, self).get_context_data(**kwargs)
data["document_id"] = self.get_document()._id
return data
| fatiherikli/dbpatterns | web/dbpatterns/documents/views.py | Python | mit | 10,239 |
#!/usr/bin/env python
from outlawg import Outlawg
from fftool import PATH_PREFS_ROOT, __version__
from arg_parser import arg_parser
from firefox_download import download
from firefox_profile import create_mozprofile, clean_profiles
from firefox_run import launch_firefox
Log = Outlawg()
def main():
Log.header('FF-TOOL: download, install & launch Firefox!', 'XL', '=')
options = arg_parser()
if options.version:
print('FF-TOOL VERSION: {0}'.format(__version__))
return
if (options.prefs_dirs) and not PATH_PREFS_ROOT:
Log.header("ERROR")
print("Missing path to $PATH_PREFS_ROOT directory.")
print("Please set the `PATH_PREFS_ROOT` environment variable and " +
"try again.")
exit()
if (options.clean_profiles):
clean_profiles()
return
# DOWNLOAD/INSTALL
if not (options.no_download):
download(options.channel)
# If user specified `--install-only`, then
# download/install specified channel(s) and exit early.
if (options.install_only):
return
# PROFILE
profile_path = create_mozprofile(
options.profile,
options.addon,
prefs_dirs=options.prefs_dirs,
)
# LAUNCH
launch_firefox(profile_path,
channel=options.channel,
logging=options.logging,
nspr_log_modules=options.nspr_log_modules)
if __name__ == '__main__':
main()
| rpappalax/ff-tool | fftool/main.py | Python | mpl-2.0 | 1,468 |
import shelve
import time
from metrics import *
# little data set for testing
testdata_users = {'max':{'odin doma':3,'labirint straha':5,'detektiv':2,'komnata':4},
'dima':{'odin doma':5,'labirint straha':1,'detektiv':5},
'alex':{'odin doma':5,'pila':2,'komnata':3,'grabim bank':3,'labirint straha':1,'detektiv':4,'dom s privideniamy':3},
'den':{'odin doma':2,'grabim bank':3,'labirint straha':5,'dom s privideniamy':5},
'kirill':{'grabim bank':3,'labirint straha':4,'detektiv':1,'dom s privideniamy':5},
'olga':{'odin doma':3,'pila':4,'detektiv':4,'komnata':1,'dom s privideniamy':3},
'lera':{'odin doma':4,'pila':3,'grabim bank':4,'labirint straha':1},
'anna':{'pila':4,'grabim bank':2,'labirint straha':5,'komnata':4,'detektiv':4,'dom s privideniamy':4}}
testdata = {'grabim bank': {'den': 3, 'lera': 4, 'alex': 3, 'kirill': 3, 'anna': 2},
'labirint straha': {'den': 5, 'lera': 1, 'max': 5, 'dima': 1, 'alex': 1, 'kirill': 4, 'anna': 5},
'komnata': {'alex': 3, 'olga': 1, 'anna': 4, 'max': 4},
'odin doma': {'den': 2, 'lera': 4, 'max': 3, 'dima': 5, 'alex': 5, 'olga': 3},
'dom s privideniamy': {'den': 5, 'alex': 3, 'kirill': 5, 'anna': 4, 'olga': 3},
'detektiv': {'max': 2, 'dima': 5, 'alex': 4, 'kirill': 1, 'anna': 4, 'olga': 4},
'pila': {'lera': 3, 'alex': 2, 'anna': 4, 'olga': 4}}
# generating shelve DB based on movielens data
def genData(basename = 'prefsbase',datab = 'u.data',itemb = 'u.item'):
movies = {}
prefs = {}
db = shelve.open(basename)
for key in db:
del key
for i in open(itemb):
info = i.split('|')
movies[info[0]] = info[1]
for i in open(datab):
info = i.split()
prefs.setdefault(info[0],{})
prefs[info[0]][movies[info[1]]] = float(info[2])
for i in prefs:
db[i] = prefs[i]
db.close()
return ('success')
# generating shelve DB based on movielens data
def genDataNew(basename = 'prefsbase',datab = 'r1.train'):
prefs = {}
db = shelve.open(basename)
for key in db:
del key
for i in open(datab):
info = i.split('::')
prefs.setdefault(info[0],{})
prefs[info[0]][info[1]] = float(info[2])
for i in prefs:
db[i] = prefs[i]
db.close()
return ('success')
def genLitData(basename = 'prefsbase',datab = 'u.data',itemb = 'u.item'):
movies = {}
prefs = {}
db = shelve.open(basename)
for key in db:
del key
for i in open(itemb):
info = i.split('|')
if (int(info[0]) < 100):
movies[info[0]] = info[1]
for i in open(datab):
info = i.split()
prefs.setdefault(info[0],{})
if (int(info[0]) < 100 and int(info[1]) < 100):
prefs[info[0]][movies[info[1]]] = float(info[2])
for i in prefs:
db[i] = prefs[i]
db.close()
return ('success')
def singleSupportMatrix(pref_dict,similarity=PCC):
Matrix = itemMatrix(pref_dict,0,similarity,False)
print('itemMatrixReady')
ratings = []
for i in Matrix:
for j in Matrix[i]:
ratings.append(Matrix[i][j])
print('ratings summed')
ratings.sort()
print('sorted')
median = ratings[len(ratings)//2]
print('median: %f'%median)
SupportMatrix = {}
c = 0
for i in Matrix:
SupportMatrix[i] = []
for j in Matrix[i]:
if (Matrix[i][j] > median):
SupportMatrix[i].append((1,j))
else:
SupportMatrix[i].append((0,j))
c += 1
if (c%100 == 0):
print('%d/%d'%(c,len(Matrix)))
return SupportMatrix
def multipleSupportMatrix(pref_dict,similarities,DB):
SupportMatrix = {}
for i in pref_dict:
SupportMatrix[i] = {}
for j in pref_dict:
if (j != i):
SupportMatrix[i][j] = 0
for sim in similarities:
print('%s calculating'%sim.__name__)
mat = singleSupportMatrix(pref_dict,sim)
print('Sup value expanding')
for name1 in mat:
for (j,name2) in mat[name1]:
SupportMatrix[name1][name2] += j
ShvM = shelve.open(DB)
for i in SupportMatrix:
ShvM[i] = SupportMatrix[i]
ShvM.close()
print('Sup Matrix is ready')
return SupportMatrix
# MAE - a and b - results for test data and original test data {{}{}{}}
def MAE(a,b):
if (len(a) != len(b) or len(a) <= 0):
return 100
S = 0
n = 0
S = sum(abs(float(a[i][j]-b[i][j])) for i in a for j in a[i])
M = sum(len(a[i]) for i in a)
if (M == 0):
return 100
return S/M
def RMSE(a,b):
if (len(a) != len(b) or len(a) <= 0):
return 0
S = sum((a[i][j]-b[i][j])**2 for i in a for j in a[i])
M = sum(len(a[i]) for i in a)
return sqrt(S)/M
# Get similarity value from Support Matrix (SM) with super similar value grater or equal then v for a and b
# sim for sup.similar, disim for sup.disimilar
def SUPSIM(pref,a,b,SM,v,sim,disim):
if (SM[a][b] >= v):
return sim * len(bothcalc(pref[a],pref[b]))/len(pref[b])
else:
return disim * len(bothcalc(pref[a],pref[b]))/len(pref[b])
def topSup(pref_dict,a,n,SupMatrix,v,sim,disim,best = True):
top = []
for i in pref_dict:
if (i != a):
top.append((SUPSIM(pref_dict,a,i,SupMatrix,v,sim,disim),i))
if best:
top.sort()
top.reverse()
return dict([(j,i) for (i,j) in top[0:n]])
else:
return dict([(j,i) for (i,j) in top])
def itemMatrixSup(pref_dict,n,SupMatrix,v,sim,disim,best = True):
itM = {}
for i in pref_dict:
itM[i] = topSup(pref_dict,i,n,SupMatrix,v,sim,disim,best)
return itM
def SUPSIM3(pref,a,b,SM,v,v1,sim,avr,disim):
if (SM[a][b] >= v):
if (SM[a][b] >= v1):
return sim * len(bothcalc(pref[a],pref[b]))/len(pref[b])
return avr(pref[a],pref[b])
else:
return disim * len(bothcalc(pref[a],pref[b]))/len(pref[b])
def topSup3(pref_dict,a,n,SupMatrix,v,v1,sim,avr,disim,best = True):
top = []
for i in pref_dict:
if (i != a):
top.append((SUPSIM3(pref_dict,a,i,SupMatrix,v,v1,sim,avr,disim),i))
if best:
top.sort()
top.reverse()
return dict([(j,i) for (i,j) in top[0:n]])
else:
return dict([(j,i) for (i,j) in top])
def itemMatrixSup3(pref_dict,n,SupMatrix,v,v1,sim,avr,disim,best = True):
itM = {}
for i in pref_dict:
itM[i] = topSup3(pref_dict,i,n,SupMatrix,v,v1,sim,avr,disim,best)
return itM
# нахождение ближайших соседей
def top(pref_dict,a,n,similarity = PCC,best = True):
top = []
top.extend([(similarity(pref_dict[a],pref_dict[i]),i) for i in pref_dict if (i != a)])
if best:
top.sort()
top.reverse()
return dict([(j,i) for (i,j) in top[0:n]])
else:
return dict([(j,i) for (i,j) in top])
# трансформация словаря оценок
def transform(prefs):
result = {}
for person in prefs:
for item in prefs[person]:
result.setdefault(item,{})
result[item][person] = prefs[person][item]
return result
# нахождение наилучших мер подобий для всех объектов из pref_dict
def itemMatrix(pref_dict,n,similarity,best = True):
itM = {}
print('calculating itemMatrix')
c = 0
lc = len(pref_dict)
for i in pref_dict:
itM[i] = top(pref_dict,i,n,similarity,best)
c += 1
if (c%50 == 0):
print('%d/%d - %s'%(c,lc,similarity.__name__))
return itM
#-------------------
# нахождение наилучших мер подобий для всех объектов из pref_dict
def itemMatrixAll(pref_dict,n,sims,best = True):
itM = {}
print('calculating itemMatrixAll')
c = 0
lc = len(pref_dict)
for i in pref_dict:
itM[i] = topAll(pref_dict,i,n,sims,best)
c += 1
if (c%50 == 0):
print('%d/%d'%(c,lc))
return itM
# нахождение ближайших соседей
def topAll(pref_dict,a,n,sims,best = True):
top = []
top.extend([(sum(sim(pref_dict[a],pref_dict[i]) for sim in sims)/len(sims),i) for i in pref_dict if (i != a)])
if best:
top.sort()
top.reverse()
return dict([(j,i) for (i,j) in top[0:n]])
else:
return dict([(j,i) for (i,j) in top])
#Main mathod for recommending!
#--------------------
# prefs: {u1:{i1,i2,i3},u2:{},{}}
# itemMatch: {i1:{i2,i3,i4},i2:{},i3:{}}
def recommendOne(prefs,prefsTr,itemMatch,item,user):
if (user not in prefs):
return 200
if (item in prefs[user]):
return 200
if (item not in itemMatch):
return 200
itemRatings = prefs[user]
userRatings = prefsTr[item]
s = 0
averageRat = sum(rating for (user,rating) in userRatings.items())/len(userRatings)
up = sum((itemRatings[item2] - (sum(r for (u,r) in prefsTr[item2].items())/len(prefsTr[item2])))*
itemMatch[item][item2] for item2 in itemRatings if (item2 in itemMatch[item]))
down = sum(abs(itemMatch[item][item2]) for item2 in itemRatings if (item2 in itemMatch[item]))
if (down == 0):
return 200
return (averageRat+up/down)
def recommendOne_two(prefs,prefsTr,itemMatch,item,user):
if (item in prefs[user]):
return 0
if (item not in itemMatch):
return 0
itemRatings = prefs[user]
userRatings = prefsTr[item]
s = 0
averageRat = sum(rating for (user,rating) in itemRatings.items())/len(itemRatings)
up = sum((itemRatings[item2] - averageRat)*itemMatch[item][item2] for item2 in itemRatings if (item2 in itemMatch[item]))
down = sum(abs(itemMatch[item][item2]) for item2 in itemRatings if (item2 in itemMatch[item]))
if (down == 0):
return 0
return (averageRat+up/down)
# нахождение ближайших соседей
def topAld(pref_dict,a,n,sim):
top = []
for i in pref_dict:
if (i != a):
top.append((sim(pref_dict[i],pref_dict[a]),i))
top.sort()
top.reverse()
return top[0:n]
# нахождение наилучших мер подобий для всех объектов из pref_dict
def itemMatrixAld(pref_dict,n,sim):
start = time.time()
itM = {}
for i in pref_dict:
itM[i] = topAld(pref_dict,i,n,sim)
return itM
# выдача прогнозируемых оценок объектов в порядке убывания
def recommend(prefs,itemMatch,user):
userRatings=prefs[user]
scores={}
totalSim={}
rs = 0
#средняя оценка пользователя
for (item,rating) in userRatings.items( ):
rs += rating
rs = rs/len(userRatings)
#цикл по объектам максимально похожим на объекты данного пользователя
for (item,rating) in userRatings.items( ):
for (similarity,item2) in itemMatch[item]:
#пропускаем, если пользователь уже оценивал данный объект
if (item2 in userRatings):
continue
#взвешенная сумма отклоненных от среднего оценок, умноженных на коэффициент подобия
scores.setdefault(item2,0)
scores[item2]+=similarity*(rating-rs)
# Сумма модулей всех коэффициентов подобия
totalSim.setdefault(item2,0)
totalSim[item2]+=abs(similarity)
#считаем предполагаемые оценки и сортируем по убыванию
rankings=[(rs + score/(totalSim[item]),item) for item,score in scores.items( )]
rankings.sort( )
rankings.reverse( )
return rankings
| maximdanilchenko/fusionBasedRecSys | itembased_recommender_system.py | Python | mit | 12,406 |
#!/usr/bin/env python
# coding: utf-8
from unittest import TestLoader as BaseTestLoader, TestSuite
import sys
class TestLoader(BaseTestLoader):
def loadTestsFromName(self, name, module=None):
suite = TestSuite()
for test in findTests(name):
sys.path.insert(0, name) # python3 compatibility
suite.addTests(super(TestLoader, self).loadTestsFromName(test))
del sys.path[0] # python3 compatibility
return suite
def loadTestsFromNames(self, names, module=None):
suite = TestSuite()
for name in names:
suite.addTests(self.loadTestsFromName(name))
return suite
def findTests(dir):
import os, re
pattern = re.compile('([a-z]+_)+test\.py$')
for fileName in os.listdir(dir):
if pattern.match(fileName):
yield os.path.join(dir, fileName).replace('.py', '').replace(os.sep, '.')
__all__ = [TestLoader]
| edgeware/mockito-python | mockito_util/test.py | Python | mit | 864 |
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import datetime as dt
from decimal import Decimal
from itertools import product, cycle
from flask import json
from evesrp import db
from evesrp.models import Request, ActionType
from evesrp.auth import PermissionType
from evesrp.auth.models import Pilot, Division, Permission
from evesrp.util.datetime import utc
from evesrp.util.decimal import PrettyDecimal
from ...util_tests import TestLogin
class TestFilterBase(TestLogin):
DIV_1 = 'Division One'
DIV_2 = 'Division Two'
DIV_3 = 'Division Three'
killmails = [
{
'id': 42513498,
'ship_type': 'Scythe',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/42513498/',
'base_payout': 22000000,
'kill_timestamp': dt.datetime(2014, 11, 20, 4, 2,
tzinfo=utc),
'system': 'B-3QPD',
'constellation': 'UX3-N2',
'region': 'Catch',
'pilot': 'Paxswill',
'division': DIV_2,
'details': 'lol Stratop',
'status': ActionType.paid,
},
{
'id': 39697412,
'ship_type': 'Tristan',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/39697412/',
'base_payout': 9100000,
'kill_timestamp': dt.datetime(2014, 6, 23, 20, 6,
tzinfo=utc),
'system': 'Hikkoken',
'constellation': 'Ishaga',
'region': 'Black Rise',
'pilot': 'Paxswill',
'division': DIV_3,
'details': 'Elite Solo PVP',
'status': ActionType.evaluating,
},
{
'id': 39988492,
'ship_type': 'Crow',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/39988492/',
'base_payout': 22000000,
'kill_timestamp': dt.datetime(2014, 7, 9, 18, 22,
tzinfo=utc),
'system': 'Sadana',
'constellation': 'Mareerieh',
'region': 'Aridia',
'pilot': 'Paxswill',
'division': DIV_2,
'details': 'Not so travel interceptor',
'status': ActionType.approved,
},
{
'id': 43292478,
'ship_type': 'Guardian',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/43292478/',
'base_payout': 289700000,
'kill_timestamp': dt.datetime(2014, 12, 22, 4, 6,
tzinfo=utc),
'system': 'RNF-YH',
'constellation': 'JZV-O6',
'region': 'Catch',
'pilot': 'Paxswill',
'division': DIV_2,
'details': 'lol Stratop',
'status': ActionType.incomplete,
},
{
'id': 43500358,
'ship_type': 'Talwar',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/43500358/',
'base_payout': 13700000,
'kill_timestamp': dt.datetime(2014, 12, 31, 1, 48,
tzinfo=utc),
'system': 'Todifrauan',
'constellation': 'Aldodan',
'region': 'Metropolis',
'pilot': 'DurrHurrDurr',
'division': DIV_2,
'details': 'Bar',
'status': ActionType.evaluating,
},
{
'id': 43162254,
'ship_type': 'Cormorant',
'corporation': 'Dreddit',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/43162254/',
'base_payout': 11400000,
'kill_timestamp': dt.datetime(2014, 12, 17, 3, 31,
tzinfo=utc),
'system': 'GE-8JV',
'constellation': '9HXQ-G',
'region': 'Catch',
'pilot': 'DurrHurrDurr',
'division': DIV_2,
'details': 'lol Stratop',
'status': ActionType.approved,
},
{
'id': 31952048,
'ship_type': 'Amarr Shuttle',
'corporation': 'Science and Trade Institute',
'killmail_url': 'https://zkillboard.com/kill/31952048/',
'base_payout': 14000,
'kill_timestamp': dt.datetime(2013, 7, 16, 4, 39,
tzinfo=utc),
'system': 'Karan',
'constellation': 'Selonat',
'region': 'Aridia',
'pilot': 'Gevlon Goblin',
'division': DIV_1,
'details': 'grr goons',
'status': ActionType.approved,
},
{
'id': 41094133,
'ship_type': 'Crucifier',
'corporation': 'Unholy Knights of Cthulu',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/41094133/',
'base_payout': 8300000,
'kill_timestamp': dt.datetime(2014, 9, 6, 1, 32,
tzinfo=utc),
'system': 'Nisuwa',
'constellation': 'Okakuola',
'region': 'Black Rise',
'pilot': 'Sapporo Jones',
'division': DIV_2,
'details': 'Elite Solo PVP',
'status': ActionType.rejected,
},
{
'id': 43341679,
'ship_type': 'Vexor',
'corporation': 'Unholy Knights of Cthulu',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/43341679/',
'base_payout': 39900000,
'kill_timestamp': dt.datetime(2014, 12, 24, 7, 9,
tzinfo=utc),
'system': '4-CM8I',
'constellation': 'DITJ-X',
'region': 'Scalding Pass',
'pilot': 'Sapporo Jones',
'division': DIV_1,
'details': 'Scouting',
'status': ActionType.evaluating,
},
{
'id': 43372860,
'ship_type': 'Imperial Navy Slicer',
'corporation': 'Unholy Knights of Cthulu',
'alliance': 'Test Alliance Please Ignore',
'killmail_url': 'https://zkillboard.com/kill/43372860/',
'base_payout': 15660000,
'kill_timestamp': dt.datetime(2014, 12, 26, 0, 0,
tzinfo=utc),
'system': '8QT-H4',
'constellation': 'MPJW-6',
'region': 'Querious',
'pilot': 'Paxswill',
'division': DIV_1,
'details': 'Elite Solo PVP',
'status': ActionType.incomplete,
},
{
'id': 43975437,
'ship_type': 'Tristan',
'corporation': 'Brave Operations - Lollipop Division',
'alliance': 'Brave Collective',
'killmail_url': 'https://zkillboard.com/kill/43975437/',
'base_payout': 4800000,
'kill_timestamp': dt.datetime(2015, 1, 18, 18, 25,
tzinfo=utc),
'system': 'YHN-3K',
'constellation': 'UX3-N2',
'region': 'Catch',
'pilot': 'Zora Aran',
'division': DIV_3,
'details': 'Awox?',
'status': ActionType.rejected,
},
]
def setUp(self):
super(TestFilterBase, self).setUp()
with self.app.test_request_context():
# Divisions
divisions = {
self.DIV_1: Division(self.DIV_1),
self.DIV_2: Division(self.DIV_2),
self.DIV_3: Division(self.DIV_3),
}
# Give all permissions in all divisions to admin_user
for division in divisions.values():
for permission in PermissionType.all:
Permission(division, permission, self.admin_user)
# Pilots
pilots = {
'Paxswill': 570140137,
'Sapporo Jones': 772506501,
'DurrHurrDurr': 1456384556,
'Gevlon Goblin': 91662677,
'Zora Aran': 534674271,
}
for name, id in pilots.items():
if id % 2 == 0:
user = self.normal_user
else:
user = self.admin_user
db.session.add(Pilot(user, name, id))
# Lossmails/requests
for request_data in self.killmails:
# Copy dict before we pop stuff out of it
data_copy = dict(request_data)
# Distribute requests between users
if request_data['id'] % 2 == 0:
user = self.admin_user
else:
user = self.normal_user
details = data_copy.pop('details')
division = divisions[data_copy.pop('division')]
status = data_copy.pop('status')
data_copy['pilot_id'] = pilots[data_copy.pop('pilot')]
request = Request(user, details, division, data_copy.items())
# Set status after the base payout has been set
request.status = status
db.session.commit()
def check_filter_url(self, url, expected_ids, expected_total):
client = self.login(self.admin_name)
resp = client.get(url, headers={'Accept':'application/json'},
follow_redirects=False)
if resp.status_code == 301:
# Manually follow redirects to keep the Accept header around.
resp = client.get(resp.location,
headers={'Accept':'application/json'},
follow_redirects=False)
resp_obj = json.loads(resp.data)
# Check that the returned requests match
self.assertEqual(expected_ids,
{request['id'] for request in resp_obj['requests']})
# Check that the totals add up properly (in a roundabout way)
self.assertEqual(PrettyDecimal(expected_total).currency(),
resp_obj['total_payouts'])
class TestExactFilter(TestFilterBase):
choices = [None]
def test_exact_filter_combos(self):
# Explanation for the below: product(seq, repeat=n) computes a
# cartesian product of sequence seq against itself n times. By using
# this as a constructor to frozenset, we can combinations with repeated
# choices (ex: ['Foo', 'Foo'] as opposed to ['Bar', 'Foo']). frozenset
# is used as set() is mutable, and thus unhashable. This is all wrapped
# in a set comprehension to deduplicate combinations that differ only
# in ordering (ex: ['Foo', 'Bar'] and ['Bar', 'Foo']).
choice_combos = {frozenset(combo) for combo in product(self.choices,
repeat=2)}
for combo in choice_combos:
# Find the set of matching killmail IDs first
matching_ids = set()
total_payout = Decimal(0)
for request in self.killmails:
if combo == {None} or request.get(self.attribute) in combo:
matching_ids.add(request['id'])
if request['status'] != ActionType.rejected:
total_payout += Decimal(request['base_payout'])
# Ask the app what it thinks the matching requests are
if combo != {None}:
if self.attribute == 'ship_type':
filter_attribute = 'ship'
else:
filter_attribute = self.attribute
if self.attribute == 'status':
values = ','.join(map(lambda x: x.value, combo))
else:
values = ','.join(combo)
url = '/request/all/{}/{}'.format(filter_attribute, values)
else:
url = '/request/all/'
self.check_filter_url(url, matching_ids, total_payout)
class TestDivisionFilter(TestExactFilter):
attribute = 'division'
choices = [TestFilterBase.DIV_1, TestFilterBase.DIV_2, TestFilterBase.DIV_3]
class TestAllianceFilter(TestExactFilter):
attribute = 'alliance'
choices = [
'Test Alliance Please Ignore',
'Brave Collective',
'Goonswarm Federation',
]
class TestCorporationFilter(TestExactFilter):
attribute = 'corporation'
choices = [
'Dreddit',
'Unholy Knights of Cthulu',
'Goonwaffe',
'Science and Trade Institute',
'Brave Collective - Lollipop Division',
]
class TestPilotFilter(TestExactFilter):
attribute = 'pilot'
choices = [
'Paxswill',
'DurrHurrDurr',
'Gevlon Goblin',
'Sapporo Jones',
'Zora Aran',
]
class TestShipFilter(TestExactFilter):
attribute = 'ship_type'
choices = ['Tristan', 'Crow', 'Vexor', 'Guardian']
class TestRegionFilter(TestExactFilter):
attribute = 'region'
choices = ['Black Rise', 'Catch', 'Aridia', 'Scalding Pass']
class TestConstellationFilter(TestExactFilter):
attribute = 'constellation'
choices = ['UX3-N2', 'Ishaga', 'Mareerieh', '9HXQ-G', 'Selonat']
class TestSystemFilter(TestExactFilter):
attribute = 'system'
choices = ['GE-8JV', 'Todifrauan', 'RNF-YH', '4-CM8I', 'Karan']
class TestStatusFilter(TestExactFilter):
attribute = 'status'
choices = ActionType.statuses
class TestMultipleFilter(TestFilterBase):
choices = {}
def test_exact_multiple_filters(self):
# Compute expected values
matching_ids = set()
total_payout = Decimal(0)
for request in self.killmails:
for attribute, valid_values in self.choices.items():
if request.get(attribute) not in valid_values:
break
else:
matching_ids.add(request['id'])
if request['status'] != ActionType.rejected:
total_payout += request['base_payout']
# Ask the app what it thinks is the answer
url = '/request/all/'
for attribute, values in self.choices.items():
url += '{}/{}/'.format(attribute, ','.join(values))
self.check_filter_url(url, matching_ids, total_payout)
class TestDredditCatchFilter(TestMultipleFilter):
choices = {
'corporation': ['Dreddit'],
'region': ['Catch'],
}
| paxswill/evesrp | tests_python/views/requests/test_filters.py | Python | bsd-2-clause | 14,714 |
# -*- coding: utf-8 -*-
import json
from django.http import HttpResponse # , HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response
from common import utils, page
from www.misc.decorators import member_required
from www.tasks import async_clear_count_info_by_code
from www.account import interface as interface_account
from www.journey import interface as interface_journey
from www.answer import interface as interface_answer
from www.message import interface
urb = interface.UnreadCountBase()
lb = interface_journey.LikeBase()
ab = interface_answer.AnswerBase()
ub = interface_account.UserBase()
@member_required
def system_message(request, template_name='message/system_message.html'):
system_messages = urb.get_system_message(request.user.id)
# 分页
page_num = int(request.REQUEST.get('page', 1))
page_objs = page.Cpt(system_messages, count=10, page=page_num).info
system_messages = page_objs[0]
page_params = (page_objs[1], page_objs[4])
# 异步清除未读消息数
async_clear_count_info_by_code(request.user.id, code='system_message')
unread_count_info = urb.get_unread_count_info(request.user)
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@member_required
def received_like(request, template_name='message/received_like.html'):
likes = lb.get_to_user_likes(request.user.id)
# 分页
page_num = int(request.REQUEST.get('page', 1))
page_objs = page.Cpt(likes, count=10, page=page_num).info
likes = page_objs[0]
page_params = (page_objs[1], page_objs[4])
likes = lb.format_likes(likes)
likes_count = page_objs[5]
# 异步清除未读消息数
async_clear_count_info_by_code(request.user.id, code='received_like')
unread_count_info = urb.get_unread_count_info(request.user)
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@member_required
def received_answer(request, template_name='message/received_answer.html'):
answers = ab.get_user_received_answer(request.user.id)
# 分页
page_num = int(request.REQUEST.get('page', 1))
page_objs = page.Cpt(answers, count=10, page=page_num).info
answers = page_objs[0]
page_params = (page_objs[1], page_objs[4])
answers = ab.format_answers(answers, need_obj=True)
# 异步清除未读消息数
async_clear_count_info_by_code(request.user.id, code='received_answer')
unread_count_info = urb.get_unread_count_info(request.user)
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@member_required
def at_answer(request, template_name='message/at_answer.html'):
answers = ab.get_at_answers(request.user.id)
# 分页
page_num = int(request.REQUEST.get('page', 1))
page_objs = page.Cpt(answers, count=10, page=page_num).info
answers = page_objs[0]
page_params = (page_objs[1], page_objs[4])
answers = ab.format_answers(answers, need_obj=True)
# 异步清除未读消息数
async_clear_count_info_by_code(request.user.id, code='at_answer')
unread_count_info = urb.get_unread_count_info(request.user)
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
# ===================================================ajax部分=================================================================#
@member_required
def get_unread_count_total(request):
count_info = urb.get_unread_count_total(request.user)
# 更新最后活跃时间
ub.update_user_last_active_time(request.user.id, ip=utils.get_clientip(request))
return HttpResponse(json.dumps(count_info), mimetype='application/json')
@member_required
def get_all_valid_global_notice(request):
gnb = interface.GlobalNoticeBase()
global_notice = gnb.format_global_notice(gnb.get_all_valid_global_notice())
return HttpResponse(json.dumps(global_notice), mimetype='application/json')
| lantianlz/qiexing | www/message/views.py | Python | gpl-2.0 | 4,024 |
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
class TestCourseTopic(unittest.TestCase):
pass
| frappe/erpnext | erpnext/education/doctype/course_topic/test_course_topic.py | Python | gpl-3.0 | 154 |
from drawable import Drawable
import pyglet
from pyglet.gl import *
class Sprite(Drawable):
def __init__(self, sx, sy, tint=[1,1,1,1]):
self.tint = tint
verts = [-sx,-sy, sx,-sy, sx,sy, -sx,sy]
texcoords0 = [0,0, 1,0, 1,1, 0,1]
indices = [0,1,2, 0,2,3]
self.vlist = pyglet.graphics.vertex_list_indexed( len(verts)/2, indices, ('v2f', verts), ('t2f', texcoords0) )
def draw(self):
glDepthMask(GL_FALSE)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glColor4f(*self.tint)
self.vlist.draw(pyglet.gl.GL_TRIANGLES)
glDisable(GL_BLEND)
glDepthMask(GL_TRUE)
| swiftcoder/ashima-iv | src/sprite.py | Python | bsd-3-clause | 622 |
import argparse
import sys
import time
from typing import IO, Iterator
from conway.grid import BaseGrid
DEFAULT_TURNS = -1
DEFAULT_DELAY = 0.35
DEFAULT_SEP = "%"
DEFAULT_OUTFILE = sys.stdout
def run(
grid: BaseGrid,
turns: int = DEFAULT_TURNS,
delay: float = DEFAULT_DELAY,
sep: str = DEFAULT_SEP,
out: IO = DEFAULT_OUTFILE,
):
"""Run the Game of Life to completion.
See the ``--help`` output for details.
"""
render(grid, sep, out)
time.sleep(delay)
while turns:
grid.tick()
render(grid, sep, out)
time.sleep(delay)
turns -= 1
def run_iter(
grid: BaseGrid, sep: str = DEFAULT_SEP, turns: int = DEFAULT_TURNS
) -> Iterator[str]:
"""Iterate over each tick of the Game.
Yields a string representation the state of the Game after each tick. The
first frame yielded is the initial state of the game.
See the `run` method for argument details.
"""
yield draw(grid, sep)
while turns:
grid.tick()
yield draw(grid, sep)
turns -= 1
def render(grid: BaseGrid, sep: str = DEFAULT_SEP, out: IO = DEFAULT_OUTFILE):
"""Print the `grid` to `out` prefixed with the given `sep`."""
print(draw(grid, sep), file=out)
def draw(grid: BaseGrid, sep: str = DEFAULT_SEP) -> str:
"""Draw the `grid` prefixed with the given `sep`."""
return f"{sep}\n{grid}"
| dustinrohde/python-conway | conway/__init__.py | Python | mit | 1,394 |
# encoding: utf-8
import numpy as np
import _remapping
import pyroms
def flood(varz, Bgrd, Bpos='t', irange=None, jrange=None, \
spval=-9.99e+33, dmax=0, cdepth=0, kk=0):
"""
var = flood(var, Bgrd)
optional switch:
- Bpos='t', 'u' specify the grid position where
the variable rely
- irange specify grid sub-sample for i direction
- jrange specify grid sub-sample for j direction
- spval=1e35 define spval value
- dmax=0 if dmax>0, maximum horizontal
flooding distance
- cdepth=0 critical depth for flooding
if depth<cdepth => no flooding
- kk
Flood varz on Bgrd
"""
varz = varz.copy()
varz = np.array(varz)
assert len(varz.shape) == 3, 'var must be 3D'
# replace spval by nan
idx = np.where(abs((varz-spval)/spval)<=1e-5)
varz[idx] = np.nan
if Bpos is 't':
x = Bgrd.lon_t
y = Bgrd.lat_t
h = Bgrd.h_t
mask = Bgrd.mask_t[0,:,:]
elif Bpos is 'u':
x = Bgrd.lon_u
y = Bgrd.lat_u
h = Bgrd.h_u
mask = Bgrd.mask_u[0,:,:]
nlev, Mm, Lm = varz.shape
if irange is None:
irange = (0,Lm)
else:
assert varz.shape[2] == irange[1]-irange[0], \
'var shape and irange must agreed'
if jrange is None:
jrange = (0,Mm)
else:
assert varz.shape[1] == jrange[1]-jrange[0], \
'var shape and jrange must agreed'
x = x[jrange[0]:jrange[1], irange[0]:irange[1]]
y = y[jrange[0]:jrange[1], irange[0]:irange[1]]
h = h[jrange[0]:jrange[1], irange[0]:irange[1]]
mask = mask[jrange[0]:jrange[1], irange[0]:irange[1]]
# Finding nearest values in horizontal
# critical deph => no change if depth is less than specified value
cdepth = abs(cdepth)
if cdepth != 0:
idx = np.where(h >= cdepth)
msk = np.zeros(mask.shape)
msk[idx] = 1
else:
msk = mask.copy()
for k in range(nlev-1,0,-1):
c1 = np.array(msk, dtype=bool)
c2 = np.isnan(varz[k,:,:]) == 1
if kk == 0:
c3 = np.ones(mask.shape).astype(bool)
else:
c3 = np.isnan(varz[min(k-kk,0),:,:]) == 0
c = c1 & c2 & c3
idxnan = np.where(c == True)
idx = np.where(c2 == False)
if list(idx[0]):
wet = np.zeros((len(idx[0]),2))
dry = np.zeros((len(idxnan[0]),2))
wet[:,0] = idx[0]+1
wet[:,1] = idx[1]+1
dry[:,0] = idxnan[0]+1
dry[:,1] = idxnan[1]+1
varz[k,:] = _remapping.flood(varz[k,:], wet, dry, x, y, dmax)
# drop the deepest values down
idx = np.where(np.isnan(varz) == 1)
varz[idx] = spval
bottom = pyroms.utility.get_bottom(varz[::-1,:,:], mask, spval=spval)
bottom = (nlev-1) - bottom
for i in range(Lm):
for j in range(Mm):
if mask[j,i] == 1:
varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i]
return varz
| dcherian/pyroms | pyroms_toolbox/pyroms_toolbox/BGrid_POP/flood.py | Python | bsd-3-clause | 3,221 |
from unittest import mock
from unittest.mock import MagicMock, Mock
import pytest
from PyQt5 import QtCore
from PyQt5.QtWidgets import QMessageBox
from .main import Pesel2PBNWindow
WYGENEROWANY_PESEL = '00891702980' # http://www.bogus.ovh.org/generatory/all.html
@pytest.fixture
def p2p_window(qtbot):
window = Pesel2PBNWindow(networkAccessManager=mock.Mock())
window.show()
qtbot.addWidget(window)
return window
def test_pesel2pbn_pastePESEL(qtbot, p2p_window):
with mock.patch('PyQt5.QtWidgets.QApplication.clipboard') as cb:
clipboard = cb.return_value
clipboard.text.return_value = "123"
qtbot.mouseClick(p2p_window.ui.pastePESEL, QtCore.Qt.LeftButton)
assert p2p_window.ui.numeryPESEL.toPlainText() == '123'
def test_pesel2pbn_copyPBN(qtbot, p2p_window):
with mock.patch('PyQt5.QtWidgets.QApplication.clipboard') as cb:
clipboard = cb.return_value
clipboard.setText = MagicMock()
p2p_window.ui.numeryPBN.setPlainText("123")
qtbot.mouseClick(p2p_window.ui.copyPBN, QtCore.Qt.LeftButton)
assert clipboard.setText.called_with("123", mode=clipboard.Clipboard)
def test_about_box(qtbot, p2p_window):
with mock.patch.object(QMessageBox, 'about', new=Mock) as about:
qtbot.mouseClick(p2p_window.ui.oProgramie, QtCore.Qt.LeftButton)
assert about.called
def test_wykonaj_brak_PESELi(qtbot, p2p_window):
with mock.patch.object(QMessageBox, 'critical', new=Mock) as critical:
qtbot.mouseClick(p2p_window.ui.wykonajButton, QtCore.Qt.LeftButton)
assert critical.called
def test_wykonaj_zle_PESELe(qtbot, p2p_window):
p2p_window.ui.numeryPESEL.setPlainText('123')
with mock.patch.object(QMessageBox, 'critical', new=Mock) as critical:
qtbot.mouseClick(p2p_window.ui.wykonajButton, QtCore.Qt.LeftButton)
assert critical.called
def test_wykonaj_dobre_PESELe_bez_tokena(qtbot, p2p_window):
p2p_window.ui.numeryPESEL.setPlainText(WYGENEROWANY_PESEL)
with mock.patch.object(QMessageBox, 'critical', new=Mock) as critical:
qtbot.mouseClick(p2p_window.ui.wykonajButton, QtCore.Qt.LeftButton)
assert critical.called
def test_wykonaj_dobre_PESELe_bez_tokena(qtbot, p2p_window):
p2p_window.ui.numeryPESEL.setPlainText(WYGENEROWANY_PESEL)
p2p_window.ui.token.setText('token-123')
qtbot.mouseClick(p2p_window.ui.wykonajButton, QtCore.Qt.LeftButton)
assert len(p2p_window.networkAccessManager.mock_calls) == 2
| mpasternak/pesel2pbn | src/pesel2pbn/tests.py | Python | mit | 2,513 |
l = [4, 6, 8, 10, 5]
l = [i*i for i in l]
print l
| pybursa/homeworks | v_glushkova/hw2/1.py | Python | gpl-2.0 | 50 |
import unittest
import os
from maskgen import plugins, image_wrap
import numpy
import tempfile
class CV2ResizeByPercentTestCase(unittest.TestCase):
filesToKill = []
def setUp(self):
plugins.loadPlugins()
def test_snap(self):
img = numpy.zeros((500,540),dtype='uint8')
wrapper = image_wrap.ImageWrapper(img)
filename = tempfile.mktemp(prefix='mstc',suffix='.png',dir='.')
filename_output = tempfile.mktemp(prefix='mstcr', suffix='.png', dir='.')
self.filesToKill.append(filename)
wrapper.save(filename)
self.filesToKill.append(filename_output)
image_wrap.ImageWrapper(img).save(filename_output)
args,error = plugins.callPlugin('CV2ResizeByPercent',
wrapper,
filename,
filename_output,
percentage_width = 0.9,
percentage_height=0.9,
interpolation='other' )
wrapper = image_wrap.openImageFile(filename_output)
output = wrapper.to_array()
self.assertEqual((448,480),output.shape)
def tearDown(self):
for f in self.filesToKill:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
unittest.main()
| rwgdrummer/maskgen | tests/plugins/ResizeByPercentTest.py | Python | bsd-3-clause | 1,335 |
"""
homeassistant.components.zone
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Allows defintion of zones in Home Assistant.
zone:
name: School
latitude: 32.8773367
longitude: -117.2494053
# Optional radius in meters (default: 100)
radius: 250
# Optional icon to show instead of name
# See https://www.google.com/design/icons/
# Example: home, work, group-work, shopping-cart, social:people
icon: group-work
zone 2:
name: Work
latitude: 32.8753367
longitude: -117.2474053
"""
import logging
from homeassistant.const import (
ATTR_HIDDEN, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_NAME)
from homeassistant.helpers import extract_domain_configs, generate_entity_id
from homeassistant.helpers.entity import Entity
from homeassistant.util.location import distance
DOMAIN = "zone"
DEPENDENCIES = []
ENTITY_ID_FORMAT = 'zone.{}'
ENTITY_ID_HOME = ENTITY_ID_FORMAT.format('home')
STATE = 'zoning'
DEFAULT_NAME = 'Unnamed zone'
ATTR_RADIUS = 'radius'
DEFAULT_RADIUS = 100
ATTR_ICON = 'icon'
ICON_HOME = 'home'
def active_zone(hass, latitude, longitude, radius=0):
""" Find the active zone for given latitude, longitude. """
# Sort entity IDs so that we are deterministic if equal distance to 2 zones
zones = (hass.states.get(entity_id) for entity_id
in sorted(hass.states.entity_ids(DOMAIN)))
min_dist = None
closest = None
for zone in zones:
zone_dist = distance(
latitude, longitude,
zone.attributes[ATTR_LATITUDE], zone.attributes[ATTR_LONGITUDE])
within_zone = zone_dist - radius < zone.attributes[ATTR_RADIUS]
closer_zone = closest is None or zone_dist < min_dist
smaller_zone = (zone_dist == min_dist and
zone.attributes[ATTR_RADIUS] <
closest.attributes[ATTR_RADIUS])
if within_zone and (closer_zone or smaller_zone):
min_dist = zone_dist
closest = zone
return closest
def in_zone(zone, latitude, longitude, radius=0):
""" Test if given latitude, longitude is in given zone. """
zone_dist = distance(
latitude, longitude,
zone.attributes[ATTR_LATITUDE], zone.attributes[ATTR_LONGITUDE])
return zone_dist - radius < zone.attributes[ATTR_RADIUS]
def setup(hass, config):
""" Setup zone. """
entities = set()
for key in extract_domain_configs(config, DOMAIN):
entries = config[key]
if not isinstance(entries, list):
entries = entries,
for entry in entries:
name = entry.get(CONF_NAME, DEFAULT_NAME)
latitude = entry.get(ATTR_LATITUDE)
longitude = entry.get(ATTR_LONGITUDE)
radius = entry.get(ATTR_RADIUS, DEFAULT_RADIUS)
icon = entry.get(ATTR_ICON)
if None in (latitude, longitude):
logging.getLogger(__name__).error(
'Each zone needs a latitude and longitude.')
continue
zone = Zone(hass, name, latitude, longitude, radius, icon)
zone.entity_id = generate_entity_id(ENTITY_ID_FORMAT, name,
entities)
zone.update_ha_state()
entities.add(zone.entity_id)
if ENTITY_ID_HOME not in entities:
zone = Zone(hass, hass.config.location_name, hass.config.latitude,
hass.config.longitude, DEFAULT_RADIUS, ICON_HOME)
zone.entity_id = ENTITY_ID_HOME
zone.update_ha_state()
return True
class Zone(Entity):
""" Represents a Zone in Home Assistant. """
# pylint: disable=too-many-arguments
def __init__(self, hass, name, latitude, longitude, radius, icon):
self.hass = hass
self._name = name
self.latitude = latitude
self.longitude = longitude
self.radius = radius
self.icon = icon
def should_poll(self):
return False
@property
def name(self):
return self._name
@property
def state(self):
""" The state property really does nothing for a zone. """
return STATE
@property
def state_attributes(self):
attr = {
ATTR_HIDDEN: True,
ATTR_LATITUDE: self.latitude,
ATTR_LONGITUDE: self.longitude,
ATTR_RADIUS: self.radius,
}
if self.icon:
attr[ATTR_ICON] = self.icon
return attr
| tomduijf/home-assistant | homeassistant/components/zone.py | Python | mit | 4,426 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.