repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
fagusMcFagel/ticketsystem
|
ticketsystem/tickets/views.py
|
Python
|
mit
| 49,979
| 0.008066
|
#standard library
from _functools import reduce
import imghdr
### IMPORT THE APPLICABLE SETTINGS SET IN manage.py ###
from manage import USED_SETTINGS
import importlib
used_settings = importlib.import_module(USED_SETTINGS)
settings_media_root = used_settings.MEDIA_ROOT
settings_media_url = used_settings.MEDIA_URL
### REGULAR IMPORTS ###
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.db.models import Q
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from django.contrib.auth import authenticate, login, logout
from django.forms.models import model_to_dict
from django.utils import timezone
from django.core.mail import send_mail, get_connection
from django.http.response import HttpResponseNotAllowed
from tickets.models import Ticket, Measures
from tickets.forms import (
EnterTicketForm, LoginForm, DetailForm,
EditableDataForm,ClosingDataForm, SearchForm,
ClosedDataForm, CompactMeasureForm, MeasureForm
)
from tickets.field_constants import FieldConstants
# local constants
LOGIN_URL = '/tickets/login/'
STDRT_REDIRECT_URL = '/tickets/overview/'
# view function for user login
'''
#parameter: HttpRequest request
#URL:'tickets/login'
'''
def login_user(request):
# renewal of session expiration
# request.session.set_expiry(settings.COOKIE_EXP_AGE)
# initialize variables error and login_user
error = False
logged_in_user = None
infomsg = ''
# if form is submitted in a post request
if request.method == 'POST':
form = LoginForm(request.POST)
# if POST data is valid in LoginForm
if form.is_valid():
# logout currently logged in user
if request.user.is_authenticated:
logout(request)
# get user name and password from POST data and try to authenticate user
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
# if user is authenticated: login user
if user is not None:
login(request, user)
# if the login was redirected with parameter 'next' (e.g. via @login_required decorator)
if request.GET.get('next'):
return HttpResponseRedirect(request.GET.get('next'))
# default redirect to /tickets/overview/
else:
return HttpResponseRedirect(STDRT_REDIRECT_URL)
# res
|
et the form and set error to true
else:
error = True
form = LoginForm()
# if called normally (with GET-Request)
else:
# display currently logged in user, if existent
if request.user.is_authenticated:
logged_in_user = request.user
# set empty login form
form = LoginForm()
infomsg = 'Login erforde
|
rlich!'
return render(
request, 'ticket_login.djhtml',
{'form':form,
'error':error,
'login_user':logged_in_user,
'infomsg':infomsg}
)
# view function for logging a user out and redirecting to the login page
'''
#parameter: HttpRequest request
#URL:'tickets/logout'
'''
def logout_user(request):
if request.user.is_authenticated:
logout(request)
return HttpResponseRedirect('/tickets/login/')
# view function for creating a new ticket
'''
#lets the user choose sector and category
#and requires input for subject and description
#parameter: HttpRequest request
#URL:'tickets/enter'
'''
@login_required(login_url=LOGIN_URL)
def enter_ticket(request):
# init infomsg as empty string
infomsg = ''
if request.method == 'POST':
# set form as EnterTicketForm-Object with the POST-data
form = EnterTicketForm(request.POST, request.FILES)
# create an entry in the database with the entered data
if form.is_valid():
# get cleaned data and current system time
cd = form.cleaned_data
now = timezone.now()
# initialize img as empty string, fileErr as False
img = ''
fileErr = False
# check if an image file was uploaded and if so set img to the file
if request.FILES:
if imghdr.what(request.FILES['image']):
img = request.FILES['image']
# if a file was uploaded but is not recognized as an image file
else:
# form: form to be displayed for ticket entering; infomsg: displayed infomsg
infomsg = 'Dateifehler'
fileErr = True
return render(
request, 'ticket_enter.djhtml',
{'form':form,
'infomsg':infomsg,
'fileErr':fileErr}
)
cd['sector'] = Group.objects.get(name=cd['sector'])
# initialize ticket object t with form data
# ticket id increments automatically
# fields (apart from closingdatetime) mustn't be NULL -> initalized with '' (empty String)
t = Ticket(sector=cd['sector'], category=cd['category'],
subject=cd['subject'], description=cd['description'],
creationdatetime=now, status='open',
# TODO:get username from form/request-data?
creator=request.user.username,
responsible_person=None,
comment='', keywords='',
image=img
)
# save data set to database
t.save()
# reset form and display thank-you-message
infomsg = 'Ticket erfolgreich erstellt!'
form = EnterTicketForm()
#if requested with GET-Method
else:
# initialize empty form
form = EnterTicketForm()
# form: form to be displayed for ticket entering; infomsg: displayed infomsg
return render(
request, 'ticket_enter.djhtml',
{'form':form,
'infomsg':infomsg}
)
# view function for displaying a user's tickets
'''
#displays a list of open tickets for all groups/sectors the user's in on the left (NO responsible_person specified)
#and a list of open tickets for which he is entered as responsible_person
#parameter: HttpRequest request
#URL:'tickets/overview'
'''
@login_required(login_url=LOGIN_URL)
def show_ticket_list(request):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# search for open tickets to be displayed according to the
# the requesting user
query_user = Q(status='open') & Q(responsible_person=request.user)
tickets_user = Ticket.objects.filter(query_user)
# get column headings/names from Ticket model
labels_dict = FieldConstants.TICKET_FIELD_LABELS
# the groups the user is part of
query_group = Q(status='open') & Q(responsible_person=None) & Q(sector__in=groups)
tickets_group = Ticket.objects.filter(query_group)
# initialize infomsg and set it according to GET['status']
infomsg = ''
if request.GET.get('status') :
if request.GET['status'] == 'closed':
infomsg = 'Ticket abgeschlossen!'
# return the template with the fetched data on display
return render(
request, 'ticket_overview.djhtml',
{'tickets_group':tickets_group,
'tickets_user':tickets_user,
'labels_dict':labels_dict,
'infomsg':infoms
|
klahnakoski/TestFailures
|
pyLibrary/queries/containers/tests/test_container.py
|
Python
|
mpl-2.0
| 5,642
| 0.00319
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import division
from __future__ import unicode_literals
from pyLibrary.debugs.logs import Log
from pyLibrary.dot import wrap
from pyLibrary.queries.containers.list_usingSQLite import Table_usingSQLite
from pyLibrary.queries.expressions import NullOp
from pyLibrary.testing.fuzzytestcase import FuzzyTestCase
class TestContainer(FuzzyTestCase):
def test_assumption(self):
table = Table_usingSQLite("test_table")
collection = {}
uid = table.next_uid()
ok, required_changes = table.flatten({"a": 1, "b": "v"}, uid, collection)
table.change_schema(required_changes)
uid = table.next_uid()
ok, required_changes = table.flatten({"a": None, "b": "v"}, uid, collection)
uid = table.next_uid()
ok, required_changes = table.flatten({"a": 1, "b": None}, uid, collection)
table._insert(collection)
result = table.db.query('SELECT coalesce("a.$number", "b.$string"), length(coalesce("a.$number", "b.$string")) FROM '+table.name)
self.assertEqual(result, {"data": [(1.0, 3), ('v', 1), (1.0, 3)]})
def test_flatten_inner(self):
table = Table_usingSQLite("test_table")
collection = {}
uid = table.next_uid() # 1
ok, required_changes = table.flatten({"a": 0}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{"add": {"name": "a", "type": "number", "nested_path": NullOp()}}])
self.assertEqual(collection, {
".": {
"rows": [{"__id__": 1, "a.$number": 0}],
"active_columns": [{"es_column": "a.$number"}]
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 2
ok, required_changes = table.flatten({"a": {"b": 0}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [
{"add": {"name": "a", "type": "object", "nested_path": NullOp()}},
{"add": {"name": "a.b", "type": "number", "nested_path": NullOp()}}
])
self.assertEqual(collection, {
".": {
"rows": [{"__id__": 2, "a.$object": ".", "a.b.$number": 0}],
"active_columns": {wrap({"es_column": "a.b.$number"}), wrap({"es_column": "a.$object"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 3
ok, required_changes = table.flatten({"a": {"b": [0, 1]}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{
|
"add": {"name": "a.b", "type": "nested", "nested_path": NullOp()}
}])
self.assertEqual(collection, {
".": {
"rows": [
{"__id__": 3, "a.$object": "."}
],
"active_columns": {wrap({"es_column": "a.$object"}), wrap({"es_column": "a.b.$object"})}
},
"a.b": {
|
"rows":[
{"__id__": 4, "__parent__": 3, "__order__": 0, "a.b.$number": 0},
{"__id__": 5, "__parent__": 3, "__order__": 1, "a.b.$number": 1}
],
"active_columns": {wrap({"es_column": "a.b.$number"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 6
ok, required_changes = table.flatten({"a": {"b": "value"}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{
"add": {"name": "a.b", "type": "string", "nested_path": "a.b"}
}])
self.assertEqual(collection, {
".": {
"rows": [
{"__id__": 6, "a.b.$object": ".", "a.$object": "."}
],
"active_columns": {wrap({"es_column": "a.b.$object"}), wrap({"es_column": "a.$object"})}
},
"a.b": {
"rows": [
{"__id__": 7, "__parent__": 6, "__order__": 0, "a.b.$string": "value"}
],
"active_columns": {wrap({"es_column": "a.b.$string"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
# VERIFY CONTENT OF TABLE
result = table.db.query('SELECT * FROM "test_table.a.b" ORDER BY __id__')
self.assertEqual(result, {"data": [
(2, 2, 0, 0, None),
(4, 3, 0, 0, None),
(5, 3, 1, 1, None),
(7, 6, 0, None, 'value')
]})
# VIEW METADATA
command = 'PRAGMA table_info("test_table")'
Log.note("Metadata\n{{meta|json|indent}}", meta=table.db.query(command))
# VIEW METADATA
command = 'PRAGMA table_info("test_table.a.b")'
Log.note("Metadata\n{{meta|json|indent}}", meta=table.db.query(command))
# VERIFY PULLING DATA
result = table.query({"from": table.name})
self.assertEqual(result, {"data": [
{"a": 0},
{"a": {"b": 0}},
{"a": {"b": [0, 1]}},
{"a": {"b": "value"}}
]})
Log.note("{{result}}", result=result)
|
SerialShadow/SickRage
|
sickbeard/__init__.py
|
Python
|
gpl-3.0
| 114,709
| 0.005048
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import webbrowser
import datetime
import socket
import os
import re
import os.path
import shutil
import shutil_custom
shutil.copyfile = shutil_custom.copyfile_custom
from threading import Lock
import sys
from github import Github
from sickbeard import metadata
from sickbeard import providers
from sickbeard.providers.generic import GenericProvider
from sickbeard.providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \
frenchtorrentdb, freshontv, titansoftv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
subtitles, traktChecker
from sickbeard import db
from sickbeard import helpers
from sickbeard import scheduler
from sickbeard import search_queue
from sickbeard import show_queue
from sickbeard import logger
from sickbeard import naming
from sickbeard import dailysearcher
from sickbeard.indexers.indexer_api import indexerApi
from sickbeard.indexers.indexer_exceptions import indexer_shownotfound, indexer_showincomplete, indexer_exception, indexer_error, \
indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
from sickbeard.common import SD
from sickbeard.common import SKIPPED
from sickbeard.common import WANTED
from sickbeard.databases import mainDB, cache_db, failed_db
from sickrage.helper.exceptions import ex
from sickrage.system.Shutdown import Shutdown
from configobj import ConfigObj
import requests
requests.packages.urllib3.disable_warnings()
PID = None
CFG = None
CONFIG_FILE = None
# This is the version of the config we EXPECT to find
CONFIG_VERSION = 7
# Default encryption version (0 for None)
ENCRYPTION_VERSION = 0
ENCRYPTION_SECRET = None
PROG_DIR = '.'
MY_FULLNAME = None
MY_NAME = None
MY_ARGS = []
SYS_ENCODING = ''
DATA_DIR = ''
CREATEPID = False
PIDFILE = ''
DAEMON = None
NO_RESIZE = False
# system events
events = None
# github
gh = None
# schedualers
dailySearchScheduler = None
backlogSearchScheduler = None
showUpdateScheduler = None
versionCheckScheduler = None
showQueueScheduler = None
searchQueueScheduler = None
properFinderScheduler = None
autoPostProcesserScheduler = None
subtitlesFinderScheduler = None
traktCheckerScheduler = None
showList = None
loadingShowList = None
providerList = []
newznabProviderList = []
torrentRssProviderList = []
metadata_provider_dict = {}
NEWEST_VERSION = None
NEWEST_VERSION_STRING = None
VERSION_NOTIFY = False
AUTO_UPDATE = False
NOTIFY_ON_UPDATE = False
CUR_COMMIT_HASH = None
BRANCH = ''
GIT_RESET = True
GIT_REMOTE = ''
GIT_REMOTE_URL = ''
CUR_COMMIT_BRANCH = ''
GIT_ORG = 'SiCKRAGETV'
GIT_REPO = 'SickRage'
GIT_USERNAME = None
GIT_PASSWORD = None
GIT_PATH = None
GIT_AUTOISSUES = False
GIT_NEWVER = False
DEVELOPER = False
NEWS_URL = 'http://sickragetv.github.io/sickrage-news/news.md'
NEWS_LAST_READ = None
NEWS_LATEST = None
NEWS_UNREAD = 0
INIT_LOCK = Lock()
started = False
ACTUAL_LOG_DIR = None
LOG_DIR = None
LOG_NR = 5
LOG_SIZE = 1048576
SOCKET_TIMEOUT = None
WEB_PORT = None
WEB_LOG = None
WEB_ROOT = None
WEB_USERNAME = None
WEB_PASSWORD = None
WEB_HOST = None
WEB_IPV6 = None
WEB_COOKIE_SECRET = None
WEB_USE_GZIP = True
DOWNLOAD_URL = None
HANDLE_REVERSE_PROXY = False
PROXY_SETTING = None
PROXY_INDEXERS = True
SSL_VERIFY = True
LOCALHOST_IP = None
CPU_PRESET = None
ANON_REDIRECT = None
API_KEY = None
API_ROOT = None
ENABLE_HTTPS = False
HTTPS_CERT = None
HTTPS_KEY = None
INDEXER_DEFAULT_LANGUAGE = None
EP_DEFAULT_DELETED_STATUS = None
LAUNCH_BROWSER = False
CACHE_DIR = None
ACTUAL_CACHE_DIR = None
ROOT_DIRS = None
TRASH_REMOVE_SHOW = False
TRASH_ROTATE_LOGS = False
SORT_ARTICLE = False
DEBUG = False
DISPLAY_ALL_SEASONS = True
DEFAULT_PAGE = 'home'
USE_LISTVIEW = False
METADATA_KODI = None
METADATA_KODI_12PLUS = None
METADATA_MEDIABROWSER = None
METADATA_PS3 = None
METADATA_WDTV = None
METADATA_TIVO = None
METADATA_MEDE8ER = None
QUALITY_DEFAULT = None
STATUS_DEFAULT = None
STATUS_DEFAULT_AFTER = None
FLATTEN_FOLDERS_DEFAULT = False
SUBTITLES_DEFAULT = False
INDEXER_DEFAULT = None
INDEXER_TIMEOUT = None
SCENE_DEFAULT = False
ANIME_DEFAULT = False
ARCHIVE_DEFAULT = False
PROVIDER_ORDER = []
NAMING_MULTI_EP = False
NAMING_ANIME_MULTI_EP = False
NAMING_PATT
|
ERN = None
NAMING_ABD_PATTERN = None
NAMING_CUSTOM_ABD = False
NAMING_SPORTS_PATTERN = None
NAMING_CUSTOM_SPORTS = False
NAMING_ANIME_PATTERN = None
NAMING_CUSTOM_ANIME = False
NAMING_FORCE_FOLDERS = False
NAMING_STRIP_YEAR = False
NAMING_ANIME = None
USE_NZBS = False
USE_TORRENTS = False
NZB_METHOD = None
NZB_DIR = None
USENET_RETENTION = None
TORRENT_METHOD = None
TORRENT_DIR =
|
None
DOWNLOAD_PROPERS = False
CHECK_PROPERS_INTERVAL = None
ALLOW_HIGH_PRIORITY = False
SAB_FORCED = False
RANDOMIZE_PROVIDERS = False
AUTOPOSTPROCESSER_FREQUENCY = None
DAILYSEARCH_FREQUENCY = None
UPDATE_FREQUENCY = None
BACKLOG_FREQUENCY = None
SHOWUPDATE_HOUR = None
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10
DEFAULT_DAILYSEARCH_FREQUENCY = 40
DEFAULT_BACKLOG_FREQUENCY = 21
DEFAULT_UPDATE_FREQUENCY = 1
DEFAULT_SHOWUPDATE_HOUR = 3
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
MIN_DAILYSEARCH_FREQUENCY = 10
MIN_BACKLOG_FREQUENCY = 10
MIN_UPDATE_FREQUENCY = 1
BACKLOG_DAYS = 7
ADD_SHOWS_WO_DIR = False
CREATE_MISSING_SHOW_DIRS = False
RENAME_EPISODES = False
AIRDATE_EPISODES = False
FILE_TIMESTAMP_TIMEZONE = None
PROCESS_AUTOMATICALLY = False
NO_DELETE = False
KEEP_PROCESSED_DIR = False
PROCESS_METHOD = None
DELRARCONTENTS = False
MOVE_ASSOCIATED_FILES = False
POSTPONE_IF_SYNC_FILES = True
NFO_RENAME = True
TV_DOWNLOAD_DIR = None
UNPACK = False
SKIP_REMOVED_FILES = False
NZBS = False
NZBS_UID = None
NZBS_HASH = None
WOMBLE = False
BINSEARCH = False
OMGWTFNZBS = False
OMGWTFNZBS_USERNAME = None
OMGWTFNZBS_APIKEY = None
NEWZBIN = False
NEWZBIN_USERNAME = None
NEWZBIN_PASSWORD = None
SAB_USERNAME = None
SAB_PASSWORD = None
SAB_APIKEY = None
SAB_CATEGORY = None
SAB_CATEGORY_ANIME = None
SAB_HOST = ''
NZBGET_USERNAME = None
NZBGET_PASSWORD = None
NZBGET_CATEGORY = None
NZBGET_CATEGORY_ANIME = None
NZBGET_HOST = None
NZBGET_USE_HTTPS = False
NZBGET_PRIORITY = 100
TORRENT_USERNAME = None
TORRENT_PASSWORD = None
TORRENT_HOST = ''
TORRENT_PATH = ''
TORRENT_SEED_TIME = None
TORRENT_PAUSED = False
TORRENT_HIGH_BANDWIDTH = False
TORRENT_LABEL = ''
TORRENT_LABEL_ANIME = ''
TORRENT_VERIFY_CERT = False
TORRENT_RPCURL = 'transmission'
TORRENT_AUTH_TYPE = 'none'
USE_KODI = False
KODI_ALWAYS_ON = True
KODI_NOTIFY_ONSNATCH = False
KODI_NOTIFY_ONDOWNLOAD = False
KODI_NOTIFY_ONSUBTITLEDOWNLOAD = False
KODI_UPDATE_LIBRARY = False
KODI_UPDATE_FULL = False
KODI_UPDATE_ONLYFIRST = False
KODI_HOST = ''
KODI_USERNAME = None
KODI_PASSWORD = None
USE_PLEX = False
PLEX_NOTIFY_ONSNATCH = False
PLEX_NOTIFY_ONDOWNLOAD = False
PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = False
PLEX_UPDATE_LIBRARY = False
PLEX_SERVER_HOST = None
PLEX_SERVER_TOKEN = None
PLEX_
|
milinbhakta/flaskmaterialdesign
|
venv/Lib/importlib/util.py
|
Python
|
gpl-2.0
| 7,227
| 0.000692
|
"""Utility code for constructing importers, etc."""
from ._bootstrap import MAGIC_NUMBER
from ._bootstrap import cache_from_source
from ._bootstrap import decode_source
from ._bootstrap import source_from_cache
from ._bootstrap import spec_from_loader
from ._bootstrap import spec_from_file_location
from ._bootstrap import _resolve_name
from ._bootstrap import _find_spec
from contextlib import contextmanager
import functools
import sys
import warnings
def resolve_name(name, package):
"""Resolve a relative module name to an absolute one."""
if not name.startswith('.'):
return name
elif not package:
raise ValueError('{!r} is not a relative name '
'(no leading dot)'.format(name))
level = 0
for character in name:
if character != '.':
break
level += 1
return _resolve_name(name[level:], package, level)
def _find_spec_from_path(name, path=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct spec.
"""
if name not in sys.modules:
return _find_spec(name, path)
else:
module = sys.modules[name]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name))
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
def find_spec(name, package=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
If the name is for submodule (contains a dot), the parent module is
automatically imported.
The name and package arguments work the same as importlib.import_module().
In other words, relative module names (with leading dots) work.
"""
fullname = resolve_name(name, package) if name.startswith('.') else name
if fullname not in sys.modules:
parent_name = fullname.rpartition('.')[0]
if parent_name:
# Use builtins.__import__() in case someone replaced it.
parent = __import__(parent_name, fromlist=['__path__'])
return _find_spec(fullname, parent.__path__)
else:
return _find_spec(fullname, None)
else:
module = sys.modules[fullname]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name))
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
@contextmanager
def _module_to_load(name):
is_reload = name in sys.modules
module = sys.modules.get(name)
if not is_reload:
# This must be done before open() is called as the 'io' module
# implicitly imports 'locale' and would otherwise trigger an
# infinite loop.
module = type(sys)(name)
# This must be done before putting the mo
|
dule in sys.modules
# (otherwise an optimization shortcut in import.c be
|
comes wrong)
module.__initializing__ = True
sys.modules[name] = module
try:
yield module
except Exception:
if not is_reload:
try:
del sys.modules[name]
except KeyError:
pass
finally:
module.__initializing__ = False
def set_package(fxn):
"""Set __package__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_package_wrapper(*args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(*args, **kwargs)
if getattr(module, '__package__', None) is None:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
return module
return set_package_wrapper
def set_loader(fxn):
"""Set __loader__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_loader_wrapper(self, *args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(self, *args, **kwargs)
if getattr(module, '__loader__', None) is None:
module.__loader__ = self
return module
return set_loader_wrapper
def module_for_loader(fxn):
"""Decorator to handle selecting the proper module for loaders.
The decorated function is passed the module to use instead of the module
name. The module passed in to the function is either from sys.modules if
it already exists or is a new module. If the module is new, then __name__
is set the first argument to the method, __loader__ is set to self, and
__package__ is set accordingly (if self.is_package() is defined) will be set
before it is passed to the decorated function (if self.is_package() does
not work for the module it will be set post-load).
If an exception is raised and the decorator created the module it is
subsequently removed from sys.modules.
The decorator assumes that the decorated function takes the module name as
the second argument.
"""
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
@functools.wraps(fxn)
def module_for_loader_wrapper(self, fullname, *args, **kwargs):
with _module_to_load(fullname) as module:
module.__loader__ = self
try:
is_package = self.is_package(fullname)
except (ImportError, AttributeError):
pass
else:
if is_package:
module.__package__ = fullname
else:
module.__package__ = fullname.rpartition('.')[0]
# If __package__ was not set above, __import__() will do it later.
return fxn(self, module, *args, **kwargs)
return module_for_loader_wrapper
|
dominikl/openmicroscopy
|
components/tools/OmeroPy/src/omero/plugins/cecog.py
|
Python
|
gpl-2.0
| 6,684
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Methods for working with cecog
Copyright 2010 University of Dundee, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import os
import re
import sys
from omero.cli import BaseControl, CLI
import omero
import omero.constants
from omero.rtypes import rstring
class CecogControl(BaseControl):
"""CeCog integration plugin.
Provides actions for prepairing data and otherwise integrating with Cecog. See
the Run_Cecog_4.1.py script.
"""
# [MetaMorph_PlateScanPackage]
# regex_subdirectories = re.compile('(?=[^_]).*?(?P<D>\d+).*?')
# regex_position = re.compile('P(?P<P>.+?)_')
# continuous_frames = 1
regex_token = re.compile(r'(?P<Token>.+)\.')
regex_time = re.compile(r'T(?P<T>\d+)')
regex_channel = re.compile(r'_C(?P<C>.+?)(_|$)')
regex_zslice = re.compile(r'_Z(?P<Z>\d+)')
def _configure(self, parser):
sub = parser.sub()
merge = parser.add(sub, self.merge, self.merge.__doc__)
merge.add_argument("path", help="Path to image files")
rois = parser.add(sub, self.rois, self.rois.__doc__)
rois.add_argument(
"-f", "--file", required=True, help="Details file to be parsed")
rois.add_argument(
"-i", "--image", required=True,
help="Image id which should have ids attached")
for x in (merge, rois):
x.add_login_arguments()
#
# Public methods
#
def merge(self, args):
"""Uses PIL to read multiple planes from a local folder.
Planes are combined and uploaded to OMERO as new images with additional T, C,
Z dimensions.
It should be run as a local script (not via scripting service) in order that
it has access to the local users file system. Therefore need EMAN2 or PIL
installed locally.
Example usage:
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/0037/
Since this dir does not contain folders, this will upload images in '0037'
into a Dataset called Demo_data in a Project called 'Data'.
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/
Since this dir does contain folders, this will look for images in all
subdirectories of 'Demo_data' and upload images into a Dataset called
Demo_data in a Project called 'Data'.
Images will be combined in Z, C and T according to the \
MetaMorph_PlateScanPackage naming convention.
E.g. tubulin_P0037_T00005_Cgfp_Z1_S1.tiff is Point 37, Timepoint 5, Channel \
gfp, Z 1. S?
see \
/Applications/CecogPackage/CecogAnalyzer.app/Contents/Resources/resources/\
naming_schemes.conf
"""
"""
Processes the command args, makes project and dataset then calls
uploadDirAsImages() to process and
upload the images to OMERO.
"""
from omero.rtypes import unwrap
from omero.util.script_utils import uploadDirAsImages
path = args.path
client = self.ctx.conn(args)
queryService = client.sf.getQueryService()
updateService =
|
client.sf.getUpdateService()
pixelsService = client.sf.getPixelsService()
# if we don't have any folders in the 'dir' E.g.
# CecogPackage/Data/Demo_data/0037/
# then 'Demo_data' becomes a dataset
subDirs = []
for f in os.listdir(path):
fullpath = path + f
# process folders in root dir:
if os.path.isdir(fullpath):
subDirs.append(fullpath)
# get the dataset name and proj
|
ect name from path
if len(subDirs) == 0:
p = path[:-1] # will remove the last folder
p = os.path.dirname(p)
else:
if os.path.basename(path) == "":
p = path[:-1] # remove slash
datasetName = os.path.basename(p) # e.g. Demo_data
p = p[:-1]
p = os.path.dirname(p)
projectName = os.path.basename(p) # e.g. Data
self.ctx.err("Putting images in Project: %s Dataset: %s"
% (projectName, datasetName))
# create dataset
dataset = omero.model.DatasetI()
dataset.name = rstring(datasetName)
dataset = updateService.saveAndReturnObject(dataset)
# create project
project = omero.model.ProjectI()
project.name = rstring(projectName)
project = updateService.saveAndReturnObject(project)
# put dataset in project
link = omero.model.ProjectDatasetLinkI()
link.parent = omero.model.ProjectI(project.id.val, False)
link.child = omero.model.DatasetI(dataset.id.val, False)
updateService.saveAndReturnObject(link)
if len(subDirs) > 0:
for subDir in subDirs:
self.ctx.err("Processing images in %s" % subDir)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, subDir, dataset)
self.ctx.out("%s" % unwrap(rv))
# if there are no sub-directories, just put all the images in the dir
else:
self.ctx.err("Processing images in %s" % path)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, path, dataset)
self.ctx.out("%s" % unwrap(rv))
def rois(self, args):
"""Parses an object_details text file, as generated by CeCog Analyzer
and saves the data as ROIs on an Image in OMERO.
Text file is of the form:
frame objID classLabel className centerX centerY mean sd
1 10 6 lateana 1119 41 76.8253796095 \
54.9305640673
Example usage:
bin/omero cecog rois -f \
Data/Demo_output/analyzed/0037/statistics/P0037__object_details.txt -i 502
"""
"""
Processes the command args, parses the object_details.txt file and
creates ROIs on the image specified in OMERO
"""
from omero.util.script_utils import uploadCecogObjectDetails
filePath = args.file
imageId = args.image
if not os.path.exists(filePath):
self.ctx.die(654, "Could find the object_details file at %s"
% filePath)
client = self.ctx.conn(args)
updateService = client.sf.getUpdateService()
ids = uploadCecogObjectDetails(updateService, imageId, filePath)
self.ctx.out("Rois created: %s" % len(ids))
try:
register("cecog", CecogControl, CecogControl.__doc__)
except NameError:
if __name__ == "__main__":
cli = CLI()
cli.register("cecog", CecogControl, CecogControl.__doc__)
cli.invoke(sys.argv[1:])
|
idalin/calibre-web
|
cps/fb2.py
|
Python
|
gpl-3.0
| 2,739
| 0.00146
|
# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018 lemmsh, cervinko, OzzieIsaacs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.
|
org/license
|
s/>.
from __future__ import division, print_function, unicode_literals
from lxml import etree
from .constants import BookMeta
def get_fb2_info(tmp_file_path, original_file_extension):
ns = {
'fb': 'http://www.gribuser.ru/xml/fictionbook/2.0',
'l': 'http://www.w3.org/1999/xlink',
}
fb2_file = open(tmp_file_path)
tree = etree.fromstring(fb2_file.read().encode())
authors = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:author', namespaces=ns)
def get_author(element):
last_name = element.xpath('fb:last-name/text()', namespaces=ns)
if len(last_name):
last_name = last_name[0]
else:
last_name = u''
middle_name = element.xpath('fb:middle-name/text()', namespaces=ns)
if len(middle_name):
middle_name = middle_name[0]
else:
middle_name = u''
first_name = element.xpath('fb:first-name/text()', namespaces=ns)
if len(first_name):
first_name = first_name[0]
else:
first_name = u''
return (first_name + u' '
+ middle_name + u' '
+ last_name)
author = str(", ".join(map(get_author, authors)))
title = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:book-title/text()', namespaces=ns)
if len(title):
title = str(title[0])
else:
title = u''
description = tree.xpath('/fb:FictionBook/fb:description/fb:publish-info/fb:book-name/text()', namespaces=ns)
if len(description):
description = str(description[0])
else:
description = u''
return BookMeta(
file_path=tmp_file_path,
extension=original_file_extension,
title=title,
author=author,
cover=None,
description=description,
tags="",
series="",
series_id="",
languages="",
publisher="")
|
chrisspen/django-pjm
|
django_pjm/management/commands/import_pjm_loads.py
|
Python
|
mit
| 1,807
| 0.006641
|
from datetime import date
from monthdelta import MonthDelta as monthdelta
from optparse import make_option
from django.core.management.base import NoArgsCommand, BaseCommand
import dateutil.parser
from django_pjm import models
class Command(BaseCommand):
help = "Imports PJM load values."
args = ''
option_list = BaseCommand.option_list + (
make_option('--start-date', default=None),
make_option('--end-date', default=None),
make_option('--zone', default=None),
make_option('--segments', default=None),
#make_option('--only-type', default=None),
#make_option('--auto-reprocess-days', default=0),
)
def handle(self, **options):
start_date = (options['start_date'] or '').strip()
if start_date:
start_date = dateutil.parser.parse(start_date)
start_date = date(start_date.year, start_date.month, start_date.day)
else:
start_date = date.today() - monthdelta(1)
end_date = (options['end_date'] or '').strip()
if end_date:
end_date = dateutil.parser.parse(end_date)
end_date = date(end_date.year, end_date.month, end_dat
|
e.day)
else:
end_date = date.today()
segments = [_ for
|
_ in options['segments'].split(',') if _.strip()]
while start_date <= end_date:
for segment in segments:
print 'Calculating for segment %s on start date %s.' % (segment, start_date)
models.Load.calculate(
year=start_date.year,
month=start_date.month,
zone=options['zone'],
segment=segment,
)
start_date += monthdelta(1)
|
FabriceSalvaire/grouped-purchase-order
|
GroupedPurchaseOrder/bootstrap/components.py
|
Python
|
agpl-3.0
| 6,955
| 0.006039
|
####################################################################################################
#
# GroupedPurchaseOrder - A Django Application.
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
from django.core.urlresolvers import reverse, NoReverseMatch
from django.forms.utils import flatatt
from django.utils.html import escape, format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
####################################################################################################
from .html import join_text, merge_new_words, render_tag
####################################################################################################
#
# Notes:
# - How to concate in {% %} ? #deleteModal{{ supplier.pk }}
# - url 'suppliers.update' supplier.pk
#
####################################################################################################
####################################################################################################
def render_icon(icon, title=''):
"""Render a glyphicon.
"""
#? escape ?
# attrs = {'class': 'glyphicon glyphicon-{}'.format(icon)}
attrs = {'class': 'glyphicon glyphicon-' + icon}
if title:
attrs['title'] = _(title)
return format_html('<span{0}></span>', flatatt(attrs))
####################################################################################################
def render_button(content, icon=None, style='default', size='', href='', title='', button_class='', attrs=None):
"""Render a button with content
"""
# <button type="button" class="btn btn-default">Default</button>
# <button type="button" class="btn btn-primary">Primary</button>
# <button type="button" class="btn btn-success">Success</button>
# <button type="button" class="btn btn-info">Info</button>
# <button type="button" class="btn btn-warning">Warning</button>
# <button type="button" class="btn btn-danger">Danger</button>
# <button type="button" class="btn btn-link">Link</button>
#
# size : btn-lg, btn-sm, btn-xs
# <button type="button" class="btn btn-primary btn-lg">Large button</button>
#
# btn-block
# <button type="button" class="btn btn-primary btn-lg btn-block">Block level button</button>
# <button type="button" class="btn btn-default btn-lg btn-block">Block level button</button>
#
# active
# <button type="button" class="btn btn-primary btn-lg active">Primary button</button>
# <a href="#" class="btn btn-default btn-lg active" role="button">Link</a>
#
# disabled="disabled"
# <button type="button" class="btn btn-lg btn-primary" disabled="disabled">Primary button</button>
# <a href="#" class="btn btn-default btn-lg disabled" role="button">Link</a>
#
# <a class="btn btn-default" href="#" role="button">Link</a>
# <button class="btn btn-default" type="submit">Button</button>
# <input class="btn btn-default" type="button" value="Input">
# <input class="btn btn-default" type="submit" value="Submit">
if attrs is None:
attrs = {}
classes = ['btn']
button_styles = ('default
|
', 'primary', 'success', 'info', 'warning', 'danger', 'link')
if style in button_styles:
classes.append('btn-' + style)
else:
raise ValueError('Parameter style must be {} ("{}" given)',
', '.join(button_styles), style)
# size = text_value(size).lower().strip()
if size:
if size == 'xs':
classes.append('btn-xs')
elif size == 'sm' or size == 'small'
|
:
classes.append('btn-sm')
elif size == 'lg' or size == 'large':
classes.append('btn-lg')
else:
raise ValueError('Parameter "size" should be "xs", "sm", "lg" or empty ("{}" given)',
format(size))
attrs['class'] = merge_new_words(button_class, classes)
if href:
try:
# current_app = context['request'].resolver_match.namespace
# viewname=viewname, args=view_args, kwargs=view_kwargs, current_app=current_app
url = reverse(href)
except NoReverseMatch:
url = href
attrs['href'] = url
tag = 'a'
else:
tag = 'button'
if title:
attrs['title'] = escape(_(title))
icon_content = render_icon(icon) if icon else ''
if content:
content = join_text((icon_content, escape(_(content))), separator=' ')
else:
content = icon_content
return render_tag(tag, mark_safe(content), attrs=attrs)
####################################################################################################
def render_icon_button(icon, **kwargs):
return render_button(None, icon=icon, **kwargs)
####################################################################################################
def render_modal_icon_button(icon, *args, **kwargs):
attrs = {'data-toggle':'modal', 'data-target':join_text(args)}
return render_button(None, icon=icon, attrs=attrs, **kwargs)
####################################################################################################
def render_dismiss_button(title, **kwargs):
attrs = {'type':'button', 'data-dismiss':'modal'}
return render_button(title, attrs=attrs, **kwargs)
####################################################################################################
def render_close_button(*args, **kwargs):
# '<button type="button" class="close" data-dismiss="modal">'
# '</button>'
attrs = {'type':'button', 'class':'close', 'data-dismiss':'modal'}
title = escape(_('Close'))
content = ('<span aria-hidden="true">×</span>'
'<span class="sr-only">{0}</span>'.format(title))
return render_tag('button', mark_safe(content), attrs=attrs)
####################################################################################################
#
# End
#
####################################################################################################
|
brianwc/courtlistener
|
cl/citations/tasks.py
|
Python
|
agpl-3.0
| 5,964
| 0.000503
|
import re
from django.core import urlresolvers
from django.db import IntegrityError
from cl.citations import find_citations, match_citations
from cl.custom_filters.templatetags.text_filters import best_case_name
from cl.search.models import Opinion, OpinionsCited
from celery import task
def get_document_citations(opinion):
"""Identify and return citations from the html or plain text of the
opinion.
"""
if opinion.html_columbia:
citations = find_citations.get_citations(opinion.html_columbia)
elif opinion.html_lawbox:
citations = find_citations.get_citations(opinion.html_lawbox)
elif opinion.html:
citations = find_citations.get_citations(opinion.html)
elif opinion.plain_text:
citations = find_citations.get_citations(opinion.plain_text,
html=False)
else:
citations = []
return citations
def create_cited_html(opinion, citations):
if any([opinion.html_columbia, opinion.html_lawbox, opinion.html]):
new_html = opinion.html_columbia or opinion.html_lawbox or opinion.html
for citation in citations:
new_html = re.sub(citation.as_regex(), citation.as_html(),
new_html)
elif opinion.plain_text:
inner_html = opinion.plain_text
for citation in citations:
|
repl = u'</pre>%s<pre class="inline">' % citation.as_html()
inner_html = re.sub(citation.as_regex(), repl, inner_html)
new_html = u'<pre class="inline">%s</pre>' % inner_html
return new_html.encode('utf-8')
@task
def update_document(opinion, index=True):
"""Get the citations for an item and save it an
|
d add it to the index if
requested."""
DEBUG = 0
if DEBUG >= 1:
print "%s at %s" % (
best_case_name(opinion.cluster),
urlresolvers.reverse(
'admin:search_opinioncluster_change',
args=(opinion.cluster.pk,),
)
)
citations = get_document_citations(opinion)
# List for tracking number of citation vs. name matches
matched_citations = []
# List used so we can do one simple update to the citing opinion.
opinions_cited = []
for citation in citations:
# Resource.org docs contain their own citation in the html text, which
# we don't want to include
if citation.base_citation() in opinion.cluster.citation_string:
continue
matches, is_citation_match = match_citations.match_citation(
citation,
citing_doc=opinion
)
# TODO: Figure out what to do if there's more than one
if len(matches) == 1:
matched_citations.append(is_citation_match)
match_id = matches[0]['id']
try:
matched_opinion = Opinion.objects.get(pk=match_id)
# Increase citation count for matched cluster if it hasn't
# already been cited by this opinion.
if matched_opinion not in opinion.opinions_cited.all():
matched_opinion.cluster.citation_count += 1
matched_opinion.cluster.save(index=index)
# Add citation match to the citing opinion's list of cases it
# cites. opinions_cited is a set so duplicates aren't an issue
opinions_cited.append(matched_opinion.pk)
# URL field will be used for generating inline citation html
citation.match_url = matched_opinion.cluster.get_absolute_url()
citation.match_id = matched_opinion.pk
except Opinion.DoesNotExist:
if DEBUG >= 2:
print "No Opinions returned for id %s" % match_id
continue
except Opinion.MultipleObjectsReturned:
if DEBUG >= 2:
print "Multiple Opinions returned for id %s" % match_id
continue
else:
#create_stub([citation])
if DEBUG >= 2:
# TODO: Don't print 1 line per citation. Save them in a list
# and print in a single line at the end.
print "No match found for citation %s" % citation.base_citation()
# Only update things if we found citations
if citations:
opinion.html_with_citations = create_cited_html(opinion, citations)
try:
OpinionsCited.objects.bulk_create([
OpinionsCited(citing_opinion_id=pk,
cited_opinion_id=opinion.pk) for
pk in opinions_cited
])
except IntegrityError as e:
# If bulk_create would create an item that already exists, it fails.
# In that case, do them one by one, skipping failing cases.
for pk in opinions_cited:
try:
cited = OpinionsCited(
citing_opinion_id=pk,
cited_opinion_id=opinion.pk,
)
cited.save()
except IntegrityError:
# We'll just skip the ones that already exist, but still do
# the others.
pass
if DEBUG >= 3:
print opinion.html_with_citations
# Update Solr if requested. In some cases we do it at the end for
# performance reasons.
opinion.save(index=index)
if DEBUG >= 1:
citation_matches = sum(matched_citations)
name_matches = len(matched_citations) - citation_matches
print " %d citations" % len(citations)
print " %d exact matches" % citation_matches
print " %d name matches" % name_matches
@task
def update_document_by_id(opinion_id):
"""This is not an OK way to do id-based tasks. Needs to be refactored."""
op = Opinion.objects.get(pk=opinion_id)
update_document(op)
|
lgp171188/fjord
|
fjord/base/middleware.py
|
Python
|
bsd-3-clause
| 5,915
| 0
|
"""
The middlewares in this file do mobile detection, provide a user override,
and provide a cookie override. They must be used in the correct order.
MobileMiddleware must always come after any of the other middlewares in this
file in `settings.MIDDLEWARE_CLASSES`.
"""
import urllib
from warnings import warn
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
from django.utils import translation
from django.utils.encoding import smart_str
from fjord.base import urlresolvers
from fjord.base.browsers import parse_ua
from fjord.base.templatetags.jinja_helpers import urlparams
MOBILE_COOKIE = getattr(settings, 'MOBILE_COOKIE', 'mobile')
class UserAgentMiddleware(object):
"""Add ``request.BROWSER`` which has information from the User-Agent
``request.BROWSER`` has the following attributes:
- browser: The user's browser, eg: "Firefox".
- browser_version: The browser's version, eg: "14.0.1"
- platform: The general platform the user is using, eg "Windows".
- platform_version: The version of the platform, eg. "XP" or "10.6.2".
- mobile: If the client is using a mobile device. `True` or `False`.
Any of the above may be `None` if detection fails.
"""
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
request.BROWSER = parse_ua(ua)
class MobileQueryStringMiddleware(object):
|
"""
Add querystring override for mobile.
This allows the
|
user to override mobile detection by setting
'mobile=1' in the querystring. This will persist in a cookie
that other the other middlewares in this file will respect.
"""
def process_request(self, request):
# The 'mobile' querystring overrides any prior MOBILE
# figuring and we put it in two places.
mobile_qs = request.GET.get('mobile', None)
if mobile_qs == '1':
request.MOBILE = True
request.MOBILE_SET_COOKIE = 'yes'
elif mobile_qs == '0':
request.MOBILE = False
request.MOBILE_SET_COOKIE = 'no'
class MobileMiddleware(object):
"""
Set request.MOBILE based on cookies and UA detection.
The set of rules to decide `request.MOBILE` is given below. If any rule
matches, the process stops.
1. If there is a variable `mobile` in the query string, `request.MOBILE`
is set accordingly.
2. If a cookie is set indicating a mobile preference, follow it.
3. If user agent parsing has already happened, trust it's judgement about
mobile-ness. (i.e. `request.BROWSER.mobile`)
4. Otherwise, set `request.MOBILE` to True if the string "mobile" is in the
user agent (case insensitive), and False otherwise.
If there is a variable `request.MOBILE_SET_COOKIE`, it's value will be
stored in the mobile cookie.
"""
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
mc = request.COOKIES.get(MOBILE_COOKIE)
if hasattr(request, 'MOBILE'):
# Our work here is done
return
if mc:
request.MOBILE = (mc == 'yes')
return
if hasattr(request, 'BROWSER'):
# UA Detection already figured this out.
request.MOBILE = request.BROWSER.mobile
return
# Make a guess based on UA if nothing else has figured it out.
request.MOBILE = ('mobile' in ua)
def process_response(self, request, response):
if hasattr(request, 'MOBILE_SET_COOKIE'):
cookie_value = request.MOBILE_SET_COOKIE
response.set_cookie(MOBILE_COOKIE, cookie_value)
return response
class LocaleURLMiddleware(object):
"""
1. Search for the locale.
2. Save it in the request.
3. Strip them from the URL.
"""
def __init__(self):
if not settings.USE_I18N or not settings.USE_L10N:
warn('USE_I18N or USE_L10N is False but LocaleURLMiddleware is '
'loaded. Consider removing fjord.base.middleware.'
'LocaleURLMiddleware from your MIDDLEWARE_CLASSES setting.')
self.exempt_urls = getattr(settings, 'FF_EXEMPT_LANG_PARAM_URLS', ())
def _is_lang_change(self, request):
"""Return True if the lang param is present and URL isn't exempt."""
if 'lang' not in request.GET:
return False
return not any(request.path.endswith(url) for url in self.exempt_urls)
def process_request(self, request):
prefixer = urlresolvers.Prefixer(request)
urlresolvers.set_url_prefix(prefixer)
full_path = prefixer.fix(prefixer.shortened_path)
if self._is_lang_change(request):
# Blank out the locale so that we can set a new one. Remove lang
# from the query params so we don't have an infinite loop.
prefixer.locale = ''
new_path = prefixer.fix(prefixer.shortened_path)
query = dict((smart_str(k), request.GET[k]) for k in request.GET)
query.pop('lang')
return HttpResponsePermanentRedirect(urlparams(new_path, **query))
if full_path != request.path:
query_string = request.META.get('QUERY_STRING', '')
full_path = urllib.quote(full_path.encode('utf-8'))
if query_string:
full_path = '%s?%s' % (full_path, query_string)
response = HttpResponsePermanentRedirect(full_path)
# Vary on Accept-Language if we changed the locale
old_locale = prefixer.locale
new_locale, _ = urlresolvers.split_path(full_path)
if old_locale != new_locale:
response['Vary'] = 'Accept-Language'
return response
request.path_info = '/' + prefixer.shortened_path
request.locale = prefixer.locale
translation.activate(prefixer.locale)
|
facebook/fbthrift
|
thrift/compiler/test/fixtures/types/gen-py/include/__init__.py
|
Python
|
apache-2.0
| 147
| 0
|
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT
|
YOU KNOW W
|
HAT YOU ARE DOING
# @generated
#
__all__ = ['ttypes', 'constants']
|
dnanexus/rseqc
|
rseqc/lib/qcmodule/twoList.py
|
Python
|
gpl-3.0
| 2,098
| 0.039561
|
'''manipulate ndarray list'''
from itertools import imap,starmap,izip
from operator import mul,add,sub
def check_list(v1,v2):
'''check if the length of two list is same'''
if v1.size != v2.size:
raise ValueError,"the lenght of both arrays must be the same"
pass
def Add(v1,v2):
'''add two list'''
check_list(v1,v2)
return v1.__add__(v2)
def Subtract(v1,v2):
'''subtract v2 from v1'''
check_list(v1,v2)
return v1.__sub__(v2)
def Product(v1,v2):
'''return product of two list'''
check_list(v1,v2)
return v1.__mul__(v2)
def Division(v1,v2):
'''return divide v1 by v2. add 1 to both v1 and v2'''
check_list(v1,v2)
return (v1+1).__div__(v2+1)
def Average(v1,v2):
'''return arithmetic mean of two list'''
check_list(v1,v2)
return v1.__add__(v2)/2
def geometricMean(v1,v2):
'''return geometric mean of two list'''
check_list(v1,v2)
return (v1.__mul__(v2))**0.5
def Max(v1,v2):
'''pairwise comparison two list. return the max one between two paried number'''
|
check_list(v1,v2)
return imap(max,izip(v1,v2))
def Min(v1,v2):
'''pairwise comparison two list. return the max one between two paried number'''
check_list(v1,v2)
return imap(min,izip(v1,v2))
def euclidean_distance(v1,v2):
'''return euclidean distance'''
|
check_list(v1,v2)
return (sum((v1.__sub__(v2))**2) / v1.size)**0.5
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/moto/sqs/urls.py
|
Python
|
agpl-3.0
| 267
| 0.011236
|
from .respons
|
es import QueueResponse, QueuesResponse
url_bases = [
"https
|
?://(.*?)(queue|sqs)(.*?).amazonaws.com"
]
url_paths = {
'{0}/$': QueuesResponse().dispatch,
'{0}/(?P<account_id>\d+)/(?P<queue_name>[a-zA-Z0-9\-_]+)': QueueResponse().dispatch,
}
|
nextgis-extra/tests
|
lib_gdal/ogr/ogr_sde.py
|
Python
|
gpl-2.0
| 10,401
| 0.011826
|
#!/usr/bin/env python
###############################################################################
# $Id: ogr_sde.py 33793 2016-03-26 13:02:07Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test OGR ArcSDE driver.
# Author: Howard Butler <hobu.inc@gmail.com>
#
###############################################################################
# Copyright (c) 2008, Howard Butler <hobu.inc@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
|
from osgeo import ogr
from osgeo import osr
from osgeo import gdal
###############################################################################
# Open ArcSDE datasource.
sde_server = '172.16.1.193'
sde_port = '5151'
sde_db = 'sde'
sde_user = 'sde'
sde_passwo
|
rd = 'sde'
gdaltest.sde_dr = None
try:
gdaltest.sde_dr = ogr.GetDriverByName( 'SDE' )
except:
pass
def ogr_sde_1():
"Test basic opening of a database"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base)
if ds is None:
print("Could not open %s" % base)
gdaltest.sde_dr = None
return 'skip'
ds.Destroy()
ds = ogr.Open(base, update=1)
ds.Destroy()
return 'success'
def ogr_sde_2():
"Test creation of a layer"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
shp_ds = ogr.Open( 'data/poly.shp' )
gdaltest.shp_ds = shp_ds
shp_lyr = shp_ds.GetLayer(0)
ds = ogr.Open(base, update=1)
lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon, srs=shp_lyr.GetSpatialRef(),options = [ 'OVERWRITE=YES' ] )
# lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon)
ogrtest.quick_create_layer_def( lyr,
[ ('AREA', ogr.OFTReal),
('EAS_ID', ogr.OFTInteger),
('PRFEDEA', ogr.OFTString),
('WHEN', ogr.OFTDateTime) ] )
#######################################################
# Copy in poly.shp
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
feat = shp_lyr.GetNextFeature()
gdaltest.poly_feat = []
while feat is not None:
gdaltest.poly_feat.append( feat )
dst_feat.SetFrom( feat )
lyr.CreateFeature( dst_feat )
feat = shp_lyr.GetNextFeature()
dst_feat.Destroy()
return 'success'
def ogr_sde_3():
"Test basic version locking"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
ds2 = ogr.Open(base, update=1)
if ds2 is not None:
gdaltest.post_reason('A locked version was able to be opened')
return 'fail'
ds.Destroy()
return 'success'
def ogr_sde_4():
"Test basic version creation"
if gdaltest.sde_dr is None:
return 'skip'
version_name = 'TESTING'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'TRUE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
ds.Destroy()
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
ds.Destroy()
return 'success'
def ogr_sde_5():
"Test versioned editing"
if gdaltest.sde_dr is None:
return 'skip'
version_name = 'TESTING'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'TRUE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY')
f1 = l1.GetFeature(1)
f1.SetField("PRFEDEA",'SDE.TESTING')
l1.SetFeature(f1)
ds.Destroy()
del ds
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, default)
# print default
ds2 = ogr.Open(default, update=1)
l2 = ds2.GetLayerByName('SDE.TPOLY')
f2 = l2.GetFeature(1)
f2.SetField("PRFEDEA",'SDE.DEFAULT')
f2.SetField("WHEN", 2008, 3, 19, 16, 15, 00, 0)
l2.SetFeature(f2)
ds2.Destroy()
del ds2
ds3 = ogr.Open(base)
l3 = ds3.GetLayerByName('SDE.TPOLY')
f3 = l3.GetFeature(1)
if f3.GetField("PRFEDEA") != "SDE.TESTING":
gdaltest.post_reason('versioned editing failed for child version SDE.TESTING')
return 'fail'
ds3.Destroy()
del ds3
ds4 = ogr.Open(default)
l4 = ds4.GetLayerByName('SDE.TPOLY')
f4 = l4.GetFeature(1)
if f4.GetField("PRFEDEA") != "SDE.DEFAULT":
gdaltest.post_reason('versioned editing failed for parent version SDE.DEFAULT')
return 'fail'
idx = f4.GetFieldIndex('WHEN')
df = f4.GetField(idx)
if df != '2008/03/19 16:15:00':
gdaltest.post_reason("datetime handling did not work -- expected '2008/03/19 16:15:00' got '%s' "% df)
ds4.Destroy()
del ds4
return 'success'
def ogr_sde_6():
"Extent fetching"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (
sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY')
extent = l1.GetExtent(force=0)
if extent != (0.0, 2147483645.0, 0.0, 2147483645.0):
gdaltest.post_reason("unforced extent did not equal expected value")
extent = l1.GetExtent(force=1)
if extent != (478316.0, 481645.0, 4762881.0, 4765611.0):
gdaltest.post_reason("forced extent did not equal expected value")
return 'success'
def ogr_sde_7():
"Bad layer test"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (
sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY2')
if l1:
gdaltest.post_reason("we got a layer when we should not have")
ds.Destroy()
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (
sde_server, sde_port, sde_db, sde_user, sde_password, default)
ds = ogr.Open(default, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY2')
if l1:
gdaltest.post_reason("we got a layer when we should not have")
ds.Destroy()
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s' % (
sde_serv
|
waterxt/tensorflowkaldi
|
neuralNetworks/nnet.py
|
Python
|
mit
| 9,220
| 0.005748
|
'''@file nnet.py
contains the functionality for a Kaldi style neural network'''
import shutil
import os
import itertools
import numpy as np
import tensorflow as tf
import classifiers.activation
from classifiers.dnn import DNN
from trainer import CrossEnthropyTrainer
from decoder import Decoder
class Nnet(object):
'''a class for a neural network that can be used together with Kaldi'''
def __init__(self, conf, input_dim, num_labels):
'''
Nnet constructor
Args:
conf: nnet configuration
input_dim: network input dimension
num_labels: number of target labels
'''
#get nnet structure configs
self.conf = dict(conf.items('nnet'))
#define location to save neural nets
self.conf['savedir'] = (conf.get('directories', 'expdir')
+ '/' + self.conf['name'])
if not os.path.isdir(self.conf['savedir']):
os.mkdir(self.conf['savedir'])
if not os.path.isdir(self.conf['savedir'] + '/training'):
os.mkdir(self.conf['savedir'] + '/training')
#compute the input_dimension of the spliced features
self.input_dim = input_dim * (2*int(self.conf['context_width']) + 1)
if self.conf['batch_norm'] == 'True':
activation = classifiers.activation.Batchnorm(None)
else:
activation = None
#create the activation function
if self.conf['nonlin'] == 'relu':
activation = classifiers.activation.TfActivation(activation,
tf.nn.relu)
elif self.conf['nonlin'] == 'sigmoid':
activation = classifiers.activation.TfActivation(activation,
tf.nn.sigmoid)
elif self.conf['nonlin'] == 'tanh':
activation = classifiers.activation.TfActivation(activation,
tf.nn.tanh)
elif self.conf['nonlin'] == 'linear':
activation = classifiers.activation.TfActivation(activation,
lambda(x): x)
else:
raise Exception('unkown nonlinearity')
if self.conf['l2_norm'] == 'True':
activation = classifiers.activation.L2Norm(activation)
if float(self.conf['dropout']) < 1:
activation = classifiers.activation.Dropout(
activation, float(self.conf['dropout']))
self.weight_init = self.conf['weight_init']
#create a DNN
self.dnn = DNN(
num_labels, int(self.conf['num_hidden_layers']),
int(self.conf['num_hidden_units']), activation,
self.weight_init, int(self.conf['add_layer_period']) > 0)
def train(self, dispenser, dispenser_dev):
'''
Trai
|
n the neural network
Args:
dispenser: a batchdispenser for training data
dispenser_dev: a batchdispenser for dev data
'''
#put the DNN in a training environment
epoch = int(self.conf['epoch'])
max_epoch = int(self.conf['max_epoch'])
|
halve_learning_rate = int(self.conf['halve_learning_rate'])
start_halving_impr = float(self.conf['start_halving_impr'])
end_halving_impr = float(self.conf['end_halving_impr'])
trainer = CrossEnthropyTrainer(
self.dnn, self.input_dim, dispenser.max_input_length,
dispenser.max_target_length,
float(self.conf['initial_learning_rate']),
float(self.conf['l1_penalty']),
float(self.conf['l2_penalty']),
float(self.conf['momentum']),
int(self.conf['minibatch_size']),
float(self.conf['clip_grad']))
#start the visualization if it is requested
if self.conf['visualise'] == 'True':
if os.path.isdir(self.conf['savedir'] + '/logdir'):
shutil.rmtree(self.conf['savedir'] + '/logdir')
trainer.start_visualization(self.conf['savedir'] + '/logdir')
#start a tensorflow session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True #pylint: disable=E1101
with tf.Session(graph=trainer.graph, config=config):
#initialise the trainer
trainer.initialize()
#load the neural net if the starting epoch is not 0
if (epoch > 0):
trainer.restore_trainer(self.conf['savedir'] + '/training/')
#do a validation step
validation_loss = trainer.evaluate(dispenser_dev)
print '======================================= validation loss at epoch %d is: %f =============================' % (epoch, validation_loss)
#start the training iteration
while (epoch < max_epoch):
#update the model
loss = trainer.update(dispenser)
#print the progress
print '======================================= training loss at epoch %d is : %f ==============================' %(epoch, loss)
#validate the model if required
current_loss = trainer.evaluate(dispenser_dev)
print '======================================= validation loss at epoch %d is: %f ==========================' % (epoch, current_loss)
epoch += 1
if halve_learning_rate == 0:
if current_loss < validation_loss:
if current_loss > (validation_loss - start_halving_impr):
halve_learning_rate = 1
trainer.halve_learning_rate()
print "================ begining to halve learning rate ================"
validation_loss = current_loss
pre_loss = loss
trainer.save_trainer(self.conf['savedir']
+ '/training/', 'iter_' + str(epoch) + '_tr'+str(loss)+'_cv'+str(validation_loss))
else:
print ('the validation loss is worse, returning to '
'the previously validated model with halved '
'learning rate')
trainer.restore_trainer(self.conf['savedir']+ '/training/')
trainer.halve_learning_rate()
halve_learning_rate = 1
print "================ begining to halve learning rate ================"
continue
else:
if current_loss < (validation_loss - end_halving_impr):
trainer.halve_learning_rate()
pre_loss = loss
validation_loss = current_loss
trainer.save_trainer(self.conf['savedir']
+ '/training/', 'iter_' + str(epoch) + '_tr'+str(loss)+'_cv'+str(validation_loss))
else:
trainer.restore_trainer(self.conf['savedir'] + '/training/')
print ('the validation loss is worse, '
'terminating training')
break
#save the final model
trainer.save_model(self.conf['savedir'] + '/final')
#compute the state prior and write it to the savedir
prior = dispenser.compute_target_count().astype(np.float32)
prior = prior + 1
prior = prior/prior.sum()
np.save(self.conf['savedir'] + '/prior.npy', prior)
def decode(self, reader, writer):
'''
compute pseudo likelihoods the testing set
Args:
reader: a feature reader object to read features to decode
writer: a writer object to write likelihoods
'''
#create a decoder
decoder = Decoder(self.dnn, self.input_dim, reader.max_input_length)
#read the prior
pri
|
saurabhbajaj207/CarpeDiem
|
venv/Lib/site-packages/pyasn1_modules/rfc2251.py
|
Python
|
mit
| 26,833
| 0.004398
|
#
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
# LDAP message syntax
#
# ASN.1 source from:
# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/ldap.asn
#
# Sample captures from:
# http://wiki.wireshark.org/SampleCaptures/
#
from pyasn1.type import tag, namedtype, namedval, univ, constraint
maxInt = univ.Integer(2147483647)
class LDAPString(univ.OctetString):
pass
class LDAPOID(univ.OctetString):
pass
class LDAPDN(LDAPString):
pass
class RelativeLDAPDN(LDAPString):
pass
class AttributeType(LDAPString):
pass
class AttributeDescription(LDAPString):
pass
class AttributeDescriptionList(univ.SequenceOf):
componentType = AttributeDescription()
class AttributeValue(univ.OctetString):
pass
class AssertionValue(univ.OctetString):
pass
class AttributeValueAssertion(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('attributeDesc', AttributeDescription()),
namedtype.NamedType('assertionValue', AssertionValue())
)
class Attribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
class MatchingRuleId(LDAPString):
pass
class Control(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('controlType', LDAPOID()),
namedtype.DefaultedNamedType('criticality', univ.Boolean('False')),
namedtype.OptionalNamedType('controlValue', univ.OctetString())
)
class Controls(univ.SequenceOf):
componentType = Control()
class LDAPURL(LDAPString):
pass
class Referral(univ.SequenceOf):
componentType = LDAPURL()
class SaslCredentials(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('mechanism', LDAPString()),
namedtype.OptionalNamedType('credentials', univ.OctetString())
)
class AuthenticationChoice(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('simple', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('reserved-1', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('reserved-2', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.NamedType('sasl',
SaslCredentials().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
class BindRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 0)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 127))),
namedtype.NamedType('name', LDAPDN()),
namedtype.NamedType('authentication', AuthenticationChoice())
)
class PartialAttributeList(univ.SequenceOf):
componentType = univ.Sequence(
componentType=namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
)
class SearchResultEntry(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 4)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('objectName', LDAPDN()),
namedtype.NamedType('attributes', PartialAttributeList())
)
class MatchingRuleAssertion(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('matchingRule', MatchingRuleId().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('type', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.NamedType('matchValue',
AssertionValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
namedtype.DefaultedNamedType('dnAttributes', univ.Boolean('False').subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
)
class SubstringFilter(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('substrings',
univ.SequenceOf(
componentType=univ.Choice(
componentType=namedtype.NamedTypes(
namedtype.NamedType(
'initial', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
namedtype.NamedType(
'any', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
),
namedtype.NamedType(
'final', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))
)
)
)
)
)
)
# Ugly hack to handle recursive Filter reference (up to 3-levels deep).
class Filter3(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.NamedType('substrings', SubstringFilter().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
namedtype.NamedType('present', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
)
class Filter2(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('and', univ.SetOf(componentType=Filter3()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('or', univ.SetOf(componentType=Filter3()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
namedtype.NamedType('not',
Filter3
|
().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.NamedType('substrings', SubstringFilter().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))
|
),
namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
namedtype.NamedType('present', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
|
smallyear/linuxLearn
|
salt/salt/utils/yamlloader.py
|
Python
|
apache-2.0
| 3,478
| 0.000575
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
import warnings
# Import third party
|
libs
import yaml
from yaml.nodes import MappingNode
from yaml.constructor import ConstructorError
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except Exception:
pass
# This function is safe and needs to stay as yaml.load. The load function
# accepts a custom loader, and every time this function is used in Salt
# the custom loader defined below is used. This should be altered though to
# not require th
|
e custom loader to be explicitly added.
load = yaml.load # pylint: disable=C0103
class DuplicateKeyWarning(RuntimeWarning):
'''
Warned when duplicate keys exist
'''
warnings.simplefilter('always', category=DuplicateKeyWarning)
# with code integrated from https://gist.github.com/844388
class SaltYamlSafeLoader(yaml.SafeLoader, object):
'''
Create a custom YAML loader that uses the custom constructor. This allows
for the YAML loading defaults to be manipulated based on needs within salt
to make things like sls file more intuitive.
'''
def __init__(self, stream, dictclass=dict):
yaml.SafeLoader.__init__(self, stream)
if dictclass is not dict:
# then assume ordered dict and use it for both !map and !omap
self.add_constructor(
u'tag:yaml.org,2002:map',
type(self).construct_yaml_map)
self.add_constructor(
u'tag:yaml.org,2002:omap',
type(self).construct_yaml_map)
self.dictclass = dictclass
def construct_yaml_map(self, node):
data = self.dictclass()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
'''
Build the mapping for YAML
'''
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
'expected a mapping node, but found {0}'.format(node.id),
node.start_mark)
self.flatten_mapping(node)
mapping = self.dictclass()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError:
err = ('While constructing a mapping {0} found unacceptable '
'key {1}').format(node.start_mark, key_node.start_mark)
raise ConstructorError(err)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError('Conflicting ID {0!r}'.format(key))
mapping[key] = value
return mapping
def construct_scalar(self, node):
'''
Verify integers and pass them in correctly is they are declared
as octal
'''
if node.tag == 'tag:yaml.org,2002:int':
if node.value == '0':
pass
elif node.value.startswith('0') and not node.value.startswith(('0b', '0x')):
node.value = node.value.lstrip('0')
# If value was all zeros, node.value would have been reduced to
# an empty string. Change it to '0'.
if node.value == '':
node.value = '0'
return super(SaltYamlSafeLoader, self).construct_scalar(node)
|
ic-labs/django-icekit
|
icekit/plugins/iiif/apps.py
|
Python
|
mit
| 1,144
| 0
|
from django.apps import AppConfig
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class AppConfig(AppConfig):
name = '.'.join(__name__.split('.')[:-1])
label = 'icekit_plugins_iiif'
verbose_name = "IIIF Basics"
def ready(self):
# Create custom permission pointing to User, because we have no other
# model
|
to hang it off for now...
# TODO This is a hack, find a better way
User = get_user_model()
try:
# this doesn't work if migrations haven't been updated, resulting
# in "RuntimeError: Error creating new content types. Please make
# sure contenttypes is migrated before trying to migrate apps
# individually."
content_type = ContentType.objects.g
|
et_for_model(User)
Permission.objects.get_or_create(
codename='can_use_iiif_image_api',
name='Can Use IIIF Image API',
content_type=content_type,
)
except RuntimeError:
pass
|
wojtekwalczak/kaggle_titanic
|
titanic/transformers/FamilyCounter.py
|
Python
|
apache-2.0
| 946
| 0.005285
|
from __future__ import print_function
import pandas as pd
from sklearn.base import
|
TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True):
self.use = use
def transform(self, feature
|
s_raw, **transform_params):
if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def set_params(self, **params):
if 'use' in params:
self.use = params.get('use')
|
cr0hn/OMSTD
|
examples/develop/lp/002/lp-002-s1.py
|
Python
|
bsd-2-clause
| 2,001
| 0.002999
|
# -*- coding: utf-8 -*-
"""
Project name: Open Methodology for Security Tool Developers
Project URL: https://github.com/cr0hn/OMSTD
Copyright (c) 2014, cr0hn<-AT->cr0hn.com
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDI
|
NG, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__ = 'cr0hn - cr0hn<-at->cr0
|
hn.com (@ggdaniel)'
from multiprocessing.pool import Pool
# ----------------------------------------------------------------------
def hello(i):
print(i)
# ----------------------------------------------------------------------
def main():
p = Pool(10)
p.map(hello, range(50))
if __name__ == '__main__':
main()
|
Micronaet/micronaet-xmlrpc
|
xmlrpc_operation_product/__openerp__.py
|
Python
|
agpl-3.0
| 1,769
| 0.004522
|
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your op
|
tion) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###########
|
####################################################################
{
'name': 'XMLRPC Operation Product',
'version': '0.1',
'category': 'ETL',
'description': '''
XMLRPC Import product
''',
'author': 'Micronaet S.r.l. - Nicola Riolini',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': [
'base',
'xmlrpc_base',
'product',
'sql_product', # for statistic category
'base_accounting_program', # q x pack
#'sql_partner', # for fields to update
#'l10n_it_private', # private info
#'mx_partner_zone', # zone
# 'l10n_it_iban_cin'
],
'init_xml': [],
'demo': [],
'data': [
#'operation.xml',
'product_view.xml',
'data/operation.xml',
],
'active': False,
'installable': True,
'auto_install': False,
}
|
agry/NGECore2
|
scripts/mobiles/tatooine/variegated_womprat.py
|
Python
|
lgpl-3.0
| 1,684
| 0.026128
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('variegated_womprat')
mobileTemplate.setLevel(12)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Wild Meat")
mobileTemplate.setMeatAmount(3)
mobileTemplate.setHid
|
eType("Leathery Hide")
mobi
|
leTemplate.setBoneAmount(3)
mobileTemplate.setBoneType("Animal Bone")
mobileTemplate.setHideAmount(2)
mobileTemplate.setSocialGroup("variegated womprat")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_variegated_womp_rat.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_1')
attacks.add('bm_bolster_armor_1')
attacks.add('bm_enfeeble_1')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('variegated_womprat', mobileTemplate)
return
|
davish/Twitter-World-Mood
|
twython/twython.py
|
Python
|
mit
| 22,902
| 0.00262
|
#!/usr/bin/env python
"""
Twython is a library for Python that wraps the Twitter API.
It aims to abstract away all the API endpoints, so that additions to the library
and/or the Twitter API won't cause any overall problems.
Questions, comments? ryan@venodesigns.net
"""
__author__ = "Ryan McGrath <ryan@venodesigns.net>"
__version__ = "2.3.4"
import urllib
import re
import warnings
import requests
from requests.auth import OAuth1
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Twython maps keyword based arguments to Twitter API endpoints. The endpoints
# table is a file with a dictionary of every API endpoint that Twython supports.
from twitter_endpoints import base_url, api_table, twitter_http_status_codes
try:
import simplejson
except ImportError:
try:
# Python 2.6 and up
import json as simplejson
except ImportError:
try:
from django.utils import simplejson
except:
# Seriously wtf is wrong with you if you get this Exception.
raise Exception("Twython requires the simplejson library (or Python 2.6) to work. http://www.undefined.org/python/")
class TwythonError(Exception):
"""
Generic error class, catch-all for most Twython issues.
Special cases are handled by TwythonAPILimit and TwythonAuthError.
Note: To use these, the syntax has changed as of Twython 1.3. To catch these,
you need to explicitly import them into your code, e.g:
from twython import TwythonError, TwythonAPILimit, TwythonAuthError
"""
def __init__(self, msg, error_code=None, retry_after=None):
self.msg = msg
self.error_code = error_code
if error_code is not None and error_code in twitter_http_status_codes:
self.msg = '%s: %s -- %s' % \
(twitter_http_status_codes[error_code][0],
twitter_http_status_codes[error_code][1],
self.msg)
def __str__(self):
return repr(self.msg)
class TwythonAuthError(TwythonError):
""" Raised when you try to access a protected resource and it fails due to
some issue with your authentication.
"""
pass
class TwythonRateLimitError(TwythonError):
""" Raised when you've hit a rate limit.
retry_wait_seconds is the number of seconds to wait before trying again.
"""
def __init__(self, msg, error_code, retry_after=None):
TwythonError.__init__(self, msg, error_code=error_code)
if isinstance(retry_after, int):
self.msg = '%s (Retry after %d seconds)' % (msg, retry_after)
class Twython(object):
def __init__(self, app_key=None, app_secret=None, oauth_token=None, oauth_token_secret=None, \
headers=None, callback_url=None, twitter_token=None, twitter_secret=None, proxies=None):
"""Instantiates an instance of Twython. Takes optional parameters for authentication and such (see below).
:param app_key: (optional) Your applications key
:param app_secret: (optional) Your applications secret key
:param oauth_token: (optional) Used with oauth_token_secret to make authenticated calls
:param oauth_token_secret: (optional) Used with oauth_token to make authenticated calls
:param headers: (optional) Custom headers to send along with the request
:param callback_url: (optional) If set, will overwrite the callback url set in your application
:param proxies: (optional) A dictionary of proxies, for example {"http":"proxy.example.org:8080", "https":"proxy.example.org:8081"}.
"""
# Needed for hitting that there API.
self.api_url = 'https://api.twitter.com/%s'
self.request_token_url = self.api_url % 'oauth/request_token'
self.access_token_url = self.api_url % 'oauth/access_token'
self.authorize_url = self.api_url % 'oauth/authorize'
self.authenticate_url = self.api_url % 'oauth/authenticate'
# Enforce unicode on keys and secrets
self.app_key = app_key and unicode(app_key) or twitter_token and unicode(twitter_token)
self.app_secret = app_key and unicode(app_secret) or twitter_secret and unicode(twitter_secret)
self.oauth_token = oauth_token and u'%s' % oauth_token
self.oauth_token_secret = oauth_token_secret and u'%s' % oauth_token_secret
self.callback_url = callback_url
# If there's headers, set them, otherwise be an embarassing parent for their own good.
self.headers = headers or {'User-Agent': 'Twython v' + __version__}
# Allow for unauthenticated requests
self.client = requests.session(proxies=proxies)
self.auth = None
if self.app_key is not None and self.app_secret is not None and \
self.oauth_token is None and self.oauth_token_secret is None:
self.auth = OAuth1(self.app_key, self.app_secret,
signature_type='auth_header')
if self.app_key is not None and self.app_secret is not None and \
self.oauth_token is not None and self.oauth_token_secret is not None:
self.auth = OAuth1(self.app_key, self.app_secret,
self.oauth_token, self.oauth_token_secret,
signature_type='auth_header')
if self.auth is not None:
self.client = requests.session(headers=self.headers, auth=self.auth, proxies=proxies)
# register available funcs to allow listing name when debugging.
def setFunc(key):
return lambda **kwargs: self._constructFunc(key, **kwargs)
for key in api_table.keys():
self.__dict__[key] = setFunc(key)
# create stash for last call intel
self._last_call = None
def _constructFunc(self, api_call, **kwargs):
# Go through and replace any mustaches that are in our API url.
fn = api_table[api_call]
url = re.sub(
'\{\{(?P<m>[a-zA-Z_]+)\}\}',
# The '1' here catches the API version. Slightly hilarious.
lambda m: "%s" % kwargs.get(m.group(1), '1'),
base_url + fn['url']
)
content = self._request(url, method=fn['method'], params=kwargs)
return content
def _request(self, url, method='GET', params=None, files=None, api_call=None):
'''Internal response generator, no sense in repeating the same
code twice, right? ;)
'''
method = method.lower()
if not method in ('get', 'post'):
raise TwythonError('Method must be of GET or POST')
params = params or {}
func = getattr(self.client, method)
if method == 'get':
response = func(url, params=params)
else:
response = func(url, data=params, files=files)
content = response.content.decode('utf-8')
# create stash for last function intel
self._last_call = {
'api_call': api_call,
|
'api_error': None,
'cookies': response.cookies,
'error': response.error,
'headers': response.headers,
'status_code': response.status_code,
'url': response.url,
'content': content,
}
try:
content = simplejson.loads(content)
except ValueError:
raise TwythonError('Response was not valid JSON, unable to decode.')
if response.status_code > 304:
# If there is no e
|
rror message, use a default.
error_msg = content.get(
'error', 'An error occurred processing your request.')
self._last_call['api_error'] = error_msg
if response.status_code == 420:
exceptionType = TwythonRateLimitError
else:
exceptionType = TwythonError
raise exceptionType(error_msg,
error_code=response.status_code,
retry_after=response.headers.get('retry-after'))
return c
|
nhynes/neon
|
tests/test_optimizer.py
|
Python
|
apache-2.0
| 6,823
| 0.001319
|
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is
|
distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test of the optimizers
'''
import numpy as np
impo
|
rt copy
from neon import NervanaObject
from neon.backends import gen_backend
from neon.optimizers import GradientDescentMomentum, RMSProp, Adadelta, Adam, Adagrad
from neon.optimizers import MultiOptimizer
from neon.layers import Conv, Affine, LSTM, GRU
from neon.initializers import Gaussian, Constant
from neon.transforms import Rectlin, Logistic, Tanh
class DummyLayer(object):
def __init__(self, p):
self.p = p[0]
def get_params(self):
return self.p
def compare_tensors(func, param_list, param2, tol=0., epoch=1):
func.optimize([DummyLayer(param_list)], epoch=epoch)
(param, grad), states = param_list[0]
cond = np.sum(np.abs(param.get() - param2) <= tol)
assert cond == np.prod(param2.shape)
def wrap(x):
be = NervanaObject.be
dtypeu = np.float32
return be.array(dtypeu(x))
def test_gdm(backend_default):
lrate, mom, wdecay = 0.1, 0.9, 0.005
gdm = GradientDescentMomentum(
learning_rate=lrate, momentum_coef=mom, wdecay=wdecay)
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
velocity = states[0]
param2[:] = param2 + velocity * mom - grad2 * lrate - wdecay * lrate * param
param_list = [((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(gdm, param_list, param2, tol=1e-7)
def test_rmsprop(backend_default):
rms = RMSProp()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
state = states[0]
decay = rms.decay_rate
denom = np.sqrt(decay * state + np.square(grad2) * (1.0 - decay) + rms.epsilon) + rms.epsilon
param2[:] -= grad2 * rms.learning_rate / denom
param_list = [((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(rms, param_list, param2, tol=1e-7)
def test_adadelta(backend_default):
ada = Adadelta()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0]),
copy.deepcopy(states[1]),
copy.deepcopy(states[2])]
decay = ada.decay
states2[0][:] = states2[0] * decay + (1. - decay) * grad2 * grad2
states2[2][:] = np.sqrt(
(states2[1] + ada.epsilon) / (states2[0] + ada.epsilon)) * grad2
states2[1][:] = states2[1] * decay + (1. - decay) * states2[2] * states2[2]
param2[:] -= states2[2]
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0]), wrap(states[1]), wrap(states[2])])]
compare_tensors(ada, param_list, param2, tol=1e-7)
def test_adagrad(backend_default):
ada = Adagrad()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0])]
states2[0][:] = states2[0] + np.square(grad2)
denom = np.sqrt(states2[0] + ada.epsilon)
param2[:] -= grad2 * ada.learning_rate / denom
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(ada, param_list, param2, tol=1e-7)
def test_adam(backend_default):
adam = Adam()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0]),
copy.deepcopy(states[1])]
epoch = 1
t = epoch + 1
l = adam.learning_rate * np.sqrt(1 - adam.beta_2 ** t) / (1 - adam.beta_1 ** t)
m, v = states2
m[:] = m * adam.beta_1 + (1. - adam.beta_1) * grad2
v[:] = v * adam.beta_2 + (1. - adam.beta_2) * grad2 * grad2
param2[:] -= l * m / (np.sqrt(v) + adam.epsilon)
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0]), wrap(states[1])])]
compare_tensors(adam, param_list, param2, tol=1e-7, epoch=epoch)
def test_multi_optimizer(backend_default):
opt_gdm = GradientDescentMomentum(
learning_rate=0.001, momentum_coef=0.9, wdecay=0.005)
opt_ada = Adadelta()
opt_adam = Adam()
opt_rms = RMSProp()
opt_rms_1 = RMSProp(gradient_clip_value=5)
init_one = Gaussian(scale=0.01)
l1 = Conv((11, 11, 64), strides=4, padding=3,
init=init_one, bias=Constant(0), activation=Rectlin())
l2 = Affine(nout=4096, init=init_one,
bias=Constant(1), activation=Rectlin())
l3 = LSTM(output_size=1000, init=init_one, activation=Logistic(), gate_activation=Tanh())
l4 = GRU(output_size=100, init=init_one, activation=Logistic(), gate_activation=Tanh())
layers = [l1, l2, l3, l4]
layer_list = []
for layer in layers:
if isinstance(layer, list):
layer_list.extend(layer)
else:
layer_list.append(layer)
opt = MultiOptimizer({'default': opt_gdm,
'Bias': opt_ada,
'Convolution': opt_adam,
'Linear': opt_rms,
'LSTM': opt_rms_1,
'GRU': opt_rms_1})
map_list = opt.map_optimizers(layer_list)
assert map_list[opt_adam][0].__class__.__name__ == 'Convolution'
assert map_list[opt_ada][0].__class__.__name__ == 'Bias'
assert map_list[opt_rms][0].__class__.__name__ == 'Linear'
assert map_list[opt_gdm][0].__class__.__name__ == 'Activation'
assert map_list[opt_rms_1][0].__class__.__name__ == 'LSTM'
assert map_list[opt_rms_1][1].__class__.__name__ == 'GRU'
if __name__ == '__main__':
be = gen_backend(backend='gpu', batch_size=50)
test_multi_optimizer(be)
|
bloyl/mne-python
|
mne/tests/test_transforms.py
|
Python
|
bsd-3-clause
| 21,423
| 0
|
import os
import os.path as op
import pytest
import numpy as np
from numpy.testing import (assert_array_equal, assert_equal, assert_allclose,
assert_array_less, assert_almost_equal)
import itertools
import mne
from mne.datasets import testing
from mne.fixes import _get_img_fdata
from mne import read_trans, write_trans
from mne.io import read_info
from mne.transforms import (invert_transform, _get_trans,
rotation, rotation3d, rotation_angles, _find_trans,
combine_transforms, apply_trans, translation,
get_ras_to_neuromag_trans, _pol_to_cart,
quat_to_rot, rot_to_quat, _angle_between_quats,
_find_vector_rotation, _sph_to_cart, _cart_to_sph,
_topo_to_sph, _average_quats,
_SphericalSurfaceWarp as SphericalSurfaceWarp,
rotation3d_align_z_axis, _read_fs_xfm,
_write_fs_xfm, _quat_real, _fit_matched_points,
_quat_to_euler, _euler_to_quat,
_quat_to_affine, _compute_r2, _validate_pipeline)
from mne.utils import requires_nibabel, requires_dipy
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif')
fname_eve = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw-eve.fif')
subjects_dir = op.join(data_path, 'subjects')
fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz')
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt')
test_fif_fname = op.join(base_dir, 'test_raw.fif')
ctf_fname = op.join
|
(base_dir, 'test_ctf_raw.fif')
hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
def test_tps():
"""Test TPS warping."""
az = np.linspace(0., 2 * np.pi, 20, endpoint=False)
pol = np.linspace(0, np.pi, 12)[1:-1]
sph = np.array(np.meshgrid(1, az, pol, indexing='ij'))
sph.shape = (3, -1)
assert_equal(sph.shape[1], 200)
source = _sph_to_c
|
art(sph.T)
destination = source.copy()
destination *= 2
destination[:, 0] += 1
# fit with 100 points
warp = SphericalSurfaceWarp()
assert 'no ' in repr(warp)
warp.fit(source[::3], destination[::2])
assert 'oct5' in repr(warp)
destination_est = warp.transform(source)
assert_allclose(destination_est, destination, atol=1e-3)
@testing.requires_testing_data
def test_get_trans():
"""Test converting '-trans.txt' to '-trans.fif'."""
trans = read_trans(fname)
trans = invert_transform(trans) # starts out as head->MRI, so invert
trans_2 = _get_trans(fname_trans)[0]
assert trans.__eq__(trans_2, atol=1e-5)
@testing.requires_testing_data
def test_io_trans(tmpdir):
"""Test reading and writing of trans files."""
tempdir = str(tmpdir)
os.mkdir(op.join(tempdir, 'sample'))
pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir)
trans0 = read_trans(fname)
fname1 = op.join(tempdir, 'sample', 'test-trans.fif')
trans0.save(fname1)
assert fname1 == _find_trans('sample', subjects_dir=tempdir)
trans1 = read_trans(fname1)
# check all properties
assert trans0 == trans1
# check reading non -trans.fif files
pytest.raises(IOError, read_trans, fname_eve)
# check warning on bad filenames
fname2 = op.join(tempdir, 'trans-test-bad-name.fif')
with pytest.warns(RuntimeWarning, match='-trans.fif'):
write_trans(fname2, trans0)
def test_get_ras_to_neuromag_trans():
"""Test the coordinate transformation from ras to neuromag."""
# create model points in neuromag-like space
rng = np.random.RandomState(0)
anterior = [0, 1, 0]
left = [-1, 0, 0]
right = [.8, 0, 0]
up = [0, 0, 1]
rand_pts = rng.uniform(-1, 1, (3, 3))
pts = np.vstack((anterior, left, right, up, rand_pts))
# change coord system
rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6)
trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz))
pts_changed = apply_trans(trans, pts)
# transform back into original space
nas, lpa, rpa = pts_changed[:3]
hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa)
pts_restored = apply_trans(hsp_trans, pts_changed)
err = "Neuromag transformation failed"
assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err)
def _cartesian_to_sphere(x, y, z):
"""Convert using old function."""
hypotxy = np.hypot(x, y)
r = np.hypot(hypotxy, z)
elev = np.arctan2(z, hypotxy)
az = np.arctan2(y, x)
return az, elev, r
def _sphere_to_cartesian(theta, phi, r):
"""Convert using old function."""
z = r * np.sin(phi)
rcos_phi = r * np.cos(phi)
x = rcos_phi * np.cos(theta)
y = rcos_phi * np.sin(theta)
return x, y, z
def test_sph_to_cart():
"""Test conversion between sphere and cartesian."""
# Simple test, expected value (11, 0, 0)
r, theta, phi = 11., 0., np.pi / 2.
z = r * np.cos(phi)
rsin_phi = r * np.sin(phi)
x = rsin_phi * np.cos(theta)
y = rsin_phi * np.sin(theta)
coord = _sph_to_cart(np.array([[r, theta, phi]]))[0]
assert_allclose(coord, (x, y, z), atol=1e-7)
assert_allclose(coord, (r, 0, 0), atol=1e-7)
rng = np.random.RandomState(0)
# round-trip test
coords = rng.randn(10, 3)
assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5)
# equivalence tests to old versions
for coord in coords:
sph = _cart_to_sph(coord[np.newaxis])
cart = _sph_to_cart(sph)
sph_old = np.array(_cartesian_to_sphere(*coord))
cart_old = _sphere_to_cartesian(*sph_old)
sph_old[1] = np.pi / 2. - sph_old[1] # new convention
assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7)
assert_allclose(cart[0], cart_old, atol=1e-7)
assert_allclose(cart[0], coord, atol=1e-7)
def _polar_to_cartesian(theta, r):
"""Transform polar coordinates to cartesian."""
x = r * np.cos(theta)
y = r * np.sin(theta)
return x, y
def test_polar_to_cartesian():
"""Test helper transform function from polar to cartesian."""
r = 1
theta = np.pi
# expected values are (-1, 0)
x = r * np.cos(theta)
y = r * np.sin(theta)
coord = _pol_to_cart(np.array([[r, theta]]))[0]
# np.pi is an approx since pi is irrational
assert_allclose(coord, (x, y), atol=1e-7)
assert_allclose(coord, (-1, 0), atol=1e-7)
assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7)
rng = np.random.RandomState(0)
r = rng.randn(10)
theta = rng.rand(10) * (2 * np.pi)
polar = np.array((r, theta)).T
assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar],
_pol_to_cart(polar), atol=1e-7)
def _topo_to_phi_theta(theta, radius):
"""Convert using old function."""
sph_phi = (0.5 - radius) * 180
sph_theta = -theta
return sph_phi, sph_theta
def test_topo_to_sph():
"""Test topo to sphere conversion."""
rng = np.random.RandomState(0)
angles = rng.rand(10) * 360
radii = rng.rand(10)
angles[0] = 30
radii[0] = 0.25
# new way
sph = _topo_to_sph(np.array([angles, radii]).T)
new = _sph_to_cart(sph)
new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1]
# old way
for ii, (angle, radius) in enumerate(zip(angles, radii)):
sph_phi, sph_theta = _topo_to_phi_theta(angle, radius)
if ii == 0:
assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30])
azimuth = sph_theta / 180.0 * np.pi
elevation = sph_phi / 180.0 * np.pi
assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation],
atol=1e-7)
r = np.ones_like(radius)
x, y, z = _sphere_to_cartesian(azimuth, elevation, r)
pos = [-y, x, z]
if ii == 0:
expected = np.array([1. / 2., np.sqrt(3) / 2., 1.])
expected /= np.sqrt(2)
assert_allclose(pos, expected, atol=1e-7)
|
BD2KGenomics/brca-website
|
django/users/migrations/0005_myuser_email_me.py
|
Python
|
apache-2.0
| 450
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 o
|
n 2016-04-08 11:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_myuser_is_approved'),
]
operations = [
migrations.AddField(
model_name='myuser',
|
name='email_me',
field=models.BooleanField(default=True),
),
]
|
mcdaniel67/sympy
|
sympy/solvers/tests/test_inequalities.py
|
Python
|
bsd-3-clause
| 13,455
| 0.001189
|
"""Tests for tools for solving inequalities and systems of inequalities. """
from sympy import (And, Eq, FiniteSet, Ge, Gt, Interval, Le, Lt, Ne, oo,
Or, S, sin, sqrt, Symbol, Union, Integral, Sum,
Function, Poly, PurePoly, pi, root)
from sympy.solvers.inequalities import (reduce_inequalities,
solve_poly_inequality as psolve,
reduce_rational_inequalities,
solve_univariate_inequality as isolve,
reduce_abs_inequality)
from sympy.polys.rootoftools import RootOf
from sympy.solvers.solvers import solve
from sympy.abc import x, y
from sympy.utilities.pytest import raises, slow
inf = oo.evalf()
def test_solve_poly_inequality():
assert psolve(Poly(0, x), '==') == [S.Reals]
assert psolve(Poly(1, x), '==') == [S.EmptySet]
assert psolve(PurePoly(x + 1, x), ">") == [Interval(-1, oo, True, False)]
def test_reduce_poly_inequalities_real_interval():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=False) == \
S.Reals if x.is_real else Interval(-oo, oo)
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Eq(x**2, 1)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2, 1)]], x, relational=False) == Interval(-1, 1)
assert reduce_rational_inequalities(
[[Lt(x**2, 1)]], x, relational=False) == Interval(-1, 1, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1)]], x, relational=False) == \
Union(Interval(-oo, -1), Interval(1, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 1)]], x, relational=False) == \
Interval(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 1)]], x, relational=False) == \
FiniteSet(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities([[Eq(
x**2, 1.0)]], x, relational=False) == FiniteSet(-1.0, 1.0).evalf()
assert reduce_rational_inequalities(
[[Le(x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0)
assert reduce_rational_inequalities([[Lt(
x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0), Interval(1.0, inf))
assert reduce_rational_inequalities(
[[Gt(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0, right_open=True),
Interval(1.0, inf, left_open=True))
assert reduce_rational_inequalities([[Ne(
x**2, 1.0)]], x, relational=False) == \
FiniteSet(-1.0, 1.0).complement(S.Reals)
s = sqrt(2)
assert reduce_rational_inequalities([[Lt(
x**2 - 1, 0), Gt(x**2 - 1, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities([[Le(x**2 - 1, 0), Ge(
x**2 - 1, 0)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2 - 2,
|
0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, False, False), Interval(1, s, False, False))
assert reduce_rational_inequalities(
[[Le(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=
|
False
) == Union(Interval(-s, -1, False, True), Interval(1, s, True, False))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, False), Interval(1, s, False, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(1, s, True, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ne(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(-1, 1, True, True),
Interval(1, s, True, True))
def test_reduce_poly_inequalities_complex_relational():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=True) == False
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=True) == And(Lt(-oo, x), Lt(x, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
for one in (S(1), S(1.0)):
inf = one*oo
assert reduce_rational_inequalities(
[[Eq(x**2, one)]], x, relational=True) == \
Or(Eq(x, -one), Eq(x, one))
assert reduce_rational_inequalities(
[[Le(x**2, one)]], x, relational=True) == \
And(And(Le(-one, x), Le(x, one)))
assert reduce_rational_inequalities(
[[Lt(x**2, one)]], x, relational=True) == \
And(And(Lt(-one, x), Lt(x, one)))
assert reduce_rational_inequalities(
[[Ge(x**2, one)]], x, relational=True) == \
And(Or(And(Le(one, x), Lt(x, inf)), And(Le(x, -one), Lt(-inf, x))))
assert reduce_rational_inequalities(
[[Gt(x**2, one)]], x, relational=True) == \
And(Or(And(Lt(-inf, x), Lt(x, -one)), And(Lt(one, x), Lt(x, inf))))
assert reduce_rational_inequalities(
[[Ne(x**2, one)]], x, relational=True) == \
Or(And(Lt(-inf, x), Lt(x, -one)),
And(Lt(-one, x), Lt(x, one)),
And(Lt(one, x), Lt(x, inf)))
def test_reduce_rational_inequalities_real_relational():
assert reduce_rational_inequalities([], x) == False
assert reduce_rational_inequalities(
[[(x**2 + 3*x + 2)/(x**2 - 16) >= 0]], x, relational=False) == \
Union(Interval.open(-oo, -4), Interval(-2, -1), Interval.open(4, oo))
assert reduce_rational_inequalities(
[[((-2*x - 10)*(3 - x))/((x**2 + 5)*(x - 2)**2) < 0]], x,
relational=False) == \
Union(Interval.open(-5, 2), Interval.open(2, 3))
assert reduce_rational_inequalities([[(x + 1)/(x - 5) <= 0]], x,
relational=False) == \
Interval.Ropen(-1, 5)
assert reduce_rational_inequalities([[(x**2 + 4*x + 3)/(x - 1) > 0]], x,
relational=False) == \
Union(Interval.open(-3, -1), Interval.open(1, oo))
assert reduce_rational_inequalities([[(x**2 - 16)/(x - 1)**2 < 0]], x,
relational=False) == \
Union(Interval.open(-4, 1), Interval.open(1, 4))
assert reduce_rational_inequalities([[(3*x + 1)/(x + 4) >= 1]], x,
relational=False) == \
Union(Interval.open(-oo, -4), Interval.Ropen(S(3)/2, oo))
assert reduce_rational_inequalities([[(x - 8)/x <= 3 - x]], x,
relational=False) == \
Union(Interval.Lopen(-oo, -2), Interval.Lopen(0, 4))
def test_reduce_abs_inequalities():
e = abs(x - 5) < 3
ans = And(Lt(2, x), Lt(x, 8))
assert reduce_inequalities(e) == ans
assert reduce_inequalities(e, x) == ans
assert reduce_inequalities(abs(x - 5)) == Eq(x, 5)
assert reduce_inequalities(
abs(2*x + 3) >= 8) == Or(And(Le(S(5)/2, x), Lt(x, oo)),
And(Le(x, -S(11)/2), Lt(-oo, x)))
assert reduce_inequalities(abs(x -
|
ajitabhpandey/learn-programming
|
python/characterPictureGrid.py
|
Python
|
gpl-2.0
| 597
| 0.0067
|
#!/usr/bin/python
def characterPictureGrid(grid):
for dim1 in range(0, len(grid)):
for dim2 in range(0, len(grid[dim1])):
print grid[dim1][dim2],
print "\n"
grid =
|
[['.', '.', '.', '.', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['O',
|
'O', 'O', 'O', 'O', '.'],
['.', 'O', 'O', 'O', 'O', 'O'],
['O', 'O', 'O', 'O', 'O', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['.', '.', '.', '.', '.', '.']]
characterPictureGrid(grid)
|
whitepages/nova
|
nova/tests/functional/api_sample_tests/test_quota_sets.py
|
Python
|
apache-2.0
| 4,046
| 0.001236
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class QuotaSetsSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-quota-sets"
def _get_flags(self):
f = super(QuotaSetsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.server_group_quotas.'
'Server_group_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.quotas.Quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.extended_quotas.Extended_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.user_quotas.User_quotas')
return f
def test_show_quotas(self):
# Get api sample to show quotas.
response = self._do_get('os-quota-sets/fake_tenant')
self._verify_response('quotas-show-get-resp', {}, response, 200)
def test_show_quotas_defaults(self):
# Get api sample to show quotas def
|
aults.
response = self._do_get
|
('os-quota-sets/fake_tenant/defaults')
self._verify_response('quotas-show-defaults-get-resp',
{}, response, 200)
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
self._verify_response('quotas-update-post-resp', {}, response, 200)
def test_delete_quotas(self):
# Get api sample to delete quota.
response = self._do_delete('os-quota-sets/fake_tenant')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_force(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-force-post-req',
{})
return self._verify_response('quotas-update-force-post-resp', {},
response, 200)
def test_show_quotas_for_user(self):
# Get api sample to show quotas for user.
response = self._do_get('os-quota-sets/fake_tenant?user_id=1')
self._verify_response('user-quotas-show-get-resp', {}, response, 200)
def test_delete_quotas_for_user(self):
response = self._do_delete('os-quota-sets/fake_tenant?user_id=1')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_for_user(self):
# Get api sample to update quotas for user.
response = self._do_put('os-quota-sets/fake_tenant?user_id=1',
'user-quotas-update-post-req',
{})
return self._verify_response('user-quotas-update-post-resp', {},
response, 200)
|
bear/parsedatetime
|
tests/TestStartTimeFromSourceTime.py
|
Python
|
apache-2.0
| 2,109
| 0
|
# -*- coding: utf-8 -*-
"""
Test parsing of strings that are phrases with the
ptc.StartTimeFromSourceTime flag set to True
"""
import sys
import time
import datetime
import unittest
import parsedatetime as pdt
from parsedatetime.context import pdtContext
from . import utils
class test(unittest.TestCase):
@utils.assertEqualWithComparator
def assertExpectedResult(self, result, check, **kwargs):
return utils.compareResultByTimeTuplesAndFlags(result, check, **kwargs)
def setUp(self):
self.cal = pdt.Calendar()
self.cal.ptc.StartTimeFromSourceTime = True
(self.yr, self.mth, self.dy, self.hr,
self.mn, self.sec, self.wd, self.yd, self.isdst) = time.localtime()
def testEndOfPhrases(self):
s = datetime.datetime.now()
# find out what month we are currently on
# set the day to 1 and then go back a day
# to get the end of the current month
(yr, mth, dy, hr, mn, sec, _, _, _) = s.timetuple()
s = datetime.datetime(yr, mth, dy, 13, 14, 15)
mth += 1
if mth > 12:
mth = 1
yr += 1
t = datetime.datetime(
yr, mth, 1, 13, 14, 15) + datetime.timedelta(days=-1)
start = s.timetuple()
target = t.timetuple()
self.assertExpectedResult(
self.cal.parse('eom', start),
(target, pdtContext(pdtContext.ACU_DAY)))
self.assertExpectedResult(
self.cal.parse('meeting eom', start),
(target, pdtContext(pdtContext.ACU_DAY)))
s = datetime.datetime.now()
(yr, mth, dy, hr, mn, sec, wd, yd, isdst) = s.timetuple()
s = d
|
atetime.datetime(yr, mth, 1, 13, 14, 15)
t = datetime.datetime(yr, 12, 31, 13, 14, 15)
start =
|
s.timetuple()
target = t.timetuple()
self.assertExpectedResult(
self.cal.parse('eoy', start),
(target, pdtContext(pdtContext.ACU_MONTH)))
self.assertExpectedResult(
self.cal.parse('meeting eoy', start),
(target, pdtContext(pdtContext.ACU_MONTH)))
|
outlierbio/ob-pipelines
|
ob_pipelines/apps/fastqc/fastqc.py
|
Python
|
apache-2.0
| 1,841
| 0.002173
|
import os.path as op
import logging
import shutil
from subprocess import check_out
|
put
from tempfile import mkdtemp
import click
from ob_pipelines.s3 import (
s3, download_file_or_folder, remove_file_or_folder, SCRATCH_DIR, path_to_bucket_and_key
)
logger = logging.getLogger('ob-pipelines')
@click.command()
@click.argument('fq1')
@click.argument('fq2')
@click.argument('out_dir')
@click.argument('name')
def fastqc(fq1, fq2, out_dir, name):
"""Run FastQC"""
out_dir = out_dir i
|
f out_dir.endswith('/') else out_dir + '/'
temp_dir = mkdtemp(dir=SCRATCH_DIR)
fq1_local = op.join(temp_dir, name + '_1.fastq.gz')
fq2_local = op.join(temp_dir, name + '_2.fastq.gz')
if fq1.startswith('s3://'):
# Assume that if fq1 is in S3, so is fq2
download_file_or_folder(fq1, fq1_local)
download_file_or_folder(fq2, fq2_local)
else:
shutil.copy(fq1, fq1_local)
shutil.copy(fq2, fq2_local)
cmd = ['fastqc', '-o', temp_dir, fq1_local, fq2_local]
# Run command and save output
logging.info('Running:\n{}'.format(' '.join(cmd)))
out = check_output(cmd)
logging.info(out.decode())
out_files = [
name + '_1_fastqc.html',
name + '_2_fastqc.html',
name + '_1_fastqc.zip',
name + '_2_fastqc.zip'
]
for fname in out_files:
# Upload temp out directory to S3 with prefix
if out_dir.startswith('s3://'):
bucket, key = path_to_bucket_and_key(out_dir)
local_fpath = op.join(temp_dir, fname)
print('uploading {} to s3://{}/{}{}'.format(local_fpath, bucket, key, fname))
s3.upload_file(local_fpath, bucket, key + fname)
remove_file_or_folder(local_fpath)
else:
shutil.move(temp_dir, out_dir)
if __name__ == '__main__':
fastqc()
|
Valeureux/wezer-exchange
|
__TODO__/project_crowdfunding/project_crowdfunding.py
|
Python
|
agpl-3.0
| 1,117
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron and Valeureux
# Copyright 2013 Yannick Buron and Valeureux
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################
|
########
|
#######################################
from openerp.osv import orm
class ProjectProject(orm.Model):
_name = 'project.project'
_inherit = ['project.project', 'crowdfunding.campaign']
|
LPgenerator/django-db-mailer
|
dbmail/providers/google/android.py
|
Python
|
gpl-2.0
| 1,265
| 0
|
# -*- encoding: utf-8 -*-
try:
from httplib import HTTPSConnection
from urlparse import urlparse
except ImportError:
from http.client import HTTPSConnection
from urllib.parse import urlparse
from json import dumps, loads
from django.conf import settings
class GCMError(Exception):
pass
def send(user, message, **kwargs):
"""
Site: https://developers.google.com
API: https://developers.google.com/cloud-messaging/
Desc: Android notifications
"""
headers = {
"Content-type": "application/json",
"Authorization": "key=" + kwargs.pop("gcm_key", settings.GCM_KEY
|
)
}
hook_url = 'https://android.googleapis.com/gcm/send'
data = {
"registration_ids": [user],
"data": {
"title": kwargs.pop("event"),
'message': message,
}
}
data['data'].update(
|
kwargs)
up = urlparse(hook_url)
http = HTTPSConnection(up.netloc)
http.request(
"POST", up.path,
headers=headers,
body=dumps(data))
response = http.getresponse()
if response.status != 200:
raise GCMError(response.reason)
body = response.read()
if loads(body).get("failure") > 0:
raise GCMError(repr(body))
return True
|
openstack/bifrost
|
bifrost/inventory.py
|
Python
|
apache-2.0
| 10,872
| 0
|
#!/usr/bin/env python3
#
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from oslo_config import cfg
from oslo_log import log
import yaml
try:
import openstack
SDK_LOADED = True
except ImportError:
SDK_LOADED = False
DOCUMENTATION = '''
Bifrost Inventory Module
========================
This is a dynamic inventory module intended to provide a platform for
consistent inventory information for Bifrost.
The inventory supplies two distinct groups by default:
- localhost
- baremetal
The localhost group is required for Bifrost to perform local actions to
bifrost for local actions such as installing Ironic.
The baremetal group contains the hosts defined by the data source along with
variables extracted from the data source. The variables are defined on a
per-host level which allows explicit actions to be taken based upon the
variables.
It is also possible for users to specify additional per-host groups by
simply setting the host_groups variable in the inventory file. See below for
an example JSON file.
The default group can also be changed by setting the DEFAULT_HOST_GROUPS
variable to contain the desired groups separated by whitespace as follows:
DEFAULT_HOST_GROUPS="foo bar zoo"
In case of provisioning virtual machines, additional per-VM groups can
be set by simply setting the test_vm_groups[$ho
|
st] variable to a list
of desired groups. Moreover, users can override the default 'baremetal'
group by assigning a list of default groups to the test_vm_default_group
variable.
Presently, the base mode of operation reads a JSON/YAML file in the format
originally utilized by bifrost and returns structured JSON that is
int
|
erpreted by Ansible.
Conceivably, this inventory module can be extended to allow for direct
processing of inventory data from other data sources such as a configuration
management database or other inventory data source to provide a consistent
user experience.
How to use?
-----------
export BIFROST_INVENTORY_SOURCE=/tmp/baremetal.[json|yaml]
ansible-playbook playbook.yaml -i inventory/bifrost_inventory.py
One can also just directly invoke bifrost_inventory.py in order to see the
resulting JSON output. This module also has a feature to support the
pass-through of a pre-existing JSON document, which receives updates and
formatting to be supplied to Ansible. Ultimately the use of JSON will be
far more flexible and should be the preferred path forward.
Example JSON Element:
{
"node1": {
"uuid": "a8cb6624-0d9f-c882-affc-046ebb96ec01",
"host_groups": [
"nova",
"neutron"
],
"driver_info": {
"ipmi_target_channel": "0",
"ipmi_username": "ADMIN",
"ipmi_address": "192.168.122.1",
"ipmi_target_address": "0",
"ipmi_password": "undefined",
"ipmi_bridging": "single"
},
"nics": [
{
"mac": "00:01:02:03:04:05"
}.
{
"mac": "00:01:02:03:04:06"
}
],
"driver": "ipmi",
"ipv4_address": "192.168.122.2",
"properties": {
"cpu_arch": "x86_64",
"ram": "3072",
"disk_size": "10",
"cpus": "1"
},
"name": "node1"
}
}
Utilizing ironic as the data source
-----------------------------------
The functionality exists to allow a user to query an existing ironic
installation for the inventory data. This is an advanced feature,
as the node may not have sufficient information to allow for node
deployment or automated testing, unless DHCP reservations are used.
This setting can be invoked by setting the source to "ironic"::
export BIFROST_INVENTORY_SOURCE=ironic
Known Issues
------------
At present, this module only supports inventory list mode and is not
intended to support specific host queries.
'''
LOG = log.getLogger(__name__)
opts = [
cfg.BoolOpt('list',
default=True,
help='List active hosts'),
]
def _parse_config():
config = cfg.ConfigOpts()
log.register_options(config)
config.register_cli_opts(opts)
config(prog='bifrost_inventory.py')
config.set_override('use_stderr', True)
log.set_defaults()
log.setup(config, "bifrost_inventory.py")
return config
def _prepare_inventory():
hostvars = {"127.0.0.1": {"ansible_connection": "local"}}
groups = {}
groups.update({'baremetal': {'hosts': []}})
groups.update({'localhost': {'hosts': ["127.0.0.1"]}})
return (groups, hostvars)
def _process_baremetal_data(data_source, groups, hostvars):
"""Process data through as pre-formatted data"""
with open(data_source, 'rb') as file_object:
try:
file_data = yaml.safe_load(file_object)
except Exception as e:
LOG.error("Failed to parse JSON or YAML: %s", e)
raise Exception("Failed to parse JSON or YAML")
node_names = os.environ.get('BIFROST_NODE_NAMES', None)
if node_names:
node_names = node_names.split(',')
for name in file_data:
if node_names and name not in node_names:
continue
host = file_data[name]
# Perform basic validation
node_net_data = host.get('node_network_data')
ipv4_addr = host.get('ipv4_address')
default_groups = os.environ.get('DEFAULT_HOST_GROUPS',
'baremetal').split()
host['host_groups'] = sorted(list(set(host.get('host_groups', []) +
default_groups)))
if not node_net_data and not ipv4_addr:
host['addressing_mode'] = "dhcp"
else:
host['ansible_ssh_host'] = host['ipv4_address']
if ('provisioning_ipv4_address' not in host and
'addressing_mode' not in host):
host['provisioning_ipv4_address'] = host['ipv4_address']
# Add each host to the values to be returned.
for group in host['host_groups']:
if group not in groups:
groups.update({group: {'hosts': []}})
groups[group]['hosts'].append(host['name'])
hostvars.update({host['name']: host})
return (groups, hostvars)
def _process_sdk(groups, hostvars):
"""Retrieve inventory utilizing OpenStackSDK."""
# NOTE(dtantsur): backward compatibility
if os.environ.get('IRONIC_URL'):
print("WARNING: IRONIC_URL is deprecated, use OS_ENDPOINT")
os.environ['OS_ENDPOINT'] = os.environ['IRONIC_URL']
if os.environ.get('OS_ENDPOINT') and not os.environ.get('OS_AUTH_URL'):
os.environ['OS_AUTH_TYPE'] = None
cloud = openstack.connect()
machines = cloud.list_machines()
node_names = os.environ.get('BIFROST_NODE_NAMES', None)
if node_names:
node_names = node_names.split(',')
for machine in machines:
machine = cloud.get_machine(machine['uuid'])
if machine['name'] is None:
name = machine['uuid']
else:
name = machine['name']
if node_names and name not in node_names:
continue
new_machine = {}
for key, value in machine.items():
# NOTE(TheJulia): We don't want to pass infomrational links
# nor do we want to pass links about the ports since they
# are API endpoint URLs.
if key not in ['links', 'ports']:
new_machine[key] = value
# NOTE(TheJulia): Collect network information, enumerate through
# and extract important values, presently MAC address. Once done,
# return the network information to the inventory.
nics = cloud.list_nics_for_machi
|
mivade/cfbrank
|
dataparse.py
|
Python
|
gpl-3.0
| 3,352
| 0.004177
|
"""
cfbrank -- A college football ranking algorithm
dataparse.py: A module for parsing datafiles containing the
|
relevant
statistics for the cfbrank algorithm. See the readme for full details
on the data formats and sources supported.
Written by Michael V.
|
DePalatis <depalatis@gmail.com>
cfbrank is distributed under the terms of the GNU GPL.
"""
import csv
from team import Team
from conference import Conference
ncaa_names = [x.strip() for x in open('data/NCAANames2012.txt', 'r').readlines()]
sun_names = [x.strip() for x in open('data/SunNames2013.txt', 'r').readlines()]
def parseNCAACSV(filename, teamd={}):
"""Parse CSV schedule data file downloadable from the NCAA web
site. Unfortunately, as of week 4 of the 2013 season, the NCAA
schedules do not include scores, so this won't work."""
if not isinstance(teamd, dict):
raise RuntimeError("teamd must be a dictionary!")
datafile = csv.reader(open(filename, 'r'))
for i, row in enumerate(datafile):
if i == 0 or row[5] == '':
continue
school = row[1]
if not teamd.has_key(school):
teamd[school] = Team(school, "", True)
won = int(row[5]) > int(row[6])
opp_name = row[4]
if not teamd.has_key(opp_name):
FBS = opp_name in ncaa_names
teamd[opp_name] = Team(opp_name, "", FBS)
opponent = teamd[opp_name]
#print opp_name
teamd[school].addOpponent(opponent, won)
return teamd
def parseSunCSV(filename, teamd={}):
"""Prase Sunshine Forecast data file."""
if not isinstance(teamd, dict):
raise RuntimeError("teamd must be a dictionary!")
datafile = csv.reader(open(filename, 'r'))
for i, row in enumerate(datafile):
if i == 0 or len(row[2].split()) == 0:
continue
home, away = row[3], row[1]
home_score, away_score = int(row[4]), int(row[2])
## if home == 'Texas' or away == 'Texas':
## print home_score, home, "--", away, away_score
## if home == 'Texas':
## print home_score > away_score
## else:
## print away_score > home_score
for school in [home, away]:
if not teamd.has_key(school):
FBS = school in sun_names
teamd[school] = Team(school, "", FBS)
home_won = home_score > away_score
teamd[home].addOpponent(teamd[away], home_won)
teamd[home].points_for += home_score
teamd[home].points_against += away_score
teamd[away].addOpponent(teamd[home], not home_won)
teamd[away].points_for += away_score
teamd[away].points_against += home_score
return teamd
if __name__ == "__main__":
teamd = {}
parseSunCSV('data/sun4cast_FBS_2013.csv', teamd)
Texas = teamd['Texas']
Bama = teamd['Alabama']
print 'Alabama: %i-%i' % (Bama.wins, Bama.losses)
print 'Texas: %i-%i' % (Texas.wins, Texas.losses)
if True:
print "opponents:"
for opp in Texas.opponents:
print opp.school
rankings = []
for school in teamd.keys():
team = teamd[school]
if team.FBS:
rankings.append([team.getScore(), team.school])
rankings = sorted(rankings)[::-1]
for i in range(25):
print i+1, rankings[i][1], rankings[i][0]
|
cloudControl/django-celery-migration-app
|
minestrone/urls.py
|
Python
|
mit
| 781
| 0.010243
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import minestrone.soup.vie
|
ws
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'minestrone.views.home', name='home'),
# url(r'^minestrone/', include('minestrone.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contr
|
ib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^$', minestrone.soup.views.JobsView.as_view()),
url(r'^jobs/$', minestrone.soup.views.JobsView.as_view()),
url(r'^editor/$', minestrone.soup.views.EditorView.as_view()),
)
|
StackStorm/st2
|
st2common/st2common/content/loader.py
|
Python
|
apache-2.0
| 14,094
| 0.001064
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from yaml.parser import ParserError
import six
from oslo_config import cfg
from st2common import log as logging
from st2common.constants.meta import ALLOWED_EXTS
from st2common.constants.meta import PARSER_FUNCS
from st2common.constants.pack import MANIFEST_FILE_NAME
if six.PY2:
from io import open
__all__ = ["ContentPackLoader", "MetaLoader", "OverrideLoader"]
LOG = logging.getLogger(__name__)
class ContentPackLoader(object):
"""
Class for loading pack and pack content information from directories on disk.
"""
# TODO: Rename "get_content" methods since they don't actually return
# content - they just return a path
ALLOWED_CONTENT_TYPES = [
"triggers",
"sensors",
"actions",
"rules",
"aliases",
"policies",
]
def get_packs(self, base_dirs):
"""
Retrieve a list of packs in the provided directories.
:return: Dictionary where the key is pack name and the value is full path to the pack
directory.
:rtype: ``dict``
"""
if not isinstance(base_dirs, list):
raise TypeError(
"The base dirs has a value that is not a list"
f" (was {type(base_dirs)})."
)
result = {}
for base_dir in base_dirs:
if not os.path.isdir(base_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (base_dir))
packs_in_dir = self._get_packs_from_dir(base_dir=base_dir)
result.update(packs_in_dir)
return result
def get_content(self, base_dirs, content_type):
"""
Retrieve content from the provided directories.
Provided directories are searched from left to right. If a pack with the same name exists
in multiple directories, first pack which is found wins.
:param base_dirs: Directories to look into.
:type base_dirs: ``list``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``dict``
"""
if not isinstance(base_dirs, list):
raise TypeError(
"The base dirs has a value that is not a list"
f" (was {type(base_dirs)})."
)
if content_type not in self.ALLOWED_CONTENT_TYPES:
raise ValueError("Unsupported content_type: %s" % (content_type))
content = {}
pack_to_dir_map = {}
for base_dir in base_dirs:
if not os.path.isdir(base_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (base_dir))
dir_content = self._get_content_from_dir(
base_dir=base_dir, content_type=content_type
)
# Check for duplicate packs
for pack_name, pack_content in six.iteritems(dir_content):
if pack_name in content:
pack_dir = pack_to_dir_map[pack_name]
LOG.warning(
'Pack "%s" already found in "%s", ignoring content from "%s"'
% (pack_name, pack_dir, base_dir)
)
else:
content[pack_name] = pack_content
pack_to_dir_map[pack_name] = base_dir
return content
def get_content_from_pack(self, pack_dir, content_type):
"""
Retrieve content from the provided pack directory.
:param pack_dir: Path to the pack directory.
:type pack_dir: ``str``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``str``
"""
if content_type not in self.ALLOWED_CONTENT_TYPES:
raise ValueError("Unsupported content_type: %s" % (content_type))
if not os.path.isdir(pack_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir))
content = self._get_content_from_pack_dir(
pack_dir=pack_dir, content_type=content_type
)
return content
def _get_packs_from_dir(self, base_dir):
result = {}
for pack_name in os.listdir(base_dir):
pack_dir = os.path.join(base_dir, pack_name)
pack_manifest_file = os.path.join(pack_dir, MANIFEST_FILE_NAME)
if os.path.isdir(pack_dir) and os.path.isfile(pack_manifest_file):
result[pack_name] = pack_dir
return result
def _get_content_from_dir(self, base_dir, content_type):
content = {}
for pack in os.listdir(base_dir):
# TODO: Use function from util which escapes the name
pack_dir = os.path.join(base_dir, pack)
# Ignore missing or non directories
try:
pack_content = self._get_content_from_pack_dir(
pack_dir=pack_dir, content_type=content_type
)
except ValueError:
continue
else:
content[pack] = pack_content
return content
def _get_content_from_pack_dir(self, pack_dir, content_type):
content_types = dict(
triggers=self._get_triggers,
sensors=self._get_sensors,
actions=self._get_actions,
rules=self._get_rules,
aliases=self._get_aliases,
policies=self._get_policies,
)
get_func = content_types.get(content_type)
if get_func is None:
raise ValueError("Invalid content_type: %s" % (content_type))
if not os.path.isdir(pack_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir))
pack_content = get_func(pack_dir=pack_dir)
return pack_content
def _get_triggers(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="triggers")
def _get_sensors(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="sensors")
def _get_actions(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="actions")
def _get_rules(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="rules")
def _get_aliases(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="aliases")
def _get_policies(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="policies")
def _get_folder(self, pack_dir, content_type):
path = os.path.join(pack_dir, content_type)
if not os.path.isdir(path):
return None
return path
class MetaLoader(object):
"""
Class for loading and parsing pack and resource met
|
adata files.
"""
def load(self, file_path, expected_type=None):
"""
Loads content from file_path if file_path's extension
is one of allowed ones (See ALLOWED_EXTS).
Throws UnsupportedMetaException on disallowed filetypes.
Throws ValueError on malformed meta.
:param file_path: Absolute path to the file to load content from.
:type file_path: ``str``
:param expected_type: Expected type f
|
or the loaded and parsed content (optional).
:type expected_type: ``object``
:rtype: ``dict``
"""
file_name, file_ext = os.path.splitext(file_path)
if file_ext not in ALLOWED_EXTS:
raise Exception(
"Un
|
nathanshammah/pim
|
setup.py
|
Python
|
mit
| 3,679
| 0.015222
|
#!
|
/usr/bin/env python
"""PIQS: Permutational Invariant Quantum Solver
PIQS is an open-source Python solver to study the exact Lindbladian
dynamics of open quantum systems consisting of identical qubits.
"""
DOCLINES = __doc__.split('\n')
CLASSIFIERS = """\
Development Status :: 3
|
- Alpha
Intended Audience :: Science/Research
License :: OSI Approved :: BSD License
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering
Operating System :: MacOS
Operating System :: POSIX
Operating System :: Unix
Operating System :: Microsoft :: Windows
"""
import os
import sys
# The following is required to get unit tests up and running.
# If the user doesn't have, then that's OK, we'll just skip unit tests.
try:
from setuptools import setup, Extension
TEST_SUITE = 'nose.collector'
TESTS_REQUIRE = ['nose']
EXTRA_KWARGS = {
'test_suite': TEST_SUITE,
'tests_require': TESTS_REQUIRE
}
except:
from distutils.core import setup
from distutils.extension import Extension
EXTRA_KWARGS = {}
try:
import numpy as np
except:
np = None
from Cython.Build import cythonize
from Cython.Distutils import build_ext
MAJOR = 1
MINOR = 2
MICRO = 0
ISRELEASED = True
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
REQUIRES = ['numpy (>=1.8)', 'scipy (>=0.15)', 'cython (>=0.21)', 'qutip (>=4.2)']
INSTALL_REQUIRES = ['numpy>=1.8', 'scipy>=0.15', 'cython>=0.21', 'qutip>=4.2']
PACKAGES = ['piqs', 'piqs/cy', 'piqs/tests']
PACKAGE_DATA = {
'piqs': ['configspec.ini'],
'piqs/tests': ['*.ini'],
'piqs/cy': ['*.pxi', '*.pxd', '*.pyx'],
}
INCLUDE_DIRS = [np.get_include()] if np is not None else []
NAME = "piqs"
AUTHOR = ("Nathan Shammah, Shahnawaz Ahmed")
AUTHOR_EMAIL = ("nathan.shammah@gmail.com, shahnawaz.ahmed95@gmail.com")
LICENSE = "BSD"
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = "\n".join(DOCLINES[2:])
KEYWORDS = "quantum physics dynamics permutational symmetry invariance"
URL = ""
CLASSIFIERS = [_f for _f in CLASSIFIERS.split('\n') if _f]
PLATFORMS = ["Linux", "Mac OSX", "Unix", "Windows"]
# Add Cython extensions here
cy_exts = ['dicke']
# If on Win and Python version >= 3.5 and not in MSYS2 (i.e. Visual studio compile)
if sys.platform == 'win32' and int(str(sys.version_info[0])+str(sys.version_info[1])) >= 35 and os.environ.get('MSYSTEM') is None:
_compiler_flags = ['/w', '/Ox']
# Everything else
else:
_compiler_flags = ['-w', '-O3', '-march=native', '-funroll-loops']
EXT_MODULES =[]
# Add Cython files from piqs/cy
for ext in cy_exts:
_mod = Extension('piqs.cy.'+ext,
sources = ['piqs/cy/'+ext+'.pyx'],
include_dirs = [np.get_include()],
extra_compile_args=_compiler_flags,
extra_link_args=[],
language='c++')
EXT_MODULES.append(_mod)
# Remove -Wstrict-prototypes from cflags
import distutils.sysconfig
cfg_vars = distutils.sysconfig.get_config_vars()
if "CFLAGS" in cfg_vars:
cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace("-Wstrict-prototypes", "")
# Setup commands go here
setup(
name = NAME,
version = VERSION,
packages = PACKAGES,
include_package_data=True,
include_dirs = INCLUDE_DIRS,
ext_modules = cythonize(EXT_MODULES),
cmdclass = {'build_ext': build_ext},
author = AUTHOR,
author_email = AUTHOR_EMAIL,
license = LICENSE,
description = DESCRIPTION,
long_description = LONG_DESCRIPTION,
keywords = KEYWORDS,
url = URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
requires = REQUIRES,
package_data = PACKAGE_DATA,
zip_safe = False,
install_requires=INSTALL_REQUIRES,
**EXTRA_KWARGS
)
|
nitrotc/tc-bout-board
|
app/tc_bout_board.py
|
Python
|
gpl-3.0
| 27,003
| 0.003444
|
#!/usr/bin/env python
#
# "TC BOUT BOARD" a wrestling Match Bout Board application for youth matches or tournaments
# Copyright (C) 2016 Anthony Cetera
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os import system
from platform import system as platform
from Tkinter import *
import tkMessageBox
# Global Variables
meet = set([])
# Images
team_image = "bb_team_image.gif"
# Initialize bout board row text variables. Global so they can be updated from both classes.
nw_text = []
od_text = []
dd_text = []
ith_text = []
class WrMat:
"""
The WrMat class maintains the bout board data set.
Each object is a mat number and a list of match numbers on that mat
mat_bouts should always be added to the class as a list.
"""
def __init__(self, mat_num, mat_bouts):
self.mat_num = mat_num
self.mat_bouts = mat_bouts
def __str__(self):
return str(self.mat_num) + "\n" + str(self.mat_bouts)
def add_bout(self, bout_num):
self.mat_bouts.append(bout_num)
def finish_bout(self, bout_pos):
self.mat_bouts.pop(bout_pos)
def insert_bout(self, bout_pos, bout_num):
self.mat_bouts.insert(bout_pos, bout_num)
def get_mat_num(self):
return self.mat_num
def get_mat_bouts(self):
return self.mat_bouts
def help_about():
"""
Posts version and license information.
"""
tkMessageBox.showinfo("About TC BOUT BOARD", "TC BOUT BOARD v1.0 Copyright (C) 2016 Anthony Cetera\n"
"This program comes with ABSOLUTELY NO WARRANTY;"
" for details click Help --> About\n\n"
"This is free software, and you are welcome to redistribute it"
"under certain conditions; "
"please check the beginning of the source code for license details.")
def get_mat(matnum):
"""
Send in a mat number and get back the WrMat object containing that mat number.
"""
global meet
for eachmat in meet:
if eachmat.get_mat_num() == matnum:
return eachmat
def validate_match_spinbox(value):
"""
Function checks that spinboxes contain integers between 1 and 99.
First I tried making this a method in the Adminwin class but Pycharm complained.
Made it static to avoid Pycharm error - I'm neurotic like that.
"""
try:
intval = int(value)
if 0 < intval < 100:
return True
else:
return False
except ValueError:
return False
def validate_insert_match(value):
"""
Function checks limits the insert to 5 characters.
"""
try:
if len(value) < 6:
return True
else:
return False
except ValueError:
return False
def update_grid(matnum):
"""
StringVars for board grid labels are defined in class Boardwin.
Function sets each of these stringvars based on the contents of the current WrMat match list.
Function must be passed a mat number to update from.
"""
curmat = get_mat(matnum)
matboutlist = curmat.get_mat_bouts()
try:
nw_text[matnum].set(matboutlist[0])
except IndexError:
nw_text[matnum].set("*")
try:
od_text[matnum].set(matboutlist[1])
except IndexError:
od_text[matnum].set("*")
try:
dd_text[matnum].set(matboutlist[2])
except IndexError:
dd_text[matnum].set("*")
try:
ith_text[matnum].set(matboutlist[3])
except IndexError:
ith_text[matnum].set("*")
class Adminwin:
"""
All administrative window functions are defined here.
"""
def __init__(self, master):
# Define the maximum number of mats the application will support
# Update this if you want to try running more than 6 mats. Not tested with integers > 6.
self.maxmats = 6
# Define lists needed to hold each listbox object
# One needed for the mats, mat labels, and mat scrollbars
self.mat = []
self.matlabel = []
self.sbmat = []
# Define variables needed to start the additional board window
self.board_window = None
self.start_board_window = None
# Establish that the bout board isn't running
self.board_running = False
# Define meet setup variables before initializing
self.init_mat_num = None
self.init_mat_optmenu = None
self.init_mat_label = None
self.init_button = None
# Define list to hold spinboxes for match numbers
self.match_num_spin = []
self.match_spinner_label = []
# Init cleanup flag
# This is used to decide if we should cleanup the information frame after initializing the meet set.
self.wipe_mat_optmenu = False
# Set starting rows for mat grid
mat_start_row = 0
mat_button_row = 0
# Deal with initial focus problem on OSX
if platform() == 'Darwin': # How Mac OS X is identified by Python
system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "Python" to true' ''')
# Set up root of parent window as master
self.master = master
self.master.title("BOUT BOARD ADMIN")
# Draw a frame in the grid for a border
self.adminframe = Frame(self.master, bd=6, bg='gray90', relief=SUNKEN)
self.adminframe.grid(column=0, row=0)
self.adminframe.grid_rowconfigure(0, weight=1)
self.adminframe.grid_columnconfigure(0, weight=1)
# Menu block
# Build menu for one time options
self.menubar = Menu(self.master)
self.init_menu = Menu(self.menubar, tearoff=0)
self.init_menu.add_command(label="Setup Meet", command=self.draw_init_dialogs)
self.init_menu.add_command(label="Show Board", command=self.start_board)
self.menubar
|
.add_cascade(label="Get Started", menu=self.init_menu)
# Quit Menu
self.quit_menu = Menu(self.menubar, tearoff=0)
self.quit_menu.add_command(label="Close Board", command=self.stop_board)
self.quit_menu.add_command(label="Quit!", command=self.adminframe.quit)
self.menubar.add_cascade(label="Quit Menu", menu=self.quit_menu)
# Help Menu
self.help_menu = Menu(self.menubar, tearoff=0)
self.help_menu.add_comma
|
nd(label="About", command=help_about)
self.menubar.add_cascade(label="Help", menu=self.help_menu)
# Populate the menu bar with options above
self.master.config(menu=self.menubar)
# Build grid of up to 6 potential mats
for i in range(self.maxmats):
matnum = i + 1
matcol = (i % 3) * 2
matlabelrow = ((i // 3) * 2) + mat_start_row
matrow = matlabelrow + 1
scrollcol = matcol + 1
self.matlabel.append(Label(self.adminframe, text="MAT " + str(matnum)))
self.sbmat.append(Scrollbar(self.adminframe, orient=VERTICAL))
self.mat.append(Listbox(self.adminframe, selectmode=SINGLE, yscrollcommand=self.sbmat[i].set))
self.sbmat[i].config(command=self.mat[i].yview)
self.matlabel[i].grid(sticky=(N, W), column=matcol, row=matlabelrow)
self.mat[i].grid(sticky=(N, W), column=matcol, row=matrow)
self.sbmat[i].grid(sticky=(N, S), column=scrollcol,
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_network_watchers_operations.py
|
Python
|
mit
| 105,992
| 0.00551
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pag
|
ing import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azu
|
re.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkWatchersOperations(object):
"""NetworkWatchersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkWatcher"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher resource.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkWatcher')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_cod
|
fllamber/show_do_milhao
|
Show_do_milhao/Teste.perguntas.py
|
Python
|
unlicense
| 5,657
| 0.004908
|
# -*- coding: utf-8 -*-
import anydbm
import random
arquivo = anydbm.open('dados' , 'c')
arquivo['P1N1'] = 'Longbottom era o sobrenome de quem, nas séries de livros de Harry Potter?'
arquivo['P1C1'] = 'Neville'
arquivo['P1R2'] = 'Hermione'
arquivo['P1R3'] = 'Snape'
arquivo['P1R4'] = 'Dumbledore'
arquivo['P2N1'] = 'Qual Local que hoje se supõe onde foi o Nascimente de Jesus Cristo?'
arquivo['P2R1'] = 'Igreja da Penha'
arquivo['P2C2'] = 'Basílica da Natividade'
arquivo['P2R3'] = 'Natal'
arquivo['P2R4'] = 'Congo'
arquivo['P3N1'] = 'Göpfritz an der Wild é localizado aonde?'
arquivo['P3R1'] = 'Inglaterra'
arquivo['P3R2'] = 'Emirados Árabes'
arquivo['P3C3'] = 'Áustria'
arquivo['P3R4'] = 'Brasil'
arquivo['P4N1'] = 'Complete: Eu me remexo muito, Eu me remexo muito, Eu me remexo...'
arquivo['P4C1'] = 'Muito!'
arquivo['P4R2'] = 'Pouco!'
arquivo['P4R3'] = 'Nem sempre!'
arquivo['P4R4'] = 'Constantemente!'
arquivo['P5N2'] = 'Nofollow É:'
arquivo['P5R1'] = 'Ato de Não seguir no Twitter'
arquivo['P5R2'] = 'Programa usado para não ter seguidores no Twitter'
arquivo['P5R3'] = 'Uma expressão para não ser seguido no Twitter'
arquivo['P5C4'] = 'Um atributo HTML'
arquivo['P6N2'] = 'No Campeonato Sul-Americano de futebol Sub-19 de 1964, foi consagrado campeão:'
arquivo['P6R1'] = 'Paraguai'
arquivo['P6C2'] = 'Uruguai'
arquivo['P6R3'] = 'Argélia'
arquivo['P6R4'] = 'Argentina'
arquivo['P7N2'] = 'No Filme “Indiana Jones No templo da perdição”, as Pedras de Sankara são:'
arquivo['P7R1'] = 'Artefatos Para Abrir um vórtice temporal'
arquivo['P7R2'] = '500KG de cocaína pasteurizada'
arquivo['P7C3'] = 'Pedras místicas dadas pelo deus hindu Shiva'
arquivo['P7R4'] = 'O nome da pistola usada pelo Han Solo'
arquivo['P8N2'] = 'Em Lajes do Pico, nos Açores, encontra-se o povoado de:'
arquivo['P8R1'] = 'Ilha do Manuel'
arquivo['P8R2'] = 'Ilha do Medo'
arquivo['P8C3'] = 'Ribeira do meio'
arquivo['P8R4'] = 'Lajes de Embaixo'
arquivo['P9N2'] = 'No Concurso Miss Mundo 1975, a ganhadora foi:'
arquivo['P9R1'] = 'Um Travesti Maquiado'
arquivo['P9C2'] = 'Wilnelia Merced Cruz'
arquivo['P9R3'] = 'Kaiane Aldorino'
arquivo['P9R4'] = 'Todas ficavam feias em preto-e-branco'
arquivo['P10N3'] = 'Na ciência da computação, o caractere nulo é um caractere da tabela ASCII que:'
arquivo['P10R1'] = 'Representa o forever alone'
arquivo['P10R2'] = 'Foi o primeiro a ser escrito por Charles Baggage'
arquivo['P10C3'] = 'Representa um espaço vazio'
arquivo['P10R4'] = 'Faz o programa ficar corrompido'
arquivo['P11N3'] = 'Kingdom City:'
arquivo['P11C1'] = 'Uma vila no estado americano de missouri'
arquivo['P11R2'] = 'Uma fase do Sonic'
arquivo['P11R3'] = 'Uma fase do Mário'
arquivo['P11R4'] = 'Um local bonito de se ver'
arquivo['P12N3'] = 'Uma tecnologia de proteção digital para CDs e DVDs É:'
arquivo['P12R1'] = 'K.O.N.F.I.A.N.Ç.A'
arquivo['P12C2'] = 'SecuROM'
arquivo['P12R3'] = 'Fita Crepe'
arquivo['P12R4'] = 'SecuTroll'
arquivo['P13N3'] = 'Um Site que é um MEME:'
arquivo['P13R1'] = 'http://www.zosima.com/'
arquivo['P13R2'] = 'http://www.ufrj.com.org'
arquivo['P13R3'] = 'http://www.trolface.com'
arquivo['P13C4'] = 'http://nyan.cat/'
arquivo['P14N3'] = 'Qual desses animais é vertebrado?'
arquivo['P14R1'] = 'Borboleta'
arquivo['P14R2'] = '
|
Barata'
arquivo['P14C3'] = 'Jacaré'
arquivo['P14R4'] = 'Minhoca'
arquivo['P15N4'] = 'linha 11 do metro de Moscovo também é referida como:'
arquivo['P15R1'] = 'Трусость и образования'
arquivo['P15R2'] = 'Не инвестировать в возобновляемые'
arquivo['P15R3'] = 'В один прекрасный день мы будем вторглись китайские'
arqui
|
vo['P15C4'] = 'Linha Kakhovskaia'
arquivo['P16N4'] = 'O Qutb Minar é o minarete de tijolo mais alto do mundo, exemplo de arquitetura:'
arquivo['P16C1'] = 'Indo-islâmica'
arquivo['P16R2'] = 'De alguém que gostava de empilhar tijolos'
arquivo['P16R3'] = 'Dos primos da áfrica'
arquivo['P16R4'] = 'Cimento Mauá, Melhor não há'
arquivo['P17N4'] = 'Jugular é algo pertecente...'
arquivo['P17C1'] = 'À garganta'
arquivo['P17R2'] = 'Aos pés'
arquivo['P17R3'] = 'Ao peito'
arquivo['P17R4'] = 'Ao vampiro'
arquivo['P18N4'] = 'Que outro nome também pode ser chamado uma farmácia:'
arquivo['P18R1'] = 'Farmacomania'
arquivo['P18R2'] = 'Perfumaria'
arquivo['P18R3'] = 'Remedista'
arquivo['P18C4'] = 'Drogaria'
arquivo['P19N4'] = 'Nos quadrinhos, Rorschach é:'
arquivo['P19R1'] = 'Quem vigia os watchman?'
arquivo['P19R2'] = 'Shang Tsung'
arquivo['P19C3'] = 'Walter Kovacs'
arquivo['P19R4'] = 'Doutor Manhattan'
arquivo['P20N5'] = 'Qual o nome da esposa de kaká, que é pastora da igreja renascer?'
arquivo['P20R1'] = 'Bruxa do 71'
arquivo['P20C2'] = 'Caroline Celico'
arquivo['P20R3'] = 'Gata Boralheira'
arquivo['P20R4'] = 'Gaviã Arqueira'
arquivo['P21N5'] = 'O que significa a expresão “Fogo de palha”?'
arquivo['P21R1'] = 'Fogo Forte'
arquivo['P21C2'] = 'Entusiasmo Passageiro'
arquivo['P21R3'] = 'Fúria repentina'
arquivo['P21R4'] = 'Tristeza Profunda'
arquivo['P22N5'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo.close()
#LEITOR DE ENTRADAS (printa no Shell)
##entrada = anydbm.open('dados', 'r')
##for q in range(1 , 21):
## Q = 'P%i' %q
## for j in range (1, 5):
## J = Q + 'N%i' %j
## if entrada.has_key(J):
## print entrada[J]
## S = Q +'R'+'%i' %j
## L = Q +'C'+'%i' %j
## if entrada.has_key(L):
## print entrada[L]
##
## if entrada.has_key(S):
## print entrada[S]
##entrada.close()
|
kwinczek/tvseries
|
tvs/cache.py
|
Python
|
gpl-2.0
| 5,742
| 0.003657
|
#!/usr/bin/env python
#-*- coding: utf8 -*-
# Copyright 2009-2012 Kamil Winczek <kwinczek@gmail.com>
#
# This file is part of series.py.
#
# series.py is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# series.py is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with series.py. If not, see http://www.gnu.org/licenses/.
import contextlib
import sys
import lxml.etree as etree
import shelve
import subprocess
try:
import urllib2
except ImportError:
import urllib as urllib2
import time
import tvs.show
# Spinner implementation.
@contextlib.contextmanager
def spinning_distraction(spin):
if spin:
global p
p = subprocess.Popen(['tvs_spin.py'])
yield
p.terminate()
sys.stdout.write("\r")
sys.stdout.flush()
else:
yield
# --------------------------------------------------------------------- #
# #
# Class Cache #
# #
# --------------------------------------------------------------------- #
class Cache(object):
"""
Cache implementation.
Cache is a wraper class for Show class.
It is capable of retrieving and storing data from tvrage.com.
ttl contains date upto when object is valid.
"""
def __init__(self, keyword, options):
self.keyword = keyword
self.show = None
self.options = options
self.now = time.time()
if self.options.cache:
self.c = shelve.open(self.options.cachefile)
self.i = shelve.open(self.options.cacheids)
self.url_search = "http://services.tvrage.com/feeds/search.php?show=%s" % self.keyword
self.showid = self.__get_show_id()
self.url_full_show = "http://services.tvrage.com/feeds/full_show_info.php?sid=%s" % self.showid
self.show = self.__get_show()
if self.options.debug:
print("Search URL: %s" % self.url_search)
print("Shows full URL: %s" % self.url_full_show)
def __del__(self):
"""If cache was used all files need to be closed."""
if self.options.cache:
self.c.close()
self.i.close()
def __save_id_to_cache(self, showid):
"""Saves retrieved show's id to cache"""
self.i[self.keyword] = showid
def __save_show_to_cache(self, show):
if not show:
return False
# Set TTL, add 12h (43200secs) to current time (12h TTL)
self.c[str(self.showid)] = (self.now+43200, show)
return True
def __get_id_from_cache(self):
try:
return self.i[self.keyword]
except:
return None
def __get_id_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return etree.fromstring(urllib2.urlopen(self.url_search).read()).xpath('//Results/show/showid')[0].text
except KeyboardInterrupt:
raise
except:
return None
def __get_show_from_cache(self):
try:
return self.c[str(self.showid)]
except:
return (None, None)
def __get_show_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return tvs.show.Show(etree.fromstring(urllib2.urlopen(self.url_full_show).read()), self.options)
except KeyboardInterrupt:
raise
except:
return None
def __get_show_id(self):
"""Returns first found id from search list. """
# Try to get id from ids cache file
if self.options.cache and not self.options.refresh:
showid = self.__get_id_from_cache()
if not showid:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
return showid
else:
return showid
elif self.options.refresh:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
elif not self.options.cache:
return self.__get_id_from_tvrage()
else:
|
showid = self.__get_id_from_tvrage()
if
|
showid:
self.__save_id_to_cache(showid)
return showid
return None
def __get_show(self):
"""Returns show instance with data from tvrage."""
if self.showid == None: # Previously not found show id
return None
if self.options.cache and not self.options.refresh:
ttl, show = self.__get_show_from_cache()
if not ttl and not self.show or ttl < self.now:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
elif self.options.refresh:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
# If no cache to be used.
else:
show = self.__get_show_from_tvrage()
return show
def get_show(self):
return self.show
|
ChinaMassClouds/copenstack-server
|
openstack/src/ceilometer-2014.2.2/ceilometer/hardware/pollsters/cpu.py
|
Python
|
gpl-2.0
| 1,650
| 0
|
#
# Copyright 2013 ZHAW SoE
# Copyright 2014 Intel Corp.
#
# Authors: Lucas Graf <gr
|
aflu0@students.zhaw.ch>
# Toni Zehnder <zehndton@students.zhaw.ch>
# Lianhao Lu <lianhao.lu@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.o
|
rg/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.hardware import plugin
from ceilometer.hardware.pollsters import util
from ceilometer import sample
class _Base(plugin.HardwarePollster):
CACHE_KEY = 'cpu'
def generate_one_sample(self, host, c_data):
value, metadata, extra = c_data
return util.make_sample_from_host(host,
name=self.IDENTIFIER,
sample_type=sample.TYPE_GAUGE,
unit='process',
volume=value,
res_metadata=metadata,
extra=extra)
class CPULoad1MinPollster(_Base):
IDENTIFIER = 'cpu.load.1min'
class CPULoad5MinPollster(_Base):
IDENTIFIER = 'cpu.load.5min'
class CPULoad15MinPollster(_Base):
IDENTIFIER = 'cpu.load.15min'
|
crhaithcock/RushHour
|
Analytics/shared_code/RHconstants.py
|
Python
|
cc0-1.0
| 1,791
| 0.007259
|
'''
Created on Jun 8, 2015
@author: cliff
'''
CAR_SYMBOLS = ['Q', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L']
TRUCK_SYMBOLS = ['T', 'R', 'W', 'Z']
CAR_COLORS_NO_HASH = ['7FFF00', '7FFFD4', 'D2691E', '8B008B', 'BDB76B',\
'8B0000', 'FF1493', '1E90FF', 'FFD700', 'ADFF2F', \
'CD5C5C', 'F0E68C']
CAR_COLORS_WITH_HASH = ['#'+x for x in CAR_COLORS_NO_HASH]
TRUCK_COLORS_NO_HASH = ['F08080', 'FFA07A', 'FF00FF', '00FA9A']
TRUCK_COLORS_WITH_HASH = ['#'+x for x in TRUCK_COLORS_NO_HASH]
RED_COLOR_WITH_HASH = '#FF0000'
RED_COLOR_NO_HASH = 'FF0000'
RED_SYMBOL = 'X'
BLANK_COLOR_WITH_HASH = "#E6E6E6"
BLANK_COLOR_NO_HASH = "E6E6E6"
# Topology Values
EMPTY = '000'
ONE_CAR = '001'
TWO_CAR = '010'
THREE_CAR = '011'
ONE_TRUCK = '100'
TWO_TRUCK = '110'
ONE_CAR_ONE_TRUCK = '101'
ONE_TRUCK_ONE_CAR = '111'
# relabeling: 208-08-01
# for numpy
|
implementation, want to use matrix math. Need to contrive values such that
# for z in values, x + y = z if an only if x or y = 0.
BLANK_SPACE = '000'
HORIZONTAL_CAR = '010'
HORIZONTAL_TRUCK = '100'
VERTICAL_CAR = '011'
VERTICAL_TRUCK = '101'
blank = 0
vcar = 4
vtruck = 5
hcar = 6
htruck = 7
# Relabel
|
ing These 2017-08-28
# Coding Scheme:
# 3-bits: x y z
# x - orientation (0 = horizontal, 1 = vertical)
# y - Truck Bit (0 = Not Truck, 1 = Truck )
# z - Car Bit (0 = Not Car, 1 = car)
# 000 - Horizontal, Not Car, Not Truck (i.e. Empty Space)
# BLANK_SPACE = '000'
# HORIZONTAL_CAR = '001'
# HORIZONTAL_TRUCK = '010'
# VERTICAL_CAR = '101'
# VERTICAL_TRUCK = '110'
# Given dependencies throughout the code base. Keeping a copy of pre-2018-08-28 values
#BLANK_SPACE = '000'
#VERTICAL_CAR = '001'
#VERTICAL_TRUCK = '010'
#HORIZONTAL_CAR = '011'
#HORIZONTAL_TRUCK = '100'
|
naturali/tensorflow
|
tensorflow/contrib/framework/python/framework/tensor_util.py
|
Python
|
apache-2.0
| 11,844
| 0.006079
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
__all__ = [
'assert_same_float_dtype',
'assert_scalar_int',
'convert_to_tensor_or_sparse_tensor',
'is_tensor',
'reduce_sum_n',
'with_shape',
'with_same_shape']
def _assert_same_base_type(items, expected_type=None):
r"""Asserts all items are of the same base type.
Args:
items: List of graph items (e.g., `Variable`, `Tensor`, `SparseTensor`,
`Operation`, or `IndexedSlices`). Can include `None` elements, which
will be ignored.
expected_type: Expected type. If not specified, assert all items are
of the same base type.
Returns:
Validated type, or none if neither expected_type nor items provided.
Raises:
ValueError: If any types do not match.
"""
original_item_str = None
for item in items:
if item is not None:
item_type = item.dtype.base_dtype
if not expected_type:
expected_type = item_type
original_item_str = item.name if hasattr(item, 'name') else str(item)
elif expected_type != item_type:
raise ValueError('%s, type=%s, must be of the same type (%s)%s.' % (
item.name if hasattr(item, 'name') else str(item),
item_type, expected_type,
(' as %s' % original_item_str) if original_item_str else ''))
return expected_type
def assert_same_float_dtype(tensors=None, dtype=None):
"""Validate and return float type based on `tensors` and `dtype`.
For ops such as matrix multiplication, inputs and weights must be of the
same float type. This function validates that all `tensors` are the same type,
validates that type is `dtype` (if supplied), and returns the type. Type must
be `dtypes.float32` or `dtypes.float64`. If neither `tensors` nor
`dtype` is supplied, default to `dtypes.float32`.
Args:
tensors: Tensors of input values. Can include `None` elements, which will be
ignored.
dtype: Expected type.
Returns:
Validated type.
Raises:
ValueError: if neither `tensors` nor `dtype` is supplied, or result is not
float.
"""
if tensors:
dtype = _assert_same_base_type(tensors, dtype)
if not dtype:
dtype = dtypes.float32
elif not dtype.is_floating:
raise ValueError('Expected float, got %s.' % dtype)
return dtype
def assert_scalar_int(tensor):
"""Assert `tensor` is 0-D, of type `tf.int32` or `tf.int64`.
Args:
tensor: Tensor to test.
Returns:
`tensor`, for chaining.
Raises:
ValueError: if `tensor` is not 0-D, of type `tf.int32` or `tf.int64`.
"""
data_type = tensor.dtype
if data_type.base_dtype not in [dtypes.int32, dtypes.int64]:
raise ValueError('Unexpected type %s for %s.' % (data_type, tensor.name))
shape = tensor.get_shape()
if shape.ndims != 0:
raise ValueError('Unexpected shape %s for %s.' % (shape, tensor.name))
return tensor
def reduce_sum_n(tensors, name=None):
"""Reduce tensors to a scalar sum.
This reduces each tensor in `tensors` to a scalar via `tf.reduce_sum`, then
adds them via `tf.add_n`.
Args:
tensors: List of tensors, all of the same numeric type.
name: Tensor name, and scope for all other ops.
Returns:
Total loss tensor, or None if no losses have been configured.
Raises:
ValueError: if `losses` is missing or empty.
"""
if not tensors:
raise ValueError('No tensors provided.')
tensors = [math_ops.reduce_sum(t, name='%s/sum' % t.op.name) for t in tensors]
if len(tensors) == 1:
return tensors[0]
with ops.name_scope(name, 'reduce_sum_n', tensors) as scope:
return math_ops.add_n(tensors, name=scope)
def _all_equal(tensor0, tensor1):
with ops.name_scope('all_equal', values=[tensor0, tensor1]) as scope:
return math_ops.reduce_all(
math_ops.equal(tensor0, tensor1, name='equal'), name=scope)
def _is_rank(expected_rank, actual_tensor):
"""Returns whether actual_tensor's rank is expected_rank.
Args:
expected_rank: Integer defining the expected rank, or tensor of same.
actual_tensor: Tensor to test.
Returns:
New tensor.
"""
with ops.name_scope('is_rank', values=[actual_tensor]) as scope:
expected = ops.convert_to_tensor(expected_rank, name='expected')
actual = array_ops.rank(actual_tensor, name='actual')
return math_ops.equal(expected, actual, name=scope)
def _is_shape(expected_shape, actual_tensor, actual_shape=None):
"""Returns whether actual_tensor's shape is expected_shape.
Args:
expected_shape: Integer list defining the expected shape, or tensor of same.
actual_tensor: Tensor to test.
actual_shape: Shape of actual_tensor, if we already have it.
Returns:
New tensor.
"""
with ops.name_scope('is_shape', values=[actual_tensor]) as scope:
is_rank = _is_rank(array_ops.size(expected_shape), actual_tensor)
if actual_shape is None:
actual_shape = array_ops.shape(actual_tensor, name='actual')
shape_equal = _all_equal(
ops.convert_to_tensor(expected_shape, name='expected'),
actual_shape)
return math_ops.logical_and(is_rank, shape_equal, name=scope)
def _assert_shape_op(expected_shape, actual_tensor):
"""Asserts actual_tensor's shape is expected_shape.
Args:
expected_shape: Li
|
st of integers defining the expected shape, or tensor of
same.
actual_tensor: Tensor to test.
Returns:
New assert tensor.
"""
with ops.name_scope('assert_shape', values=[actual_tensor]) as scope:
actual_shape = array_ops.shape(actual_tensor, name='actual')
is_shape = _is_shape(expected_shape, actual_tensor, actual_shape)
return control_flow_ops.Assert(
is_shape, [
'Wrong shape for %s [expected] [actua
|
l].' % actual_tensor.name,
expected_shape,
actual_shape
], name=scope)
def with_same_shape(expected_tensor, tensor):
"""Assert tensors are the same shape, from the same graph.
Args:
expected_tensor: Tensor with expected shape.
tensor: Tensor of actual values.
Returns:
Tuple of (actual_tensor, label_tensor), possibly with assert ops added.
"""
with ops.name_scope('%s/' % tensor.op.name, values=[expected_tensor, tensor]):
tensor_shape = expected_tensor.get_shape()
expected_shape = (
tensor_shape.as_list() if tensor_shape.is_fully_defined()
else array_ops.shape(expected_tensor, name='expected_shape'))
return with_shape(expected_shape, tensor)
def is_tensor(x):
"""Check for tensor types.
Check whether an object is a tensor. Equivalent to
`isinstance(x, [tf.Tensor, tf.SparseTensor, tf.Variable])`.
Args:
x: An python object to check.
Returns:
`True` if `x` is a tensor, `False` if not.
"""
tensor_types = (ops.Tensor, ops.SparseTensor, variables.Variable)
return isinstance(x, tensor_types)
def with_shape(expected_shape, tensor):
"""Asserts tensor has expected shape.
If tensor shape and expected_shape, are fully defined, assert they match.
Otherwise, add assert op that will validate the shape when tensor is
evaluated, and set shape on tensor.
Args:
|
cristina0botez/m3u8
|
m3u8/__init__.py
|
Python
|
mit
| 2,171
| 0.006909
|
# coding: utf-8
# Copyright 2014 Globo.com Player authors. All rights reserved.
# Use of this source code is governed by a MIT License
# license that can be found in the LICENSE file.
import sys
PYTHON_MAJOR_VERSION = sys.version_info
import os
import posixpath
try:
import urlparse as url_parser
import urllib2
urlopen = urllib2.urlopen
except ImportError:
import urllib.parse as url_parser
from urllib.request import urlopen as url_opener
urlopen = url_opener
f
|
rom m3u8.model import M3U8, Playlist, IFramePlaylist, Media, Segment
from m3u8.parser import parse, is_url, ParseError
__all__ = ('M3U8', 'Playlist', 'IFramePlaylist', 'Media',
'Segment', 'loads', 'load', 'parse', 'ParseError')
def loads(content):
'''
Given a string with a m3u8
|
content, returns a M3U8 object.
Raises ValueError if invalid content
'''
return M3U8(content)
def load(uri):
'''
Retrieves the content from a given URI and returns a M3U8 object.
Raises ValueError if invalid content or IOError if request fails.
'''
if is_url(uri):
return _load_from_uri(uri)
else:
return _load_from_file(uri)
# Support for python3 inspired by https://github.com/szemtiv/m3u8/
def _load_from_uri(uri):
resource = urlopen(uri)
base_uri = _parsed_url(_url_for(uri))
if PYTHON_MAJOR_VERSION < (3,):
content = _read_python2x(resource)
else:
content = _read_python3x(resource)
return M3U8(content, base_uri=base_uri)
def _url_for(uri):
return urlopen(uri).geturl()
def _parsed_url(url):
parsed_url = url_parser.urlparse(url)
prefix = parsed_url.scheme + '://' + parsed_url.netloc
base_path = posixpath.normpath(parsed_url.path + '/..')
return url_parser.urljoin(prefix, base_path)
def _read_python2x(resource):
return resource.read().strip()
def _read_python3x(resource):
return resource.read().decode(resource.headers.get_content_charset(failobj="utf-8"))
def _load_from_file(uri):
with open(uri) as fileobj:
raw_content = fileobj.read().strip()
base_uri = os.path.dirname(uri)
return M3U8(raw_content, base_uri=base_uri)
|
tofu-rocketry/apel
|
test/test_record.py
|
Python
|
apache-2.0
| 1,067
| 0.004686
|
import unittest
from apel.db.records import Record, InvalidRecordException
class RecordTest(unittest.TestCase):
'''
Test case for Record
'''
# test for public interface
def test_set_field(self):
record = Record()
self.assertRaises(InvalidRecordException,
record.set_field, 'Test',
|
'value')
record._db_fields = ['Test']
record.set_field('Test', 'value')
self.assertEqual(record._record_content['Test'], 'value')
def test_set_all(self):
record = Record()
self.assertRaises(InvalidRecordException,
record.set_all, {'Test':'value'})
record._db_fields = ['Test']
record.set_all({'Test':'value'})
self.assertEqual(record._record_co
|
ntent['Test'], 'value')
def test_get_field(self):
record = Record()
record._db_fields = ['Test']
record._record_content['Test'] = 'value'
self.assertEqual(record.get_field('Test'), 'value')
if __name__ == '__main__':
unittest.main()
|
tshrinivasan/open-tamil
|
examples/solpattiyal.py
|
Python
|
mit
| 3,639
| 0.016213
|
# -*- coding: utf-8 -*-
#
# This file is distributed under MIT License or default open-tamil license.
# (C) 2013-2015 Muthiah Annamalai
#
# This file is part of 'open-tamil' examples
# It can be used to identify patterns in a Tamil text files;
# e.g. it has been used to identify patterns in Tamil Wikipedia
# articles.
#
from __future__ import print_function
import tamil
import sys
import codecs
from transliterate import *
import re
from functools import cmp_to_key
import operator
PYTHON3 = sys.version[0] > '2'
if not PYTHON3:
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# use generators for better memory footprint -- 04/04/15
class WordFrequency(object):
# get words
@staticmethod
def get_tamil_words_iterable( letters ):
""" given a list of UTF-8 letters section them into words, grouping them at spaces """
#punctuations = u'-,+,/,*,>,<,_,],[,{,},(,)'.split(',')+[',']
#isspace_or_tamil = lambda x: not x in punctuations and tamil.utf8.istamil(x)
# correct algorithm for get-tamil-words
buf = []
for idx,let in enumerate(letters):
if tamil.utf8.istamil( let ):
buf.append( let )
else:
|
if len(buf) > 0:
yield u"".join( buf )
buf = []
if len(buf) > 0:
yield u"".join(buf)
# sentinel
def __init__(self,tatext=u''):
object.__init__(self)
self.frequency = {}
# process data
def process(self,new_
|
text):
for taline in new_text.split(u"\n"):
self.tamil_words_process( taline )
return
# finalize
def display(self):
self.print_tamil_words( )
return
# processor / core
def tamil_words_process( self, taline ):
taletters = tamil.utf8.get_letters_iterable(taline)
# raw words
#for word in re.split(u"\s+",tatext):
# print(u"-> ",word)
# tamil words only
for pos,word in enumerate(WordFrequency.get_tamil_words_iterable(taletters)):
if len(word) < 1:
continue
self.frequency[word] = 1 + self.frequency.get(word,0)
return
# closer/results
def print_tamil_words(self):
# sort words by descending order of occurence
print(u"# unique words = %d"%(len(self.frequency)))
for l in sorted(self.frequency.items(), key=operator.itemgetter(1)):
print( l[0],':',l[1])
print(u"#"*80)
print(u"# sorted in Tamil order")
for l in sorted(self.frequency.keys(), key=cmp_to_key(tamil.utf8.compare_words_lexicographic)):
print( l,':',self.frequency[l])
return
# driver
def demo_tamil_text_filter( file_urls ):
#url = u"../tawiki-20150316-all-titles"
if not type(file_urls) is list:
file_urls = [file_urls]
obj = WordFrequency( )
for filepath in file_urls:
try:
tatext = codecs.open(filepath,'r','utf-8').read()
obj.process(tatext)
except Exception as e:
sys.stderr.write("Skipping the file :"+filepath+" due to exception\n\t\t " + str(e)+"\n")
obj.display()
return obj
if __name__ == u"__main__":
if len(sys.argv) < 2:
print("usage: python solpattiyal.py <filename>")
print(" this command shows list of unique words in Tamil and their frequencies in document(s);")
print(" it also relists the words in the sorted order")
sys.exit(-1)
demo_tamil_text_filter(sys.argv[1:])
|
ceos-seo/data_cube_utilities
|
dea_tools/dea_tools/spatial.py
|
Python
|
apache-2.0
| 34,337
| 0.009145
|
## dea_spatialtools.py
'''
Description: This file contains a set of python functions for conducting
spatial analyses on Digital Earth Australia data.
License: The code in this notebook is licensed under the Apache License,
Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0). Digital Earth
Australia data is licensed under the Creative Commons by Attribution 4.0
license (https://creativecommons.org/licenses/by/4.0/).
Contact: If you need assistance, please post a question on the Open Data
Cube Slack channel (http://slack.opendatacube.org/) or on the GIS Stack
Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube)
using the `open-data-cube` tag (you can view previously asked questions
here: https://gis.stackexchange.com/questions/tagged/open-data-cube).
If you would like to report an issue with this script, file one on
Github: https://github.com/GeoscienceAustralia/dea-notebooks/issues/new
Functions included:
xr_vectorize
xr_rasterize
subpixel_contours
interpolate_2d
contours_to_array
largest_region
transform_geojson_wgs_to_epsg
zonal_stats_parallel
Last modified: November 20
|
20
'''
# Import required packages
import collections
import numpy as np
import xarray as xr
import geopandas as gpd
import rasterio.features
import scipy.interpolate
from scipy import ndimage as nd
from skimage.measure import label
from rasterstats import zonal_stats
from skimage.measure import
|
find_contours
from datacube.utils.cog import write_cog
from datacube.helpers import write_geotiff
from datacube.utils.geometry import assign_crs
from datacube.utils.geometry import CRS, Geometry
from shapely.geometry import LineString, MultiLineString, shape
def xr_vectorize(da,
attribute_col='attribute',
transform=None,
crs=None,
dtype='float32',
export_shp=False,
verbose=False,
**rasterio_kwargs):
"""
Vectorises a xarray.DataArray into a geopandas.GeoDataFrame.
Parameters
----------
da : xarray dataarray or a numpy ndarray
attribute_col : str, optional
Name of the attribute column in the resulting geodataframe.
Values of the raster object converted to polygons will be
assigned to this column. Defaults to 'attribute'.
transform : affine.Affine object, optional
An affine.Affine object (e.g. `from affine import Affine;
Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the
affine transformation used to convert raster coordinates
(e.g. [0, 0]) to geographic coordinates. If none is provided,
the function will attempt to obtain an affine transformation
from the xarray object (e.g. either at `da.transform` or
`da.geobox.transform`).
crs : str or CRS object, optional
An EPSG string giving the coordinate system of the array
(e.g. 'EPSG:3577'). If none is provided, the function will
attempt to extract a CRS from the xarray object's `crs`
attribute.
dtype : str, optional
Data type must be one of int16, int32, uint8, uint16,
or float32
export_shp : Boolean or string path, optional
To export the output vectorised features to a shapefile, supply
an output path (e.g. 'output_dir/output.shp'. The default is
False, which will not write out a shapefile.
verbose : bool, optional
Print debugging messages. Default False.
**rasterio_kwargs :
A set of keyword arguments to rasterio.features.shapes
Can include `mask` and `connectivity`.
Returns
-------
gdf : Geopandas GeoDataFrame
"""
# Check for a crs object
try:
crs = da.crs
except:
if crs is None:
raise Exception("Please add a `crs` attribute to the "
"xarray.DataArray, or provide a CRS using the "
"function's `crs` parameter (e.g. 'EPSG:3577')")
# Check if transform is provided as a xarray.DataArray method.
# If not, require supplied Affine
if transform is None:
try:
# First, try to take transform info from geobox
transform = da.geobox.transform
# If no geobox
except:
try:
# Try getting transform from 'transform' attribute
transform = da.transform
except:
# If neither of those options work, raise an exception telling the
# user to provide a transform
raise TypeError("Please provide an Affine transform object using the "
"`transform` parameter (e.g. `from affine import "
"Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "
"6886890.0)`")
# Check to see if the input is a numpy array
if type(da) is np.ndarray:
vectors = rasterio.features.shapes(source=da.astype(dtype),
transform=transform,
**rasterio_kwargs)
else:
# Run the vectorizing function
vectors = rasterio.features.shapes(source=da.data.astype(dtype),
transform=transform,
**rasterio_kwargs)
# Convert the generator into a list
vectors = list(vectors)
# Extract the polygon coordinates and values from the list
polygons = [polygon for polygon, value in vectors]
values = [value for polygon, value in vectors]
# Convert polygon coordinates into polygon shapes
polygons = [shape(polygon) for polygon in polygons]
# Create a geopandas dataframe populated with the polygon shapes
gdf = gpd.GeoDataFrame(data={attribute_col: values},
geometry=polygons,
crs={'init': str(crs)})
# If a file path is supplied, export a shapefile
if export_shp:
gdf.to_file(export_shp)
return gdf
def xr_rasterize(gdf,
da,
attribute_col=False,
crs=None,
transform=None,
name=None,
x_dim='x',
y_dim='y',
export_tiff=None,
verbose=False,
**rasterio_kwargs):
"""
Rasterizes a geopandas.GeoDataFrame into an xarray.DataArray.
Parameters
----------
gdf : geopandas.GeoDataFrame
A geopandas.GeoDataFrame object containing the vector/shapefile
data you want to rasterise.
da : xarray.DataArray or xarray.Dataset
The shape, coordinates, dimensions, and transform of this object
are used to build the rasterized shapefile. It effectively
provides a template. The attributes of this object are also
appended to the output xarray.DataArray.
attribute_col : string, optional
Name of the attribute column in the geodataframe that the pixels
in the raster will contain. If set to False, output will be a
boolean array of 1's and 0's.
crs : str, optional
CRS metadata to add to the output xarray. e.g. 'epsg:3577'.
The function will attempt get this info from the input
GeoDataFrame first.
transform : affine.Affine object, optional
An affine.Affine object (e.g. `from affine import Affine;
Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the
affine transformation used to convert raster coordinates
(e.g. [0, 0]) to geographic coordinates. If none is provided,
the function will attempt to obtain an affine transformation
from the xarray object (e.g. either at `da.transform` or
`da.geobox.transform`).
x_dim : str, optional
An optional string allowing you to override the xarray dimension
used for x coordinates. Defaults to 'x'. Useful, for example,
i
|
nijel/weblate
|
weblate/vcs/tests/test_gpg.py
|
Python
|
gpl-3.0
| 3,459
| 0.000869
|
#
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public Lice
|
nse as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General
|
Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import subprocess
from distutils.version import LooseVersion
from unittest import SkipTest
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
import weblate.vcs.gpg
from weblate.utils.checks import check_data_writable
from weblate.utils.unittest import tempdir_setting
from weblate.vcs.gpg import (
generate_gpg_key,
get_gpg_key,
get_gpg_public_key,
get_gpg_sign_key,
)
class GPGTest(TestCase):
gpg_error = None
@classmethod
def setUpClass(cls):
"""Check whether we can use gpg."""
super().setUpClass()
try:
result = subprocess.run(
["gpg", "--version"],
check=True,
text=True,
capture_output=True,
)
version = result.stdout.splitlines()[0].strip().rsplit(None, 1)[-1]
if LooseVersion(version) < LooseVersion("2.1"):
cls.gpg_error = "gpg too old"
except (subprocess.CalledProcessError, OSError):
cls.gpg_error = "gpg not found"
def setUp(self):
if self.gpg_error:
raise SkipTest(self.gpg_error)
def check_errors(self):
self.assertEqual(weblate.vcs.gpg.GPG_ERRORS, {})
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_generate(self):
self.assertEqual(check_data_writable(), [])
self.assertIsNone(get_gpg_key(silent=True))
key = generate_gpg_key()
self.check_errors()
self.assertIsNotNone(key)
self.assertEqual(key, get_gpg_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_get(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_sign_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_sign_key())
# Check empty cache
cache.delete("gpg-key-id")
self.assertEqual(key, get_gpg_sign_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_public(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_public_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_public_key())
|
pbfy0/visvis
|
examples/surfaceFromRandomPoints.py
|
Python
|
bsd-3-clause
| 6,186
| 0.023278
|
#!/usr/bin/env python
""" Examples of using qhull via scipy to generate 3D plots in visvis.
Requires numpy ver 1.5, scipy ver 0.9 and qhull from
http://www.qhull.org/ (on Windows this comes with Scipy).
plot3D meshes and plots random convex transformable data in both cartesian
and spherical coordinates
Play around with the many input parameters to plot3D to make interesting plots.
Keith Smith, 4 March 2011
"""
import numpy as np
import scipy as sp
from scipy.spatial import Delaunay
import visvis as vv
def plot3D( vuvi,
coordSys='Cartesian',
raised = True,
depRange=[-40,0],
ambient = 0.9,
diffuse = 0.4,
colormap = vv.CM_JET,
faceShading='smooth',
edgeColor = (0.5,0.5,0.5,1),
edgeShading = 'smooth',
faceColor = (1,1,1,1),
shininess = 50,
specular = 0.35,
emission = 0.45 ):
""" plot3D(vxyz,
coordSys=['Cartesian', 'Spherical'],
raised = True,
depRange=[-40,0], #Note: second range limit not currently used
rangeR=[-40,0],
ambient = 0.9,
diffuse = 0.4,
colormap = vv.CM_JET,
faceShading='smooth',
edgeColor = (0.5,0.5,0.5,1),
edgeShading = 'smooth',
faceColor = (1,1,1,1),
shininess = 50,
specular = 0.35,
emission = 0.45 ))
"""
if coordSys == 'Spherical':
thetaPhiR = vuvi # data cols are theta, phi, radius
vxyz = np.zeros(vuvi.shape)
# Now find xyz data points on unit sphere (for meshing)
vxyz[:,0] = np.sin(thetaPhiR[:,0])*np.cos(thetaPhiR[:,1])
vxyz[:,1] = np.sin(thetaPhiR[:,0])*np.sin(thetaPhiR[:,1])
vxyz[:,2] = np.cos(thetaPhiR[:,0])
#normalize and scale dependent values
thetaPhiR[thetaPhiR[:,2] < depRange[0], 2] = depRange[0]
depVal = thetaPhiR[:,2] - np.min(thetaPhiR[:,2])
else:
vxyz = vuvi
vxyz[vxyz[:,2] < depRange[0], 2] = depRange[0]
numOfPts = np.shape(vxyz)[0]
depVal = vxyz[:,2]
# set to convex surface for meshing
# find center of data
center = np.average(vxyz, 0)
#center data
vxyz = vxyz - center
# find x-y plane distance to each point
radials = np.sqrt(vxyz[:,0]**2 + vxyz[:,1]**2)
# get max and adjust so that arctan ranges between +-45 deg
maxRadial = np.max(radials)/0.7
#get angle on sphere
xi = np.arctan2(radials / maxRadial, 1)
#force z axis data to sphere
vxyz[:,2] = maxRadial * np.cos(xi)
vxyz = np.append(vxyz, [[0.7, 0.7, -0.7],[-0.7, 0.7, -0.7],[0.7, -0.7, -0.7],[-0.7, -0.7, -0.7]], axis=0)
# Send data to convex_hull program qhull
dly = Delaunay(vxyz)
meshIndx = dly.convex_hull
# Check each triangle facet and flip if
# vertex order puts back side out
for index, (I1, I2, I3) in enumerate(meshIndx):
a = vxyz[I1,:] - vxyz[I2,:]
b = vxyz[I2,:] - vxyz[I3,:]
c = np.cross(a, b)
if np.dot(c, vxyz[I2,:]) > 0:
meshIndx[index] = (I1, I3, I2)
# if 3D surface adjust dependent coordinates
if raised:
if coordSys == 'Spherical':
vxyz[:,0] = depVal*np.sin(thetaPhiR[:,0])*np.cos(thetaPhiR[:,1])
vxyz[:,1] = depVal*np.sin(thetaPhiR[:,0])*np.sin(thetaPhiR[:,1])
vxyz[:,2] = depVal*np.cos(thetaPhiR[:,0])
else:
vxyz = vxyz + center
vxyz[:numOfPts,2] = depVal
else:
if coordSys == 'Spherical':
depRange[0] = 1.0
else:
# Since qhull encloses the data with Delaunay triangles there will be
# a set of facets which cover the bottom of the data. For flat
# contours, the bottom facets need to be separated a fraction from
# the top facets else you don't see colormap colors
depValRange = np.max(vxyz[:numOfPts,2]) - np.min(vxyz[:numOfPts,2])
vxyz[:numOfPts,2] = vxyz[:numOfPts,2] / (10 * depValRange )
#normalize depVal for color mapping
dataRange = np.max(depVal) - np.min(depVal)
depVal = (depVal- np.min(depVal)) / dataRange
# Get axes
ax = vv.gca()
ms = vv.Mesh(ax, vxyz, faces=meshIndx, normals=vxyz)
ms.SetValues(np.reshape(depVal,np.size(depVal)))
ms.ambient = ambient
ms.diffuse = diffuse
ms.colormap = colormap
ms.faceShading = faceShading
ms.edgeColor = edgeColor
ms.edgeShading = edgeShading
ms.faceColor = faceColor
ms.shininess = shininess
ms.specular = specular
ms.emission = emission
ax.SetLimits(rangeX=[-depRange[0],depRange[0]],
rangeY=[-depRange[0],depRange[0]],
rangeZ=[-depRange[0], depRange[0]])
# Start of test code.
if __name__ == '__main__':
# Create figure
fig = vv.figure()
fig.position.w = 600
# Cartesian plot
numOfPts = 2000
scale = 1
# Create random points
xyz = 2 * scale * (np.random.rand(numOfPts,3) - 0.5)
# 2D sync function
xyz[:,2] = np.sinc(5*(np.sqrt(xyz[:,0]**2 + xyz[:,1]**2)))
#xyz[:,2] = scale - ( xyz[:,0]**2 + xyz[:,1]**2)
# Plot
vv.subplot(121)
vv.title('Cartesian coordinates')
plot3D(xyz, depRange=[-1,0])
#plot3D(xyz, depR
|
ange=[-1,0], raised=False)
# Sperical plot
numOfPts = 1000
# Create random points
ThetaPhiR = np.zeros((numOfPts,3))
ThetaPhiR[:,0] = np.pi * np.random.rand(numOfPts) # theta is 0 to 180 deg
ThetaPhiR[:,1] = 2 * np.pi * np.random.rand(numOfPts) # phi is 0 to 360 deg
ThetaPhiR[:,
|
2] = 10 * np.log10((np.sin(ThetaPhiR[:,0])**4) * (np.cos(ThetaPhiR[:,1])**2))
# Plot
vv.subplot(122)
vv.title('Sperical coordinates')
plot3D(ThetaPhiR, coordSys='Spherical')
#plot3D(ThetaPhiR, coordSys='Spherical', raised=False)
# Run main loop
app = vv.use()
app.Run()
|
Petraea/jsonbot
|
jsb/plugs/socket/dns.py
|
Python
|
mit
| 1,189
| 0.015139
|
# jsb/plugs/socket/dns.py
#
#
""" do a fqdn loopup. """
## jsb imports
from jsb.lib.commands import cmnds
from jsb.lib.examples import examples
## basic imports
from socket import gethostbyname
from socket import getfqdn
import re
## dns command
def handle_dns(bot, event):
""" arguments: <ip>|<hostname> - do a dns lookup. """
if not
|
event.rest: event.missing("<ip>|<hostname>") ; return
query = event.rest.strip()
ippattern = re.match(r"^([0-9]{1,3}\.){3}[0-9]{1,3}$", query)
hostpattern = re.match(r"(\w+://)?(?P<hostname>\S+\.\w+)", query)
if ippattern:
try:
answer = getfqdn(ippattern.group(0))
event.reply("%(hostname)s is %(answer)s" % {"hostname": query, "answer": answer})
except: event.reply("
|
Couldn't lookup ip")
elif hostpattern:
try:
answer = gethostbyname(hostpattern.group('hostname'))
event.reply("%(ip)s is %(answer)s" % {"ip": query, "answer": answer})
except: event.reply("Couldn't look up the hostname")
else: return
cmnds.add("dns", handle_dns, ["OPER", "USER", "GUEST"])
examples.add("dns", "resolve the ip or the hostname", "dns google.com")
|
TwolDE2/enigma2
|
lib/python/Components/Renderer/CiModuleControl.py
|
Python
|
gpl-2.0
| 2,404
| 0.027454
|
from Components.Renderer.Renderer import Renderer
from enigma import eDVBCI_UI, eLabel, iPlayableService
from skin import parameters
from Components.SystemInfo import SystemInfo
from Components.VariableText import VariableText
from Tools.Hex2strColor import Hex2strColor
from os import popen
class CiModuleControl(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.eDVBCIUIInstance = eDVBCI_UI.getInstance()
self.eDVBCIUIInstance and self.eDVBCIUIInstance.ciStateChanged.get().append(self.ciModuleStateChanged)
self.text = ""
self.allVisible = Fa
|
lse
self.no_visible_state1 = "ciplushelper" in popen("top -n 1").read()
self.colors = parameters.get("CiModuleControlColors", (0x007F7F7F, 0x00FFFF00, 0x00FFFF00, 0x00FF2525)) # "state 0 (no module) gray", "state 1 (init module) yellow", "state 2 (module ready) g
|
reen", "state -1 (error) red"
GUI_WIDGET = eLabel
def applySkin(self, desktop, parent):
attribs = self.skinAttributes[:]
for (attrib, value) in self.skinAttributes:
if attrib == "allVisible":
self.allVisible = value == "1"
attribs.remove((attrib, value))
break
self.skinAttributes = attribs
return Renderer.applySkin(self, desktop, parent)
def ciModuleStateChanged(self, slot):
self.changed(True)
def changed(self, what):
if what == True or what[0] == self.CHANGED_SPECIFIC and what[1] == iPlayableService.evStart:
string = ""
NUM_CI = SystemInfo["CommonInterface"]
if NUM_CI and NUM_CI > 0:
if self.eDVBCIUIInstance:
for slot in range(NUM_CI):
state = self.eDVBCIUIInstance.getState(slot)
if state == 1 and self.no_visible_state1:
continue
add_num = True
if string:
string += " "
if state != -1:
if state == 0:
if not self.allVisible:
string += ""
add_num = False
else:
string += Hex2strColor(self.colors[0]) # no module
elif state == 1:
string += Hex2strColor(self.colors[1]) # init module
elif state == 2:
string += Hex2strColor(self.colors[2]) # module ready
else:
if not self.allVisible:
string += ""
add_num = False
else:
string += Hex2strColor(self.colors[3]) # error
if add_num:
string += "%d" % (slot + 1)
if string:
string = _("CI slot: ") + string
self.text = string
|
Rouslan/NTracer
|
lib/ntracer/tests/test.py
|
Python
|
mit
| 16,831
| 0.03149
|
import unittest
import random
import pickle
from ..wrapper import NTracer,CUBE,SPHERE
from ..render import Material,Color
def pydot(a,b):
return sum(ia*ib for ia,ib in zip(a,b))
def and_generic(f):
def inner(self):
with self.subTest(generic=False):
f(self,False)
with self.subTest(generic=True):
f(self,True)
return inner
def object_equal_method(*attrs):
def inner(self,a,b,msg=None):
if a is not b:
for attr in attrs:
self.assertEqual(getattr(a,attr),getattr(b,attr),msg)
return inner
def rand_vector(nt,lo=-1000,hi=1000):
return nt.Vector([random.uniform(lo,hi) for x in range(nt.dimension)])
def rand_triangle_verts(nt):
points = []
d = nt.dimension
for i in range(d):
points.append(nt.Vector(
[random.uniform(-10,10) for j in range(0,i)] +
[random.uniform(1,10)] +
[0 for j in range(i+1,d)]))
return points
def walk_bounds(n,aabb,nt,f):
f(aabb,n)
if isinstance(n,nt.KDBranch):
walk_bounds(n.left,aabb.left(n.axis,n.split),nt,f)
walk_bounds(n.right,aabb.right(n.axis,n.split),nt,f)
def aabb_intersects(a,b):
return all(a_min <= b_max and a_max >= b_min
for a_min,a_max,b_min,b_max in zip(a.start,a.end,b.start,b.end))
def to_prototype(nt,x):
if isinstance(x,nt.Triangle): return nt.TrianglePrototype(x)
if isinstance(x,nt.TriangleBatch): return nt.TriangleBatchPrototype(x)
# this constructor isn't implemented yet
if isinstance(x,nt.Solid): return nt.SolidPrototype(x)
raise TypeError('x is not a primitive')
class Tests(unittest.TestCase):
def __init__(self,*args,**kwds):
super().__init__(*args,**kwds)
self._nt_cache = set()
self.addTypeEqualityFunc(Material,'_material_equal')
def get_ntracer(self,dimension,generic=False):
r = NTracer(dimension,generic)
if r not in self._nt_cache:
self._nt_cache.add(r)
#self.addTypeEqualityFunc(r.Vector,'_vector_equal')
self.addTypeEqualityFunc(r.base.AABB,'_aabb_equal')
self.addTypeEqualityFunc(r.base.KDBranch,'_kdbranch_equal')
self.addTypeEqualityFunc(r.base.KDLeaf,'listlike_equal')
self.addTypeEqualityFunc(r.base.Triangle,'_triangle_equal')
self.addTypeEqualityFunc(r.base.TriangleBatch,'listlike_equal')
return r
_aabb_equal = object_equal_method('start','end')
_material_equal = object_equal_method('color','opacity','reflectivity','specular_intensity','specular_exp','specular')
_kdbranch_equal = object_equal_method('axis','split','left','right')
def listlike_equal(self,a,b,msg=None):
self.assertEqual(list(a),list(b),msg)
def _triangle_equal(self,a,b,msg=None):
self.assertEqual(a.p1,b.p1,msg)
self.assertEqual(a.face_normal,b.face_normal,msg)
self.assertEqual(list(a.edge_normals),list(b.edge_normals),msg)
self.assertEqual(a.material,b.material,msg)
def vector_almost_equal(self,va,vb):
self.assertEqual(len(va),len(vb))
for a,b in zip(va,vb):
self.assertAlmostEqual(a,b,4)
#def check_kdtree(self,nt,scene):
# prims = set()
# leaf_boundaries = []
# def handler(aabb,node):
# if node is None:
# leaf_boundaries.append((aabb,frozenset()))
# elif isinstance(node,nt.KDLeaf):
# prims.update(to_prototype(nt,p) for p in node)
# leaf_boundaries.append((aabb,frozenset(node)))
# walk_bounds(scene.root,scene.boundary,nt,handler)
# for p in prims:
# for bound,contained in leaf_boundaries:
# self.assertEqual(bound.intersects(p),p.primitive in contained)
def test_simd(self):
d = 64
while d > 4:
nt = self.get_ntracer(d)
a = nt.Vector(range(d))
|
b = nt.Vector(x+12 for x in range(d-1,-1,-1))
self.assertAlmostEqual(nt.dot(a,b),pydot(a,b),4)
d = d >> 1
@and_generic
def test_math(self,generic):
nt = self.get_ntracer(4,generic)
ma = nt.Matrix([[10,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,
|
16]])
mb = nt.Matrix([13,6,9,6,7,3,3,13,1,11,12,7,12,15,17,15])
mx = ma * mb
my = nt.Matrix([195,159,200,167,210,245,283,277,342,385,447,441,474,525,611,605])
self.listlike_equal(mx.values,my.values)
self.vector_almost_equal((mb * mb.inverse()).values,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1])
self.vector_almost_equal(nt.Vector(13,2,16,14).unit(),[0.52,0.08,0.64,0.56])
@and_generic
def test_aabb(self,generic):
nt = self.get_ntracer(5,generic)
a = nt.AABB((1,7,-5,5,4),(5,13,-1,6,12))
self.assertEqual(a.dimension,5)
self.listlike_equal(a.end,[5,13,-1,6,12])
self.listlike_equal(a.start,[1,7,-5,5,4])
self.listlike_equal(a.right(2,-3).start,[1,7,-3,5,4])
self.listlike_equal(a.left(0,2).end,[2,13,-1,6,12])
@and_generic
def test_triangle(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
box = nt.AABB((-1,-1,-1),(1,1,1))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(-2.092357,0.1627209,0.9231308),
(0.274588,0.8528936,2.309217),
(-1.212236,1.855952,0.3137006)],mat)))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(2.048058,-3.022543,1.447644),
(1.961913,-0.5438575,-0.1552723),
(0.3618142,-1.684767,0.2162201)],mat)))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(-4.335572,-1.690142,-1.302721),
(0.8976227,0.5090631,4.6815),
(-0.8176082,4.334341,-1.763081)],mat)))
self.assertTrue(box.intersects(nt.TrianglePrototype([
(0,0,0),
(5,5,5),
(1,2,3)],mat)))
self.assertTrue(nt.AABB(
(-0.894424974918,-1.0,-0.850639998913),
(0.0,-0.447214990854,0.850639998913)).intersects(
nt.TrianglePrototype([
(0.0,-1.0,0.0),
(0.723599970341,-0.447214990854,0.525720000267),
(-0.276385009289,-0.447214990854,0.850639998913)],mat)))
points = [rand_triangle_verts(nt) for i in range(nt.BATCH_SIZE)]
max_v = min_v = points[0][0]
for tri in points:
for p in tri:
max_v = [max(a,b) for a,b in zip(max_v,p)]
min_v = [min(a,b) for a,b in zip(min_v,p)]
tbp = nt.TriangleBatchPrototype(nt.TrianglePrototype(tri,mat) for tri in points)
self.vector_almost_equal(tbp.boundary.start,min_v)
self.vector_almost_equal(tbp.boundary.end,max_v)
if nt.BATCH_SIZE == 4:
self.assertTrue(box.intersects(nt.TriangleBatchPrototype([
nt.TrianglePrototype([
(5.8737568855285645,0.0,0.0),
(2.362654209136963,1.4457907676696777,0.0),
(-7.4159417152404785,-2.368093252182007,5.305923938751221)],mat),
nt.TrianglePrototype([
(6.069871425628662,0.0,0.0),
(8.298105239868164,1.4387503862380981,0.0),
(-7.501928806304932,4.3413987159729,5.4995622634887695)],mat),
nt.TrianglePrototype([
(5.153589248657227,0.0,0.0),
(-0.8880055546760559,3.595335006713867,0.0),
(-0.14510761201381683,6.0621466636657715,1.7603594064712524)],mat),
nt.TrianglePrototype([
(1.9743329286575317,0.0,0.0),
(-0.6579152345657349,8.780682563781738,0.0),
(1.0433781147003174,0.5538825988769531,4.187061309814453)],mat)])))
@and_generic
def test_cube(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
box = nt.AABB((-1,-1,-1),(1,1,1))
self.assertFalse(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(1.356136,1.717844,1.57
|
phretor/django-academic
|
academic/apps/people/context_processors.py
|
Python
|
bsd-3-clause
| 168
| 0.005952
|
from academic import settings
def default_picture_url(context):
return {
|
'ACADEMIC_PEOPLE_DEFAULT_PICTURE':
settings.PEOPLE_DEF
|
AULT_PICTURE, }
|
pirata-cat/agora-ciudadana
|
userena/models.py
|
Python
|
agpl-3.0
| 15,318
| 0.002742
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.core.exceptions import ImproperlyConfigured
from django.utils.http import urlquote
from userena.utils import get_gravatar, generate_sha1, get_protocol
from userena.managers import UserenaManager, UserenaBaseProfileManager
from userena import settings as userena_settings
from guardian.shortcuts import get_perms
from guardian.shortcuts import assign
from easy_thumbnails.fields import ThumbnailerImageField
import datetime
import random
import hashlib
PROFILE_PERMISSIONS = (
('view_profile', 'Can view profile'),
)
def upload_to_mugshot(instance, filename):
"""
Uploads a mugshot for a user to the ``USERENA_MUGSHOT_PATH`` and saving it
under unique hash for the image. This is for privacy reasons so others
can't just browse through the mugshot directory.
"""
extension = filename.split('.')[-1].lower()
salt, hash = generate_sha1(instance.id)
return '%(path)s%(hash)s.%(extension)s' % {'path': userena_settings.USERENA_MUGSHOT_PATH,
'hash': hash[:10],
|
'extension': extension}
class UserenaSignup(models.Model):
"""
Userena model which stores all the necessary information to have a full
functional user implementation on your Djan
|
go website.
"""
user = models.OneToOneField(User,
verbose_name=_('user'),
related_name='userena_signup')
last_active = models.DateTimeField(_('last active'),
blank=True,
null=True,
help_text=_('The last date that the user was active.'))
activation_key = models.CharField(_('activation key'),
max_length=40,
blank=True)
activation_notification_send = models.BooleanField(_('notification send'),
default=False,
help_text=_('Designates whether this user has already got a notification about activating their account.'))
email_unconfirmed = models.EmailField(_('unconfirmed email address'),
blank=True,
help_text=_('Temporary email address when the user requests an email change.'))
email_confirmation_key = models.CharField(_('unconfirmed email verification key'),
max_length=40,
blank=True)
email_confirmation_key_created = models.DateTimeField(_('creation date of email confirmation key'),
blank=True,
null=True)
objects = UserenaManager()
class Meta:
verbose_name = _('userena registration')
verbose_name_plural = _('userena registrations')
def __unicode__(self):
return '%s' % self.user.username
def change_email(self, email):
"""
Changes the email address for a user.
A user needs to verify this new email address before it becomes
active. By storing the new email address in a temporary field --
``temporary_email`` -- we are able to set this email address after the
user has verified it by clicking on the verification URI in the email.
This email gets send out by ``send_verification_email``.
:param email:
The new email address that the user wants to use.
"""
self.email_unconfirmed = email
salt, hash = generate_sha1(self.user.username)
self.email_confirmation_key = hash
self.email_confirmation_key_created = timezone.now()
self.save()
# Send email for activation
self.send_confirmation_email()
def send_confirmation_email(self):
"""
Sends an email to confirm the new email address.
This method sends out two emails. One to the new email address that
contains the ``email_confirmation_key`` which is used to verify this
this email address with :func:`UserenaUser.objects.confirm_email`.
The other email is to the old email address to let the user know that
a request is made to change this email address.
"""
context= {'user': self.user,
'new_email': self.email_unconfirmed,
'protocol': get_protocol(),
'confirmation_key': self.email_confirmation_key,
'site': Site.objects.get_current()}
# Email to the old address
subject_old = render_to_string('accounts/emails/confirmation_email_subject_old.txt',
context)
subject_old = ''.join(subject_old.splitlines())
message_old = render_to_string('accounts/emails/confirmation_email_message_old.txt',
context)
send_mail(subject_old,
message_old,
settings.DEFAULT_FROM_EMAIL,
[self.user.email])
# Email to the new address
subject_new = render_to_string('accounts/emails/confirmation_email_subject_new.txt',
context)
subject_new = ''.join(subject_new.splitlines())
message_new = render_to_string('accounts/emails/confirmation_email_message_new.txt',
context)
send_mail(subject_new,
message_new,
settings.DEFAULT_FROM_EMAIL,
[self.email_unconfirmed,])
def activation_key_expired(self):
"""
Checks if activation key is expired.
Returns ``True`` when the ``activation_key`` of the user is expired and
``False`` if the key is still valid.
The key is expired when it's set to the value defined in
``USERENA_ACTIVATED`` or ``activation_key_created`` is beyond the
amount of days defined in ``USERENA_ACTIVATION_DAYS``.
"""
expiration_days = datetime.timedelta(days=userena_settings.USERENA_ACTIVATION_DAYS)
expiration_date = self.user.date_joined + expiration_days
if self.activation_key == userena_settings.USERENA_ACTIVATED:
return True
if timezone.now() >= expiration_date:
return True
return False
def send_activation_email(self, auto_join_secret = False):
"""
Sends a activation email to the user.
This email is send when the user wants to activate their newly created
user.
"""
if not auto_join_secret:
activation_url = reverse('userena_activate', args=(self.user.username, self.activation_key))
else:
if isinstance(auto_join_secret, basestring):
auto_join_key = auto_join_secret
else:
auto_join_key = hashlib.md5(self.activation_key +
settings.AGORA_API_AUTO_ACTIVATION_SECRET).hexdigest()
activation_url = reverse('auto_join_activate', args=(self.user.username, auto_join_key))
context= {'user': self.user,
'protocol': get_protocol(),
'activation_days': userena_settings.USERENA_ACTIVATION_DAYS,
'activation_url': activation_url,
'site': Site.objects.get_current()}
subject = render_to_string('accounts/emails/activation_email_subject.txt',
context)
subject = ''.join(subject.splitlines())
message = render_to_string
|
atrick/swift
|
utils/gyb_syntax_support/NodeSerializationCodes.py
|
Python
|
apache-2.0
| 7,910
| 0
|
from .Node import error
SYNTAX_NODE_SERIALIZATION_CODES = {
# 0 is 'Token'. Needs to be defined manually
# 1 is 'Unknown'. Needs to be defined manually
'UnknownDecl': 2,
'TypealiasDecl': 3,
'AssociatedtypeDecl': 4,
'IfConfigDecl': 5,
'PoundErrorDecl': 6,
'PoundWarningDecl': 7,
'PoundSourceLocation': 8,
'ClassDecl': 9,
'StructDecl': 10,
'ProtocolDecl': 11,
'ExtensionDecl': 12,
'FunctionDecl': 13,
'InitializerDecl': 14,
'DeinitializerDecl': 15,
'SubscriptDecl': 16,
'ImportDecl': 17,
'AccessorDecl': 18,
'VariableDecl': 19,
'EnumCaseDecl': 20,
'EnumDecl': 21,
'OperatorDecl': 22,
'PrecedenceGroupDecl': 23,
'UnknownExpr': 24,
'InOutExpr': 25,
'PoundColumnExpr': 26,
'TryExpr': 27,
'AwaitExpr': 249,
'IdentifierExpr': 28,
'SuperRefExpr': 29,
'NilLiteralExpr': 30,
'DiscardAssignmentExpr': 31,
'AssignmentExpr': 32,
'SequenceExpr': 33,
'PoundLineExpr': 34,
'PoundFileExpr': 35,
'PoundFunctionExpr': 36,
'PoundDsohandleExpr': 37,
'SymbolicReferenceExpr': 38,
|
'PrefixOperatorExpr': 39,
'BinaryOperatorExpr': 40,
'ArrowExpr': 41,
'FloatLiteralExpr': 42,
'TupleExpr': 43,
'ArrayExpr': 44,
'DictionaryExpr': 45,
'ImplicitMemberExpr': 46,
'IntegerLiteralExpr': 47,
'StringLiteralExpr': 48,
'BooleanLiteralExpr': 49,
'TernaryExpr': 50,
'MemberAccessExpr': 51,
'DotSelfExpr': 52,
'IsExpr': 53,
'AsExpr': 54,
'TypeExpr': 55,
'
|
ClosureExpr': 56,
'UnresolvedPatternExpr': 57,
'FunctionCallExpr': 58,
'SubscriptExpr': 59,
'OptionalChainingExpr': 60,
'ForcedValueExpr': 61,
'PostfixUnaryExpr': 62,
'SpecializeExpr': 63,
'KeyPathExpr': 65,
'KeyPathBaseExpr': 66,
'ObjcKeyPathExpr': 67,
'ObjcSelectorExpr': 68,
'EditorPlaceholderExpr': 69,
'ObjectLiteralExpr': 70,
'UnknownStmt': 71,
'ContinueStmt': 72,
'WhileStmt': 73,
'DeferStmt': 74,
'ExpressionStmt': 75,
'RepeatWhileStmt': 76,
'GuardStmt': 77,
'ForInStmt': 78,
'SwitchStmt': 79,
'DoStmt': 80,
'ReturnStmt': 81,
'FallthroughStmt': 82,
'BreakStmt': 83,
'DeclarationStmt': 84,
'ThrowStmt': 85,
'IfStmt': 86,
'Decl': 87,
'Expr': 88,
'Stmt': 89,
'Type': 90,
'Pattern': 91,
'CodeBlockItem': 92,
'CodeBlock': 93,
'DeclNameArgument': 94,
'DeclNameArguments': 95,
# removed: 'FunctionCallArgument': 96,
'TupleExprElement': 97,
'ArrayElement': 98,
'DictionaryElement': 99,
'ClosureCaptureItem': 100,
'ClosureCaptureSignature': 101,
'ClosureParam': 102,
'ClosureSignature': 103,
'StringSegment': 104,
'ExpressionSegment': 105,
'ObjcNamePiece': 106,
'TypeInitializerClause': 107,
'ParameterClause': 108,
'ReturnClause': 109,
'FunctionSignature': 110,
'IfConfigClause': 111,
'PoundSourceLocationArgs': 112,
'DeclModifier': 113,
'InheritedType': 114,
'TypeInheritanceClause': 115,
'MemberDeclBlock': 116,
'MemberDeclListItem': 117,
'SourceFile': 118,
'InitializerClause': 119,
'FunctionParameter': 120,
'AccessLevelModifier': 121,
'AccessPathComponent': 122,
'AccessorParameter': 123,
'AccessorBlock': 124,
'PatternBinding': 125,
'EnumCaseElement': 126,
'OperatorPrecedenceAndTypes': 127,
'PrecedenceGroupRelation': 128,
'PrecedenceGroupNameElement': 129,
'PrecedenceGroupAssignment': 130,
'PrecedenceGroupAssociativity': 131,
'Attribute': 132,
'LabeledSpecializeEntry': 133,
'ImplementsAttributeArguments': 134,
'ObjCSelectorPiece': 135,
'WhereClause': 136,
'ConditionElement': 137,
'AvailabilityCondition': 138,
'MatchingPatternCondition': 139,
'OptionalBindingCondition': 140,
'ElseIfContinuation': 141,
'ElseBlock': 142,
'SwitchCase': 143,
'SwitchDefaultLabel': 144,
'CaseItem': 145,
'SwitchCaseLabel': 146,
'CatchClause': 147,
'GenericWhereClause': 148,
'SameTypeRequirement': 149,
'GenericParameter': 150,
'GenericParameterClause': 151,
'ConformanceRequirement': 152,
'CompositionTypeElement': 153,
'TupleTypeElement': 154,
'GenericArgument': 155,
'GenericArgumentClause': 156,
'TypeAnnotation': 157,
'TuplePatternElement': 158,
'AvailabilityArgument': 159,
'AvailabilityLabeledArgument': 160,
'AvailabilityVersionRestriction': 161,
'VersionTuple': 162,
'CodeBlockItemList': 163,
# removed: 'FunctionCallArgumentList': 164,
'TupleExprElementList': 165,
'ArrayElementList': 166,
'DictionaryElementList': 167,
'StringLiteralSegments': 168,
'DeclNameArgumentList': 169,
'ExprList': 170,
'ClosureCaptureItemList': 171,
'ClosureParamList': 172,
'ObjcName': 173,
'FunctionParameterList': 174,
'IfConfigClauseList': 175,
'InheritedTypeList': 176,
'MemberDeclList': 177,
'ModifierList': 178,
'AccessPath': 179,
'AccessorList': 180,
'PatternBindingList': 181,
'EnumCaseElementList': 182,
'PrecedenceGroupAttributeList': 183,
'PrecedenceGroupNameList': 184,
'TokenList': 185,
'NonEmptyTokenList': 186,
'AttributeList': 187,
'SpecializeAttributeSpecList': 188,
'ObjCSelector': 189,
'SwitchCaseList': 190,
'CatchClauseList': 191,
'CaseItemList': 192,
'ConditionElementList': 193,
'GenericRequirementList': 194,
'GenericParameterList': 195,
'CompositionTypeElementList': 196,
'TupleTypeElementList': 197,
'GenericArgumentList': 198,
'TuplePatternElementList': 199,
'AvailabilitySpecList': 200,
'UnknownPattern': 201,
'EnumCasePattern': 202,
'IsTypePattern': 203,
'OptionalPattern': 204,
'IdentifierPattern': 205,
'AsTypePattern': 206,
'TuplePattern': 207,
'WildcardPattern': 208,
'ExpressionPattern': 209,
'ValueBindingPattern': 210,
'UnknownType': 211,
'SimpleTypeIdentifier': 212,
'MemberTypeIdentifier': 213,
'ClassRestrictionType': 214,
'ArrayType': 215,
'DictionaryType': 216,
'MetatypeType': 217,
'OptionalType': 218,
'ImplicitlyUnwrappedOptionalType': 219,
'CompositionType': 220,
'TupleType': 221,
'FunctionType': 222,
'AttributedType': 223,
'YieldStmt': 224,
'YieldList': 225,
'IdentifierList': 226,
'NamedAttributeStringArgument': 227,
'DeclName': 228,
'PoundAssertStmt': 229,
'SomeType': 230,
'CustomAttribute': 231,
'GenericRequirement': 232,
'DifferentiableAttributeArguments': 233,
'DifferentiabilityParamsClause': 234,
'DifferentiabilityParams': 235,
'DifferentiabilityParamList': 236,
'DifferentiabilityParam': 237,
# removed: 'DifferentiableAttributeFuncSpecifier': 238,
'FunctionDeclName': 239,
'PoundFilePathExpr': 240,
'DerivativeRegistrationAttributeArguments': 241,
'QualifiedDeclName': 242,
'CatchItem': 243,
'CatchItemList': 244,
'MultipleTrailingClosureElementList': 245,
'MultipleTrailingClosureElement': 246,
'PoundFileIDExpr': 247,
'TargetFunctionEntry': 248,
}
def verify_syntax_node_serialization_codes(nodes, serialization_codes):
# Verify that all nodes have serialization codes
for node in nodes:
if not node.is_base() and node.syntax_kind not in serialization_codes:
error('Node %s has no serialization code' % node.syntax_kind)
# Verify that no serialization code is used twice
used_codes = set()
for serialization_code in serialization_codes.values():
if serialization_code in used_codes:
error("Serialization code %d used twice" % serialization_code)
used_codes.add(serialization_code)
def get_serialization_code(syntax_kind):
return SYNTAX_NODE_SERIALIZATION_CODES[syntax_kind]
|
OCA/margin-analysis
|
account_invoice_margin_sale_delivered_sync/__manifest__.py
|
Python
|
agpl-3.0
| 676
| 0
|
# Copyright 2021 Tecnativa - Sergio Teruel
# License AGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name
|
": "Account Invoice Margin Sale Delivered Sync",
"summary": "Sync invoice margin between invoices and sale orders",
"version": "12.0.1.0.1",
"development_status": "Beta",
"maintainers": ["sergio-teruel"],
"category": "Account",
"website": "https://github.com/OCA/margin-analysis",
"author": "Tecnativa, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"sale_margin_delivere
|
d",
"account_invoice_margin_sale",
],
}
|
eccles/lnxproc
|
lnxproc/vmstat.py
|
Python
|
mit
| 3,041
| 0
|
'''
Contains Vmstat() class
Typical contents of vmstat file::
nr_free_pages 1757414
nr_inactive_anon 2604
nr_active_anon 528697
nr_inactive_file 841209
nr_active_file 382447
nr_unevictable 7836
nr_mlock 7837
nr_anon_pages 534070
nr_mapped 76013
nr_file_pages 1228693
nr_dirty 21
nr_writeback 0
nr_slab_reclaimable 511040
nr_slab_unreclaimable 13487
nr_page_table_pages 13920
nr_kernel_stack 809
nr_unstable 0
nr_bounce 0
nr_vmscan_write 0
nr_vmscan_immediate_reclaim 0
nr_writeback_temp 0
nr_isolated_anon 0
nr_isolated_file 0
nr_shmem 3583
nr_dirtied 1034714
nr_written 972154
numa_hit 29109076
numa_miss 0
numa_foreign 0
numa_interleave 11066
numa_local 29109076
numa_other 0
nr_anon_transparent_hugepages 0
nr_dirty_threshold 347004
nr_dirty_background_threshold 173502
pgpgin 6038832
pgpgout 6412006
pswpin 0
pswpout 0
pgalloc_dma 0
pgalloc_dma32 51
pgalloc_normal 30639735
pgalloc_movable 0
pgfree 32398292
pgactivate 2344853
pgdeactivate 1
pgfault 37440670
pgmajfault 3319
pgrefill_dma 0
pgrefill_dma32 0
pgrefill_normal 0
pgrefill_movable 0
pgst
|
eal_kswapd_dma 0
pgsteal_kswapd_dma32 0
pgsteal_kswapd_normal 0
pgsteal_kswapd_movable 0
pgsteal_direct_dma 0
pgsteal_direct_dma32 0
pgsteal_direct_normal 0
pgsteal_direct_movable 0
pgscan_kswapd_dma 0
pgscan_kswapd_dma32 0
pgscan_kswapd_normal 0
pgscan_kswapd_movable 0
pgscan_direct_dma 0
pgscan_direct_dma32 0
pgscan_direct_normal 0
pgscan_direct_movable 0
|
zone_reclaim_failed 0
pginodesteal 0
slabs_scanned 0
kswapd_inodesteal 0
kswapd_low_wmark_hit_quickly 0
kswapd_high_wmark_hit_quickly 0
kswapd_skip_congestion_wait 0
pageoutrun 1
allocstall 0
pgrotated 23
compact_blocks_moved 0
compact_pages_moved 0
compact_pagemigrate_failed 0
compact_stall 0
compact_fail 0
compact_success 0
htlb_buddy_alloc_success 0
htlb_buddy_alloc_fail 0
unevictable_pgs_culled 8305
unevictable_pgs_scanned 0
unevictable_pgs_rescued 6377
unevictable_pgs_mlocked 15565
unevictable_pgs_munlocked 7197
unevictable_pgs_cleared 0
unevictable_pgs_stranded 0
unevictable_pgs_mlockfreed 0
thp_fault_alloc 0
thp_fault_fallback 0
thp_collapse_alloc 0
thp_collapse_alloc_failed 0
thp_split 0
'''
from logging import getLogger
from os import path as ospath
from .readfile import ReadFile
LOGGER = getLogger(__name__)
class VMstat(ReadFile):
'''
VMstat handling
'''
FILENAME = ospath.join('proc', 'vmstat')
KEY = 'vmstat'
def normalize(self):
'''
Translates data into dictionary
The vmstat file is a number of records keyed on ' ' separator
'''
LOGGER.debug("Normalize")
lines = self.lines
ret = {}
for line in lines:
top, tail = line.split()
ret[top.strip()] = int(tail.strip())
return ret
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/meta.py
|
Python
|
gpl-3.0
| 2,155
| 0.027855
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .pladform import PladformIE
from ..utils import (
unescapeHTML,
int_or_none,
ExtractorError,
)
class METAIE(InfoExtractor):
_VALID_URL = r'https?://video\.meta\.ua/(?:iframe/)?(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://video.meta.ua/5502115.video',
'md5': '71b6f3ee274bef16f1ab410f7f56b476',
'info_dict': {
'id': '5502115',
'ext': 'mp4',
'title': 'Sony Xperia Z camera test [HQ]',
'description': 'Xperia Z shoots video in FullHD HDR.',
'uploader_id': 'nomobile',
'uploader': 'CHЁZA.TV',
'upload_date': '20130211',
},
'add_ie': ['Youtube'],
}, {
'url': 'http://video.meta.ua/iframe/5502115',
'only_matching': True,
}, {
# pladform embed
'url': 'http://video.meta.ua/7121015.video',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
st_html5 = self._search_regex(
r"st_html5\s*=\s*'#([^']+)'", webpage, 'uppod html5 st', default=None)
if st_html5:
# uppod st decryption algorithm is reverse engineered from function un(s) at uppod.js
json_str =
|
''
for i in range(0, len(st_html5), 3):
json_str += '�%s;' % st_html5[i:i + 3]
uppod_data = self._parse_json(unescapeHTML(json_str), video_id)
error = uppod_data.get('customnotfound')
if error:
raise ExtractorError('%s said: %s' % (self.IE_NAME, error), expected=Tru
|
e)
video_url = uppod_data['file']
info = {
'id': video_id,
'url': video_url,
'title': uppod_data.get('comment') or self._og_search_title(webpage),
'description': self._og_search_description(webpage, default=None),
'thumbnail': uppod_data.get('poster') or self._og_search_thumbnail(webpage),
'duration': int_or_none(self._og_search_property(
'video:duration', webpage, default=None)),
}
if 'youtube.com/' in video_url:
info.update({
'_type': 'url_transparent',
'ie_key': 'Youtube',
})
return info
pladform_url = PladformIE._extract_url(webpage)
if pladform_url:
return self.url_result(pladform_url)
|
fabioz/mu-repo
|
mu_repo/umsgpack_s_conn.py
|
Python
|
gpl-3.0
| 15,598
| 0.003526
|
# Fork: https://github.com/fabioz/u-msgpack-python
#
'''
This module provides a way to do full-duplex communication over a socket with umsgpack_s.
Basic usage is:
# Create our server handler (must handle decoded messages)
class ServerHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
# Some message was received from the client in the server.
if decoded == 'echo':
# Actual implementations may want to put that in a queue and have an additional
# thread to check the queue and handle what was received and send the results back.
self.send('echo back')
def send(self, obj):
# Send a message to the client
self.connection.sendall(self.pack_obj(obj))
# Start the server
server = umsgpack_s_conn.Server(ServerHandler)
server.serve_forever('127.0.0.1', 0, block=True)
port = server.get_port() # Port only available after socket is created
...
On the client side:
class ClientHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
print('Client received: %s' % (decoded,))
client = umsgpack_s_conn.Client('127.0.0.1', port, ClientHandler)
# Note, as above, actual implementations may want to put that in a queue and have an additional
# thread do the actual send.
client.send('echo')
@license: MIT
@author: Fabio Zadrozny
'''
from mu_repo import umsgpack_s
import binascii
import select
import socket
import struct
import sys
import threading
import weakref
try:
basestring
except:
basestring = str
_as_bytes = umsgpack_s._as_bytes
DEBUG = 0 # > 3 to see actual messages
BUFFER_SIZE = 1024 * 8
MAX_INT32 = 2147483647 # ((2** 32) -1)
def get_free_port():
'''
Helper to get free port (usually not needed as the server can receive '0' to connect to a new
port).
'''
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 0))
_, port = s.getsockname()
s.close()
return port
def wait_for_condition(condition, timeout=2.):
'''
Helper to wait for a condition with a timeout.
:param float condition:
Timeout to reach condition (in seconds).
:return bool:
True if the condition wasn't satisfied and True if it was.
'''
import time
initial = time.time()
while not condition():
if time.time() - initial > timeout:
return False
time.sleep(.01)
return True
def assert_waited_condition(condition, timeout=2.):
'''
Helper to wait for a condition with a timeout.
:param callable condition:
A callable that returns either a True/False boolean (where True indicates the condition was
reached) or a string (where an empty string means the condition was reached or a non-empty
string to show some message to the user regarding the failure).
:param float condition:
Timeout to reach condition (in seconds).
'''
import time
initial = time.time()
while True:
c = condition()
if isinstance(c, bool):
if c:
return
elif isinstance(c, basestring):
if not c:
return
else:
raise AssertionError('Expecting bool or string as the return.')
if time.time() - initial > timeout:
raise AssertionError(
u'Could not reach condition before timeout: %s (condition return: %s)' %
(timeout, c))
time.sleep(.01)
class Server(object):
def __init__(self, connection_handler_class=None, params=(), thread_name='', thread_class=None):
if thread_class is None:
|
thread_class = threading.Thread
self._thread_class = thread_class
if connection_handler_class is None:
connection_handler_class = EchoHandler
self.connection_handler_class = connection_handler_class
self._params = params
self._block = None
self._shutdown_event = thread
|
ing.Event()
self._thread_name = thread_name
def serve_forever(self, host, port, block=False):
if self._block is not None:
raise AssertionError(
'Server already started. Please create new one instead of trying to reuse.')
if not block:
self.thread = self._thread_class(target=self._serve_forever, args=(host, port))
self.thread.setDaemon(True)
if self._thread_name:
self.thread.setName(self._thread_name)
self.thread.start()
else:
self._serve_forever(host, port)
self._block = block
def is_alive(self):
if self._block is None:
return False
sock = getattr(self, '_sock', None)
return sock is not None
def get_port(self):
'''
Note: only available after socket is already connected. Raises AssertionError if it's not
connected at this point.
'''
wait_for_condition(lambda: hasattr(self, '_sock'), timeout=5.0)
return self._sock.getsockname()[1]
def shutdown(self):
if DEBUG:
sys.stderr.write('Shutting down server.\n')
self._shutdown_event.set()
sock = getattr(self, '_sock', None)
if sock is not None:
self._sock = None
try:
sock.shutdown(socket.SHUT_RDWR)
except:
pass
try:
sock.close()
except:
pass
def after_bind_socket(self, host, port):
'''
Clients may override to do something after the host/port is bound.
'''
def _serve_forever(self, host, port):
if DEBUG:
sys.stderr.write('Listening at: %s (%s)\n' % (host, port))
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# We should cleanly call shutdown, but just in case let's set to reuse the address.
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5) # Request queue size
self._sock = sock
self.after_bind_socket(host, self.get_port())
connections = []
try:
while not self._shutdown_event.is_set():
sock = self._sock
if sock is None:
break
# Will block until available (no timeout). If closed returns properly.
try:
fd_sets = select.select([sock], [], [])
except:
break # error: (9, 'Bad file descriptor')
if DEBUG:
sys.stderr.write('Select returned: %s\n' % fd_sets[0])
if self._shutdown_event.is_set():
break
sock = self._sock
if sock is None:
break
if fd_sets[0]:
connection, _client_address = sock.accept()
if DEBUG:
sys.stderr.write('Accepted socket.\n')
try:
connection_handler = self.connection_handler_class(
connection,
*self._params)
connections.append(weakref.ref(connection))
connection_handler.start()
except:
import traceback
traceback.print_exc()
finally:
if DEBUG:
sys.stderr.write('Exited _serve_forever.\n')
for c in connections:
c = c()
if c is not None:
try:
c.shutdown(socket.SHUT_RDWR)
except:
|
repotvsupertuga/repo
|
plugin.video.pancas/resources/lib/libraries/control.py
|
Python
|
gpl-2.0
| 9,795
| 0.00827
|
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,xbmc,xbmcaddon,xbmcplugin,xbmcgui,xbmcvfs
import base64, jsunpack
import random, time
tmdb_key = jsunpack.jsunpack_keys()
tvdb_key = base64.urlsafe_b64decode('MUQ2MkYyRjkwMDMwQzQ0NA==')
fanarttv_key = base64.urlsafe_b64decode('YTc4YzhmZWRjN2U3NTE1MjRkMzkyNmNhMmQyOTU3OTg=')
trakt_key = base64.urlsafe_b64decode('NDFjYzI1NjY5Y2Y2OTc0NTg4ZjA0MTMxYjcyZjc4MjEwMzdjY2I1ZTdlMjMzNDVjN2MxZTk3NGI4MGI5ZjI1NQ==')
trakt_secret = base64.urlsafe_b64decode('Y2I4OWExYTViN2ZlYmJiMDM2NmQ3Y2EyNzJjZDc4YTU5MWQ1ODI2Y2UyMTQ1NWVmYzE1ZDliYzQ1ZWNjY2QyZQ==')
scriptID = 'plugin.video.pancas'
##scriptIDMedia = 'script.pancas.media'
ptv = xbmcaddon.Addon(scriptID)
lang = xbmcaddon.Addon().getLocalizedString
setting = xbmcaddon.Addon().getSetting
addon = xbmcaddon.Addon
addItem = xbmcplugin.addDirectoryItem
item = xbmcgui.ListItem
directory = xbmcplugin.endOfDirectory
content = xbmcplugin.setContent
property = xbmcplugin.setProperty
addonInfo = xbmcaddon.Addon().getAddonInfo
##addonInfoMedia = xbmcaddon.Addon(scriptIDMedia).getAddonInfo
infoLabel = xbmc.getInfoLabel
condVisibility = xbmc.getCondVisibility
jsonrpc = xbmc.executeJSONRPC
window = xbmcgui.Window(10000)
dialog = xbmcgui.Dialog()
progressDialog = xbmcgui.DialogProgress()
windowDialog = xbmcgui.WindowDialog()
button = xbmcgui.ControlButton
image = xbmcgui.ControlImage
keyboard = xbmc.Keyboard
sleep = xbmc.sleep
execute = xbmc.executebuiltin
skin = xbmc.getSkinDir()
player = xbmc.Player()
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
resolve = xbmcplugin.setResolvedUrl
openFile = xbmcvfs.File
makeFile = xbmcvfs.mkdir
deleteFile = xbmcvfs.delete
listDir = xbmcvfs.listdir
transPath = xbmc.translatePath
skinPath = xbmc.translatePath('special://skin/')
addonPath = xbmc.translatePath(addonInfo('path'))
##addonPathMedia = xbmc.translatePath(addonInfoMedia('path'))
dataPath = xbmc.translatePath(addonInfo('profile')).decode('utf-8')
settingsFile = os.path.join(dataPath, 'settings.xml')
databaseFile = os.path.join(dataPath, 'settings.db')
favouritesFile = os.path.join(dataPath, 'favourites.db')
sourcescacheFile = os.path.join(dataPath, 'sources.db')
sourcescachedUrl = os.path.join(dataPath, 'sourcesurl.db')
cachemetaFile = os.path.join(dataPath, 'metacache.db')
libcacheFile = os.path.join(dataPath, 'library.db')
metacacheFile = os.path.join(dataPath, 'meta.db')
cacheFile = os.path.join(dataPath, 'cache.db')
cookieDir = os.path.join(dataPath, 'Cookies')
progressDialogBG = xbmcgui.DialogProgressBG()
info_lang = xbmc.getLanguage(xbmc.ISO_639_1)
try:
makeFile(cookieDir)
except:
pass
def addonIcon():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return addonInfo('icon')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'icon.png')
def addonPoster():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultVideo.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'poster.png')
def addonBanner():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultVideo.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'banner.png')
def addonThumb():
appearance = setting('appearance').lower()
if appearance == '-': return 'DefaultFolder.png'
elif appearance == '': return addonInfo('icon')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'icon.png')
def addonFanart():
appearance = setting('appearance').lower()
if appearance == '-': return None
elif appearance == '': return addonInfo('fanart')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'fanart.jpg')
def addonNext():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultFolderBack.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'next.jpg')
def artPath():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return None
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance)
def infoDialog(message, heading=addonInfo('name'), icon=addonIcon(), time=3000):
try: dialog.notification(heading, message, icon, time, sound=False)
except: execute("Notification(%s,%s, %s, %s)" % (heading, message, time, icon))
def yesnoDialog(line1, line2, line3, heading=addonInfo('name'), nolabel='', yeslabel=''):
return dialog.yesno(heading, line1, line2, line3, nolabel, yeslabel)
def selectDialog(list, heading=addonInfo('name')):
return dialog.select(heading, list)
def version():
num = ''
try: version = addon('xbmc.addon').getAddonInfo('version')
except: version = '999'
for i in version:
if i.isdigit(): num += i
else: break
return int(num)
def refresh():
return execute('Container.Refresh')
def idle():
return execute('Dialog.Close(busydialog)')
def queueItem():
return execute('Action(Queue)')
def openPlaylist():
return execute('ActivateWindow(VideoPlaylist)')
def openSettings(query=None, id=addonInfo('id')):
try:
idle()
execute('Addon.OpenSettings(%s)' % id)
if query == None: raise Exception()
c, f = query.split('.')
execute('SetFocus(%i)' % (int(c) + 100))
execute('SetFocus(%i)' % (int(f) + 200))
except:
return
def set_setting(id, value):
if not isinstance(value, basestring): value = str(value)
ptv.setSetting(id=id, value=value)
def log(msg, level=xbmc
|
.LOGNOTICE):
#return
level = xbmc.LOGNOTICE
print('[SPECTO]: %s' % (msg))
try:
if isinstance(msg, unicode):
msg = msg.encode('utf-8')
xbmc.log('[SPECTO]: %s' % (msg), level)
except Exception as e:
try:
#xbmc.log('Logging Failure: %s' % (e), l
|
evel)
a=1
except: pass # just give up
def randomagent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80'],
['11.0']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index]))
DEFAULT_TIMEOUT = 30
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.
|
mhabrnal/abrt
|
src/python-problem/doc/conf.py
|
Python
|
gpl-2.0
| 8,344
| 0.00755
|
# -*- coding: utf-8 -*-
#
# python-problem documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 4 12:03:58 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.pa
|
th.abspath('../problem/.libs')) # _pyabrt
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extens
|
ion module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'abrt-python'
copyright = u'2012, Richard Marko'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'abrt-pythondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'abrt-python.tex', u'abrt-python Documentation',
u'Richard Marko', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'abrt-python', u'abrt-python Documentation',
[u'Richard Marko'], 5)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'abrt-python', u'abrt-python Documentation',
u'Richard Marko', 'abrt-python', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
def setup(app):
app.connect('autodoc-process-signature', process_signature)
def process_signature(app, what, name, obj, options, signature,
return_annotation):
if what not in ('function'):
return
new_params = list()
for param in (x.strip() for x
|
bearstech/modoboa-admin
|
modoboa_admin/views/alias.py
|
Python
|
mit
| 4,252
| 0.000235
|
from django.contrib.auth.decorators import (
login_required, permission_required
)
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.shortcuts import render
from django.utils.translation import ugettext as _, ungettext
import reversion
from modoboa.lib import events
from modoboa.lib.exceptions import PermDeniedException, Conflict
from modoboa.lib.web_utils import render_to_json_response
from ..forms import AliasForm
from ..models import Alias
def _validate_alias(request, form, successmsg, callback=None):
"""Alias validation
Common function shared between creation and modification actions.
"""
if form.is_valid():
form.set_recipients()
try:
alias = form.save()
except IntegrityError:
raise Conflict(_("Alias with this name already exists"))
if callback:
callback(request.user, alias)
return render_to_json_response(successmsg)
return render_to_json_response({'form_errors': form.errors}, status=400)
def _new_alias(request, title, action, successmsg,
tplname="modoboa_admin/aliasform.html"):
events.raiseEvent("CanCreate", request.user, "mailbox_aliases")
if request.method == "POST":
def callback(user, alias):
alias.post_create(user)
form = AliasForm(request.user, request.POST)
return _validate_alias(
request, form, successmsg, callback
)
ctx = {
"title": title,
"action": action,
"formid": "aliasform",
"action_label": _("Create"),
"action_classes": "submit",
"form": AliasForm(request.user)
}
return render(request, tplname, ctx)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newdlist(request):
return _new_alias(
request, _("New distribution list"), reverse("modoboa_admin:dlist_add"),
_("Distribution
|
list created")
)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newalias(request):
return _new_alias(
request, _("New alias"), reverse("modoboa_admin:alias_add"),
_("Alias created")
)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newforward(request):
retur
|
n _new_alias(
request, _("New forward"), reverse("modoboa_admin:forward_add"),
_("Forward created")
)
@login_required
@permission_required("modoboa_admin.change_alias")
@reversion.create_revision()
def editalias(request, alid, tplname="modoboa_admin/aliasform.html"):
alias = Alias.objects.get(pk=alid)
if not request.user.can_access(alias):
raise PermDeniedException
if request.method == "POST":
if len(alias.get_recipients()) >= 2:
successmsg = _("Distribution list modified")
elif alias.extmboxes != "":
successmsg = _("Forward modified")
else:
successmsg = _("Alias modified")
form = AliasForm(request.user, request.POST, instance=alias)
return _validate_alias(request, form, successmsg)
ctx = {
'action': reverse("modoboa_admin:alias_change", args=[alias.id]),
'formid': 'aliasform',
'title': alias.full_address,
'action_label': _('Update'),
'action_classes': 'submit',
'form': AliasForm(request.user, instance=alias)
}
return render(request, tplname, ctx)
@login_required
@permission_required("modoboa_admin.delete_alias")
def delalias(request):
selection = request.GET["selection"].split(",")
for alid in selection:
alias = Alias.objects.get(pk=alid)
if not request.user.can_access(alias):
raise PermDeniedException
if alias.type == 'dlist':
msg = "Distribution list deleted"
msgs = "Distribution lists deleted"
elif alias.type == 'forward':
msg = "Forward deleted"
msgs = "Forwards deleted"
else:
msg = "Alias deleted"
msgs = "Aliases deleted"
alias.delete()
msg = ungettext(msg, msgs, len(selection))
return render_to_json_response(msg)
|
canhhs91/greenpointtrees
|
src/paypal/pro/south_migrations/0001_initial.py
|
Python
|
mit
| 7,020
| 0.008832
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
else:
User = get_user_model
|
()
# With the default User model these will be 'auth.User' and 'auth.user'
# so instead of using orm['auth.User'] we can use orm[user_orm_label]
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._met
|
a.module_name)
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PayPalNVP'
db.create_table('paypal_nvp', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('method', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('ack', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('profilestatus', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('profileid', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('profilereference', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('correlationid', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('token', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('payerid', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('firstname', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('lastname', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('street', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('state', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('countrycode', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('zip', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('invnum', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('custom', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_orm_label], null=True, blank=True)),
('flag', self.gf('django.db.models.fields.BooleanField')(default=False)),
('flag_code', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('flag_info', self.gf('django.db.models.fields.TextField')(blank=True)),
('ipaddress', self.gf('django.db.models.fields.IPAddressField')(max_length=15, blank=True)),
('query', self.gf('django.db.models.fields.TextField')(blank=True)),
('response', self.gf('django.db.models.fields.TextField')(blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal(u'pro', ['PayPalNVP'])
def backwards(self, orm):
# Deleting model 'PayPalNVP'
db.delete_table('paypal_nvp')
models = {
user_model_label: {
'Meta': {'object_name': User.__name__,
'db_table': "'%s'" % User._meta.db_table
},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
},
u'pro.paypalnvp': {
'Meta': {'object_name': 'PayPalNVP', 'db_table': "'paypal_nvp'"},
'ack': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'correlationid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'countrycode': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'firstname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flag_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'flag_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invnum': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'ipaddress': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'blank': 'True'}),
'lastname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'payerid': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'profileid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'profilereference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'profilestatus': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'query': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm[user_orm_label]", 'null': 'True', 'blank': 'True'}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['pro']
|
blha303/ytbot
|
plugins/core_misc.py
|
Python
|
gpl-3.0
| 2,257
| 0.000886
|
import socket
import time
import re
from util import hook
socket.setdefaulttimeout(10)
nick_re = re.compile(":(.+?)!")
# Auto-join on Invite (Configurable, defaults to True)
@hook.event('INVITE')
def invite(paraml, conn=None):
invite_join = conn.conf.get('invite_join', True)
if invite_join:
conn.join(paraml[-1])
# Identify to NickServ (or other service)
@hook.event('004')
def onjoin(paraml, conn=None, bot=None):
nickserv_password = conn.conf.get('nickserv_password', '')
nickserv_name = conn.conf.get('nickserv_name', 'nickserv')
nickserv_account_name = conn.conf.get('nickserv_user', '')
nickserv_command = conn.conf.get('nickserv_command', 'IDENTIFY')
if nickserv_password:
if nickserv_password in bot.config['censored_strings']:
bot.config['censored_strings'].remove(nickserv_password)
if nickserv_account_name:
conn.msg(nickserv_name, "{} {} {}".format(nickserv_command, nickserv_account_name, nickserv_password))
else:
conn.msg(nickserv_name, "{} {}".format(nickserv_command, nickserv_password))
bot.config['censored_strings'].append(nickserv_password)
time.sleep(1)
# Set bot modes
mode = conn.conf.get('mode')
if mode:
conn.cmd('MODE', [conn.nick, mode])
# Join config-defined channels
for channel in conn.channels:
conn.join(channel)
time.sleep(1)
print "Bot ready."
@hook.event("KICK")
def onkick(paraml,
|
conn=None, chan=None):
# if the bot has been kicked, remove from the channel list
if paraml[1] == conn.nick:
conn.channels.remove(chan)
auto_rejoin = conn.conf.get('auto_rejoin', False)
if auto_rejoin:
co
|
nn.join(paraml[0])
@hook.event("NICK")
def onnick(paraml, conn=None, raw=None):
old_nick = nick_re.search(raw).group(1)
new_nick = str(paraml[0])
if old_nick == conn.nick:
conn.nick = new_nick
print "Bot nick changed from '{}' to '{}'.".format(old_nick, new_nick)
@hook.singlethread
@hook.event('004')
def keep_alive(paraml, conn=None):
keepalive = conn.conf.get('keep_alive', False)
if keepalive:
while True:
conn.cmd('PING', [conn.nick])
time.sleep(60)
|
geberl/droppy-workspace
|
Tasks/DropPy.Common/file_tools.py
|
Python
|
mit
| 1,999
| 0.0005
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import distutils.dir_util
import os
import shutil
import sys
def touch_file(file_path):
"""
Create a new empty file at file_path.
"""
parent_dir = os.path.abspath(os.path.join(file_path, os.pardir))
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
with codecs.open(file_path, 'a'):
os.utime(file_path, None)
def copy_file(input_file, output_file, overwrite=False):
"""
Helper function to copy a file that adds an overwrite parameter.
"""
if os.path.isfile(output_file):
if overwrite:
print('File exists, overwriting')
shutil.copyfile(input_file, output_file)
|
else:
sys.exit('File exists, unable to continue: %s' % output_file)
else:
shutil.copyfile(input_file, output_file)
def copy_tree(input_dir, output_dir, overwrite=False):
"""
Helper function to copy
|
a directory tree that adds an overwrite parameter.
"""
if os.path.isdir(output_dir):
if overwrite:
print('Directory exists, overwriting')
distutils.dir_util.copy_tree(input_dir, output_dir)
else:
sys.exit('Directory exists, unable to continue: %s' % output_dir)
else:
distutils.dir_util.copy_tree(input_dir, output_dir)
def get_file_paths_from_directory(dir_path):
"""
Walk a directory and create a list of all contained file_paths in all sub-directories.
"""
file_paths = []
for root, dirs, files in os.walk(dir_path):
for f in files:
file_paths.append(os.path.join(root, f))
return file_paths
def clean_dsstore(dir_path):
"""
Walk a directory and get rid of all those useless hidden .DS_Store files.
"""
for root, dirs, files in os.walk(dir_path):
for f in files:
if f == '.DS_Store':
os.remove(os.path.join(dir_path, root, f))
|
activityhistory/selfspy
|
selfspy/helpers.py
|
Python
|
gpl-3.0
| 3,410
| 0.006452
|
import calendar
from dateutil.parser import parse
import os
from os import listdir
from os.path import isfile, join
from selfspy import config as cfg
from objc import YES, NO
from AppKit import *
from CBGraphView import CBGraphView
TIMELINE_WIDTH = 960
TIMELINE_HEIGHT = 20
WINDOW_PADDING = 18
def unixTimeFromString(self, s=None):
fuzzy_ts = parse(str(s), fuzzy=True)
ts = calendar.timegm(fuzzy_ts.utctimetuple())
return ts
def getScreenshotPath(self, self2=None):
path = os.path.join(cfg.CURRENT_DIR, 'screenshots')
path = os.path.expanduser(path)
return path + '/'
def generateScreenshotList(self, self2=None):
path = getScreenshotPath(self)
list_of_files = [ f for f in listdir(path) if isfile(join(path,f)) ]
return list_of_files
def generateDateQuery(self, s=None):
self.dateQuery = '20' + s[0:2] + '-' + s[2:4] + '-' + s[4:6] + ' ' + s[7:9] + ':' + s[9:11] + ':' + s[11:13] + '.'
def mapFilenameDateToNumber(self, s=None):
return int('20' + s[0:2] + s[2:4] + s[4:6] + s[7:9] + s[9:11] + s[11:13])
def addProcessTimelineSegment(self, process_id, front_bound, back_bound, reviewer):
if front_bound >= reviewer.slider_min and back_bound <= reviewer.slider_max:
# generate unique grayscale color for timeline segment
gray = (30*process_id) % 255
color = NSColor.colorWithCalibratedRed_green_blue_alpha_(gray/255.0, gray/255.0, gray/255.0, 1.0)
# get bounds of segment and draw segment
normalized_front_bound = front_bound - reviewer.slider_min
width_scale_factor = TIMELINE_WIDTH / (reviewer.normalized_max_value*1.0)
segment_x = normalized_front_bound * width_scale_factor
segment_y = 1
segment_height = TIMELINE_HEIGHT-2
segment_width = (back_bound - front_bound) * width_scale_factor
frame = NSRect(NSPoint(segment_x, segment_y),
NSSize(segment_width, segment_height))
this_view = CBGraphView.alloc().initWithFrame_(frame)
reviewer.timeline_view.addSubview_(this_view)
this_view.setBorderColor_(color)
this_view.setAssignedColor_(color)
this_view.setBackgroundColor_(color)
this_view.setWantsLayer_(YES)
# add tooltip to segment
self.processNameQuery = process_id
NSNotificationCenter.defaultCenter().postNotificationName_object_('getProcessNameFromID', self)
this_view.setToolTip_(str
|
(self.processNameResponse[0]))
self.processNameResponse = []
reviewer.nested_timeline_views.append(this_view)
## TIMELINE HEL
|
PERS
# def addProcessNameTextLabelToTimeline(self, process_id, reviewer):
# self.processNameQuery = process_id
# NSNotificationCenter.defaultCenter().postNotificationName_object_('getProcessNameFromID', self)
#
# textField_frame = NSRect(NSPoint(0, TIMELINE_HEIGHT / TIMELINE_MAX_ROWS * process_id),
# NSSize(TEXTLABEL_WIDTH, TEXTLABEL_HEIGHT))
# textField = NSTextField.alloc().initWithFrame_(textField_frame)
# textField.setEditable_(NO)
# textField.setDrawsBackground_(NO)
# textField.setSelectable_(NO)
# textField.setBezeled_(NO)
# textField.setStringValue_(str(self.processNameResponse[0]))
#
# self.processNameResponse = []
#
# reviewer.timeline_view.addSubview_(textField)
# reviewer.nested_timeline_labels.append(textField)
|
eric-stanley/robotframework
|
src/robot/model/stats.py
|
Python
|
apache-2.0
| 6,017
| 0.000166
|
# Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import elapsed_time_to_string, html_escape, normalize
from .tags import TagPatterns
class Stat(object):
"""Generic statistic object used for storing all the statistic values."""
def __init__(self, name):
#: Human readable identifier of the object these statistics
#: belong to. Either `All Tests` or `Critical Tests` for
#: :class:`~robot.model.totalstatistics.TotalStatistics`,
#: long name of the suite for
#: :class:`~robot.model.suitestatistics.SuiteStatistics`
#: or name of the tag for
#: :class:`~robot.model.tagstatistics.TagStatistics`
self.name = name
#: Number of passed tests.
self.passed = 0
#: Number of failed tests.
self.failed = 0
#: Number of milliseconds it took to execute.
self.elapsed = 0
self._norm_name = normalize(name, ignore='_')
def get_attributes(self, include_label=False, include_elapsed=False,
exclude_empty=False, values_as_strings=False,
html_escape=False):
attrs = {'pass': self.passed, 'fail': self.failed}
attrs.update(self._get_custom_attrs())
if include_label:
attrs['label'] = self.name
if include_elapsed:
attrs['elapsed'] = elapsed_time_to_string(self.elapsed,
include_millis=False)
if exclude_empty:
attrs = dict((k, v) for k, v in attrs.items() if v != '')
if values_as_strings:
attrs = dict((k, unicode(v)) for k, v in attrs.items())
if html_escape:
attrs = dict((k, self._html_escape(v)) for k, v in attrs.items())
return attrs
def _get_custom_attrs(self):
return {}
def _html_escape(self, item):
return html_escape(item) if isinstance(item, basestring) else item
@property
def total(self):
return self.passed + self.failed
def add_test(self, test):
self._update_stats(test)
self._update_elapsed(test)
def _update_stats(self, test):
if test.passed:
self.passed += 1
else:
self.failed += 1
def _update_elapsed(self, test):
self.elapsed += test.elapsedtime
def __cmp__(self, other):
return cmp(self._norm_name, other._norm_name)
def __nonzero__(self):
return not self.failed
def visit(self, visitor):
visitor.visit_stat(self)
class TotalStat(Stat):
"""Stores statistic values for a test run."""
#: Always string `total`
type = 'total'
class SuiteStat(Stat):
"""Stores statistics values for a single suite."""
#: Always string `suite`
type = 'suite'
def __init__(self, suite):
Stat.__init__(self, suite.longname)
#: Identifier of the suite, e.g. `s1-s2`.
self.id = suite.id
#: Number of milliseconds it took to execute this suite,
#: including sub-suites.
self.elapsed = suite.elapsedtime
self._name = suite.name
def _get_custom_attrs(self):
return {'id': self.id, 'name': self._name}
def _update_elapsed(self, test):
pass
def add_stat(self, other):
self.passed += other.passed
self.failed += other.failed
class TagStat(Stat):
"""Stores statistic values for a single tag."""
#: Always string
|
`tag`.
type = 'tag'
d
|
ef __init__(self, name, doc='', links=None, critical=False,
non_critical=False, combined=''):
Stat.__init__(self, name)
#: Documentation of tag as a string.
self.doc = doc
#: List of tuples in which the first value is the link URL and
#: the second is the link title. An empty list by default.
self.links = links or []
#: ``True`` if tag is considered critical, ``False`` otherwise.
self.critical = critical
#: ``True`` if tag is considered non-critical, ``False`` otherwise.
self.non_critical = non_critical
#: Pattern as a string if the tag is combined,
#: an empty string otherwise.
self.combined = combined
@property
def info(self):
"""Returns additional information of the tag statistics
are about. Either `critical`, `non-critical`, `combined` or an
empty string.
"""
if self.critical:
return 'critical'
if self.non_critical:
return 'non-critical'
if self.combined:
return 'combined'
return ''
def _get_custom_attrs(self):
return {'doc': self.doc, 'links': self._get_links_as_string(),
'info': self.info, 'combined': self.combined}
def _get_links_as_string(self):
return ':::'.join('%s:%s' % (title, url) for url, title in self.links)
def __cmp__(self, other):
return cmp(other.critical, self.critical) \
or cmp(other.non_critical, self.non_critical) \
or cmp(bool(other.combined), bool(self.combined)) \
or Stat.__cmp__(self, other)
class CombinedTagStat(TagStat):
def __init__(self, pattern, name=None, doc='', links=None):
TagStat.__init__(self, name or pattern, doc, links, combined=pattern)
self._matcher = TagPatterns(pattern)
def match(self, tags):
return self._matcher.match(tags)
|
dnanexus/rseqc
|
rseqc/lib/bx/intervals/__init__.py
|
Python
|
gpl-3.0
| 204
| 0.009804
|
"""
Tools and data structures
|
for working with genomic intervals (or sets of
regions on a line in general) efficiently.
"""
# For compatiblity with existing stuff
from bx.intervals.intersecti
|
on import *
|
sechacking/MITMf
|
core/banners.py
|
Python
|
gpl-3.0
| 4,693
| 0.012703
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import random
banner1 = """
__ __ ___ .--. __ __ ___
| |/ `.' `. |__| | |/ `.' `. _.._
| .-. .-. '.--. .| | .-. .-. ' .' .._|
| | | | | || | .' |_ | | | | | | | '
| | | | | || | .' || | | | | | __| |__
| | | | | || |'--. .-'| | | | | ||__ __|
| | | | | || | | | | | | | | | | |
|__| |__| |__||__| | | |__| |__| |__| | |
| '.' | |
| / | |
`'-' |_|
"""
banner2= """
███▄ ▄███▓ ██▓▄▄▄█████▓ ███▄ ▄███▓ █████▒
▓██▒▀█▀ ██▒▓██▒▓ ██▒ ▓▒▓██▒▀█▀ ██▒▓██ ▒
▓██ ▓██░▒██▒▒ ▓██░ ▒░▓██ ▓██░▒████ ░
▒██ ▒██ ░██░░ ▓██▓ ░ ▒██ ▒██ ░▓█▒ ░
▒██▒ ░██▒░██░ ▒██▒ ░ ▒██▒ ░██▒░▒█░
░ ▒░ ░ ░░▓ ▒ ░░ ░ ▒░ ░ ░ ▒ ░
░ ░ ░ ▒ ░ ░ ░ ░ ░ ░
░ ░ ▒ ░ ░ ░ ░ ░ ░
░ ░ ░
"""
banner3 = """
▄▄▄▄███▄▄▄▄ ▄█ ███ ▄▄▄▄███▄▄▄▄ ▄████████
▄██▀▀▀███▀▀▀██▄ ███ ▀█████████▄ ▄██▀▀▀███▀▀▀██▄ ███ ███
███ ███ ███ ███▌ ▀███▀▀██ ███ ███ ███ ███ █▀
███ ███ ███ ███▌ ███ ▀ ███ ███ ███ ▄███▄▄▄
███ ███ ███ ███▌ ███ ███ ███ ███ ▀▀███▀▀▀
███ ███ ███ ███ ███ ███ ███ ███ ███
███ ███ ███ ███ ███ ███ ███ ███ ███
▀█ ███ █▀ █▀ ▄████▀ ▀█ ███ █▀ ███
"""
banner4 = """
███╗ ███╗██╗████████╗███╗ ███╗███████╗
████╗ ████║██║╚══██╔══╝████╗ ████║██╔════╝
██╔████╔██║██║ ██║ ██╔████╔██║█████╗
██║╚██╔╝██║██║ ██║ ██║╚██╔╝██║██╔══╝
██║ ╚═╝ ██║██║ ██║ ██║ ╚═╝ ██║██║
╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝
"""
banner5 = """
@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@ @@@@@@@@
|
@@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@@ @@@@@@@@
@@! @@! @@! @@! @@! @@! @@! @@! @@!
!@! !@! !@! !@! !@! !@! !@! !@! !@!
@!! !!@ @!@
|
!!@ @!! @!! !!@ @!@ @!!!:!
!@! ! !@! !!! !!! !@! ! !@! !!!!!:
!!: !!: !!: !!: !!: !!: !!:
:!: :!: :!: :!: :!: :!: :!:
::: :: :: :: ::: :: ::
: : : : : : :
"""
def get_banner():
banners = [banner1, banner2, banner3, banner4, banner5]
return random.choice(banners)
|
emanuele/jstsp2015
|
classif_and_ktst.py
|
Python
|
mit
| 9,044
| 0.000442
|
"""Classification-based test and kernel two-sample test.
Author: Sandro Vega-Pons, Emanuele Olivetti.
"""
import os
import numpy as np
from sklearn.metrics import pairwise_distances, confusion_matrix
from sklearn.metrics import pairwise_kernels
from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedKFold, KFold, cross_val_score
from sklearn.grid_search import GridSearchCV
from kernel_two_sample_test import MMD2u, compute_null_distribution
from kernel_two_sample_test import compute_null_distribution_given_permutations
import matplotlib.pylab as plt
from joblib import Parallel, delayed
def compute_rbf_kernel_matrix(X):
"""Compute the RBF kernel matrix with sigma2 as the median pairwise
distance.
"""
sigma2 = np.median(pairwise_distances(X, metric='euclidean'))**2
K = pairwise_kernels(X, X, metric='rbf', gamma=1.0/sigma2, n_jobs=-1)
return K
def balanced_accuracy_scoring(clf, X, y):
"""Scoring function that computes the balanced accuracy to be used
internally in the cross-validation procedure.
"""
y_pred = clf.predict(X)
conf_mat = confusion_matrix(y, y_pred)
bal_acc = 0.
for i in range(len(conf_mat)):
bal_acc += (float(conf_mat[i, i])) / np.sum(conf_mat[i])
bal_acc /= len(conf_mat)
return bal_acc
def compute_svm_cv(K, y, C=100.0, n_folds=5,
scoring=balanced_accuracy_scoring):
"""Compute cross-validated score of SVM with given precomputed kernel.
"""
cv = StratifiedKFold(y, n_folds=n_folds)
clf = SVC(C=C, kernel='precomputed', class_weight='auto')
scores = cross_val_score(clf, K, y,
scoring=scoring, cv=cv)
return scores.mean()
def compute_svm_subjects(K, y, n_folds=5):
"""
"""
cv = KFold(len(K)/2, n_folds)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
train_ids = np.concatenate((train, len(K)/2+train))
test_ids = np.concatenate((test, len(K)/2+test))
clf = SVC(kernel='precomputed')
clf.fit(K[train_ids, :][:, train_ids], y[train_ids])
scores[i] = clf.score(K[test_ids, :][:, train_ids], y[test_ids])
return scores.mean()
def permutation_subjects(y):
"""Permute class labels of Contextual Disorder dataset.
"""
y_perm = np.random.randint(0, 2, len(y)/2)
y_perm = np.concatenate((y_perm, np.logical_not(y_perm).astype(int)))
return y_perm
def permutation_subjects_ktst(y):
"""Permute class labels of Contextual Disorder dataset for KTST.
"""
yp = np.random.randint(0, 2, len(y)/2)
yp = np.concatenate((yp, np.logical_not(yp).astype(int)))
y_perm = np.arange(len(y))
for i in range(len(y)/2):
if yp[i] == 1:
y_perm[i] = len(y)/2+i
y_perm[len(y)/2+i] = i
return y_perm
def compute_svm_score_nestedCV(K, y, n_folds,
scoring=balanced_accuracy_scoring,
|
random_state=None,
param_grid=[{'C': np.logspace(-5, 5, 25)}]):
"""Compute cross-validated score of SVM using precomputed kernel.
"""
cv = Stratifi
|
edKFold(y, n_folds=n_folds, shuffle=True,
random_state=random_state)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
cvclf = SVC(kernel='precomputed')
y_train = y[train]
cvcv = StratifiedKFold(y_train, n_folds=n_folds,
shuffle=True,
random_state=random_state)
clf = GridSearchCV(cvclf, param_grid=param_grid, scoring=scoring,
cv=cvcv, n_jobs=1)
clf.fit(K[train, :][:, train], y_train)
# print clf.best_params_
scores[i] = clf.score(K[test, :][:, train], y[test])
return scores.mean()
def apply_svm(K, y, n_folds=5, iterations=10000, subjects=False, verbose=True,
random_state=None):
"""
Compute the balanced accuracy, its null distribution and the p-value.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
cv: Number of folds in the stratified cross-validation
verbose: bool
Verbosity
Returns:
-------
acc: float
Average balanced accuracy.
acc_null: array
Null distribution of the balanced accuracy.
p_value: float
p-value
"""
# Computing the accuracy
param_grid = [{'C': np.logspace(-5, 5, 20)}]
if subjects:
acc = compute_svm_subjects(K, y, n_folds)
else:
acc = compute_svm_score_nestedCV(K, y, n_folds, param_grid=param_grid,
random_state=random_state)
if verbose:
print("Mean balanced accuracy = %s" % (acc))
print("Computing the null-distribution.")
# Computing the null-distribution
# acc_null = np.zeros(iterations)
# for i in range(iterations):
# if verbose and (i % 1000) == 0:
# print(i),
# stdout.flush()
# y_perm = np.random.permutation(y)
# acc_null[i] = compute_svm_score_nestedCV(K, y_perm, n_folds,
# param_grid=param_grid)
# if verbose:
# print ''
# Computing the null-distribution
if subjects:
yis = [permutation_subjects(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_subjects)(K, yis[i], n_folds) for i in range(iterations))
else:
yis = [np.random.permutation(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_score_nestedCV)(K, yis[i], n_folds, scoring=balanced_accuracy_scoring, param_grid=param_grid) for i in range(iterations))
# acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_cv)(K, yis[i], C=100., n_folds=n_folds) for i in range(iterations))
p_value = max(1.0 / iterations, (acc_null > acc).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return acc, acc_null, p_value
def apply_ktst(K, y, iterations=10000, subjects=False, verbose=True):
"""
Compute MMD^2_u, its null distribution and the p-value of the
kernel two-sample test.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
verbose: bool
Verbosity
Returns:
-------
mmd2u: float
MMD^2_u value.
acc_null: array
Null distribution of the MMD^2_u
p_value: float
p-value
"""
assert len(np.unique(y)) == 2, 'KTST only works on binary problems'
# Assuming that the first m rows of the kernel matrix are from one
# class and the other n rows from the second class.
m = len(y[y == 0])
n = len(y[y == 1])
mmd2u = MMD2u(K, m, n)
if verbose:
print("MMD^2_u = %s" % mmd2u)
print("Computing the null distribution.")
if subjects:
perms = [permutation_subjects_ktst(y) for i in range(iterations)]
mmd2u_null = compute_null_distribution_given_permutations(K, m, n,
perms,
iterations)
else:
mmd2u_null = compute_null_distribution(K, m, n, iterations,
verbose=verbose)
p_value = max(1.0/iterations, (mmd2u_null > mmd2u).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return mmd2u, mmd2u_null, p_value
def plot_null_distribution(stats, stats_null, p_value, data_name='',
stats_name='$MMD^2_u$', save_figure=True):
"""Plot the observed value for the test statistic, its null
distribution and p-value.
"""
fig = plt.figure()
ax = fig.add_subplot(111)
prob, bins, patches = plt.hist(stats_null, bins=50, normed=True)
ax.plot(stats, prob.max()/30, 'w*', markersize=15,
markeredgecolor='k', marke
|
gdietz/OpenMEE
|
common_wizard_pages/ui_effect_size_locations_page.py
|
Python
|
gpl-3.0
| 5,558
| 0.001799
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'effect_size_locations_page.ui'
#
# Created: Tue Aug 27 16:49:55 2013
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_wizardPage(object):
def setupUi(self, wizardPage):
wizardPage.setObjectName(_fromUtf8("wizardPage"))
wizardPage.resize(498, 195)
self.verticalLayout_3 = QtGui.QVBoxLayout(wizardPage)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_6 = QtGui.QLabel(wizardPage)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout_3.addWidget(self.label_6)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.trans_grp_box = QtGui.QGroupBox(wizardPage)
self.trans_grp_box.setObjectName(_fromUtf8("trans_grp_box"))
self.gridLayout = QtGui.QGridLayout(self.trans_grp_box)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.trans_var_cbo_box = QtGui.QComboBox(self.trans_grp_box)
self.trans_var_cbo_box.setObjectName(_fromUtf8("trans_var_cbo_box"))
self.gridLayout.addWidget(self.trans_var_cbo_box, 1, 1, 1, 1)
self.trans_effect_cbo_box = QtGui.QComboBox(self.trans_grp_box)
self.trans_effect_cbo_box.setObjectName(_fromUtf8("trans_effect_cbo_box"))
self.gridLayout.addWidget(self.trans_effect_cbo_box, 0, 1, 1, 1)
self.label = QtGui.QLabel(self.trans_grp_box)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtGui.QLabel(self.trans_grp_box)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.verticalLayout.addWidget(self.trans_grp_box)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.raw_grp_box = QtGui.QGroupBox(wizardPage)
self.raw_grp_box.setObjectName(_fromUtf8("raw_grp_box"))
self.gridLayout_3 = QtGui.QGridLayout(self.raw_grp_box)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.label_3 = QtGui.QLabel(self.raw_grp_box)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_3.addWidget(self.label_3, 0, 0, 1, 1)
self.raw_effect_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_effect_cbo_box.setObjectName(_fromUtf8("raw_effect_cbo_box"))
self.gridLayout_3.addWidget(self.raw_effect_cbo_box, 0, 1, 1, 1)
self.label_4 = QtGui.QLabel(self.raw_grp_box)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_3.addWidget(self.label_4, 1, 0, 1, 1)
self.raw_lower_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_lower_cbo_box.setObjectName(_fromUtf8("raw_lower_cbo_box"))
self.gridLayout_3.addWidget(self.raw_lower_cbo_box, 1, 1, 1, 1)
self.label_5 = QtGui.QLabel(self.raw_grp_box)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_3.addWidget(self.label_5, 2, 0, 1, 1)
self.raw_upper_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_upper_cbo_box.setObjectName(_fromUtf8("raw_upper_cbo_box"))
self.gridLayout_3.addWidget(self.raw_upper_cbo_box, 2, 1, 1, 1)
self.verticalLayout_2.addWidget(self.raw_grp_box)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.ver
|
ticalLayout_2.addItem(spacerItem1)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.retranslateUi(wizardPage)
QtCore.QMetaObject
|
.connectSlotsByName(wizardPage)
def retranslateUi(self, wizardPage):
wizardPage.setWindowTitle(_translate("wizardPage", "WizardPage", None))
wizardPage.setTitle(_translate("wizardPage", "Effect Size Column Locations", None))
self.label_6.setText(_translate("wizardPage", "Where is your data located?", None))
self.trans_grp_box.setTitle(_translate("wizardPage", "Transformed Scale", None))
self.label.setText(_translate("wizardPage", "Effect Size:", None))
self.label_2.setText(_translate("wizardPage", "Variance:", None))
self.raw_grp_box.setTitle(_translate("wizardPage", "Raw Scale", None))
self.label_3.setText(_translate("wizardPage", "Effect Size:", None))
self.label_4.setText(_translate("wizardPage", "CI Lower Bound:", None))
self.label_5.setText(_translate("wizardPage", "CI Upper Bound:", None))
|
django-settings/django-settings
|
myproject/myproject/user_settings.py
|
Python
|
unlicense
| 2,239
| 0.003126
|
# Imports environment-specific settings.
import os
import sys
try:
from colorama import init as colorama_init
except ImportError:
def colorama_init(autoreset=False, convert=None, strip=None, wrap=True):
"""
Fallback function that initializes colorama.
"""
pass
try:
from termcolor import colored
except ImportError:
def colored(text, color=None, on_color=None, attrs=None):
"""
Fallback function to colorize text when termcolor is not installed.
"""
return text
# Use production settings by default as it is the secure setup. To use local
# settings: $ export PRODUCTION=0
production = 'PRODUCTION' not in os.environ or os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]
local = not production
platform = sys.platform
linux = platform == 'linux2'
os_x = platform == 'darwin'
win32 = platform == 'win32'
# Don't initialize colorama when on Windows and running the shell because the
# ipyt
|
hon colors get confused.
if not win32 or not 'shell' in sys.argv:
colorama_init()
current_settings = []
if production:
current_settings.append(colored('Production', 'green', attrs=['bold']))
from production_settings import *
if local:
current_settings.append(
|
colored('Local', 'yellow', attrs=['bold']))
from local_settings import *
if linux:
current_settings.append(colored('Linux', 'blue', attrs=['bold']))
from linux_settings import *
if os_x:
current_settings.append(colored('OS X', 'blue', attrs=['bold']))
from os_x_settings import *
if win32:
current_settings.append(colored('Windows', 'blue', attrs=['bold']))
from win32_settings import *
if 'runserver' in sys.argv:
print '-' * 80
print ' :: '.join(current_settings)
print '-' * 80
color = '[1;93m' # Bold High Intensity Yellow + Underline
version = 'Development'
if production:
color = '[1;92m' # Bold High Intensity Green + Underline
version = 'Production'
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
cupy/cupy
|
examples/gemm/utils.py
|
Python
|
mit
| 551
| 0
|
import cupy as cp
def read_code(code_filename, params):
with open(code_filename, 'r') as f:
c
|
ode = f.read()
for k, v in params.items():
code = '#define ' + k + ' ' + str(v) + '\n' + code
return code
def benchmark(func, args, n_run):
times = []
for _ in range(n_run):
start = cp.cuda.Event()
end = cp.cuda.Event()
start.record()
func(*args)
end.record()
end.synchronize()
times.append(cp.cuda.get_elapsed_time(start, end)) # milliseconds
|
return times
|
alxgu/ansible
|
test/units/modules/network/f5/test_bigip_device_facts.py
|
Python
|
gpl-3.0
| 4,137
| 0.001209
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
try:
from library.modules.bigip_device_facts import Parameters
from library.modules.bigip_device_facts import VirtualAddressesFactManager
from library.modules.bigip_device_facts import VirtualAddressesParameters
from library.modules.bigip_device_facts import ArgumentSpec
from library.modules.bigip_device_facts import ModuleManager
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_device_facts import Parameters
from ansible.modules.network.f5.bigip_device_facts import VirtualAddressesFactManager
from ansible.modules.network.f5.bigip_device_facts import VirtualAddressesParameters
from ansible.modules.network.f5.bigip_device_facts import ArgumentSpec
from ansible.modules.network.f5.bigip_device_facts import ModuleManager
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads
|
(data)
except Exception:
pass
fixture_data[path] = data
return data
class FakeVirtualAddress:
def __init__(self, *args, **kwargs):
attrs = kwargs.pop('params', {})
for key, valu
|
e in iteritems(attrs):
setattr(self, key, value)
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
gather_subset=['virtual-servers'],
)
p = Parameters(params=args)
assert p.gather_subset == ['virtual-servers']
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
try:
self.p1 = patch('library.modules.bigip_device_facts.modules_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = ['ltm', 'gtm', 'asm']
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_device_facts.modules_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = ['ltm', 'gtm', 'asm']
def tearDown(self):
self.p1.stop()
def test_get_trunk_facts(self, *args):
set_module_args(dict(
gather_subset=['virtual-addresses'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
fixture1 = load_fixture('load_ltm_virtual_address_collection_1.json')
collection = fixture1['items']
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
tm = VirtualAddressesFactManager(module=module)
tm.read_collection_from_device = Mock(return_value=collection)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert 'virtual_addresses' in results
assert len(results['virtual_addresses']) > 0
|
rwl/PyCIM
|
CIM15/IEC61970/Informative/InfAssets/Medium.py
|
Python
|
mit
| 4,176
| 0.002155
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61970.Core.IdentifiedObject import IdentifiedObject
class Medium(IdentifiedObject):
"""A substance that either (1) provides the means of transmission of a force or effect, such as hydraulic fluid, or (2) is used for a surrounding or enveloping substance, such as oil in a transformer or circuit breaker.A substance that either (1) provides the means of transmission of a force or effect, such as hydraulic fluid, or (2) is used for a surrounding or enveloping substance, such as oil in a transformer or circuit breaker.
"""
def __init__(self, kind="gas", volumeSpec=0.0, Specification=None, Assets=None, *args, **kw_args):
"""Initialises a new 'Medium' instance.
@param kind: Kind of this medium. Values are: "gas", "liquid", "solid"
@param volumeSpec: The volume of the medium specified for this application. Note that the actual volume is a type of measurement associated witht the asset.
@param Specification:
@param Assets:
"""
#: Kind of this medium. Values are: "gas", "liquid", "solid"
self.kind = kind
#: The volume of the medium specified for this application. Note that the actual volume is a type of measurement associated witht the asset.
self.volumeSpec = volumeSpec
self._Specification = None
self.Specification = Specification
self._Assets = []
self.Assets = [] if Assets is None else Assets
super(Medium, self).__init__(*args, **kw_args)
_attrs = ["kind", "volumeSpec"]
_attr_types = {"kind": str, "volumeSpec": float}
_defaults = {"kind": "gas", "volumeSpec": 0.0}
_enums = {"kind": "MediumKind"}
_refs = ["Specification", "Assets"]
_many_refs = ["Assets"]
def getSpecification(self):
return self._Specification
def setSpecification(self, value):
if self._Specification is not None:
filtered = [x for x in self.Specification.Mediums if x != self]
|
self._Specification._Mediums = filtered
self._Specification = value
if self._Specification is not None:
if self not in self._Specification._Mediums:
self._Specification._Mediums.append(self)
Specification = property(getSpecification, setSpecification)
def ge
|
tAssets(self):
return self._Assets
def setAssets(self, value):
for p in self._Assets:
filtered = [q for q in p.Mediums if q != self]
self._Assets._Mediums = filtered
for r in value:
if self not in r._Mediums:
r._Mediums.append(self)
self._Assets = value
Assets = property(getAssets, setAssets)
def addAssets(self, *Assets):
for obj in Assets:
if self not in obj._Mediums:
obj._Mediums.append(self)
self._Assets.append(obj)
def removeAssets(self, *Assets):
for obj in Assets:
if self in obj._Mediums:
obj._Mediums.remove(self)
self._Assets.remove(obj)
|
samuelgarcia/python-neo
|
neo/test/iotest/test_pickleio.py
|
Python
|
bsd-3-clause
| 3,272
| 0.000306
|
"""
Tests of the neo.io.pickleio.PickleIO class
"""
import os
import unittest
import numpy as np
import quantities as pq
from neo.core import Block, Segment, AnalogSignal, SpikeTrain, Epoch, Event, \
IrregularlySampledSignal, Group
from neo.io import PickleIO
from numpy.testing import assert_array_equal
from neo.test.tools import assert_arrays_equal, assert_file_contents_equal
from neo.test.iotest.common_io_test import BaseTestIO
NCELLS = 5
class CommonTestPickleIO(BaseTestIO, unittest.TestCase):
ioclass = PickleIO
class TestPickleIO(unittest.TestCase):
def test__issue_285(self):
# Spiketrain
train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
unit = Group()
unit.add(train)
epoch = Epoch(np.array([0, 10, 20]),
np.array([2, 2, 2]),
np.array(["a", "b", "c"]),
units="ms")
blk = Block()
seg = Segment()
seg.spiketrains.append(train)
seg.epochs.append(epoch)
epoch.segment = seg
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.epochs[0], Epoch)
os.remove('blk.pkl')
# Epoch
epoch = Epoch(times=np.arange(0, 30, 10) * pq.s,
durations=[10, 5, 7] * pq.ms,
labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U'))
epoch.segment = Segment()
blk = Block()
seg = Segment()
seg.epochs.append(epoch)
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.epochs[0].segment, Segment)
os.remove('blk.pkl')
# Event
event = Event(np.arange(0, 30, 10) * pq.s,
labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U'))
event.segment = Segment()
blk = Block()
seg = Segment()
seg.events.append(event)
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.events[0].segment, Segment)
os.remove('blk.pkl')
# IrregularlySampledSignal
signal = IrregularlySampledSignal(
[0.0, 1.23, 6.78], [1, 2, 3], units='mV', time_units='ms')
signal.segment = Segment()
blk = Block()
seg = Segment()
seg.irregularlysampledsignals.append(signal)
blk.segments.append(seg)
blk.segments[0].block = blk
reader = PickleIO(filename="blk.p
|
kl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.irregularlysampledsignals[0]
|
.segment, Segment)
os.remove('blk.pkl')
if __name__ == '__main__':
unittest.main()
|
kamyu104/LeetCode
|
Python/magic-squares-in-grid.py
|
Python
|
mit
| 1,889
| 0
|
# Time: O(m * n)
# Space: O(1)
# A 3 x 3 magic square is a 3 x 3 grid filled with
# distinct numbers from 1 to 9 such that each row, column,
# and both diagonals all have the same sum.
#
# Given an grid of integers, how many 3 x 3 "magic square" subgrids are there?
# (Each subgrid is contiguous).
#
# Example 1:
#
# Input: [[4,3,8,4],
# [9,5,1,9],
# [2,7,6,2]]
# Output: 1
# Explanation:
# The following subgrid is a 3 x 3 magic square:
# 438
# 951
# 276
#
# while this one is not:
# 384
# 519
# 762
#
# In total, there is only one magic square inside the given grid.
# Note:
# - 1 <= grid.length <= 10
# - 1 <= grid[0].length <= 10
# - 0 <= grid[i][j] <= 15
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def numMagicSquaresInside(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
def magic(grid, r, c):
expect = k * (k**2+1) // 2
nums = set()
min_num = float("inf")
sum_diag, sum_anti = 0, 0
for i in xrange(k):
sum_diag += grid[r+i][c+i]
sum_anti += grid[r+i][c+k-1-i]
sum_r, sum_c = 0, 0
for j in xrange(k):
min_num = min(min_num, grid[r+i][c
|
+j])
nums.add(grid[r+i][c+j])
sum_r += grid[r+i][c+j]
sum_c += grid[r+j][c+i]
if not (sum_r == sum_c == expect):
return False
return sum_diag == sum_anti == expect and \
len(nums) == k**2 and \
|
min_num == 1
k = 3
result = 0
for r in xrange(len(grid)-k+1):
for c in xrange(len(grid[r])-k+1):
if magic(grid, r, c):
result += 1
return result
|
JustinTulloss/harmonize.fm
|
libs.py/facebook/__init__.py
|
Python
|
mit
| 32,593
| 0.002976
|
#! /usr/bin/env python
#
# pyfacebook - Python bindings for the Facebook API
#
# Copyright (c) 2008, Samuel Cormier-Iijima
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY <copyright holder> ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <copyright holder> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Python bindings for the Facebook API (pyfacebook - http://code.google.com/p/pyfacebook)
PyFacebook is a client library that wraps the Facebook API.
For more information, see
Home Page: http://code.google.com/p/pyfacebook
Developer Wiki: http://wiki.developers.facebook.com/index.php/Python
Facebook IRC Channel: #facebook on irc.freenode.net
PyFacebook can use simplejson if it is installed, which
is much faster than XML and also uses less bandwith. Go to
http://undefined.org/python/#simplejson to download it, or do
apt-get install python-simplejson on a Debian-like system.
"""
import md5
import sys
import time
import urllib
import urllib2
import httplib
import mimetypes
# try to use simplejson first, otherwise fallback to XML
try:
import simplejson
RESPONSE_FORMAT = 'JSON'
except ImportError:
try:
from django.utils import simplejson
RESPONSE_FORMAT = 'JSON'
except ImportError:
from xml.dom import minidom
RESPONSE_FORMAT = 'XML'
# support Google App Engine. GAE does not have a working urllib.urlopen.
try:
from google.appengine.api import urlfetch
def urlread(url, data=None):
if data is not None:
headers = {"Content-type": "application/x-www-form-urlencoded"}
method = urlfetch.POST
else:
headers = {}
method = urlfetch.GET
result = urlfetch.fetch(url, method=method,
payload=data, headers=headers)
if result.status_code == 200:
return result.content
else:
raise urllib2.URLError("fetch error url=%s, code=%d" % (url, result.status_code))
except ImportError:
def urlread(url, data=None):
r
|
es = urllib2.urlopen(url, data=data)
return res.read()
__all__ = ['Facebook']
VERSION = '0.1'
# REST URLs
# Change these to /bestserver.php to use the bestserver.
FACEBOOK_URL = 'http://api.facebook.com/restserver.php'
FACEBOOK_SECURE_URL = 'https://api.facebook.com/restserver.php'
class json(object): pass
# simple IDL for the Facebook API
METHODS = {
# feed methods
'feed': {
'publishStoryToUser': [
('title', str, []),
('body', str, ['optional']
|
),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('priority', int, ['optional']),
],
'publishActionOfUser': [
('title', str, []),
('body', str, ['optional']),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('priority', int, ['optional']),
],
'publishTemplatizedAction': [
# facebook expects title_data and body_data to be JSON
# simplejson.dumps({'place':'Florida'}) would do fine
# actor_id is now deprecated, use page_actor_id instead
('title_template', str, []),
('title_data', str, ['optional']),
('page_actor_id', int, ['optional']),
('body_template', str, ['optional']),
('body_data', str, ['optional']),
('body_general', str, ['optional']),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('target_ids', list, ['optional']),
],
'registerTemplateBundle': [
('one_line_story_template', str, []),
('short_story_template', json, ['optional']),
('full_story_template', json, ['optional']),
],
'getRegisteredTemplateBundles': [],
'getRegisteredTemplateBundleByID': [
('template_bundle_id', str, []),
],
'publishUserAction': [
('template_bundle_id', str, []),
('template_data', json, ['optional']),
('target_ids', list, ['optional']),
('body_general', str, ['optional']),
],
},
# fql methods
'fql': {
'query': [
('query', str, []),
],
},
# friends methods
'friends': {
'areFriends': [
('uids1', list, []),
('uids2', list, []),
],
'get': [],
'getAppUsers': [],
},
# notifications methods
'notifications': {
'get': [],
'send': [
('to_ids', list, []),
('notification', str, []),
('email', str, ['optional']),
],
'sendRequest': [
('to_ids', list, []),
('type', str, []),
('content', str, []),
('image', str, []),
('invite', bool, []),
],
'sendEmail': [
('recipients', list, []),
('subject', str, []),
('text', str, ['optional']),
('fbml', str, ['optional']),
]
},
# profile methods
'profile': {
'setFBML': [
('markup', str, ['optional']),
('uid', int, ['optional']),
('profile', str, ['optional']),
('profile_action', str, ['optional']),
('mobile_fbml', str, ['optional']),
],
'getFBML': [
('uid', int, ['optional']),
],
'setInfo': [
('title', str, []),
('type', int, []),
('info_fields', json, []),
('uid', int, []),
],
'getInfo': [
('uid', int, []),
],
'setInfoOptions': [
('field', str, []),
('options', json, []),
],
'getInfoOptions': [
('field', str, []),
],
},
# users methods
'users': {
'getInfo': [
('uids', list, []),
|
stevenzhang18/Indeed-Flask
|
lib/pandas/msgpack/_unpacker.py
|
Python
|
apache-2.0
| 297
| 0.016835
|
def __bootstrap__():
|
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, '_unpacker.cp35-win32.pyd')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__fil
|
e__)
__bootstrap__()
|
Antiun/carrier-delivery
|
delivery_carrier_label_postlogistics/delivery.py
|
Python
|
agpl-3.0
| 10,691
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Vaucher
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from openerp import models, fields, api
class PostlogisticsLicense(models.Model):
_n
|
ame = 'postlogistics.license'
_description = 'PostLogistics Franking License'
_order = 'sequence'
name = fields.Char(string='Description',
translate=True,
required=True)
number = fields.Char(string='Number',
required=True)
company_id = fields.Many2one(comodel_name='res.company',
string='Company',
requi
|
red=True)
sequence = fields.Integer(
string='Sequence',
help="Gives the sequence on company to define priority on license "
"when multiple licenses are available for the same group of "
"service."
)
class PostlogisticsServiceGroup(models.Model):
_name = 'postlogistics.service.group'
_description = 'PostLogistics Service Group'
name = fields.Char(string='Description', translate=True, required=True)
group_extid = fields.Integer(string='Group ID', required=True)
postlogistics_license_ids = fields.Many2many(
comodel_name='postlogistics.license',
relation='postlogistics_license_service_groups_rel',
column1='license_id',
column2='group_id',
string='PostLogistics Franking License')
_sql_constraints = [
('group_extid_uniq', 'unique(group_extid)',
"A service group ID must be unique.")
]
POSTLOGISTIC_TYPES = [
('label_layout', 'Label Layout'),
('output_format', 'Output Format'),
('resolution', 'Output Resolution'),
('basic', 'Basic Service'),
('additional', 'Additional Service'),
('delivery', 'Delivery Instructions')
]
class DeliveryCarrierTemplateOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.template.option'
name = fields.Char(translate=True)
postlogistics_service_group_id = fields.Many2one(
comodel_name='postlogistics.service.group',
string='PostLogistics Service Group',
)
postlogistics_type = fields.Selection(
selection=POSTLOGISTIC_TYPES,
string="PostLogistics option type",
)
# relation tables to manage compatiblity between basic services
# and other services
postlogistics_basic_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='service_id',
column2='basic_service_id',
string="Basic Services",
domain=[('postlogistics_type', '=', 'basic')],
help="List of basic service for which this service is compatible",
)
postlogistics_additonial_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Additional Services",
domain=[('postlogistics_type', '=', 'additional')],
)
postlogistics_delivery_instruction_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Delivery Instructions",
domain=[('postlogistics_type', '=', 'delivery')],
)
class DeliveryCarrierOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.option'
name = fields.Char(translate=True)
def fields_view_get(self, cr, uid, view_id=None, view_type='form',
context=None, toolbar=False, submenu=False):
_super = super(DeliveryCarrierOption, self)
result = _super.fields_view_get(cr, uid, view_id=view_id,
view_type=view_type, context=context,
toolbar=toolbar, submenu=submenu)
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
ref = self.pool['ir.model.data'].xmlid_to_object
postlogistics_partner = ref(cr, uid, xmlid, context=context)
if context.get('default_carrier_id'):
carrier_obj = self.pool['delivery.carrier']
carrier = carrier_obj.browse(cr, uid,
context['default_carrier_id'],
context=context)
if carrier.partner_id == postlogistics_partner:
arch = result['arch']
doc = etree.fromstring(arch)
for node in doc.xpath("//field[@name='tmpl_option_id']"):
node.set(
'domain',
"[('partner_id', '=', %s), "
" ('id', 'in', parent.allowed_option_ids[0][2])]" %
postlogistics_partner.id
)
result['arch'] = etree.tostring(doc)
return result
class DeliveryCarrier(models.Model):
""" Add service group """
_inherit = 'delivery.carrier'
@api.model
def _get_carrier_type_selection(self):
""" Add postlogistics carrier type """
res = super(DeliveryCarrier, self)._get_carrier_type_selection()
res.append(('postlogistics', 'Postlogistics'))
return res
@api.depends('partner_id',
'available_option_ids',
'available_option_ids.tmpl_option_id',
'available_option_ids.postlogistics_type',
)
def _get_basic_service_ids(self):
""" Search in all options for PostLogistics basic services if set """
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
if carrier.partner_id != postlogistics_partner:
continue
options = carrier.available_option_ids.filtered(
lambda option: option.postlogistics_type == 'basic'
).mapped('tmpl_option_id')
if not options:
continue
self.postlogistics_basic_service_ids = options
@api.depends('partner_id',
'postlogistics_service_group_id',
'postlogistics_basic_service_ids',
'postlogistics_basic_service_ids',
'available_option_ids',
'available_option_ids.postlogistics_type',
)
def _get_allowed_option_ids(self):
""" Return a list of possible options
A domain would be too complicated.
We do this to ensure the user first select a basic service. And
then he adds additional services.
"""
option_template_obj = self.env['delivery.carrier.template.option']
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
allowed = option_template_obj.browse()
if carrier.partner_id != postlogistics_partner:
continue
service
|
google-research/google-research
|
axial/config_imagenet32.py
|
Python
|
apache-2.0
| 1,760
| 0.000568
|
# coding=u
|
tf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
def get_config():
return tf.contrib.training.HParams(**{
'total_bs': 64,
'eval_total_bs': 16,
'dataset_name': 'imagenet32',
'dataset_config': tf.contrib.training.HParams(),
'model_name': 'SlicedChannelModel',
'model_config': tf.contrib.training.HParams(**{
'optim': tf.contrib.training.HParams(**{
'max_lr': 1e-4,
'warmup': 5000,
'grad_clip_norm': 1.0,
'ema': 0.99995,
'optimizer': 'adam',
'adam_beta1': 0.9,
'adam_beta2': 0.999,
}),
'dropout': 0.04,
'img_size': 32,
'ardec': tf.contrib.training.HParams(**{
'emb_dim': 1536,
'hdim_factor': 1,
'emb_init_scale': 5.0,
'num_heads': 16,
'num_exterior_layers': 8,
'num_outer_layers': 8,
'num_inner_layers': 8,
'res_init_scale': 1e-10,
}),
})
})
|
OpenDaisy/daisy-api
|
daisy/cmd/cache_cleaner.py
|
Python
|
apache-2.0
| 2,104
| 0.000951
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glance Image Cache Invalid Cache Entry and Stalled Image cleaner
This is meant to be run as a
|
periodic task from cron.
If something goes wrong while we're caching an image (for example the fetch
times out, or an exception is raised), we create a
|
n 'invalid' entry. These
entires are left around for debugging purposes. However, after some period of
time, we want to clean these up.
Also, if an incomplete image hangs around past the image_cache_stall_time
period, we automatically sweep it up.
"""
import os
import sys
from oslo_log import log as logging
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
from daisy.common import config
from daisy.image_cache import cleaner
CONF = config.CONF
logging.register_options(CONF)
def main():
try:
config.parse_cache_args()
logging.setup(CONF, 'glance')
app = cleaner.Cleaner()
app.run()
except RuntimeError as e:
sys.exit("ERROR: %s" % e)
|
bluestemscott/librarygadget
|
librarygadget/paypal/pro/forms.py
|
Python
|
mit
| 1,840
| 0.003261
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from paypal.pro.fields import CreditCardField, CreditCardExpiryField, CreditCardCVV2Field, CountryField
class PaymentForm(forms.Form):
"""Form used to process direct payments."""
firstname = forms.CharField(255, label="First Name")
lastname = forms.CharField(255, label="Last Name")
street = forms.CharField(255, label="Street Address")
city = forms.CharField(255, label="City")
state = forms.CharField(255, label="State")
countrycode = CountryField(label="Country", initial="US")
zip = forms.CharField(32, label="Postal / Zip Code")
acct = CreditCardField(label="Credit Card Number")
expdate = CreditCardExpiryField(label="Expiration Date")
cvv2 = CreditCardCVV2Field(label="Card Security Code")
def process(self, request, item):
"""Process a PayPal direct payment."""
from paypal.pro.helpers import PayPalWPP
wpp = PayPalWPP(request)
params = self.cleaned_data
params['creditcardtype'] = self.fields['acct'].card_type
params['expdate'] = self.cleaned_data['expdate'].strftime("%m%Y")
params['ipaddress'] = request.META.get("REMOTE_ADDR", "")
params.update(item)
# Create single payment:
if 'billingperiod' not in params:
response = wpp.doDirectPayment(params)
# Create recurring payment:
else:
response = wpp.createRecurringPaymentsProfile(params, direct=True)
return response
c
|
lass ConfirmForm(forms.Form):
"""Hidden form used b
|
y ExpressPay flow to keep track of payer information."""
token = forms.CharField(max_length=255, widget=forms.HiddenInput())
PayerID = forms.CharField(max_length=255, widget=forms.HiddenInput())
|
Germanika/plover
|
plover/system/english_stenotype.py
|
Python
|
gpl-2.0
| 6,232
| 0.024069
|
KEYS = (
'#',
'S-', 'T-', 'K-', 'P-', 'W-', 'H-', 'R-',
'A-', 'O-',
'*',
'-E', '-U',
'-F', '-R', '-P', '-B', '-L', '-G', '-T', '-S', '-D', '-Z',
)
IMPLICIT_HYPHEN_KEYS = ('A-', 'O-', '5-', '0-', '-E', '-U', '*')
SUFFIX_KEYS = ('-S', '-G', '-Z', '-D')
NUMBER_KEY = '#'
NUMBERS = {
'S-': '1-',
'T-': '2-',
'P-': '3-',
'H-': '4-',
'A-': '5-',
'O-': '0-',
'-F': '-6',
'-P': '-7',
'-L': '-8',
'-T': '-9',
}
UNDO_STROKE_STENO = '*'
ORTHOGRAPHY_RULES = [
# == +ly ==
# artistic + ly = artistically
(r'^(.*[aeiou]c) \^ ly$', r'\1ally'),
# == +ry ==
# statute + ry = statutory
(r'^(.*t)e \^ ry$', r'\1ory'),
# == t +cy ==
# frequent + cy = frequency (tcy/tecy removal)
(r'^(.*[naeiou])te? \^ cy$', r'\1cy'),
# == +s ==
# establish + s = establishes (sibilant pluralization)
(r'^(.*(?:s|sh|x|z|zh)) \^ s$', r'\1es'),
# speech + s = speeches (soft ch pluralization)
(r'^(.*(?:oa|ea|i|ee|oo|au|ou|l|n|(?<![gin]a)r|t)ch) \^ s$', r'\1es'),
|
# cherry + s = cherries (consonant + y pluralization)
(r'^(.+[bcdfghjklmnpqrstvwxz])y \^ s$', r'\1ies'),
# == y ==
# die+ing = dying
(r'^(.+)ie \^ ing$', r'\1ying'),
# metallurgy + ist = metallurgist
(r'^(.+[cdfghlmnpr])y \^ ist$', r'\1ist'),
# beauty + ful = beautiful (y -> i)
(r'^(.+[bcdfghjklmnpqrstvwxz])y \^
|
([a-hj-xz].*)$', r'\1i\2'),
# == e ==
# write + en = written
(r'^(.+)te \^ en$', r'\1tten'),
# free + ed = freed
(r'^(.+e)e \^ (e.+)$', r'\1\2'),
# narrate + ing = narrating (silent e)
(r'^(.+[bcdfghjklmnpqrstuvwxz])e \^ ([aeiouy].*)$', r'\1\2'),
# == misc ==
# defer + ed = deferred (consonant doubling) XXX monitor(stress not on last syllable)
(r'^(.*(?:[bcdfghjklmnprstvwxyz]|qu)[aeiou])([bcdfgklmnprtvz]) \^ ([aeiouy].*)$', r'\1\2\2\3'),
]
ORTHOGRAPHY_RULES_ALIASES = {
'able': 'ible',
}
ORTHOGRAPHY_WORDLIST = 'american_english_words.txt'
KEYMAPS = {
'Gemini PR': {
'#' : ('#1', '#2', '#3', '#4', '#5', '#6', '#7', '#8', '#9', '#A', '#B', '#C'),
'S-' : ('S1-', 'S2-'),
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : ('*1', '*2', '*3', '*4'),
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op' : ('Fn', 'pwr', 'res1', 'res2'),
},
'Keyboard': {
'#' : ('1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '-', '='),
'S-' : ('a', 'q'),
'T-' : 'w',
'K-' : 's',
'P-' : 'e',
'W-' : 'd',
'H-' : 'r',
'R-' : 'f',
'A-' : 'c',
'O-' : 'v',
'*' : ('t', 'g', 'y', 'h'),
'-E' : 'n',
'-U' : 'm',
'-F' : 'u',
'-R' : 'j',
'-P' : 'i',
'-B' : 'k',
'-L' : 'o',
'-G' : 'l',
'-T' : 'p',
'-S' : ';',
'-D' : '[',
'-Z' : '\'',
'arpeggiate': 'space',
# Suppress adjacent keys to prevent miss-strokes.
'no-op' : ('z', 'x', 'b', ',', '.', '/', ']', '\\'),
},
'Passport': {
'#' : '#',
'S-' : ('S', 'C'),
'T-' : 'T',
'K-' : 'K',
'P-' : 'P',
'W-' : 'W',
'H-' : 'H',
'R-' : 'R',
'A-' : 'A',
'O-' : 'O',
'*' : ('~', '*'),
'-E' : 'E',
'-U' : 'U',
'-F' : 'F',
'-R' : 'Q',
'-P' : 'N',
'-B' : 'B',
'-L' : 'L',
'-G' : 'G',
'-T' : 'Y',
'-S' : 'X',
'-D' : 'D',
'-Z' : 'Z',
'no-op': ('!', '^', '+'),
},
'Stentura': {
'#' : '#',
'S-' : 'S-',
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : '*',
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op': '^',
},
'TX Bolt': {
'#' : '#',
'S-' : 'S-',
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : '*',
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
},
'Treal': {
'#' : ('#1', '#2', '#3', '#4', '#5', '#6', '#7', '#8', '#9', '#A', '#B'),
'S-' : ('S1-', 'S2-'),
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : ('*1', '*2'),
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op': ('X1-', 'X2-', 'X3'),
},
}
DICTIONARIES_ROOT = 'asset:plover:assets'
DEFAULT_DICTIONARIES = ('main.json', 'commands.json', 'user.json')
|
0xf4/pythonrc
|
pythonrc.py
|
Python
|
mit
| 19,310
| 0.000414
|
#!/usr/bin/python
r"""
PYTHONRC
========
Initialization script for the interactive Python interpreter. Its main purpose
is to enhance the overall user experience when working in such an environment
by adding some niceties to the standard console.
It also works with IPython and BPython, although its utility in that kind of
scenarios can be argued.
Tested in GNU/Linux with Python versions 2.7 and 3.4.
Please read the Installation section below.
Features
--------
- User input completion
+ Introduces a completion mechanism for inputted commands in Python 2.
+ In Python 3, where the standard console is a lot nicer, it just
impersonates the default completion machinery to keep the consistency with
the behavior in Python 2 (and so it's still possible to adapt it to the
user's needs).
- Command History
+ Creates a callable, singleton object called `history`, placing it into
the `__builtins__` object to make it easily available, which enables the
handling of the command history (saving some input lines to a file of your
choice, listing the commands introduced so far, etc.). Try simply
`history()` on the Python prompt to see it in action; inspect its members
(with `dir(history)` or `help(history.write)`) for more information.
- Color prompt
+ Puts a colorful prompt in place, if the terminal supports it.
- Implementation of a bash's "operate-and-get-next" clone
+ Enables a quick re-edition of a code block from the history by
successive keypresses of the `Ctrl-o` hotkey.
Installation
------------
- You must define in your environment (in GNU/Linux and MacOS X that usually
means your `~/.bashrc` file) the variable 'PYTHONSTARTUP' containing the path
to `pythonrc.py`.
- It is also highly recommended to define the variable 'PYTHON_HISTORY_FILE'.
Remember that BPython (unlike the standard interpreter or IPython) ignores that
variable, so you'll have to configure it as well by other means to be able to
use the same history file there (for instance, in Linux, the file
`~/.config/bpython/config` is a good place to start, but please read BPython's
documentation).
### Example configurations
- Extract of `~/.bashrc`
```sh
# python
export PYTHONSTARTUP=~/.python/pythonrc.py
export PYTHON_HISTORY_FILE=~/.python/.python_history
## You may want to also uncomment some of this lines if using an old
## version of virtualenvwrapper
# export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3.4
# export WORKON_HOME=~/.python/virtualenvs
# source $(which virtualenvwrapper.sh)
```
- Extract of `~/.config/bpython/config`
```
[general]
color_scheme = default
hist_file = ~/.python/.python_history
hist_lenght = 1000
```
Bugs / Caveats / Future enhancements
------------------------------------
- No module/package introspection for the last argument in commands of the form
`from <package> import <not_completing_this>` (this, in fact, could be a not so
bad thing, because it doesn't execute side effects, e.g. modules' init code).
- Dep
|
ending on the user's system, the compilation of the packages' and modules'
list for completing `import ...` and `from ... import ...` commands can take a
long time, es
|
pecially the first time it is invoked.
- When completing things like a method's name, the default is to also include
the closing parenthesis along with the opening one, but the cursor is placed
after it no matter what, instead of between them. This is because of the
python module `readline`'s limitations.
You can turn off the inclusion of the closing parenthesis; if you do so, you
might be also interested in modifying the variable called
`dict_keywords_postfix` (especially the strings that act as that dictionary's
indexes).
- IPython has its own `%history` magic. I did my best to not interfere with
it, but I don't know the actual consequences. Also, it's debatable if it
even makes sense to use this file with IPython and/or BPython (though having
a unified history for all the environments is really nice).
You could define some bash aliases like
```sh
alias ipython='PYTHONSTARTUP="" ipython'
alias bpython='PYTHONSTARTUP="" bpython'
```
to be on the safer side.
- Could have used the module `six` for better clarity. Right now it uses my own
made up stubs to work on both Python 2 and 3.
- Needs better comments and documentation, especially the part on history
handling.
- Probably a lot more. Feel free to file bug reports ;-)
"""
def init():
# color prompt
import sys
import os
term_with_colors = ['xterm', 'xterm-color', 'xterm-256color', 'linux',
'screen', 'screen-256color', 'screen-bce']
red = ''
green = ''
reset = ''
if os.environ.get('TERM') in term_with_colors:
escapes_pattern = '\001\033[%sm\002' # \001 and \002 mark non-printing
red = escapes_pattern % '31'
green = escapes_pattern % '32'
reset = escapes_pattern % '0'
sys.ps1 = red + '>>> ' + reset
sys.ps2 = green + '... ' + reset
red = red.strip('\001\002')
green = green.strip('\001\002')
reset = reset.strip('\001\002')
# readline (tab-completion, history)
try:
import readline
except ImportError:
print(red + "Module 'readline' not available. Skipping user customizations." + reset)
return
import rlcompleter
import atexit
from pwd import getpwall
from os.path import isfile, isdir, expanduser, \
join as joinpath, split as splitpath, sep as pathsep
default_history_file = '~/.pythonhist'
majver = sys.version_info[0]
# Both BPython and Django shell change the nature of the __builtins__
# object. This hack workarounds that:
def builtin_setattr(attr, value):
if hasattr(__builtins__, '__dict__'):
setattr(__builtins__, attr, value)
else:
__builtins__[attr] = value
def builtin_getattr(attr):
if hasattr(__builtins__, '__dict__'):
return getattr(__builtins__, attr)
else:
return __builtins__[attr]
# My own "six" library, where I define the following stubs:
# * myrange for xrange() (python2) / range() (python3)
# * exec_stub for exec()
# * iteritems for dict.iteritems() (python2) / list(dict.items()) (python3)
# I could have done "from six import iteritems" and such instead of this
if majver == 2:
myrange = xrange
def exec_stub(textcode, globalz=None, localz=None):
# the parenthesis make it valid python3 syntax, do nothing at all
exec (textcode) in globalz, localz
def iteritems(d):
return d.iteritems()
elif majver == 3:
myrange = range
# def exec_stub(textcode, globalz=None, localz=None):
# # the "in" & "," make it valid python2 syntax, do nothing useful
# exec(textcode, globalz, localz) in globalz #, localz
# the three previous lines work, but this is better
exec_stub = builtin_getattr('exec')
def iteritems(d):
return list(d.items())
# AUXILIARY CLASSES
# History management
class History:
set_length = readline.set_history_length
get_length = readline.get_history_length
get_current_length = readline.get_current_history_length
get_item = readline.get_history_item
write = readline.write_history_file
def __init__(self, path=default_history_file, length=500):
self.path = path
self.reload(path)
self.set_length(length)
def __exit__(self):
print("Saving history (%s)..." % self.path)
self.write(expanduser(self.path))
def __repr__(self):
"""print out current history information"""
# length = self.get_current_length()
# command = self.get_item(length)
# if command == 'history':
# return "\n".join(self.get_item(i)
# for i in myrange(1, length+1))
# else:
# return '<%s instance>' % str(self.__class__)
return '<%s instance>' % str(self.__class__)
def __
|
skunkwerks/netinf
|
python/nilib/nicl.py
|
Python
|
apache-2.0
| 8,160
| 0.003922
|
#!/usr/bin/python
"""
@package nilib
@file nicl.py
@brief Basic command line client for NI names, make 'em and check 'em
@version $Revision: 0.04 $ $Author: elwynd $
@version Copyright (C) 2012 Trinity College Dublin
This is an adjunct to the NI URI library developed as
part of the SAIL project. (http://sail-project.eu)
Specification(s) - note, versions may change
- http://tools.ietf.org/html/draft-farrell-decade-ni-10
- http://tools.ietf.org/html/draft-hallambaker-decade-ni-params-03
- http://tools.ietf.org/html/draft-kutscher-icnrg-netinf-proto-00
Copyright 2012 Trinity College Dublin
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================================================@code
@code
Revision History
================
Version Date Author Notes
0.4 16/06/2012 Elwyn Davies Completed revision history and ref number.
0.3 12/10/2012 Elwyn Davies Renamed main routine for convenience with
setuputils in nilib package.
0.2 12/10/2012 Elwyn Davies Updated coments and specification refs.
0.1 01/06/2012 Elwyn Davies Updated to provide -w, -m and -b and
cope with nih: scheme.
0.0 12/02/2012 Elwyn Davies Created for NetInf codsprint.
@endcode
"""
import sys
from optparse import OptionParser
from ni import ni_errs, ni_errs_txt, NIname, NIproc
def py_nicl():
"""
@brief Command line program to generate and validate digests in ni: URLs.
Uses NIproc global instance of NI operations class
Run:
> nicl.py --help
to see usage and options.
"""
# Options parsing and verification stuff
usage = "%prog [-g|-w|-v] -n <name> -f <pathname of content file> [-V]\n"
usage = usage + " %prog -m -n <name> [-V]\n"
usage = usage + " %prog -b -s <suite_number> -f <pathname of content file> [-V]\n"
usage = usage + " The name can be either an ni: or nih: scheme URI\n"
usage = usage + " Return code: success 0, failure non-zero (-V for more info)\n"
usage = usage + " Available hashalg (suite number) options:\n"
usage = usage + " %s" % NIname.list_algs()
parser = OptionParser(usage)
parser.add_option("-g", "--generate", default=False,
action="store_true", dest="generate",
help="Generate hash based on content file, " + \
"and output name with encoded hash after the hashalg string")
|
parser.add_option("-w", "--well-known", default=False,
action="store_true", dest="well_known",
h
|
elp="Generate hash based on content file, " + \
"and output name with encoded hash in the .well_known URL " + \
"after the hashalg string. Applies to ni: scheme only.")
parser.add_option("-v", "--verify", default=False,
action="store_true", dest="verify",
help="Verify hash in name is correct for content file")
parser.add_option("-m", "--map", default=False,
action="store_true", dest="map_wkn",
help="Maps from an ni: name to a .well-known URL")
parser.add_option("-b", "--binary", default=False,
action="store_true", dest="bin",
help="Outputs the name in binary format for a given suite number")
parser.add_option("-V", "--verbose", default=False,
action="store_true", dest="verbose",
help="Be more long winded.")
parser.add_option("-n", "--ni-name", dest="ni_name",
type="string",
help="The ni name template for (-g) or ni name matching (-v) content file.")
parser.add_option("-f", "--file", dest="file_name",
type="string",
help="File with content data named by ni name.")
parser.add_option("-s", "--suite-no", dest="suite_no",
type="int",
help="Suite number for hash algorithm to use.")
(opts, args) = parser.parse_args()
if not (opts.generate or opts.well_known or opts.verify or
opts.map_wkn or opts.bin ):
parser.error( "Must specify one of -g/--generate, -w/--well-known, -v/--verify, -m/--map or -b/--binary.")
if opts.generate or opts.well_known or opts.verify:
if (opts.ni_name == None) or (opts.file_name == None):
parser.error("Must specify both name and content file name for -g, -w or -v.")
if opts.map_wkn:
if (opts.ni_name == None):
parser.error("Must specify ni name for -m.")
if opts.bin:
if (opts.suite_no == None) or (opts.file_name == None):
parser.error("Must specify both suite number and content file name for -b.")
if len(args) != 0:
parser.error("Too many or unrecognised arguments specified")
# Execute requested action
if opts.generate:
n = NIname(opts.ni_name)
ret = NIproc.makenif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print "%s" % n.get_url()
sys.exit(0)
if opts.verbose:
print "Name could not be successfully generated."
elif opts.well_known:
n = NIname(opts.ni_name)
if n.get_scheme() == "nih":
if opts.verbose:
print "Only applicable to ni: scheme names."
sys.exit(1)
ret = NIproc.makenif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print "%s" % n.get_wku_transform()
sys.exit(0)
if opts.verbose:
print "Name could not be successfully generated"
elif opts.verify:
n = NIname(opts.ni_name)
ret = NIproc.checknif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name matches content file.")
print "%s" % n.get_url()
sys.exit(0)
if opts.verbose:
print "Check of name against content failed."
elif opts.map_wkn:
n = NIname(opts.ni_name)
ret = n.validate_ni_url(has_params = True)
if ret == ni_errs.niSUCCESS:
if n.get_scheme() == "nih":
if opts.verbose:
print "Only applicable to ni: scheme names."
sys.exit(1)
if opts.verbose:
print("Name validated successfully.")
print "%s" % n.get_wku_transform()
sys.exit(0)
else:
if opts.verbose:
print "Name could not be successfully validated."
elif opts.bin:
(ret, bin_name) = NIproc.makebnf(opts.suite_no, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print base64.b16encode(str(bin_name))
sys.exit(0)
else:
if opts.verbose:
print "Name could not be successfully generated."
else:
print"Should not have happened"
sys.exit(2)
# Print appropriate error message
if opts.verbose:
print "Error: %s" % ni_errs_txt[ret]
sys.exit(1)
sys.exit(0)
#-------------------------------------------------------------------------------
if __name__ == "__main__":
py_nicl()
|
cogeorg/econlib
|
test_paralleltools.py
|
Python
|
gpl-3.0
| 601
| 0.003328
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
__author__ = """Co-Pierre Georg (co-pierre.georg@uct.ac.za)"""
import sys
from src.paralleltools import Parallel
#
|
-------------------------------------------------------------------------
#
# conftools.py is a simple module to manage .xml configuration files
#
#-------------------------------------------------------------------------
if __name__ == '__main__':
"""
VARIABLES
"""
args = sys.argv
config_file_name = args[1]
"""
CODE
"""
parallel = Parallel()
parallel.create_config_fi
|
les(config_file_name)
|
duncan-r/SHIP
|
ship/tuflow/tuflowmodel.py
|
Python
|
mit
| 22,689
| 0.003261
|
"""
Summary:
Container and main interface for accessing the Tuflow model and a class
for containing the main tuflow model files (Tcf, Tgc, etc).
There are several other classes in here that are used to determine the
order of the files in the model and key words for reading in the files.
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
from __future__ import unicode_literals
from itertools import chain
from ship.tuflow.tuflowfilepart import TuflowFile, TuflowKeyValue, TuflowUserVariable, TuflowModelVariable
from ship.tuflow import FILEPART_TYPES as fpt
from ship.utils import utilfunctions as uf
import logging
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
class TuflowModel(object):
"""Container for the entire loaded tuflow model.
"""
def __init__(self, root):
"""Initialise constants and dictionaries.
"""
self.control_files = {}
"""Tuflow Control File objects.
All types of Tuflow Control file are stored here under the type header.
Types are: TCF, TGC, TBC, ECF, TEF.
TCF is slightly different to the others as it contains an additional
member variable 'main_file_hash' to identify the main tcf file that
was called to load the model.
"""
self._root = ''
"""The current directory path used to reach the run files in the model"""
self.missing_model_files = []
"""Contains any tcf, tgs, etc files that could not be loaded."""
self.bc_event = {}
"""Contains the currently acitve BC Event variables."""
self.user_variables = None
"""Class containing the scenario/event/variable keys and values."""
@property
def root(self):
return self._root
@root.setter
def root(self, value):
self._root = value
self.updateRoot(value)
def checkPathsExist(self):
"""Test that all of the filepaths in the TuflowModel exist."""
failed = []
for file_type, file in self.control_files.items():
failed.extend(file.checkPathsExist())
return failed
def updateRoot(self, root):
"""Update the root variable in all TuflowFile's in the model.
The root variable (TuflowModel.root) is the directory that the main
.tcf file is in. This is used to define the location of all other files
which are usually referenced relative to each other.
Note:
This method will be called automatically when setting the
TuflowModel.root variable.
Args:
root(str): the new root to set.
"""
for c in self.control_files.values():
c.updateRoot(root)
def customPartSearch(self, control_callback, tuflow_callback=None,
include_unknown=False):
"""Return TuflowPart's based on the return value of the callbacks.
control_callback will be used as an argument in each of
self.control_files' customPartSearch() methods. The tuflow_callback
will be called on the combined generators returned from that method.
See Also:
ControlFile.customPartSearch
Continuing the example in the ControlFile.customPartSearch method. This
time the additinal tuflow_callback function is defined as well.
callback_func must accept a TuflowPart and return a tuple of:
keep-status and the return value. For example::
# This is the callback_func that we test the TuflowPart. It is
# defined in your script
def callback_func(part):
# In this case we check for GIS parts and return a tuple of:
# - bool(keep-status): True if it is a GIS filepart_type
# - tuple: filename and parent.model_type. This can be
# whatever you want though
if part.filepart_type == fpt.GIS:
return True, (part.filename, part.associates.parent.model_type)
# Any TuflowPart's that you don't want included must return
# a tuple of (False, None)
else:
return False, None
# Here we define a function to run after the generators are returned
# from callback_func. In the funcion above the return type is a
# tuple, so we accept that as the arg in this function, but it will
# be whatever you return from callback_func above.
# This function checks to see if there are any duplicate filename's.
# Note that it must return the same tuple as the other callback.
# i.e. keep-status, result
def tuflow_callback(part_tuple):
found = []
if part_tuple[0] in found:
return False, None
else:
return True, part_tuple[0]
# Both callback's given this time
results = tuflow.customPartSearch(callback,
tuflow_callback=tuflowCallback)
# You can now iteratre the results
for r in results:
print (str(r))
Args:
callback_func(func): a function to run for each TuflowPart in
this ControlFile's PartHolder.
include_unknown=False(bool): If False any UnknownPart's will be
ignored. If set to True it is the resonsibility of the
callback_func to check for this and deal with it.
Return:
generator - containing the results of the search.
"""
gens = []
for c in self.control_files.values():
gens.append(
c.customPartSearch(control_callback, include_unknown)
)
all_gens = chain(gens[0:-1])
for a in all_gens:
for val in a:
if tuflow_callback:
take, value = tuflow_callback(val)
if take:
yield[value]
else:
yield [val]
def removeTcfModelFile(self, model_file):
"""Remove an existing ModelFile from 'TCF' and update ControlFile.
Note:
You can call this function directly if you want to, but it is also
hooked into a callback in the TCF ControlFile. This means that when
you use the standard ControlFile add/remove/replaceControlFile()
methods these will be called automatically.
Args:
model_files(ModelFile): the ModelFile being removed.
"""
if not model_file in self.control_files[model_file.model
|
_type].control_files:
raise AttributeError("model_file doesn't exists in %s control_files" % model_file.model_type)
self.control_files[model_file.model_type].removeControlFile(model_file)
self.control_files['TCF'].parts.remove(model_file)
def replaceTcfModelFile(self, model_file, control_file, replace_file):
"""Replace an existing ModelFile in 'TCF' and update ControlFile.
|
Note:
You can call this function directly if you want to, but it is also
hooked into a callback in the TCF ControlFile. This means that when
you use the standard ControlFile add/remove/replaceControlFile()
methods these will be called automatically.
Args:
model_file(ModelFile): the replacement TuflowPart.
control_file(ControlFile): containing the contents to replace the
existing ControlFile.
replace_file(ModelFile): the TuflowPart to be replaced.
"""
if model_file in self.control_files[model_file.model_type].control_files:
raise AttributeError('model_file already exists in this ControlFile')
self.control_files[replace_file.model_type].replaceControlFile(
model_file, control_file, replace_file)
|
fangeugene/the-blue-alliance
|
tests/suggestions/test_media_url_parse.py
|
Python
|
mit
| 10,138
| 0.004636
|
import json
import unittest2
from google.appengine.ext import testbed
from consts.media_type import MediaType
from helpers.media_helper import MediaParser
from helpers.webcast_helper import WebcastParser
class TestMediaUrlParser(unittest2.TestCase):
def setUp(cls):
cls.testbed = testbed.Testbed()
cls.testbed.activate()
cls.testbed.init_urlfetch_stub()
def tearDown(cls):
cls.testbed.deactivate()
def test_youtube_parse(self):
yt_long = MediaParser.partial_media_dict_from_url("http://www.youtube.com/watch?v=I-IrVbsl_K8")
self.assertEqual(yt_long['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_long['foreign_key'], "I-IrVbsl_K8")
yt_short = MediaParser.partial_media_dict_from_url("http://youtu.be/I-IrVbsl_K8")
self.assertEqual(yt_short['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_short['foreign_key'], "I-IrVbsl_K8")
yt_from_playlist = MediaParser.partial_media_dict_from_url("https://www.youtube.com/watch?v=VP992UKFbko&index=1&list=PLZT9pIgNOV6ZE0EgstWeoRWGWT3uoaszm")
self.assertEqual(yt_from_playlist['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_from_playlist['foreign_key'], 'VP992UKFbko')
# def test_cdphotothread_parsetest_cdphotothread_parse(self):
# cd = MediaParser.partial_media_dict_from_url(
# "https://www.chiefdelphi.com/media/photos/41999")
# self.assertEqual(cd['media_type_enum'], MediaType.CD_PHOTO_THREAD)
# self.assertEqual(cd['foreign_key'], "41999")
# self.assertTrue(cd['details_json'])
# details = json.loads(cd['details_json'])
# self.assertEqual(details['image_partial'], "a88/a880fa0d65c6b49ddb93323bc7d2e901_l.jpg")
def test_imgur_parse(self):
imgur_img = MediaParser.partial_media_dict_from_url("http://imgur.com/zYqWbBh")
self.assertEqual(imgur_img['media_type_enum'], MediaType.IMGUR)
self.assertEqual(imgur_img['foreign_key'], "zYqWbBh")
imgur_img = MediaParser.partial_media_dict_from_url("http://i.imgur.com/zYqWbBh.png")
self.assertEqual(imgur_img['media_type_enum'], MediaType.IMGUR)
self.assertEqual(imgur_img['foreign_key'], "zYqWbBh")
|
self.assertEqual(MediaParser.partial_media_dict_from_url("http://imgur.com/r/aww"), None)
self.assertEqual(MediaParser.partial_media_dict_from_url("http://imgur.com/a/album"), None)
def test_fb_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("http://facebook.com/theuberbots")
self.assertE
|
qual(result['media_type_enum'], MediaType.FACEBOOK_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'theuberbots')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.FACEBOOK_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.facebook.com/theuberbots')
def test_twitter_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://twitter.com/team1124")
self.assertEqual(result['media_type_enum'], MediaType.TWITTER_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'team1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.TWITTER_PROFILE])
self.assertEqual(result['profile_url'], 'https://twitter.com/team1124')
def test_youtube_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/Uberbots1124")
self.assertEqual(result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'uberbots1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(result['profile_url'], 'https://www.youtube.com/uberbots1124')
short_result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/Uberbots1124")
self.assertEqual(short_result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(short_result['is_social'], True)
self.assertEqual(short_result['foreign_key'], 'uberbots1124')
self.assertEqual(short_result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(short_result['profile_url'], 'https://www.youtube.com/uberbots1124')
gapps_result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/c/tnt3102org")
self.assertEqual(gapps_result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(gapps_result['is_social'], True)
self.assertEqual(gapps_result['foreign_key'], 'tnt3102org')
self.assertEqual(gapps_result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(gapps_result['profile_url'], 'https://www.youtube.com/tnt3102org')
def test_github_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://github.com/frc1124")
self.assertEqual(result['media_type_enum'], MediaType.GITHUB_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'frc1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.GITHUB_PROFILE])
self.assertEqual(result['profile_url'], 'https://github.com/frc1124')
def test_instagram_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.instagram.com/4hteamneutrino")
self.assertEqual(result['media_type_enum'], MediaType.INSTAGRAM_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], '4hteamneutrino')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.INSTAGRAM_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.instagram.com/4hteamneutrino')
def test_periscope_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.periscope.tv/evolution2626")
self.assertEqual(result['media_type_enum'], MediaType.PERISCOPE_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'evolution2626')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.PERISCOPE_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.periscope.tv/evolution2626')
def test_grabcad_link(self):
result = MediaParser.partial_media_dict_from_url("https://grabcad.com/library/2016-148-robowranglers-1")
self.assertEqual(result['media_type_enum'], MediaType.GRABCAD)
self.assertEqual(result['is_social'], False)
self.assertEqual(result['foreign_key'], '2016-148-robowranglers-1')
details = json.loads(result['details_json'])
self.assertEqual(details['model_name'], '2016 | 148 - Robowranglers')
self.assertEqual(details['model_description'], 'Renegade')
self.assertEqual(details['model_image'], 'https://d2t1xqejof9utc.cloudfront.net/screenshots/pics/bf832651cc688c27a78c224fbd07d9d7/card.jpg')
self.assertEqual(details['model_created'], '2016-09-19T11:52:23Z')
# 2020-12-31 zach - I'm disabling this test because 1) it's failing and 2) we shouldn't be hitting the network during unit tests
# def test_instagram_image(self):
# result = MediaParser.partial_media_dict_from_url("https://www.instagram.com/p/BUnZiriBYre/")
# self.assertEqual(result['media_type_enum'], MediaType.INSTAGRAM_IMAGE)
# self.assertEqual(result['foreign_key'], "BUnZiriBYre")
# details = json.loads(result['details_json'])
# self.assertEqual(details['title'], "FRC 195 @ 2017 Battlecry @ WPI")
# self.assertEqual(details['author_name'], '1stroboticsrocks')
# self.assertIsNotNone(details.get('thumbnail_url', None))
def test_unsupported_url_parse(self):
self.assertEqual(MediaParser.partial_media_dict_from_url("http://foo.bar"), None)
class
|
Tasignotas/topographica_mirror
|
topo/base/arrayutil.py
|
Python
|
bsd-3-clause
| 4,625
| 0.017514
|
"""
General utility functions and classes for Topographica that require numpy.
"""
import re
from numpy import sqrt,dot,arctan2,array2str
|
ing,fmod,floor,array, \
unravel_index,concatenate,set_printoptions,divide,maximum,minimum
from numpy import ufunc
import param
# Ask numpy to print even relatively large arrays by default
set_printoptions(threshold=200*200)
def ufunc_script_repr(f,imports,prefix=None,settings=None):
"""
Return a runnable representation of the numpy ufunc f, and an
import statement for its
|
module.
"""
# (could probably be generalized if required, because module is
# f.__class__.__module__)
imports.append('import numpy')
return 'numpy.'+f.__name__
from param import parameterized
parameterized.script_repr_reg[ufunc]=ufunc_script_repr
def L2norm(v):
"""
Return the L2 norm of the vector v.
"""
return sqrt(dot(v,v))
def divisive_normalization(weights):
"""Divisively normalize an array to sum to 1.0"""
s = weights.sum()
if s != 0:
factor = 1.0/s
weights *= factor
def add_border(matrix,width=1,value=0.0):
"""
Returns a new matrix consisting of the given matrix with a border
or margin of the given width filled with the given value.
"""
rows,cols = matrix.shape
hborder = array([ [value]*(cols+2*width) ]*width)
vborder = array([ [value]*width ] * rows)
temp = concatenate( (vborder,matrix,vborder), axis=1)
return concatenate( (hborder,temp,hborder) )
def arg(z):
"""
Return the complex argument (phase) of z.
(z in radians.)
"""
z = z + complex(0,0) # so that arg(z) also works for real z
return arctan2(z.imag, z.real)
def octave_str(mat,name="mat",owner=""):
"""
Print the given Numpy matrix in Octave format, listing the given
matrix name and the object that owns it (if any).
"""
# This just prints the string version of the matrix and does search/replace
# to convert it; there may be a faster or easier way.
mstr=array2string(mat)
mstr=re.sub('\n','',mstr)
mstr=re.sub('[[]','',mstr)
mstr=re.sub('[]]','\n',mstr)
return ("# Created from %s %s\n# name: %s\n# type: matrix\n# rows: %s\n# columns: %s\n%s" %
(owner,name,name,mat.shape[0],mat.shape[1],mstr))
def octave_output(filename,mat,name="mat",owner=""):
"""Writes the given matrix to a new file of the given name, in Octave format."""
f = open(filename,'w')
f.write(octave_str(mat,name,owner))
f.close()
def centroid(array_2D):
"""Return the centroid (center of gravity) for a 2D array."""
rows,cols = array_2D.shape
rsum=0
csum=0
rmass_sum=0
cmass_sum=0
for r in xrange(rows):
row_sum = array_2D[r,:].sum()
rsum += r*row_sum
rmass_sum += row_sum
for c in xrange(cols):
col_sum = array_2D[:,c].sum()
csum += c*col_sum
cmass_sum += col_sum
row_centroid= rsum/rmass_sum
col_centroid= csum/cmass_sum
return row_centroid, col_centroid
def clip_lower(arr,lower_bound):
"""
In-place, one-sided version of numpy.clip().
i.e. numpy.clip(arr,a_min=lower_bound,out=arr) if it existed.
"""
maximum(arr,lower_bound,arr)
def clip_upper(arr,upper_bound):
"""
In-place, one-sided version of numpy.clip().
i.e. numpy.clip(arr,a_max=upper_bound,out=arr) if it existed.
"""
minimum(arr,upper_bound,arr)
def wrap(lower, upper, x):
"""
Circularly alias the numeric value x into the range [lower,upper).
Valid for cyclic quantities like orientations or hues.
"""
#I have no idea how I came up with this algorithm; it should be simplified.
#
# Note that Python's % operator works on floats and arrays;
# usually one can simply use that instead. E.g. to wrap array or
# scalar x into 0,2*pi, just use "x % (2*pi)".
range_=upper-lower
return lower + fmod(x-lower + 2*range_*(1-floor(x/(2*range_))), range_)
def array_argmax(arr):
"Returns the coordinates of the maximum element in the given array."
return unravel_index(arr.argmax(),arr.shape)
# CB: Is this of general interest? Used in gcal.ty.
class DivideWithConstant(param.Parameterized):
"""
Divide two scalars or arrays with a constant (c) offset on the
denominator to allow setting the gain or to avoid divide-by-zero
issues. The non-constant part of the denominator (y) is clipped
to ensure that it has only positive values.
"""
c = param.Number(default=1.0)
def __call__(self, x, y):
return divide(x,maximum(y,0)+self.c)
|
ninjin/simsem
|
experiment/learning.py
|
Python
|
isc
| 17,339
| 0.006863
|
'''
Learning-curve test functionality.
Author: Pontus Stenetorp <pontus stenetorp se>
Version: 2011-08-29
'''
from collections import defaultdict
from itertools import chain, izip
from operator import itemgetter
from os.path import join as path_join
from random import sample, seed
from sys import stderr
from common import compress, simstring_caching
from maths import mean, stddev
from scoring import score_classifier_by_tup, score_classifier_by_tup_ranked
try:
from cPickle import dump as pickle_dump, load as pickle_load
except ImportError:
from pickle import dump as pickle_dump, load as pickle_load
def __train_fold(args):
return _train_fold(*args)
def _train_fold(classifier, train_fold):
train_fold_lbls = [lbl for lbl, _ in train_fold]
train_fold_vecs = [vec for _, vec in train_fold]
assert len(train_fold_lbls) == len(train_fold_vecs)
classifier._train(train_fold_lbls, train_fold_vecs)
return len(train_fold_vecs), classifier
def _score_classifier(classifier, test_lbls, test_vecs):
score = score_classifier_by_tup(classifier,
(test_lbls, test_vecs))
# XXX: Hooking new scores into the old learning
new_score = score_classifier_by_tup_ranked(classifier,
(test_lbls, test_vecs), unseen=True)
return score, new_score
def _train_fold_gen(data_set, min_perc, max_perc, step_perc, it_factor):
set_size = len(data_set)
# Start with the largest folds, they take longer to process
for p in xrange(max_perc, min_perc - 1, -step_perc):
# Sample size for this iteration
sample_size = int((p / 100.0) * set_size)
if it_factor is not None:
folds = int(int(set_size / float(sample_size)) * it_factor)
else:
folds = 1
if p == 100:
# We can't sample when we use the whole set...
folds = 1
# Heuristic to keep us from having too low of a sample
elif folds < 4:
folds = 4
for _ in xrange(folds * 2):
yield sample(data_set, sample_size)
def _learning_curve_test_data_set(classifiers, train, test,
|
worker_pool, verbose=False, no_simstring_cache=False,
use_test_set=False, folds=10, min_perc=5, max_perc=100, step_perc=5,
|
it_factor=1):
# XXX: Not necessary any more!
if verbose:
print >> stderr, 'Calculating train set size...',
train_size = 0
for d in train:
for s in d:
for a in s:
train_size += 1
if verbose:
print >> stderr, 'Done!'
# XXX:
if not no_simstring_cache:
simstring_caching(classifiers, (train, test), verbose=verbose)
# Collect the seen type to iterate over later
seen_types = set()
results_by_classifier = {}
for classifier_id, classifier_class in classifiers.iteritems():
if verbose:
print >> stderr, 'Classifier:', classifier_id, '...',
from classifier.liblinear import hashabledict
classifier = classifier_class()
if verbose:
print >> stderr, 'featurising train:', '...',
train_lbls, train_vecs = classifier._gen_lbls_vecs(train)
train_set = [e for e in izip(train_lbls, train_vecs)]
assert len(train_lbls) == train_size, '{} != {}'.format(
len(train_lbls), train_size)
assert len(train_vecs) == train_size, '{} != {}'.format(
len(train_vecs), train_size)
assert len(train_set) == train_size, '{} != {}'.format(
len(train_set), train_size)
del train_lbls
del train_vecs
if verbose:
print >> stderr, 'Done!',
print >> stderr, 'featurising test', '...',
test_lbls, test_vecs = classifier._gen_lbls_vecs(test)
test_vecs = [hashabledict(d) for d in test_vecs]
if verbose:
print >> stderr, 'Done!',
# Fix the seed so that we get comparable folds
seed(0xd5347d33)
args = ((classifier, fold) for fold in _train_fold_gen(train_set,
min_perc, max_perc, step_perc, it_factor))
if worker_pool is None:
res_it = (_train_fold(*arg) for arg in args)
else:
res_it = worker_pool.imap(__train_fold, args)
classifier_results = defaultdict(list)
print >> stderr, 'Training and evaluating models: ...',
i = 0
for sample_size, fold_classifier in res_it:
score, new_score = _score_classifier(fold_classifier, test_lbls,
test_vecs)
classifier_results[sample_size].append((score, new_score))
i += 1
if i % 10 == 0:
print >> stderr, i, '...',
print >> stderr, 'Done!'
# Process the results
for sample_size in sorted(e for e in classifier_results):
results = classifier_results[sample_size]
scores = [score for score, _ in results]
new_scores = [new_score for _, new_score in results]
macro_scores = [ms for ms, _, _, _, _ in scores]
micro_scores = [ms for _, ms, _, _, _ in scores]
tps = [tp for _, _, tp, _, _ in scores]
fns = [fn for _, _, _, fn, _ in scores]
res_dics = [d for _, _, _, _, d in scores]
# New metrics
ranks = [mean(rs) for rs, _, _ in new_scores]
ambiguities = [mean(ambs) for _, ambs, _ in new_scores]
recalls = [r for _, _, r in new_scores]
# These are means of means
ranks_mean = mean(ranks)
ranks_stddev = stddev(ranks)
ambiguities_mean = mean(ambiguities)
ambiguities_stddev = stddev(ambiguities)
recalls_mean = mean(recalls)
recalls_stddev = stddev(recalls)
classifier_result = (
mean(macro_scores), stddev(macro_scores),
mean(micro_scores), stddev(micro_scores),
mean(tps), stddev(tps),
mean(fns), stddev(fns),
res_dics,
# New metrics
ranks_mean, ranks_stddev,
ambiguities_mean, ambiguities_stddev,
recalls_mean, recalls_stddev
)
classifier_results[sample_size] = classifier_result
if verbose:
res_str = ('Results {size}: '
'MACRO: {0:.3f} MACRO_STDDEV: {1:.3f} '
'MICRO: {2:.3f} MICRO_STDDEV: {3:.3f} '
'TP: {4:.3f} FP: {5:.3f} '
'MEAN_RANK: {mean_rank:.3f} MEAN_RANK_STDDEV: {mean_rank_stddev:.3f} '
'AVG_AMB: {avg_amb:.3f} AVG_AMB_STDDEV: {avg_amb_stddev:.3f} '
'RECALL: {recall:.3f} RECALL_STDDEV: {recall_stddev:.3f}'
).format(*classifier_result,
size=sample_size,
mean_rank=ranks_mean,
mean_rank_stddev=ranks_stddev,
avg_amb=ambiguities_mean,
avg_amb_stddev=ambiguities_stddev,
recall=recalls_mean,
recall_stddev=recalls_stddev
)
print res_str
results_by_classifier[classifier_id] = classifier_results
return results_by_classifier
def learning_curve_test(classifiers, datasets, outdir,
verbose=False, no_simstring_cache=False, folds=10, worker_pool=None,
min_perc=5, max_perc=100, step_perc=5, it_factor=1,
pickle_name='learning', use_test_set=False
):
### This part is really generic
# TODO: We could keep old results... But dangerous, mix-up
results_file_path = _get_learning_pickle_path(outdir, pickle_name)
#XXX: RESUME GOES HERE!
results_by_dataset = {}
for dataset_id, dataset_getter in datasets.iteritems():
if verbose:
print >> stderr, 'Data set:', dataset_id
if verb
|
vprusso/youtube_tutorials
|
algorithms/recursion/str_len.py
|
Python
|
gpl-3.0
| 615
| 0.00813
|
# YouTube Video: https://www.youtube.com/watch?v=RRK0gd7
|
7Ln0
# Given a string, calculate the length of the string.
input_str = "LucidProgramming"
# Standard Pythonic way:
# prin
|
t(len(input_str))
# Iterative length calculation: O(n)
def iterative_str_len(input_str):
input_str_len = 0
for i in range(len(input_str)):
input_str_len += 1
return input_str_len
# Recursive length calculation: O(n)
def recursive_str_len(input_str):
if input_str == '':
return 0
return 1 + recursive_str_len(input_str[1:])
print(iterative_str_len(input_str))
print(recursive_str_len(input_str))
|
mcook42/Py_R_Converters
|
py2r-syntax-converter.py
|
Python
|
mit
| 11,063
| 0.003073
|
"""
Written by Matthew Cook
Created August 4, 2016
mattheworion.cook@gmail.com
Script to do basic sytax conversion between Python 3 and R syntax.
What it does convert:
-assignment operator
-function definitions
-filename
-inline arithmetic (*=, +=, -=, **)
-':' to '{'
-'not' to '!'
-if statments
-add closing brackets on a newline
What it doesn't do:
-Python specific functions to R specific functions
-Add closing brackets with perfect indentation
TODO: Closing brackets indentation issue
"""
from os import path
from shutil import copyfile
# Define changes to make
simple_change = {"**" : "^",
" = " : " <- ",
":\n" : "{\n",
"not" : "!"}
complex_change = {"def " : "",
"+=" : '+',
"-=" : '-',
"*=" : '*'}
# Create flag stacks
flags = {}
flags['comment'] = [] # Multi-line Comment flag
flags['bracket'] = [] # Code block start flag
flags['b_start'] = [] # Indentation at code block start
# Create indent dictionary
indents = {}
# Define special characters to prevent ';' addition to line
s_char = ('}\n', '{\n', ':\n', '"""\n')
def createRfile():
"""Creates a new file named "filename.R" by removing .py extension"""
# Provide path to file
# Note: The new R file will be placed in the same directory
filename = input("Please copy and paste the path to your file here: ")
if not path.isfile(filename):
print("Filename was invalid")
filename = input("Please copy and paste the path to your file here: ")
# Test for valid file path
if path.exists(filename) :
# Strip the directory from the filename
new_name = path.basename(filename)
# Replace python extention with R extension
new_name = filename.replace(".py", ".R")
doesexist = path.exists(new_name)
if doesexist:
print("""The file already exists.
Creating a backup copy in current directory""")
# Split name at . and insert -copy. before extension
cpy_temp = new_name.split(sep=".",)
cpy_temp[0] += "-copy."
cpy = cpy_temp[0] + cpy_temp[1]
copyfile(new_name, cpy)
print("Copy created as: ", cpy)
return filename, new_name
# Create R file
elif not doesexist:
file = open(new_name, "w")
print(new_name, " was successfully created.")
file.close()
return filename, new_name
else:
print(new_name, " could not be created. Check your permissions?")
exit(0)
else:
print("No valid file selected... Quitting script")
def find_all(tofind, string):
"""Returns number of times a certain substring is found"""
found = [i for i in range(len(string)) if string.startswith(tofind, i)]
num_found = len(found)
return num_found
def complexchange(line):
"""Completes multi-step line changes """
for key in complex_change:
if key in line:
if key == 'def ' and line.lstrip().startswith(key):
#remove 'def' keyword
change = complex_change[key]
line_r = ignoreStrReplace(line, key, change)
#split function definition at '('
lsplit = line_r.split('(', maxsplit=1)
fname = lsplit[0]
params = '(' + lsplit[1]
# create R style function def "fname <- function(params)"
l
|
ine = fname + ' <- function' + params
else:
line = opfunc(line, key)
return line
# TESTED-Works
def chglinecontent(line):
"""Changes content contained within a si
|
ngle line"""
# Perform simple changes
for key in simple_change.keys():
# Ignore if string version exists in line
check_str_s, check_str_d = stringify(key)
if not check_str_d in line or not check_str_s in line:
line = ignoreStrReplace(line, key, simple_change[key])
line = complexchange(line)
line = statement(line)
return line
def indentation(s, tabsize=4):
"""Generator to return level of indentation"""
sx = s.expandtabs(tabsize)
# if line is empty yields 0
return 0 if sx.isspace() else len(sx) - len(sx.lstrip())
def opfunc(line, op):
"""
Replaces python operations ('*'=) in line(self) with R style operations.
"""
#Check if the operation is contained in a string, don't modify if true.
check_str_s, check_str_d = stringify(op)
if not check_str_d in line and not check_str_s in line:
# Get R style operation
rop = complex_change[op]
# Split line once at python operand
linesplit = line.split(op)
# Store variable (left side) and right side of operand
ls = linesplit[0] + ' <- '
rs = rop + linesplit[1]
# Prepend variable(ls) to right side and convert assignment variable
rs = linesplit[0].lstrip() + rs
# Strip whitespace from right of ls and create R style equation
line = ls + rs
return line
def stringify(sub):
"""Returns python string versions ('' and "") of original substring"""
check_str_s = "'" + sub + "'"
check_str_d = '"' + sub + '"'
return check_str_s, check_str_d
def setflags(line, indents):
"""Scans line to set/unset flags for further processing"""
# For multi-line comments
if 1 == (find_all('"""', line) % 2):
if not flags['comment']:
flags['comment'].append('"""')
else:
flags['comment'].pop()
# For code blocks
if line.rstrip().endswith(':'):
flags['bracket'].append("}")
flags['b_start'].append(indentation(line))
def standind(line, cur_ind):
"""Standardizes indentation"""
devfromstd = cur_ind % 4
if not devfromstd == 0:
line = (devfromstd * '') + line
return indentation(line), line
#TESTED-WORKS
def statement(line):
"""Converts if statements"""
if "if " in line and not 'elif' in line:
lsplit = line.split('if ', maxsplit=1)
ls = lsplit[0] + 'if '
rs = lsplit[1]
# Replace the ':' at the end of the statement
rs = lsplit[1].replace(':','{')
rs = '(' + rs
rs = rs.replace('{', '){')
line = ls + rs
if 'elif ' in line:
lsplit = line.split('if ', maxsplit=1)
ls = lsplit[0] + 'else if '
rs = lsplit[1]
# Replace the ':' at the end of the statement
rs = lsplit[1].replace(':','{')
rs = '(' + rs
rs = rs.replace('{', '){')
line = ls + rs
return line
def ignoreStrReplace(line, cur, rep):
"""Wrapper for str.replace to ignore strings"""
if '"' in line:
#Split string at quotation marks
lsplit = line.split('"')
#Replace items contained within even partitions
lsplit[::2] = [spl.replace(cur, rep) for spl in lsplit[::2]]
#Rejoin the partitions
line = '"'.join(lsplit)
elif "'" in line:
#Split string at quotation marks
lsplit = line.split("'")
#Replace items contained within even partitions
lsplit[::2] = [spl.replace(cur, rep) for spl in lsplit[::2]]
#Rejoin the partitions
line = '"'.join(lsplit)
else:
line = line.replace(cur, rep)
return line
def closeBrackets(file):
"""Attempts to find and close the opened brackets"""
for i in range(len(file)):
# Doing this to be able to randomly access variables
line = file[i]
# Set boolean to check for change in block
sameBlock = True
# Ignore lines with only whitespace
if not line.isspace():
#Look for opening brackets if closing brackets remain
if '{\n' in line and flags['bracket']:
# Store current index for
|
gunan/tensorflow
|
tensorflow/python/kernel_tests/pool_test.py
|
Python
|
apache-2.0
| 14,854
| 0.00579
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for unified pooling functionality in tensorflow.ops.nn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
def pool_direct_single_axis(
input, # pylint: disable=redefined-builtin
axis,
window_size,
pooling_type,
padding,
dilation_rate,
stride):
"""Numpy implementation of pooling along a single axis.
This is intended for testing only, and therefore isn't particularly efficient.
See pool_direct below for the meaning of the arguments.
Args:
input: numpy array.
axis: axis along which to perform pooling.
window_size: int >= 1. Size of pooling window within axis.
pooling_type: either "MAX" or "AVG".
padding: either "SAME" or "VALID".
dilation_rate: int >= 1. Dilation factor for window, i.e. stride at which
to sample input.
stride: int >= 1. Stride at which to generate output.
Returns:
pooling output array of rank N+2.
Raises:
ValueError: if arguments are invalid.
"""
effective_window_size = (window_size - 1) * dilation_rate + 1
input_size = input.shape[axis]
if padding == "SAME":
output_size = int(math.ceil(input_size / stride))
total_padding_amount = max(
0, (output_size - 1) * stride + effective_window_size - input_size)
before_padding = total_padding_amount // 2
elif padding == "VALID":
output_size = int(
math.ceil((input_size - effective_window_size + 1) / stride))
before_padding = 0
else:
raise ValueError("Unsupported padding type: %r" % (padding,))
output_shape = input.shape[:axis] + (output_size,) + input.shape[axis + 1:]
output = np.zeros(output_shape, input.dtype)
initial_dim_selector = tuple(np.s_[:] for _ in range(axis))
if pooling_type == "MAX":
pooling_func = np.max
elif pooling_type == "AVG":
pooling_func = np.mean
else:
raise ValueError("Unsupported pooling type: %r" % (pooling_type,))
for output_pos in range(output_size):
input_start_pos = output_pos * stride - before_padding
input_end_pos = min(input_start_pos + effective_window_size, input_size)
if input_start_pos < 0:
input_start_pos += dilation_rate
input_slice = np.s_[input_start_pos:input_end_pos:dilation_rate]
output[initial_dim_selector + (output_pos,)] = pooling_func(
input[initial_dim_selector + (input_slice,)], axis=axis)
return output
def pool_direct(
input, # pylint: disable=redefined-builtin
window_shape,
pooling_type,
padding, # pylint: disable=redefined-builtin
dilation_rate,
strides,
data_format=None):
"""Numpy implementation of pooling.
This is intended for testing only, and therefore isn't particularly efficient.
See tensorflow.nn.pool.
Args:
input: numpy array of rank N+2.
window_shape: Sequence of N ints >= 1.
pooling_type: either "MAX" or "AVG".
padding: either "SAME" or "VALID".
dilation_rate: Sequence of N ints >= 1.
strides: Sequence of N ints >= 1.
data_format: If specified and starts with "NC", indicates that second
dimension, rather than the last dimension, specifies the channel.
Returns:
pooling output array of rank N+2.
Raises:
ValueError: if arguments are invalid.
"""
if data_format is None or not data_format.startswith("NC"):
spatial_start_dim = 1
else:
spatial_start_dim = 2
output = input
for i in range(len(window_shape)):
output = pool_direct_single_axis(
input=output,
axis=i + spatial_start_dim,
window_size=window_shape[i],
pooling_type=pooling_type,
padding=padding,
dilation_rate=dilation_rate[i],
stride=strides[i])
return output
class PoolingTest(test.TestCase):
def _test(self, input_shape, **kwargs):
# Use negative numbers to make sure there isn't any zero padding getting
# used.
x = -np.arange(
np.prod(input_shape), dtype=np.float32).reshape(input_shape) - 1
y1 = pool_direct(input=x, **kwargs)
y2 = nn_ops.pool(input=x, **kwargs)
self.assertAllClose(y1, self.evaluate(y2), rtol=1e-2, atol=1e-2)
def testPoolSimple(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
self._test(
input_shape=[1, 1, 10, 1],
window_shape=[1, 3],
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1],
strides=[1, 2])
def testPool1D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 2], [2, 10, 2]]:
for window_shape in [[1], [2], [3]]:
if padding != "SAME":
for dilation_rate in [[1], [2], [3]]:
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1])
for strides in [[1], [2], [3]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1],
strides=strides)
def testPool2D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 10, 2], [2, 10, 9, 2]]:
for window_shape in [[1, 1], [2, 1], [2, 3]]:
if padding != "SAME":
for dilation_rate in [[1, 1], [2, 1], [1, 2], [2, 3]]:
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1, 1])
for strides in [[1, 1], [2, 1], [1, 2], [2, 3]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
|
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
|
pooling_type=pooling_type,
dilation_rate=[1, 1],
strides=strides)
def testPool3D(self):
if test.is_built_with_rocm():
self.skipTest("Pooling with 3D tensors is not supported in ROCm")
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 10, 11, 2], [2, 10, 9, 11, 2]]:
|
StamusNetworks/scirius
|
rules/migrations/0058_source_public_source.py
|
Python
|
gpl-3.0
| 440
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-03-05 09:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies
|
= [
('rules', '0057_auto_20180302_1312'),
]
operations = [
migrations.AddField(
model_name='source',
name='public_source',
fi
|
eld=models.CharField(blank=True, max_length=100, null=True),
),
]
|
boada/ICD
|
sandbox/legacy_plot_code/ston_test.py
|
Python
|
mit
| 488
| 0.004098
|
import pyfits as pyf
import pylab as pyl
full = pyf.getdata('./data/gs_all_tf_h_130511b_multi.fits')
sample = pyf.getdata('../samples/sample_1.5_3.5_gs_all.fits')
f = pyl.fig
|
ure(1)
f1 = f.add_subplot(111)
for i in range(len(sample)):
ID = sample['ID'][i]
H_flux = full['WFC3_F160W_FLUX'][i-1]
H_flux_err = full['WFC3_F160W_FLUXERR'][i-1]
H_flux_weight = full['WFC3_F160W_WEIGHT'][i-1]
H_mag = sample['Hmag'][i]
f1.scatter(H_mag,H_
|
flux/H_flux_err)
pyl.show()
|
git-artes/GNUWiNetwork
|
gwn/gwnevents/api_events.py
|
Python
|
gpl-3.0
| 2,996
| 0.00534
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of GNUWiNetwork,
# Copyright (C) 2014 by
# Pablo Belzarena, Gabriel Gomez Sena, Victor Gonzalez Barbone,
# Facultad de Ingenieria, Universidad de la Republica, Uruguay.
#
# GNUWiNetwork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GNUWiNetwork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNUWiNetwork. If not, see <http://www.gnu.org/licenses/>.
#
'''Functions to create events of different types.
To create an event object use function C{mkevent()}. This function creates events of different types, according to the event modules imported by this module.
'''
import sys
import types
import evtimer
import utils.framers.ieee80211.evframes80211 as evframes80211
import evrequest
#sys.path = sys.path + ['..']
def mkevent(nickname, **kwargs):
'''Returns an ev
|
ent of the given event nickname.
@param nickname: a valid event nickname, i.e. one that is a key in dictionary of valid nicknames.
@param kwargs: a
|
dictionary of variables depending on the type of event. Field C{ev_dc} is a dictionary of fields and values for the corresponding event type; field C{frmpkt} is a binary packed frame.
@return: an Event object.
'''
from evtimer import dc_nicknames as ev_dc_nicknames
import utils.framers.ieee80211.evframes80211
import evrequest
frmpkt, ev_dc = '', {}
if kwargs.has_key('ev_dc'):
ev_dc = kwargs['ev_dc']
if kwargs.has_key('frmpkt'):
frmpkt = kwargs['frmpkt']
ev_dc['frame_length'] = len(frmpkt)
else:
ev_dc['frame_length'] = 0
frmpkt = ''
if kwargs.has_key('payload'):
payload = kwargs['payload']
else:
payload = ''
if evtimer.dc_nicknames.has_key(nickname):
ptype, psubtype, eventclass = evtimer.dc_nicknames[nickname]
return eventclass(nickname, ptype, psubtype, ev_dc)
elif evframes80211.dc_nicknames.has_key(nickname):
ev_type, ev_subtype, eventclass = evframes80211.dc_nicknames[nickname]
ev = eventclass(nickname, ev_type, ev_subtype, frmpkt, ev_dc)
ev.payload = payload
return ev
elif evrequest.dc_nicknames.has_key(nickname):
ptype, psubtype, eventclass = evrequest.dc_nicknames[nickname]
return eventclass(nickname, ptype, psubtype, ev_dc)
else:
raise EventNameException(nickname + ' is not a valid nickname.')
if __name__ == '__main__':
import doctest
doctest.testmod()
|
tamac-io/jenkins-job-builder
|
jenkins_jobs/cli/entry.py
|
Python
|
apache-2.0
| 5,508
| 0.000182
|
#!/usr/bin/env python
# Copyright (C) 2015 Wayne Warren
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import io
import os
import logging
import platform
import sys
from stevedore import extension
import yaml
from jenkins_jobs.cli.parser import create_parser
from jenkins_jobs.config import JJBConfig
from jenkins_jobs import utils
from jenkins_jobs import version
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
reload(sys)
sys.setdefaultencoding('utf8')
def __version__():
return "Jenkins Job Builder version: %s" % \
version.version_info.version_string()
class JenkinsJobs(object):
""" This is the entry point class for the `jenkins-jobs` command line tool.
While this class can be used programmatically by external users of the JJB
API, the main goal here is to abstract the `jenkins_jobs` tool in a way
that prevents test suites from caring overly much about various
implementation details--for example, tests of subcommands must not have
access to directly modify configuration objects, instead they must provide
a fixture in the form of an .ini file that provides the configuration
necessary for testing.
External users of the JJB API may be interested in this class as an
alternative to wrapping `jenkins_jobs` with a subprocess that execs it as a
system co
|
mmand; instead, python scripts may be written that pass
`jenkins_jobs` args directly to this class to allow programmatic setting of
various command line parameters.
"""
def __init__(self, args=None, **kwargs):
if args is None:
args = []
self.parser = create_parser()
self.options = self.parser.parse_args(arg
|
s)
self.jjb_config = JJBConfig(self.options.conf, **kwargs)
if not self.options.command:
self.parser.error("Must specify a 'command' to be performed")
if (self.options.log_level is not None):
self.options.log_level = getattr(logging,
self.options.log_level.upper(),
logger.getEffectiveLevel())
logger.setLevel(self.options.log_level)
self._parse_additional()
self.jjb_config.validate()
def _set_config(self, target, option):
"""
Sets the option in target only if the given option was explicitly set
"""
opt_val = getattr(self.options, option, None)
if opt_val is not None:
target[option] = opt_val
def _parse_additional(self):
self._set_config(self.jjb_config.builder, 'ignore_cache')
self._set_config(self.jjb_config.builder, 'flush_cache')
self._set_config(self.jjb_config.yamlparser, 'allow_empty_variables')
self._set_config(self.jjb_config.jenkins, 'user')
self._set_config(self.jjb_config.jenkins, 'password')
if getattr(self.options, 'plugins_info_path', None) is not None:
with io.open(self.options.plugins_info_path, 'r',
encoding='utf-8') as yaml_file:
plugins_info = yaml.load(yaml_file)
if not isinstance(plugins_info, list):
self.parser.error("{0} must contain a Yaml list!".format(
self.options.plugins_info_path))
self.jjb_config.builder['plugins_info'] = plugins_info
if getattr(self.options, 'path', None):
if hasattr(self.options.path, 'read'):
logger.debug("Input file is stdin")
if self.options.path.isatty():
if platform.system() == 'Windows':
key = 'CTRL+Z'
else:
key = 'CTRL+D'
logger.warn("Reading configuration from STDIN. "
"Press %s to end input.", key)
else:
# take list of paths
self.options.path = self.options.path.split(os.pathsep)
do_recurse = (getattr(self.options, 'recursive', False) or
self.jjb_config.recursive)
excludes = ([e for elist in self.options.exclude
for e in elist.split(os.pathsep)] or
self.jjb_config.excludes)
paths = []
for path in self.options.path:
if do_recurse and os.path.isdir(path):
paths.extend(utils.recurse_path(path, excludes))
else:
paths.append(path)
self.options.path = paths
def execute(self):
extension_manager = extension.ExtensionManager(
namespace='jjb.cli.subcommands',
invoke_on_load=True,)
ext = extension_manager[self.options.command]
ext.obj.execute(self.options, self.jjb_config)
def main():
argv = sys.argv[1:]
jjb = JenkinsJobs(argv)
jjb.execute()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.