repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ameihm0912/MozDef
|
alerts/auditd_sftp.py
|
1
|
1886
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez averez@mozilla.com
# Jeff Bryner jbryner@mozilla.com
# Aaron Meihm ameihm@mozilla.com
# Michal Purzynski <mpurzynski@mozilla.com>
# Alicia Smith <asmith@mozilla.com>
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSFTPEvent(AlertTask):
def main(self):
search_query = SearchQuery(minutes=5)
search_query.add_must([
TermMatch('_type', 'auditd'),
TermMatch('category', 'execve'),
TermMatch('processname', 'audisp-json'),
TermMatch('details.processname', 'ssh'),
PhraseMatch('details.parentprocess', 'sftp'),
])
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
# Set alert properties
def onEvent(self, event):
category = 'execve'
severity = 'NOTICE'
tags = ['audisp-json, audit']
srchost = 'unknown'
username = 'unknown'
directory = 'unknown'
x = event['_source']
if 'details' in x:
if 'hostname' in x['details']:
srchost = x['details']['hostname']
if 'originaluser' in x['details']:
username = x['details']['originaluser']
if 'cwd' in x['details']:
directory = x['details']['cwd']
summary = 'SFTP Event by {0} from host {1} in directory {2}'.format(username, srchost, directory)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, [event], severity)
|
mpl-2.0
| -5,164,862,941,264,948,000
| 32.678571
| 105
| 0.620361
| false
| 3.727273
| false
| false
| false
|
krujos/oohimtelling
|
app.py
|
1
|
5031
|
#!/usr/bin/env python
from __future__ import print_function
import json
import requests
import sys
import os
import time
from flask import Flask, request, Response
from flask import jsonify
from functools import wraps
Flask.get = lambda self, path: self.route(path, methods=['get'])
##################################The Setup###############################################
vcap_services = json.loads(os.getenv("VCAP_SERVICES"))
client_id = None
client_secret = None
uaa_uri = None
api = None
cache = dict()
port = 8003
expire_time = 0
token = None
sslVerify = (os.getenv("VERIFY_SSL") != "false" and os.getenv("VERIFY_SSL") != "FALSE")
print("Calling CF with sslVerify = " + str(sslVerify))
if 'PORT' in os.environ:
port = int(os.getenv("PORT"))
app = Flask(__name__)
for service in vcap_services['user-provided']:
if 'uaa' == service['name']:
client_id = service['credentials']['client_id']
client_secret = service['credentials']['client_secret']
uaa_uri = service['credentials']['uri']
elif 'cloud_controller' == service['name']:
api = service['credentials']['uri']
###################################The Auth##############################################
def check_auth(user, password):
return user == client_id and password == client_secret
def authenticate():
return Response('You must be authenticated to use this application', 401,
{"WWW-Authenticate": 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
##############################The bidness logic##########################################
def get_token():
global expire_time, token
if expire_time < time.time():
client_auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
print("Getting token from " + uaa_uri)
r = requests.get(url=uaa_uri, headers={'accept': 'application/json'},
params={'grant_type': 'client_credentials'}, auth=client_auth,
verify=sslVerify)
print("Response code = " + str(r.status_code))
expire_time = time.time() + (int(r.json()['expires_in']) - 60)
token = r.json()['access_token']
print( "Token expires at " + str(expire_time))
return token
def cf(path):
access_token="bearer " + get_token()
hdr = {'Authorization': access_token}
print("Calling " + path)
r = requests.get(api + path, headers=hdr, verify=sslVerify)
if r.status_code != 200:
print("Failed to call CF API (" + path + ")", file=sys.stderr)
return r.json()
def api_cache(url):
if url not in cache:
cache[url] = cf(url)
return cache[url]
def get_apps():
apps = []
for app in cf('/v2/apps')['resources']:
a = dict()
a['name'] = app['entity']['name']
a['created_at'] = app['metadata']['created_at']
a['updated_at'] = app['metadata']['updated_at']
a['app_guid'] = app['metadata']['guid']
a['state'] = app['entity']['state']
a['buildpack'] = determine_buildpack(app)
space = api_cache(app['entity']['space_url'])
a['space'] = space['entity']['name']
org = api_cache(space['entity']['organization_url'])
a['org'] = org['entity']['name']
routes = cf(app['entity']['routes_url'])
a['routes'] = []
for route in routes['resources']:
a['routes'].append(determine_fqdn(route))
a['events'] = []
events = cf("/v2/events?q=actee:" + a['app_guid'])
for event in events['resources']:
a['events'].append(make_event(event))
apps.append([a])
return apps
def determine_fqdn(route):
host = route['entity']['host']
domain = api_cache(route['entity']['domain_url'])['entity']['name']
hostname = host + "." + domain
return hostname
def determine_buildpack(app):
buildpack = app['entity']['buildpack']
detected_buildpack = app['entity']['detected_buildpack']
if detected_buildpack is None and detected_buildpack is None:
buildpack = "CF HAS NO BUILDPACK INFO FOR THIS APP. INVESTIGATE!"
if buildpack is None:
buildpack = detected_buildpack
return buildpack
def make_event(event):
e = dict()
event_entity = event['entity']
e['event_type'] = event_entity['type']
e['actor_type'] = event_entity['actor_type']
e['actor'] = event_entity['actor_name']
e['time'] = event_entity['timestamp']
e['metadata'] = event_entity['metadata']
return e
###################################Controllers#################################
@app.get('/')
def root():
return "you probably want <a href='/apps'>/apps</a>"
@app.get('/apps')
@requires_auth
def apps():
return jsonify(apps=get_apps())
if __name__ == "__main__":
app.run(host='0.0.0.0', port=port, debug=True)
|
apache-2.0
| -5,806,166,095,092,227,000
| 30.44375
| 90
| 0.575234
| false
| 3.757282
| false
| false
| false
|
ttreeagency/PootleTypo3Org
|
pootle/apps/pootle_store/views.py
|
1
|
42408
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
import logging
from itertools import groupby
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.db.models import Q
from django.http import HttpResponse, Http404
from django.shortcuts import get_object_or_404, render_to_response
from django.template import loader, RequestContext
from django.utils.translation import to_locale, ugettext as _
from django.utils.translation.trans_real import parse_accept_lang_header
from django.utils import simplejson, timezone
from django.utils.encoding import iri_to_uri
from django.views.decorators.cache import never_cache
from translate.lang import data
from pootle.core.decorators import (get_translation_project,
set_request_context)
from pootle_app.models import Suggestion as SuggestionStat
from pootle_app.models.permissions import (get_matching_permissions,
check_permission,
check_profile_permission)
from pootle_misc.baseurl import redirect
from pootle_misc.checks import check_names, get_quality_check_failures
from pootle_misc.forms import make_search_form
from pootle_misc.stats import get_raw_stats
from pootle_misc.url_manip import ensure_uri, previous_view_url
from pootle_misc.util import paginate, ajax_required, jsonify
from pootle_profile.models import get_profile
from pootle_statistics.models import (Submission, SubmissionFields,
SubmissionTypes)
from .decorators import get_store_context, get_unit_context
from .models import Store, Unit
from .forms import (unit_comment_form_factory, unit_form_factory,
highlight_whitespace)
from .signals import translation_submitted
from .templatetags.store_tags import (highlight_diffs, pluralize_source,
pluralize_target)
from .util import (UNTRANSLATED, FUZZY, TRANSLATED, STATES_MAP,
absolute_real_path, find_altsrcs, get_sugg_list)
@get_store_context('view')
def export_as_xliff(request, store):
"""Export given file to xliff for offline translation."""
path = store.real_path
if not path:
# bug 2106
project = request.translation_project.project
if project.get_treestyle() == "gnu":
path = "/".join(store.pootle_path.split(os.path.sep)[2:])
else:
parts = store.pootle_path.split(os.path.sep)[1:]
path = "%s/%s/%s" % (parts[1], parts[0], "/".join(parts[2:]))
path, ext = os.path.splitext(path)
export_path = "/".join(['POOTLE_EXPORT', path + os.path.extsep + 'xlf'])
abs_export_path = absolute_real_path(export_path)
key = iri_to_uri("%s:export_as_xliff" % store.pootle_path)
last_export = cache.get(key)
if (not (last_export and last_export == store.get_mtime() and
os.path.isfile(abs_export_path))):
from pootle_app.project_tree import ensure_target_dir_exists
from translate.storage.poxliff import PoXliffFile
from pootle_misc import ptempfile as tempfile
import shutil
ensure_target_dir_exists(abs_export_path)
outputstore = store.convert(PoXliffFile)
outputstore.switchfile(store.name, createifmissing=True)
fd, tempstore = tempfile.mkstemp(prefix=store.name, suffix='.xlf')
os.close(fd)
outputstore.savefile(tempstore)
shutil.move(tempstore, abs_export_path)
cache.set(key, store.get_mtime(), settings.OBJECT_CACHE_TIMEOUT)
return redirect('/export/' + export_path)
@get_store_context('view')
def export_as_type(request, store, filetype):
"""Export given file to xliff for offline translation."""
from pootle_store.filetypes import factory_classes, is_monolingual
klass = factory_classes.get(filetype, None)
if (not klass or is_monolingual(klass) or
store.pootle_path.endswith(filetype)):
raise ValueError
path, ext = os.path.splitext(store.real_path)
export_path = os.path.join('POOTLE_EXPORT',
path + os.path.extsep + filetype)
abs_export_path = absolute_real_path(export_path)
key = iri_to_uri("%s:export_as_%s" % (store.pootle_path, filetype))
last_export = cache.get(key)
if (not (last_export and last_export == store.get_mtime() and
os.path.isfile(abs_export_path))):
from pootle_app.project_tree import ensure_target_dir_exists
from pootle_misc import ptempfile as tempfile
import shutil
ensure_target_dir_exists(abs_export_path)
outputstore = store.convert(klass)
fd, tempstore = tempfile.mkstemp(prefix=store.name,
suffix=os.path.extsep + filetype)
os.close(fd)
outputstore.savefile(tempstore)
shutil.move(tempstore, abs_export_path)
cache.set(key, store.get_mtime(), settings.OBJECT_CACHE_TIMEOUT)
return redirect('/export/' + export_path)
@get_store_context('view')
def download(request, store):
store.sync(update_translation=True)
return redirect('/export/' + store.real_path)
def get_filter_name(GET):
"""Gets current filter's human-readable name.
:param GET: A copy of ``request.GET``.
:return: Two-tuple with the filter name, and a list of extra arguments
passed to the current filter.
"""
filter = extra = None
if 'filter' in GET:
filter = GET['filter']
if filter.startswith('user-'):
extra = [GET.get('user', _('User missing'))]
elif filter == 'checks' and 'checks' in GET:
extra = map(lambda check: check_names.get(check, check),
GET['checks'].split(','))
elif 'search' in GET:
filter = 'search'
extra = [GET['search']]
if 'sfields' in GET:
extra.extend(GET['sfields'].split(','))
filter_name = {
'all': _('All'),
'translated': _('Translated'),
'untranslated': _('Untranslated'),
'fuzzy': _('Needs work'),
'incomplete': _('Incomplete'),
# Translators: This is the name of a filter
'search': _('Search'),
'checks': _('Checks'),
'user-submissions': _('Submissions'),
'user-submissions-overwritten': _('Overwritten submissions'),
}.get(filter)
return (filter_name, extra)
@get_translation_project
@set_request_context
def export_view(request, translation_project, dir_path, filename=None):
"""Displays a list of units with filters applied."""
current_path = translation_project.directory.pootle_path + dir_path
if filename:
current_path = current_path + filename
store = get_object_or_404(Store, pootle_path=current_path)
units_qs = store.units
else:
store = None
units_qs = translation_project.units.filter(
store__pootle_path__startswith=current_path,
)
filter_name, filter_extra = get_filter_name(request.GET)
units = get_step_query(request, units_qs)
unit_groups = [(path, list(units)) for path, units in
groupby(units, lambda x: x.store.path)]
ctx = {
'source_language': translation_project.project.source_language,
'language': translation_project.language,
'project': translation_project.project,
'unit_groups': unit_groups,
'filter_name': filter_name,
'filter_extra': filter_extra,
}
return render_to_response('store/list.html', ctx,
context_instance=RequestContext(request))
####################### Translate Page ##############################
def get_alt_src_langs(request, profile, translation_project):
language = translation_project.language
project = translation_project.project
source_language = project.source_language
langs = profile.alt_src_langs.exclude(
id__in=(language.id, source_language.id)
).filter(translationproject__project=project)
if not profile.alt_src_langs.count():
from pootle_language.models import Language
accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
for accept_lang, unused in parse_accept_lang_header(accept):
if accept_lang == '*':
continue
simplified = data.simplify_to_common(accept_lang)
normalized = to_locale(data.normalize_code(simplified))
code = to_locale(accept_lang)
if (normalized in
('en', 'en_US', source_language.code, language.code) or
code in ('en', 'en_US', source_language.code, language.code)):
continue
langs = Language.objects.filter(
code__in=(normalized, code),
translationproject__project=project,
)
if langs.count():
break
return langs
def get_non_indexed_search_step_query(form, units_queryset):
words = form.cleaned_data['search'].split()
result = units_queryset.none()
if 'source' in form.cleaned_data['sfields']:
subresult = units_queryset
for word in words:
subresult = subresult.filter(source_f__icontains=word)
result = result | subresult
if 'target' in form.cleaned_data['sfields']:
subresult = units_queryset
for word in words:
subresult = subresult.filter(target_f__icontains=word)
result = result | subresult
if 'notes' in form.cleaned_data['sfields']:
translator_subresult = units_queryset
developer_subresult = units_queryset
for word in words:
translator_subresult = translator_subresult.filter(
translator_comment__icontains=word,
)
developer_subresult = developer_subresult.filter(
developer_comment__icontains=word,
)
result = result | translator_subresult | developer_subresult
if 'locations' in form.cleaned_data['sfields']:
subresult = units_queryset
for word in words:
subresult = subresult.filter(locations__icontains=word)
result = result | subresult
return result
def get_search_step_query(translation_project, form, units_queryset):
"""Narrows down units query to units matching search string."""
if translation_project.indexer is None:
logging.debug(u"No indexer for %s, using database search",
translation_project)
return get_non_indexed_search_step_query(form, units_queryset)
logging.debug(u"Found %s indexer for %s, using indexed search",
translation_project.indexer.INDEX_DIRECTORY_NAME,
translation_project)
word_querylist = []
words = form.cleaned_data['search'].split()
fields = form.cleaned_data['sfields']
paths = units_queryset.order_by() \
.values_list('store__pootle_path', flat=True) \
.distinct()
path_querylist = [('pofilename', pootle_path)
for pootle_path in paths.iterator()]
cache_key = "search:%s" % str(hash((repr(path_querylist),
translation_project.get_mtime(),
repr(words),
repr(fields))))
dbids = cache.get(cache_key)
if dbids is None:
searchparts = []
# Split the search expression into single words. Otherwise Xapian and
# Lucene would interpret the whole string as an "OR" combination of
# words instead of the desired "AND".
for word in words:
# Generate a list for the query based on the selected fields
word_querylist = [(field, word) for field in fields]
textquery = translation_project.indexer.make_query(word_querylist,
False)
searchparts.append(textquery)
pathquery = translation_project.indexer.make_query(path_querylist,
False)
searchparts.append(pathquery)
limitedquery = translation_project.indexer.make_query(searchparts, True)
result = translation_project.indexer.search(limitedquery, ['dbid'])
dbids = [int(item['dbid'][0]) for item in result[:999]]
cache.set(cache_key, dbids, settings.OBJECT_CACHE_TIMEOUT)
return units_queryset.filter(id__in=dbids)
def get_step_query(request, units_queryset):
"""Narrows down unit query to units matching conditions in GET."""
if 'filter' in request.GET:
unit_filter = request.GET['filter']
username = request.GET.get('user', None)
profile = request.profile
if username:
try:
user = User.objects.get(username=username)
profile = user.get_profile()
except User.DoesNotExist:
pass
if unit_filter:
match_queryset = units_queryset.none()
if unit_filter == 'all':
match_queryset = units_queryset
elif unit_filter == 'translated':
match_queryset = units_queryset.filter(state=TRANSLATED)
elif unit_filter == 'untranslated':
match_queryset = units_queryset.filter(state=UNTRANSLATED)
elif unit_filter == 'fuzzy':
match_queryset = units_queryset.filter(state=FUZZY)
elif unit_filter == 'incomplete':
match_queryset = units_queryset.filter(
Q(state=UNTRANSLATED) | Q(state=FUZZY),
)
elif unit_filter == 'suggestions':
#FIXME: is None the most efficient query
match_queryset = units_queryset.exclude(suggestion=None)
elif unit_filter == 'user-suggestions':
match_queryset = units_queryset.filter(
suggestion__user=profile,
).distinct()
elif unit_filter == 'user-suggestions-accepted':
# FIXME: Oh, this is pretty lame, we need a completely
# different way to model suggestions
unit_ids = SuggestionStat.objects.filter(
suggester=profile,
state='accepted',
).values_list('unit', flat=True)
match_queryset = units_queryset.filter(
id__in=unit_ids,
).distinct()
elif unit_filter == 'user-suggestions-rejected':
# FIXME: Oh, this is as lame as above
unit_ids = SuggestionStat.objects.filter(
suggester=profile,
state='rejected',
).values_list('unit', flat=True)
match_queryset = units_queryset.filter(
id__in=unit_ids,
).distinct()
elif unit_filter == 'user-submissions':
match_queryset = units_queryset.filter(
submission__submitter=profile,
).distinct()
elif unit_filter == 'user-submissions-overwritten':
match_queryset = units_queryset.filter(
submission__submitter=profile,
).exclude(submitted_by=profile).distinct()
elif unit_filter == 'checks' and 'checks' in request.GET:
checks = request.GET['checks'].split(',')
if checks:
match_queryset = units_queryset.filter(
qualitycheck__false_positive=False,
qualitycheck__name__in=checks
).distinct()
units_queryset = match_queryset
if 'search' in request.GET and 'sfields' in request.GET:
# use the search form for validation only
search_form = make_search_form(request.GET)
if search_form.is_valid():
units_queryset = get_search_step_query(request.translation_project,
search_form, units_queryset)
return units_queryset
def translate_page(request):
cantranslate = check_permission("translate", request)
cansuggest = check_permission("suggest", request)
canreview = check_permission("review", request)
translation_project = request.translation_project
language = translation_project.language
project = translation_project.project
profile = request.profile
store = getattr(request, "store", None)
directory = getattr(request, "directory", None)
is_single_file = store and True or False
path = is_single_file and store.path or directory.path
pootle_path = (is_single_file and store.pootle_path or
directory.pootle_path)
is_terminology = (project.is_terminology or store and
store.is_terminology)
search_form = make_search_form(request=request,
terminology=is_terminology)
previous_overview_url = previous_view_url(request, ['overview'])
context = {
'cantranslate': cantranslate,
'cansuggest': cansuggest,
'canreview': canreview,
'search_form': search_form,
'store': store,
'store_id': store and store.id,
'directory': directory,
'directory_id': directory and directory.id,
'path': path,
'pootle_path': pootle_path,
'is_single_file': is_single_file,
'language': language,
'project': project,
'translation_project': translation_project,
'profile': profile,
'source_language': translation_project.project.source_language,
'previous_overview_url': previous_overview_url,
'MT_BACKENDS': settings.MT_BACKENDS,
'LOOKUP_BACKENDS': settings.LOOKUP_BACKENDS,
'AMAGAMA_URL': settings.AMAGAMA_URL,
}
return render_to_response('store/translate.html', context,
context_instance=RequestContext(request))
@get_store_context('view')
def translate(request, store):
return translate_page(request)
#
# Views used with XMLHttpRequest requests.
#
def _filter_ctx_units(units_qs, unit, how_many, gap=0):
"""Returns ``how_many``*2 units that are before and after ``index``."""
result = {'before': [], 'after': []}
if how_many and unit.index - gap > 0:
before = units_qs.filter(store=unit.store_id, index__lt=unit.index) \
.order_by('-index')[gap:how_many+gap]
result['before'] = _build_units_list(before, reverse=True)
result['before'].reverse()
#FIXME: can we avoid this query if length is known?
if how_many:
after = units_qs.filter(store=unit.store_id,
index__gt=unit.index)[gap:how_many+gap]
result['after'] = _build_units_list(after)
return result
def _build_units_list(units, reverse=False):
"""Given a list/queryset of units, builds a list with the unit data
contained in a dictionary ready to be returned as JSON.
:return: A list with unit id, source, and target texts. In case of
having plural forms, a title for the plural form is also provided.
"""
return_units = []
for unit in iter(units):
source_unit = []
target_unit = []
for i, source, title in pluralize_source(unit):
unit_dict = {'text': source}
if title:
unit_dict["title"] = title
source_unit.append(unit_dict)
for i, target, title in pluralize_target(unit):
unit_dict = {'text': target}
if title:
unit_dict["title"] = title
target_unit.append(unit_dict)
prev = None
next = None
if return_units:
if reverse:
return_units[-1]['prev'] = unit.id
next = return_units[-1]['id']
else:
return_units[-1]['next'] = unit.id
prev = return_units[-1]['id']
return_units.append({'id': unit.id,
'isfuzzy': unit.isfuzzy(),
'prev': prev,
'next': next,
'source': source_unit,
'target': target_unit})
return return_units
def _build_pager_dict(pager):
"""Given a pager object ``pager``, retrieves all the information needed
to build a pager.
:return: A dictionary containing necessary pager information to build
a pager.
"""
return {"number": pager.number,
"num_pages": pager.paginator.num_pages,
"per_page": pager.paginator.per_page
}
def _get_index_in_qs(qs, unit, store=False):
"""Given a queryset ``qs``, returns the position (index) of the unit
``unit`` within that queryset. ``store`` specifies if the queryset is
limited to a single store.
:return: Value representing the position of the unit ``unit``.
:rtype: int
"""
if store:
return qs.filter(index__lt=unit.index).count()
else:
store = unit.store
return (qs.filter(store=store, index__lt=unit.index) | \
qs.filter(store__pootle_path__lt=store.pootle_path)).count()
def get_view_units(request, units_queryset, store, limit=0):
"""Gets source and target texts excluding the editing unit.
:return: An object in JSON notation that contains the source and target
texts for units that will be displayed before and after editing
unit.
If asked by using the ``meta`` and ``pager`` parameters,
metadata and pager information will be calculated and returned
too.
"""
current_unit = None
json = {}
try:
limit = int(limit)
except ValueError:
limit = None
if not limit:
limit = request.profile.get_unit_rows()
step_queryset = get_step_query(request, units_queryset)
# Return metadata it has been explicitely requested
if request.GET.get('meta', False):
tp = request.translation_project
json["meta"] = {"source_lang": tp.project.source_language.code,
"source_dir": tp.project.source_language.get_direction(),
"target_lang": tp.language.code,
"target_dir": tp.language.get_direction(),
"project_style": tp.project.checkstyle}
# Maybe we are trying to load directly a specific unit, so we have
# to calculate its page number
uid = request.GET.get('uid', None)
if uid:
current_unit = units_queryset.get(id=uid)
preceding = _get_index_in_qs(step_queryset, current_unit, store)
page = preceding / limit + 1
else:
page = None
pager = paginate(request, step_queryset, items=limit, page=page)
json["units"] = _build_units_list(pager.object_list)
# Return paging information if requested to do so
if request.GET.get('pager', False):
json["pager"] = _build_pager_dict(pager)
if not current_unit:
try:
json["uid"] = json["units"][0]["id"]
except IndexError:
pass
else:
json["uid"] = current_unit.id
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
@ajax_required
@get_store_context('view')
def get_view_units_store(request, store, limit=0):
"""Gets source and target texts excluding the editing widget (store-level).
:return: An object in JSON notation that contains the source and target
texts for units that will be displayed before and after
unit ``uid``.
"""
return get_view_units(request, store.units, store=True, limit=limit)
def _is_filtered(request):
"""Checks if unit list is filtered."""
return ('filter' in request.GET or 'checks' in request.GET or
'user' in request.GET or
('search' in request.GET and 'sfields' in request.GET))
@ajax_required
@get_unit_context('view')
def get_more_context(request, unit):
"""Retrieves more context units.
:return: An object in JSON notation that contains the source and target
texts for units that are in the context of unit ``uid``.
"""
store = request.store
json = {}
gap = int(request.GET.get('gap', 0))
qty = int(request.GET.get('qty', 1))
json["ctx"] = _filter_ctx_units(store.units, unit, qty, gap)
rcode = 200
response = jsonify(json)
return HttpResponse(response, status=rcode, mimetype="application/json")
@never_cache
@get_unit_context('view')
def timeline(request, unit):
"""Returns a JSON-encoded string including the changes to the unit
rendered in HTML.
"""
timeline = Submission.objects.filter(unit=unit, field__in=[
SubmissionFields.TARGET, SubmissionFields.STATE,
SubmissionFields.COMMENT
])
timeline = timeline.select_related("submitter__user",
"translation_project__language")
context = {}
entries_group = []
import locale
from pootle_store.fields import to_python
for key, values in groupby(timeline, key=lambda x: x.creation_time):
entry_group = {
'datetime': key,
'datetime_str': key.strftime(locale.nl_langinfo(locale.D_T_FMT)),
'entries': [],
}
for item in values:
# Only add submitter information for the whole entry group once
entry_group.setdefault('submitter', item.submitter)
context.setdefault('language', item.translation_project.language)
entry = {
'field': item.field,
'field_name': SubmissionFields.NAMES_MAP[item.field],
}
if item.field == SubmissionFields.STATE:
entry['old_value'] = STATES_MAP[int(to_python(item.old_value))]
entry['new_value'] = STATES_MAP[int(to_python(item.new_value))]
else:
entry['new_value'] = to_python(item.new_value)
entry_group['entries'].append(entry)
entries_group.append(entry_group)
# Let's reverse the chronological order
entries_group.reverse()
# Remove first timeline item if it's solely a change to the target
if (entries_group and len(entries_group[0]['entries']) == 1 and
entries_group[0]['entries'][0]['field'] == SubmissionFields.TARGET):
del entries_group[0]
context['entries_group'] = entries_group
if request.is_ajax():
# The client will want to confirm that the response is relevant for
# the unit on screen at the time of receiving this, so we add the uid.
json = {'uid': unit.id}
t = loader.get_template('unit/xhr-timeline.html')
c = RequestContext(request, context)
json['timeline'] = t.render(c).replace('\n', '')
response = simplejson.dumps(json)
return HttpResponse(response, mimetype="application/json")
else:
return render_to_response('unit/timeline.html', context,
context_instance=RequestContext(request))
@ajax_required
@get_unit_context('translate')
def comment(request, unit):
"""Stores a new comment for the given ``unit``.
:return: If the form validates, the cleaned comment is returned.
An error message is returned otherwise.
"""
# Update current unit instance's attributes
unit.commented_by = request.profile
unit.commented_on = timezone.now()
language = request.translation_project.language
form = unit_comment_form_factory(language)(request.POST, instance=unit,
request=request)
if form.is_valid():
form.save()
context = {
'unit': unit,
'language': language,
}
t = loader.get_template('unit/comment.html')
c = RequestContext(request, context)
json = {'comment': t.render(c)}
rcode = 200
else:
json = {'msg': _("Comment submission failed.")}
rcode = 400
response = simplejson.dumps(json)
return HttpResponse(response, status=rcode, mimetype="application/json")
@never_cache
@ajax_required
@get_unit_context('view')
def get_edit_unit(request, unit):
"""Given a store path ``pootle_path`` and unit id ``uid``, gathers all the
necessary information to build the editing widget.
:return: A templatised editing widget is returned within the ``editor``
variable and paging information is also returned if the page
number has changed.
"""
json = {}
translation_project = request.translation_project
language = translation_project.language
if unit.hasplural():
snplurals = len(unit.source.strings)
else:
snplurals = None
form_class = unit_form_factory(language, snplurals, request)
form = form_class(instance=unit)
comment_form_class = unit_comment_form_factory(language)
comment_form = comment_form_class({}, instance=unit)
store = unit.store
directory = store.parent
profile = request.profile
alt_src_langs = get_alt_src_langs(request, profile, translation_project)
project = translation_project.project
report_target = ensure_uri(project.report_target)
suggestions = get_sugg_list(unit)
template_vars = {
'unit': unit,
'form': form,
'comment_form': comment_form,
'store': store,
'directory': directory,
'profile': profile,
'user': request.user,
'language': language,
'source_language': translation_project.project.source_language,
'cantranslate': check_profile_permission(profile, "translate",
directory),
'cansuggest': check_profile_permission(profile, "suggest", directory),
'canreview': check_profile_permission(profile, "review", directory),
'altsrcs': find_altsrcs(unit, alt_src_langs, store=store,
project=project),
'report_target': report_target,
'suggestions': suggestions,
}
if translation_project.project.is_terminology or store.is_terminology:
t = loader.get_template('unit/term_edit.html')
else:
t = loader.get_template('unit/edit.html')
c = RequestContext(request, template_vars)
json['editor'] = t.render(c)
rcode = 200
# Return context rows if filtering is applied but
# don't return any if the user has asked not to have it
current_filter = request.GET.get('filter', 'all')
show_ctx = request.COOKIES.get('ctxShow', 'true')
if ((_is_filtered(request) or current_filter not in ('all',)) and
show_ctx == 'true'):
# TODO: review if this first 'if' branch makes sense
if translation_project.project.is_terminology or store.is_terminology:
json['ctx'] = _filter_ctx_units(store.units, unit, 0)
else:
ctx_qty = int(request.COOKIES.get('ctxQty', 1))
json['ctx'] = _filter_ctx_units(store.units, unit, ctx_qty)
response = jsonify(json)
return HttpResponse(response, status=rcode, mimetype="application/json")
def get_failing_checks(request, pathobj):
"""Gets a list of failing checks for the current object.
:return: JSON string with a list of failing check categories which
include the actual checks that are failing.
"""
stats = get_raw_stats(pathobj)
failures = get_quality_check_failures(pathobj, stats, include_url=False)
response = jsonify(failures)
return HttpResponse(response, mimetype="application/json")
@ajax_required
@get_store_context('view')
def get_failing_checks_store(request, store):
return get_failing_checks(request, store)
@ajax_required
@get_unit_context('')
def submit(request, unit):
"""Processes translation submissions and stores them in the database.
:return: An object in JSON notation that contains the previous and last
units for the unit next to unit ``uid``.
"""
json = {}
cantranslate = check_permission("translate", request)
if not cantranslate:
raise PermissionDenied(_("You do not have rights to access "
"translation mode."))
translation_project = request.translation_project
language = translation_project.language
if unit.hasplural():
snplurals = len(unit.source.strings)
else:
snplurals = None
# Store current time so that it is the same for all submissions
current_time = timezone.now()
# Update current unit instance's attributes
unit.submitted_by = request.profile
unit.submitted_on = current_time
form_class = unit_form_factory(language, snplurals, request)
form = form_class(request.POST, instance=unit)
if form.is_valid():
if form.updated_fields:
for field, old_value, new_value in form.updated_fields:
sub = Submission(
creation_time=current_time,
translation_project=translation_project,
submitter=request.profile,
unit=unit,
field=field,
type=SubmissionTypes.NORMAL,
old_value=old_value,
new_value=new_value,
)
sub.save()
form.save()
translation_submitted.send(
sender=translation_project,
unit=form.instance,
profile=request.profile,
)
rcode = 200
else:
# Form failed
#FIXME: we should display validation errors here
rcode = 400
json["msg"] = _("Failed to process submission.")
response = jsonify(json)
return HttpResponse(response, status=rcode, mimetype="application/json")
@ajax_required
@get_unit_context('')
def suggest(request, unit):
"""Processes translation suggestions and stores them in the database.
:return: An object in JSON notation that contains the previous and last
units for the unit next to unit ``uid``.
"""
json = {}
cansuggest = check_permission("suggest", request)
if not cansuggest:
raise PermissionDenied(_("You do not have rights to access "
"translation mode."))
translation_project = request.translation_project
language = translation_project.language
if unit.hasplural():
snplurals = len(unit.source.strings)
else:
snplurals = None
form_class = unit_form_factory(language, snplurals, request)
form = form_class(request.POST, instance=unit)
if form.is_valid():
if form.instance._target_updated:
# TODO: Review if this hackish method is still necessary
#HACKISH: django 1.2 stupidly modifies instance on
# model form validation, reload unit from db
unit = Unit.objects.get(id=unit.id)
sugg = unit.add_suggestion(form.cleaned_data['target_f'],
request.profile)
if sugg:
SuggestionStat.objects.get_or_create(
translation_project=translation_project,
suggester=request.profile, state='pending', unit=unit.id
)
rcode = 200
else:
# Form failed
#FIXME: we should display validation errors here
rcode = 400
json["msg"] = _("Failed to process suggestion.")
response = jsonify(json)
return HttpResponse(response, status=rcode, mimetype="application/json")
@ajax_required
@get_unit_context('')
def reject_suggestion(request, unit, suggid):
json = {}
translation_project = request.translation_project
json["udbid"] = unit.id
json["sugid"] = suggid
if request.POST.get('reject'):
try:
sugg = unit.suggestion_set.get(id=suggid)
except ObjectDoesNotExist:
raise Http404
if (not check_permission('review', request) and
(not request.user.is_authenticated() or sugg and
sugg.user != request.profile)):
raise PermissionDenied(_("You do not have rights to access "
"review mode."))
success = unit.reject_suggestion(suggid)
if sugg is not None and success:
# FIXME: we need a totally different model for tracking stats, this
# is just lame
suggstat, created = SuggestionStat.objects.get_or_create(
translation_project=translation_project,
suggester=sugg.user,
state='pending',
unit=unit.id,
)
suggstat.reviewer = request.profile
suggstat.state = 'rejected'
suggstat.save()
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
@ajax_required
@get_unit_context('review')
def accept_suggestion(request, unit, suggid):
json = {
'udbid': unit.id,
'sugid': suggid,
}
translation_project = request.translation_project
if request.POST.get('accept'):
try:
suggestion = unit.suggestion_set.get(id=suggid)
except ObjectDoesNotExist:
raise Http404
old_target = unit.target
success = unit.accept_suggestion(suggid)
json['newtargets'] = [highlight_whitespace(target)
for target in unit.target.strings]
json['newdiffs'] = {}
for sugg in unit.get_suggestions():
json['newdiffs'][sugg.id] = \
[highlight_diffs(unit.target.strings[i], target)
for i, target in enumerate(sugg.target.strings)]
if suggestion is not None and success:
if suggestion.user:
translation_submitted.send(sender=translation_project,
unit=unit, profile=suggestion.user)
# FIXME: we need a totally different model for tracking stats, this
# is just lame
suggstat, created = SuggestionStat.objects.get_or_create(
translation_project=translation_project,
suggester=suggestion.user,
state='pending',
unit=unit.id,
)
suggstat.reviewer = request.profile
suggstat.state = 'accepted'
suggstat.save()
# For now assume the target changed
# TODO: check all fields for changes
creation_time = timezone.now()
sub = Submission(
creation_time=creation_time,
translation_project=translation_project,
submitter=suggestion.user,
from_suggestion=suggstat,
unit=unit,
field=SubmissionFields.TARGET,
type=SubmissionTypes.SUGG_ACCEPT,
old_value=old_target,
new_value=unit.target,
)
sub.save()
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
@ajax_required
def clear_vote(request, voteid):
json = {}
json["voteid"] = voteid
if request.POST.get('clear'):
try:
from voting.models import Vote
vote = Vote.objects.get(pk=voteid)
if vote.user != request.user:
# No i18n, will not go to UI
raise PermissionDenied("Users can only remove their own votes")
vote.delete()
except ObjectDoesNotExist:
raise Http404
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
@ajax_required
@get_unit_context('')
def vote_up(request, unit, suggid):
json = {}
json["suggid"] = suggid
if request.POST.get('up'):
try:
suggestion = unit.suggestion_set.get(id=suggid)
from voting.models import Vote
# Why can't it just return the vote object?
Vote.objects.record_vote(suggestion, request.user, +1)
json["voteid"] = Vote.objects.get_for_user(suggestion,
request.user).id
except ObjectDoesNotExist:
raise Http404(_("The suggestion or vote is not valid any more."))
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
@ajax_required
@get_unit_context('review')
def reject_qualitycheck(request, unit, checkid):
json = {}
json["udbid"] = unit.id
json["checkid"] = checkid
if request.POST.get('reject'):
try:
check = unit.qualitycheck_set.get(id=checkid)
check.false_positive = True
check.save()
# update timestamp
unit.save()
except ObjectDoesNotExist:
raise Http404
response = jsonify(json)
return HttpResponse(response, mimetype="application/json")
|
gpl-2.0
| -624,292,103,243,381,500
| 35.844483
| 81
| 0.601113
| false
| 4.211739
| false
| false
| false
|
popravich/cantal_tools
|
cantal_tools/_fork.py
|
1
|
3229
|
import time
import cantal
from cantal import fork
from contextlib import contextmanager
class Branch(fork.Branch):
__slots__ = fork.Branch.__slots__ + ('_errors',)
def __init__(self, suffix, state, parent, **kwargs):
super(Branch, self).__init__(suffix, state, parent, **kwargs)
self._errors = cantal.Counter(state=state + '.' + suffix,
metric='errors', **kwargs)
def exit(self):
self._parent.exit_branch(self)
def enter(self, end_current=True):
self._parent.enter_branch(self, end_current=end_current)
@contextmanager
def context(self):
cur_branch = self._parent._branch
self._parent.enter_branch(self, end_current=False)
try:
yield
except Exception:
self._errors.incr(1)
raise
finally:
self.exit()
if cur_branch:
cur_branch.enter()
def _commit(self, start, fin, increment=True):
if increment:
self._counter.incr(1)
self._duration.incr(fin - start)
class Fork(fork.Fork):
"""Custom Fork class without branches argument, instead
ensure_branch must be used.
"""
def __init__(self, state, **kwargs):
self._state = cantal.State(state=state, **kwargs)
self._kwargs = kwargs
self._kwargs['state'] = state
self._branch = None
# We do our best not to crash any code which does accouning the
# wrong way. So to report the problems we use a separate counter
self._err = cantal.Counter(metric="err", **self._kwargs)
def enter_branch(self, branch, end_current=True):
ts = int(time.time()*1000)
if self._branch is not None:
self._branch._commit(self._timestamp, ts, increment=end_current)
self._state.enter(branch.name, _timestamp=ts)
self._timestamp = ts
self._branch = branch
def exit_branch(self, branch):
ts = int(time.time() * 1000)
if self._branch is None:
self._err.incr()
branch._commit(self._timestamp, ts)
self._state.enter('_')
self._timestamp = ts
self._branch = None
def ensure_branches(self, *branches):
ret = []
for name in branches:
branch = getattr(self, name, None)
assert branch is None or isinstance(branch, Branch), (name, branch)
if branch is None:
branch = Branch(name, parent=self, **self._kwargs)
setattr(self, name, branch)
ret.append(branch)
if len(branches) == 1:
return ret[0]
return ret
@contextmanager
def context(self):
if self._branch is not None:
self._err.incr()
self._branch = None
self._state.enter('_')
try:
yield
except Exception:
if self._branch is not None:
self._branch._errors.incr(1)
raise
finally:
ts = int(time.time()*1000)
if self._branch is not None:
self._branch._commit(self._timestamp, ts)
self._state.exit()
self._branch = None
|
mit
| 1,769,075,623,500,826,000
| 30.349515
| 79
| 0.554351
| false
| 4.134443
| false
| false
| false
|
jackrzhang/zulip
|
zerver/management/commands/query_ldap.py
|
4
|
1271
|
from argparse import ArgumentParser
from typing import Any
from django.conf import settings
from django.contrib.auth import get_backends
from django.core.management.base import BaseCommand
from django_auth_ldap.backend import LDAPBackend, _LDAPUser
# Quick tool to test whether you're correctly authenticating to LDAP
def query_ldap(**options: str) -> None:
email = options['email']
for backend in get_backends():
if isinstance(backend, LDAPBackend):
ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs
if ldap_attrs is None:
print("No such user found")
else:
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
print("%s: %s" % (django_field, ldap_attrs[ldap_field]))
if settings.LDAP_EMAIL_ATTR is not None:
print("%s: %s" % ('email', ldap_attrs[settings.LDAP_EMAIL_ATTR]))
class Command(BaseCommand):
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('email', metavar='<email>', type=str,
help="email of user to query")
def handle(self, *args: Any, **options: str) -> None:
query_ldap(**options)
|
apache-2.0
| -2,874,588,886,342,787,600
| 41.366667
| 89
| 0.638867
| false
| 4.113269
| false
| false
| false
|
gpotter2/scapy
|
scapy/autorun.py
|
1
|
6498
|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <phil@secdev.org>
# This program is published under a GPLv2 license
"""
Run commands when the Scapy interpreter starts.
"""
from __future__ import print_function
import code
import sys
import importlib
import logging
from scapy.config import conf
from scapy.themes import NoTheme, DefaultTheme, HTMLTheme2, LatexTheme2
from scapy.error import log_scapy, Scapy_Exception
from scapy.utils import tex_escape
import scapy.modules.six as six
#########################
# Autorun stuff #
#########################
class StopAutorun(Scapy_Exception):
code_run = ""
class ScapyAutorunInterpreter(code.InteractiveInterpreter):
def __init__(self, *args, **kargs):
code.InteractiveInterpreter.__init__(self, *args, **kargs)
self.error = 0
def showsyntaxerror(self, *args, **kargs):
self.error = 1
return code.InteractiveInterpreter.showsyntaxerror(self, *args, **kargs) # noqa: E501
def showtraceback(self, *args, **kargs):
self.error = 1
exc_type, exc_value, exc_tb = sys.exc_info()
if isinstance(exc_value, StopAutorun):
raise exc_value
return code.InteractiveInterpreter.showtraceback(self, *args, **kargs)
def autorun_commands(cmds, my_globals=None, ignore_globals=None, verb=None):
sv = conf.verb
try:
try:
if my_globals is None:
my_globals = importlib.import_module(".all", "scapy").__dict__
if ignore_globals:
for ig in ignore_globals:
my_globals.pop(ig, None)
if verb is not None:
conf.verb = verb
interp = ScapyAutorunInterpreter(my_globals)
cmd = ""
cmds = cmds.splitlines()
cmds.append("") # ensure we finish multi-line commands
cmds.reverse()
six.moves.builtins.__dict__["_"] = None
while True:
if cmd:
sys.stderr.write(sys.__dict__.get("ps2", "... "))
else:
sys.stderr.write(str(sys.__dict__.get("ps1", sys.ps1)))
line = cmds.pop()
print(line)
cmd += "\n" + line
if interp.runsource(cmd):
continue
if interp.error:
return 0
cmd = ""
if len(cmds) <= 1:
break
except SystemExit:
pass
finally:
conf.verb = sv
return _ # noqa: F821
class StringWriter(object):
"""Util to mock sys.stdout and sys.stderr, and
store their output in a 's' var."""
def __init__(self, debug=None):
self.s = ""
self.debug = debug
def write(self, x):
# Object can be in the middle of being destroyed.
if getattr(self, "debug", None):
self.debug.write(x)
if getattr(self, "s", None) is not None:
self.s += x
def flush(self):
if getattr(self, "debug", None):
self.debug.flush()
def autorun_get_interactive_session(cmds, **kargs):
"""Create an interactive session and execute the
commands passed as "cmds" and return all output
:param cmds: a list of commands to run
:returns: (output, returned) contains both sys.stdout and sys.stderr logs
"""
sstdout, sstderr = sys.stdout, sys.stderr
sw = StringWriter()
h_old = log_scapy.handlers[0]
log_scapy.removeHandler(h_old)
log_scapy.addHandler(logging.StreamHandler(stream=sw))
try:
try:
sys.stdout = sys.stderr = sw
res = autorun_commands(cmds, **kargs)
except StopAutorun as e:
e.code_run = sw.s
raise
finally:
sys.stdout, sys.stderr = sstdout, sstderr
log_scapy.removeHandler(log_scapy.handlers[0])
log_scapy.addHandler(h_old)
return sw.s, res
def autorun_get_interactive_live_session(cmds, **kargs):
"""Create an interactive session and execute the
commands passed as "cmds" and return all output
:param cmds: a list of commands to run
:returns: (output, returned) contains both sys.stdout and sys.stderr logs
"""
sstdout, sstderr = sys.stdout, sys.stderr
sw = StringWriter(debug=sstdout)
try:
try:
sys.stdout = sys.stderr = sw
res = autorun_commands(cmds, **kargs)
except StopAutorun as e:
e.code_run = sw.s
raise
finally:
sys.stdout, sys.stderr = sstdout, sstderr
return sw.s, res
def autorun_get_text_interactive_session(cmds, **kargs):
ct = conf.color_theme
try:
conf.color_theme = NoTheme()
s, res = autorun_get_interactive_session(cmds, **kargs)
finally:
conf.color_theme = ct
return s, res
def autorun_get_live_interactive_session(cmds, **kargs):
ct = conf.color_theme
try:
conf.color_theme = DefaultTheme()
s, res = autorun_get_interactive_live_session(cmds, **kargs)
finally:
conf.color_theme = ct
return s, res
def autorun_get_ansi_interactive_session(cmds, **kargs):
ct = conf.color_theme
try:
conf.color_theme = DefaultTheme()
s, res = autorun_get_interactive_session(cmds, **kargs)
finally:
conf.color_theme = ct
return s, res
def autorun_get_html_interactive_session(cmds, **kargs):
ct = conf.color_theme
to_html = lambda s: s.replace("<", "<").replace(">", ">").replace("#[#", "<").replace("#]#", ">") # noqa: E501
try:
try:
conf.color_theme = HTMLTheme2()
s, res = autorun_get_interactive_session(cmds, **kargs)
except StopAutorun as e:
e.code_run = to_html(e.code_run)
raise
finally:
conf.color_theme = ct
return to_html(s), res
def autorun_get_latex_interactive_session(cmds, **kargs):
ct = conf.color_theme
to_latex = lambda s: tex_escape(s).replace("@[@", "{").replace("@]@", "}").replace("@`@", "\\") # noqa: E501
try:
try:
conf.color_theme = LatexTheme2()
s, res = autorun_get_interactive_session(cmds, **kargs)
except StopAutorun as e:
e.code_run = to_latex(e.code_run)
raise
finally:
conf.color_theme = ct
return to_latex(s), res
|
gpl-2.0
| 7,569,240,486,834,548,000
| 29.650943
| 121
| 0.576793
| false
| 3.64442
| false
| false
| false
|
Warboss-rus/wargameengine
|
WargameEngine/freetype2/src/tools/glnames.py
|
2
|
112194
|
#!/usr/bin/env python
#
#
# FreeType 2 glyph name builder
#
# Copyright 1996-2017 by
# David Turner, Robert Wilhelm, and Werner Lemberg.
#
# This file is part of the FreeType project, and may only be used, modified,
# and distributed under the terms of the FreeType project license,
# LICENSE.TXT. By continuing to use, modify, or distribute this file you
# indicate that you have read the license and understand and accept it
# fully.
"""\
usage: %s <output-file>
This python script generates the glyph names tables defined in the
`psnames' module.
Its single argument is the name of the header file to be created.
"""
import sys, string, struct, re, os.path
# This table lists the glyphs according to the Macintosh specification.
# It is used by the TrueType Postscript names table.
#
# See
#
# https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6post.html
#
# for the official list.
#
mac_standard_names = \
[
# 0
".notdef", ".null", "nonmarkingreturn", "space", "exclam",
"quotedbl", "numbersign", "dollar", "percent", "ampersand",
# 10
"quotesingle", "parenleft", "parenright", "asterisk", "plus",
"comma", "hyphen", "period", "slash", "zero",
# 20
"one", "two", "three", "four", "five",
"six", "seven", "eight", "nine", "colon",
# 30
"semicolon", "less", "equal", "greater", "question",
"at", "A", "B", "C", "D",
# 40
"E", "F", "G", "H", "I",
"J", "K", "L", "M", "N",
# 50
"O", "P", "Q", "R", "S",
"T", "U", "V", "W", "X",
# 60
"Y", "Z", "bracketleft", "backslash", "bracketright",
"asciicircum", "underscore", "grave", "a", "b",
# 70
"c", "d", "e", "f", "g",
"h", "i", "j", "k", "l",
# 80
"m", "n", "o", "p", "q",
"r", "s", "t", "u", "v",
# 90
"w", "x", "y", "z", "braceleft",
"bar", "braceright", "asciitilde", "Adieresis", "Aring",
# 100
"Ccedilla", "Eacute", "Ntilde", "Odieresis", "Udieresis",
"aacute", "agrave", "acircumflex", "adieresis", "atilde",
# 110
"aring", "ccedilla", "eacute", "egrave", "ecircumflex",
"edieresis", "iacute", "igrave", "icircumflex", "idieresis",
# 120
"ntilde", "oacute", "ograve", "ocircumflex", "odieresis",
"otilde", "uacute", "ugrave", "ucircumflex", "udieresis",
# 130
"dagger", "degree", "cent", "sterling", "section",
"bullet", "paragraph", "germandbls", "registered", "copyright",
# 140
"trademark", "acute", "dieresis", "notequal", "AE",
"Oslash", "infinity", "plusminus", "lessequal", "greaterequal",
# 150
"yen", "mu", "partialdiff", "summation", "product",
"pi", "integral", "ordfeminine", "ordmasculine", "Omega",
# 160
"ae", "oslash", "questiondown", "exclamdown", "logicalnot",
"radical", "florin", "approxequal", "Delta", "guillemotleft",
# 170
"guillemotright", "ellipsis", "nonbreakingspace", "Agrave", "Atilde",
"Otilde", "OE", "oe", "endash", "emdash",
# 180
"quotedblleft", "quotedblright", "quoteleft", "quoteright", "divide",
"lozenge", "ydieresis", "Ydieresis", "fraction", "currency",
# 190
"guilsinglleft", "guilsinglright", "fi", "fl", "daggerdbl",
"periodcentered", "quotesinglbase", "quotedblbase", "perthousand",
"Acircumflex",
# 200
"Ecircumflex", "Aacute", "Edieresis", "Egrave", "Iacute",
"Icircumflex", "Idieresis", "Igrave", "Oacute", "Ocircumflex",
# 210
"apple", "Ograve", "Uacute", "Ucircumflex", "Ugrave",
"dotlessi", "circumflex", "tilde", "macron", "breve",
# 220
"dotaccent", "ring", "cedilla", "hungarumlaut", "ogonek",
"caron", "Lslash", "lslash", "Scaron", "scaron",
# 230
"Zcaron", "zcaron", "brokenbar", "Eth", "eth",
"Yacute", "yacute", "Thorn", "thorn", "minus",
# 240
"multiply", "onesuperior", "twosuperior", "threesuperior", "onehalf",
"onequarter", "threequarters", "franc", "Gbreve", "gbreve",
# 250
"Idotaccent", "Scedilla", "scedilla", "Cacute", "cacute",
"Ccaron", "ccaron", "dcroat"
]
# The list of standard `SID' glyph names. For the official list,
# see Annex A of document at
#
# http://partners.adobe.com/public/developer/en/font/5176.CFF.pdf .
#
sid_standard_names = \
[
# 0
".notdef", "space", "exclam", "quotedbl", "numbersign",
"dollar", "percent", "ampersand", "quoteright", "parenleft",
# 10
"parenright", "asterisk", "plus", "comma", "hyphen",
"period", "slash", "zero", "one", "two",
# 20
"three", "four", "five", "six", "seven",
"eight", "nine", "colon", "semicolon", "less",
# 30
"equal", "greater", "question", "at", "A",
"B", "C", "D", "E", "F",
# 40
"G", "H", "I", "J", "K",
"L", "M", "N", "O", "P",
# 50
"Q", "R", "S", "T", "U",
"V", "W", "X", "Y", "Z",
# 60
"bracketleft", "backslash", "bracketright", "asciicircum", "underscore",
"quoteleft", "a", "b", "c", "d",
# 70
"e", "f", "g", "h", "i",
"j", "k", "l", "m", "n",
# 80
"o", "p", "q", "r", "s",
"t", "u", "v", "w", "x",
# 90
"y", "z", "braceleft", "bar", "braceright",
"asciitilde", "exclamdown", "cent", "sterling", "fraction",
# 100
"yen", "florin", "section", "currency", "quotesingle",
"quotedblleft", "guillemotleft", "guilsinglleft", "guilsinglright", "fi",
# 110
"fl", "endash", "dagger", "daggerdbl", "periodcentered",
"paragraph", "bullet", "quotesinglbase", "quotedblbase", "quotedblright",
# 120
"guillemotright", "ellipsis", "perthousand", "questiondown", "grave",
"acute", "circumflex", "tilde", "macron", "breve",
# 130
"dotaccent", "dieresis", "ring", "cedilla", "hungarumlaut",
"ogonek", "caron", "emdash", "AE", "ordfeminine",
# 140
"Lslash", "Oslash", "OE", "ordmasculine", "ae",
"dotlessi", "lslash", "oslash", "oe", "germandbls",
# 150
"onesuperior", "logicalnot", "mu", "trademark", "Eth",
"onehalf", "plusminus", "Thorn", "onequarter", "divide",
# 160
"brokenbar", "degree", "thorn", "threequarters", "twosuperior",
"registered", "minus", "eth", "multiply", "threesuperior",
# 170
"copyright", "Aacute", "Acircumflex", "Adieresis", "Agrave",
"Aring", "Atilde", "Ccedilla", "Eacute", "Ecircumflex",
# 180
"Edieresis", "Egrave", "Iacute", "Icircumflex", "Idieresis",
"Igrave", "Ntilde", "Oacute", "Ocircumflex", "Odieresis",
# 190
"Ograve", "Otilde", "Scaron", "Uacute", "Ucircumflex",
"Udieresis", "Ugrave", "Yacute", "Ydieresis", "Zcaron",
# 200
"aacute", "acircumflex", "adieresis", "agrave", "aring",
"atilde", "ccedilla", "eacute", "ecircumflex", "edieresis",
# 210
"egrave", "iacute", "icircumflex", "idieresis", "igrave",
"ntilde", "oacute", "ocircumflex", "odieresis", "ograve",
# 220
"otilde", "scaron", "uacute", "ucircumflex", "udieresis",
"ugrave", "yacute", "ydieresis", "zcaron", "exclamsmall",
# 230
"Hungarumlautsmall", "dollaroldstyle", "dollarsuperior", "ampersandsmall",
"Acutesmall",
"parenleftsuperior", "parenrightsuperior", "twodotenleader",
"onedotenleader", "zerooldstyle",
# 240
"oneoldstyle", "twooldstyle", "threeoldstyle", "fouroldstyle",
"fiveoldstyle",
"sixoldstyle", "sevenoldstyle", "eightoldstyle", "nineoldstyle",
"commasuperior",
# 250
"threequartersemdash", "periodsuperior", "questionsmall", "asuperior",
"bsuperior",
"centsuperior", "dsuperior", "esuperior", "isuperior", "lsuperior",
# 260
"msuperior", "nsuperior", "osuperior", "rsuperior", "ssuperior",
"tsuperior", "ff", "ffi", "ffl", "parenleftinferior",
# 270
"parenrightinferior", "Circumflexsmall", "hyphensuperior", "Gravesmall",
"Asmall",
"Bsmall", "Csmall", "Dsmall", "Esmall", "Fsmall",
# 280
"Gsmall", "Hsmall", "Ismall", "Jsmall", "Ksmall",
"Lsmall", "Msmall", "Nsmall", "Osmall", "Psmall",
# 290
"Qsmall", "Rsmall", "Ssmall", "Tsmall", "Usmall",
"Vsmall", "Wsmall", "Xsmall", "Ysmall", "Zsmall",
# 300
"colonmonetary", "onefitted", "rupiah", "Tildesmall", "exclamdownsmall",
"centoldstyle", "Lslashsmall", "Scaronsmall", "Zcaronsmall",
"Dieresissmall",
# 310
"Brevesmall", "Caronsmall", "Dotaccentsmall", "Macronsmall", "figuredash",
"hypheninferior", "Ogoneksmall", "Ringsmall", "Cedillasmall",
"questiondownsmall",
# 320
"oneeighth", "threeeighths", "fiveeighths", "seveneighths", "onethird",
"twothirds", "zerosuperior", "foursuperior", "fivesuperior",
"sixsuperior",
# 330
"sevensuperior", "eightsuperior", "ninesuperior", "zeroinferior",
"oneinferior",
"twoinferior", "threeinferior", "fourinferior", "fiveinferior",
"sixinferior",
# 340
"seveninferior", "eightinferior", "nineinferior", "centinferior",
"dollarinferior",
"periodinferior", "commainferior", "Agravesmall", "Aacutesmall",
"Acircumflexsmall",
# 350
"Atildesmall", "Adieresissmall", "Aringsmall", "AEsmall", "Ccedillasmall",
"Egravesmall", "Eacutesmall", "Ecircumflexsmall", "Edieresissmall",
"Igravesmall",
# 360
"Iacutesmall", "Icircumflexsmall", "Idieresissmall", "Ethsmall",
"Ntildesmall",
"Ogravesmall", "Oacutesmall", "Ocircumflexsmall", "Otildesmall",
"Odieresissmall",
# 370
"OEsmall", "Oslashsmall", "Ugravesmall", "Uacutesmall",
"Ucircumflexsmall",
"Udieresissmall", "Yacutesmall", "Thornsmall", "Ydieresissmall",
"001.000",
# 380
"001.001", "001.002", "001.003", "Black", "Bold",
"Book", "Light", "Medium", "Regular", "Roman",
# 390
"Semibold"
]
# This table maps character codes of the Adobe Standard Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_standard_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 96, 97, 98, 99, 100, 101, 102, 103, 104,
105, 106, 107, 108, 109, 110, 0, 111, 112, 113,
114, 0, 115, 116, 117, 118, 119, 120, 121, 122,
0, 123, 0, 124, 125, 126, 127, 128, 129, 130,
131, 0, 132, 133, 0, 134, 135, 136, 137, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 138, 0, 139, 0, 0,
0, 0, 140, 141, 142, 143, 0, 0, 0, 0,
0, 144, 0, 0, 0, 145, 0, 0, 146, 147,
148, 149, 0, 0, 0, 0
]
# This table maps character codes of the Adobe Expert Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_expert_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 229, 230, 0, 231, 232, 233, 234,
235, 236, 237, 238, 13, 14, 15, 99, 239, 240,
241, 242, 243, 244, 245, 246, 247, 248, 27, 28,
249, 250, 251, 252, 0, 253, 254, 255, 256, 257,
0, 0, 0, 258, 0, 0, 259, 260, 261, 262,
0, 0, 263, 264, 265, 0, 266, 109, 110, 267,
268, 269, 0, 270, 271, 272, 273, 274, 275, 276,
277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
297, 298, 299, 300, 301, 302, 303, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 304, 305, 306, 0, 0, 307, 308, 309, 310,
311, 0, 312, 0, 0, 313, 0, 0, 314, 315,
0, 0, 316, 317, 318, 0, 0, 0, 158, 155,
163, 319, 320, 321, 322, 323, 324, 325, 0, 0,
326, 150, 164, 169, 327, 328, 329, 330, 331, 332,
333, 334, 335, 336, 337, 338, 339, 340, 341, 342,
343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
353, 354, 355, 356, 357, 358, 359, 360, 361, 362,
363, 364, 365, 366, 367, 368, 369, 370, 371, 372,
373, 374, 375, 376, 377, 378
]
# This data has been taken literally from the files `glyphlist.txt'
# and `zapfdingbats.txt' version 2.0, Sept 2002. It is available from
#
# https://github.com/adobe-type-tools/agl-aglfn
#
adobe_glyph_list = """\
A;0041
AE;00C6
AEacute;01FC
AEmacron;01E2
AEsmall;F7E6
Aacute;00C1
Aacutesmall;F7E1
Abreve;0102
Abreveacute;1EAE
Abrevecyrillic;04D0
Abrevedotbelow;1EB6
Abrevegrave;1EB0
Abrevehookabove;1EB2
Abrevetilde;1EB4
Acaron;01CD
Acircle;24B6
Acircumflex;00C2
Acircumflexacute;1EA4
Acircumflexdotbelow;1EAC
Acircumflexgrave;1EA6
Acircumflexhookabove;1EA8
Acircumflexsmall;F7E2
Acircumflextilde;1EAA
Acute;F6C9
Acutesmall;F7B4
Acyrillic;0410
Adblgrave;0200
Adieresis;00C4
Adieresiscyrillic;04D2
Adieresismacron;01DE
Adieresissmall;F7E4
Adotbelow;1EA0
Adotmacron;01E0
Agrave;00C0
Agravesmall;F7E0
Ahookabove;1EA2
Aiecyrillic;04D4
Ainvertedbreve;0202
Alpha;0391
Alphatonos;0386
Amacron;0100
Amonospace;FF21
Aogonek;0104
Aring;00C5
Aringacute;01FA
Aringbelow;1E00
Aringsmall;F7E5
Asmall;F761
Atilde;00C3
Atildesmall;F7E3
Aybarmenian;0531
B;0042
Bcircle;24B7
Bdotaccent;1E02
Bdotbelow;1E04
Becyrillic;0411
Benarmenian;0532
Beta;0392
Bhook;0181
Blinebelow;1E06
Bmonospace;FF22
Brevesmall;F6F4
Bsmall;F762
Btopbar;0182
C;0043
Caarmenian;053E
Cacute;0106
Caron;F6CA
Caronsmall;F6F5
Ccaron;010C
Ccedilla;00C7
Ccedillaacute;1E08
Ccedillasmall;F7E7
Ccircle;24B8
Ccircumflex;0108
Cdot;010A
Cdotaccent;010A
Cedillasmall;F7B8
Chaarmenian;0549
Cheabkhasiancyrillic;04BC
Checyrillic;0427
Chedescenderabkhasiancyrillic;04BE
Chedescendercyrillic;04B6
Chedieresiscyrillic;04F4
Cheharmenian;0543
Chekhakassiancyrillic;04CB
Cheverticalstrokecyrillic;04B8
Chi;03A7
Chook;0187
Circumflexsmall;F6F6
Cmonospace;FF23
Coarmenian;0551
Csmall;F763
D;0044
DZ;01F1
DZcaron;01C4
Daarmenian;0534
Dafrican;0189
Dcaron;010E
Dcedilla;1E10
Dcircle;24B9
Dcircumflexbelow;1E12
Dcroat;0110
Ddotaccent;1E0A
Ddotbelow;1E0C
Decyrillic;0414
Deicoptic;03EE
Delta;2206
Deltagreek;0394
Dhook;018A
Dieresis;F6CB
DieresisAcute;F6CC
DieresisGrave;F6CD
Dieresissmall;F7A8
Digammagreek;03DC
Djecyrillic;0402
Dlinebelow;1E0E
Dmonospace;FF24
Dotaccentsmall;F6F7
Dslash;0110
Dsmall;F764
Dtopbar;018B
Dz;01F2
Dzcaron;01C5
Dzeabkhasiancyrillic;04E0
Dzecyrillic;0405
Dzhecyrillic;040F
E;0045
Eacute;00C9
Eacutesmall;F7E9
Ebreve;0114
Ecaron;011A
Ecedillabreve;1E1C
Echarmenian;0535
Ecircle;24BA
Ecircumflex;00CA
Ecircumflexacute;1EBE
Ecircumflexbelow;1E18
Ecircumflexdotbelow;1EC6
Ecircumflexgrave;1EC0
Ecircumflexhookabove;1EC2
Ecircumflexsmall;F7EA
Ecircumflextilde;1EC4
Ecyrillic;0404
Edblgrave;0204
Edieresis;00CB
Edieresissmall;F7EB
Edot;0116
Edotaccent;0116
Edotbelow;1EB8
Efcyrillic;0424
Egrave;00C8
Egravesmall;F7E8
Eharmenian;0537
Ehookabove;1EBA
Eightroman;2167
Einvertedbreve;0206
Eiotifiedcyrillic;0464
Elcyrillic;041B
Elevenroman;216A
Emacron;0112
Emacronacute;1E16
Emacrongrave;1E14
Emcyrillic;041C
Emonospace;FF25
Encyrillic;041D
Endescendercyrillic;04A2
Eng;014A
Enghecyrillic;04A4
Enhookcyrillic;04C7
Eogonek;0118
Eopen;0190
Epsilon;0395
Epsilontonos;0388
Ercyrillic;0420
Ereversed;018E
Ereversedcyrillic;042D
Escyrillic;0421
Esdescendercyrillic;04AA
Esh;01A9
Esmall;F765
Eta;0397
Etarmenian;0538
Etatonos;0389
Eth;00D0
Ethsmall;F7F0
Etilde;1EBC
Etildebelow;1E1A
Euro;20AC
Ezh;01B7
Ezhcaron;01EE
Ezhreversed;01B8
F;0046
Fcircle;24BB
Fdotaccent;1E1E
Feharmenian;0556
Feicoptic;03E4
Fhook;0191
Fitacyrillic;0472
Fiveroman;2164
Fmonospace;FF26
Fourroman;2163
Fsmall;F766
G;0047
GBsquare;3387
Gacute;01F4
Gamma;0393
Gammaafrican;0194
Gangiacoptic;03EA
Gbreve;011E
Gcaron;01E6
Gcedilla;0122
Gcircle;24BC
Gcircumflex;011C
Gcommaaccent;0122
Gdot;0120
Gdotaccent;0120
Gecyrillic;0413
Ghadarmenian;0542
Ghemiddlehookcyrillic;0494
Ghestrokecyrillic;0492
Gheupturncyrillic;0490
Ghook;0193
Gimarmenian;0533
Gjecyrillic;0403
Gmacron;1E20
Gmonospace;FF27
Grave;F6CE
Gravesmall;F760
Gsmall;F767
Gsmallhook;029B
Gstroke;01E4
H;0048
H18533;25CF
H18543;25AA
H18551;25AB
H22073;25A1
HPsquare;33CB
Haabkhasiancyrillic;04A8
Hadescendercyrillic;04B2
Hardsigncyrillic;042A
Hbar;0126
Hbrevebelow;1E2A
Hcedilla;1E28
Hcircle;24BD
Hcircumflex;0124
Hdieresis;1E26
Hdotaccent;1E22
Hdotbelow;1E24
Hmonospace;FF28
Hoarmenian;0540
Horicoptic;03E8
Hsmall;F768
Hungarumlaut;F6CF
Hungarumlautsmall;F6F8
Hzsquare;3390
I;0049
IAcyrillic;042F
IJ;0132
IUcyrillic;042E
Iacute;00CD
Iacutesmall;F7ED
Ibreve;012C
Icaron;01CF
Icircle;24BE
Icircumflex;00CE
Icircumflexsmall;F7EE
Icyrillic;0406
Idblgrave;0208
Idieresis;00CF
Idieresisacute;1E2E
Idieresiscyrillic;04E4
Idieresissmall;F7EF
Idot;0130
Idotaccent;0130
Idotbelow;1ECA
Iebrevecyrillic;04D6
Iecyrillic;0415
Ifraktur;2111
Igrave;00CC
Igravesmall;F7EC
Ihookabove;1EC8
Iicyrillic;0418
Iinvertedbreve;020A
Iishortcyrillic;0419
Imacron;012A
Imacroncyrillic;04E2
Imonospace;FF29
Iniarmenian;053B
Iocyrillic;0401
Iogonek;012E
Iota;0399
Iotaafrican;0196
Iotadieresis;03AA
Iotatonos;038A
Ismall;F769
Istroke;0197
Itilde;0128
Itildebelow;1E2C
Izhitsacyrillic;0474
Izhitsadblgravecyrillic;0476
J;004A
Jaarmenian;0541
Jcircle;24BF
Jcircumflex;0134
Jecyrillic;0408
Jheharmenian;054B
Jmonospace;FF2A
Jsmall;F76A
K;004B
KBsquare;3385
KKsquare;33CD
Kabashkircyrillic;04A0
Kacute;1E30
Kacyrillic;041A
Kadescendercyrillic;049A
Kahookcyrillic;04C3
Kappa;039A
Kastrokecyrillic;049E
Kaverticalstrokecyrillic;049C
Kcaron;01E8
Kcedilla;0136
Kcircle;24C0
Kcommaaccent;0136
Kdotbelow;1E32
Keharmenian;0554
Kenarmenian;053F
Khacyrillic;0425
Kheicoptic;03E6
Khook;0198
Kjecyrillic;040C
Klinebelow;1E34
Kmonospace;FF2B
Koppacyrillic;0480
Koppagreek;03DE
Ksicyrillic;046E
Ksmall;F76B
L;004C
LJ;01C7
LL;F6BF
Lacute;0139
Lambda;039B
Lcaron;013D
Lcedilla;013B
Lcircle;24C1
Lcircumflexbelow;1E3C
Lcommaaccent;013B
Ldot;013F
Ldotaccent;013F
Ldotbelow;1E36
Ldotbelowmacron;1E38
Liwnarmenian;053C
Lj;01C8
Ljecyrillic;0409
Llinebelow;1E3A
Lmonospace;FF2C
Lslash;0141
Lslashsmall;F6F9
Lsmall;F76C
M;004D
MBsquare;3386
Macron;F6D0
Macronsmall;F7AF
Macute;1E3E
Mcircle;24C2
Mdotaccent;1E40
Mdotbelow;1E42
Menarmenian;0544
Mmonospace;FF2D
Msmall;F76D
Mturned;019C
Mu;039C
N;004E
NJ;01CA
Nacute;0143
Ncaron;0147
Ncedilla;0145
Ncircle;24C3
Ncircumflexbelow;1E4A
Ncommaaccent;0145
Ndotaccent;1E44
Ndotbelow;1E46
Nhookleft;019D
Nineroman;2168
Nj;01CB
Njecyrillic;040A
Nlinebelow;1E48
Nmonospace;FF2E
Nowarmenian;0546
Nsmall;F76E
Ntilde;00D1
Ntildesmall;F7F1
Nu;039D
O;004F
OE;0152
OEsmall;F6FA
Oacute;00D3
Oacutesmall;F7F3
Obarredcyrillic;04E8
Obarreddieresiscyrillic;04EA
Obreve;014E
Ocaron;01D1
Ocenteredtilde;019F
Ocircle;24C4
Ocircumflex;00D4
Ocircumflexacute;1ED0
Ocircumflexdotbelow;1ED8
Ocircumflexgrave;1ED2
Ocircumflexhookabove;1ED4
Ocircumflexsmall;F7F4
Ocircumflextilde;1ED6
Ocyrillic;041E
Odblacute;0150
Odblgrave;020C
Odieresis;00D6
Odieresiscyrillic;04E6
Odieresissmall;F7F6
Odotbelow;1ECC
Ogoneksmall;F6FB
Ograve;00D2
Ogravesmall;F7F2
Oharmenian;0555
Ohm;2126
Ohookabove;1ECE
Ohorn;01A0
Ohornacute;1EDA
Ohorndotbelow;1EE2
Ohorngrave;1EDC
Ohornhookabove;1EDE
Ohorntilde;1EE0
Ohungarumlaut;0150
Oi;01A2
Oinvertedbreve;020E
Omacron;014C
Omacronacute;1E52
Omacrongrave;1E50
Omega;2126
Omegacyrillic;0460
Omegagreek;03A9
Omegaroundcyrillic;047A
Omegatitlocyrillic;047C
Omegatonos;038F
Omicron;039F
Omicrontonos;038C
Omonospace;FF2F
Oneroman;2160
Oogonek;01EA
Oogonekmacron;01EC
Oopen;0186
Oslash;00D8
Oslashacute;01FE
Oslashsmall;F7F8
Osmall;F76F
Ostrokeacute;01FE
Otcyrillic;047E
Otilde;00D5
Otildeacute;1E4C
Otildedieresis;1E4E
Otildesmall;F7F5
P;0050
Pacute;1E54
Pcircle;24C5
Pdotaccent;1E56
Pecyrillic;041F
Peharmenian;054A
Pemiddlehookcyrillic;04A6
Phi;03A6
Phook;01A4
Pi;03A0
Piwrarmenian;0553
Pmonospace;FF30
Psi;03A8
Psicyrillic;0470
Psmall;F770
Q;0051
Qcircle;24C6
Qmonospace;FF31
Qsmall;F771
R;0052
Raarmenian;054C
Racute;0154
Rcaron;0158
Rcedilla;0156
Rcircle;24C7
Rcommaaccent;0156
Rdblgrave;0210
Rdotaccent;1E58
Rdotbelow;1E5A
Rdotbelowmacron;1E5C
Reharmenian;0550
Rfraktur;211C
Rho;03A1
Ringsmall;F6FC
Rinvertedbreve;0212
Rlinebelow;1E5E
Rmonospace;FF32
Rsmall;F772
Rsmallinverted;0281
Rsmallinvertedsuperior;02B6
S;0053
SF010000;250C
SF020000;2514
SF030000;2510
SF040000;2518
SF050000;253C
SF060000;252C
SF070000;2534
SF080000;251C
SF090000;2524
SF100000;2500
SF110000;2502
SF190000;2561
SF200000;2562
SF210000;2556
SF220000;2555
SF230000;2563
SF240000;2551
SF250000;2557
SF260000;255D
SF270000;255C
SF280000;255B
SF360000;255E
SF370000;255F
SF380000;255A
SF390000;2554
SF400000;2569
SF410000;2566
SF420000;2560
SF430000;2550
SF440000;256C
SF450000;2567
SF460000;2568
SF470000;2564
SF480000;2565
SF490000;2559
SF500000;2558
SF510000;2552
SF520000;2553
SF530000;256B
SF540000;256A
Sacute;015A
Sacutedotaccent;1E64
Sampigreek;03E0
Scaron;0160
Scarondotaccent;1E66
Scaronsmall;F6FD
Scedilla;015E
Schwa;018F
Schwacyrillic;04D8
Schwadieresiscyrillic;04DA
Scircle;24C8
Scircumflex;015C
Scommaaccent;0218
Sdotaccent;1E60
Sdotbelow;1E62
Sdotbelowdotaccent;1E68
Seharmenian;054D
Sevenroman;2166
Shaarmenian;0547
Shacyrillic;0428
Shchacyrillic;0429
Sheicoptic;03E2
Shhacyrillic;04BA
Shimacoptic;03EC
Sigma;03A3
Sixroman;2165
Smonospace;FF33
Softsigncyrillic;042C
Ssmall;F773
Stigmagreek;03DA
T;0054
Tau;03A4
Tbar;0166
Tcaron;0164
Tcedilla;0162
Tcircle;24C9
Tcircumflexbelow;1E70
Tcommaaccent;0162
Tdotaccent;1E6A
Tdotbelow;1E6C
Tecyrillic;0422
Tedescendercyrillic;04AC
Tenroman;2169
Tetsecyrillic;04B4
Theta;0398
Thook;01AC
Thorn;00DE
Thornsmall;F7FE
Threeroman;2162
Tildesmall;F6FE
Tiwnarmenian;054F
Tlinebelow;1E6E
Tmonospace;FF34
Toarmenian;0539
Tonefive;01BC
Tonesix;0184
Tonetwo;01A7
Tretroflexhook;01AE
Tsecyrillic;0426
Tshecyrillic;040B
Tsmall;F774
Twelveroman;216B
Tworoman;2161
U;0055
Uacute;00DA
Uacutesmall;F7FA
Ubreve;016C
Ucaron;01D3
Ucircle;24CA
Ucircumflex;00DB
Ucircumflexbelow;1E76
Ucircumflexsmall;F7FB
Ucyrillic;0423
Udblacute;0170
Udblgrave;0214
Udieresis;00DC
Udieresisacute;01D7
Udieresisbelow;1E72
Udieresiscaron;01D9
Udieresiscyrillic;04F0
Udieresisgrave;01DB
Udieresismacron;01D5
Udieresissmall;F7FC
Udotbelow;1EE4
Ugrave;00D9
Ugravesmall;F7F9
Uhookabove;1EE6
Uhorn;01AF
Uhornacute;1EE8
Uhorndotbelow;1EF0
Uhorngrave;1EEA
Uhornhookabove;1EEC
Uhorntilde;1EEE
Uhungarumlaut;0170
Uhungarumlautcyrillic;04F2
Uinvertedbreve;0216
Ukcyrillic;0478
Umacron;016A
Umacroncyrillic;04EE
Umacrondieresis;1E7A
Umonospace;FF35
Uogonek;0172
Upsilon;03A5
Upsilon1;03D2
Upsilonacutehooksymbolgreek;03D3
Upsilonafrican;01B1
Upsilondieresis;03AB
Upsilondieresishooksymbolgreek;03D4
Upsilonhooksymbol;03D2
Upsilontonos;038E
Uring;016E
Ushortcyrillic;040E
Usmall;F775
Ustraightcyrillic;04AE
Ustraightstrokecyrillic;04B0
Utilde;0168
Utildeacute;1E78
Utildebelow;1E74
V;0056
Vcircle;24CB
Vdotbelow;1E7E
Vecyrillic;0412
Vewarmenian;054E
Vhook;01B2
Vmonospace;FF36
Voarmenian;0548
Vsmall;F776
Vtilde;1E7C
W;0057
Wacute;1E82
Wcircle;24CC
Wcircumflex;0174
Wdieresis;1E84
Wdotaccent;1E86
Wdotbelow;1E88
Wgrave;1E80
Wmonospace;FF37
Wsmall;F777
X;0058
Xcircle;24CD
Xdieresis;1E8C
Xdotaccent;1E8A
Xeharmenian;053D
Xi;039E
Xmonospace;FF38
Xsmall;F778
Y;0059
Yacute;00DD
Yacutesmall;F7FD
Yatcyrillic;0462
Ycircle;24CE
Ycircumflex;0176
Ydieresis;0178
Ydieresissmall;F7FF
Ydotaccent;1E8E
Ydotbelow;1EF4
Yericyrillic;042B
Yerudieresiscyrillic;04F8
Ygrave;1EF2
Yhook;01B3
Yhookabove;1EF6
Yiarmenian;0545
Yicyrillic;0407
Yiwnarmenian;0552
Ymonospace;FF39
Ysmall;F779
Ytilde;1EF8
Yusbigcyrillic;046A
Yusbigiotifiedcyrillic;046C
Yuslittlecyrillic;0466
Yuslittleiotifiedcyrillic;0468
Z;005A
Zaarmenian;0536
Zacute;0179
Zcaron;017D
Zcaronsmall;F6FF
Zcircle;24CF
Zcircumflex;1E90
Zdot;017B
Zdotaccent;017B
Zdotbelow;1E92
Zecyrillic;0417
Zedescendercyrillic;0498
Zedieresiscyrillic;04DE
Zeta;0396
Zhearmenian;053A
Zhebrevecyrillic;04C1
Zhecyrillic;0416
Zhedescendercyrillic;0496
Zhedieresiscyrillic;04DC
Zlinebelow;1E94
Zmonospace;FF3A
Zsmall;F77A
Zstroke;01B5
a;0061
aabengali;0986
aacute;00E1
aadeva;0906
aagujarati;0A86
aagurmukhi;0A06
aamatragurmukhi;0A3E
aarusquare;3303
aavowelsignbengali;09BE
aavowelsigndeva;093E
aavowelsigngujarati;0ABE
abbreviationmarkarmenian;055F
abbreviationsigndeva;0970
abengali;0985
abopomofo;311A
abreve;0103
abreveacute;1EAF
abrevecyrillic;04D1
abrevedotbelow;1EB7
abrevegrave;1EB1
abrevehookabove;1EB3
abrevetilde;1EB5
acaron;01CE
acircle;24D0
acircumflex;00E2
acircumflexacute;1EA5
acircumflexdotbelow;1EAD
acircumflexgrave;1EA7
acircumflexhookabove;1EA9
acircumflextilde;1EAB
acute;00B4
acutebelowcmb;0317
acutecmb;0301
acutecomb;0301
acutedeva;0954
acutelowmod;02CF
acutetonecmb;0341
acyrillic;0430
adblgrave;0201
addakgurmukhi;0A71
adeva;0905
adieresis;00E4
adieresiscyrillic;04D3
adieresismacron;01DF
adotbelow;1EA1
adotmacron;01E1
ae;00E6
aeacute;01FD
aekorean;3150
aemacron;01E3
afii00208;2015
afii08941;20A4
afii10017;0410
afii10018;0411
afii10019;0412
afii10020;0413
afii10021;0414
afii10022;0415
afii10023;0401
afii10024;0416
afii10025;0417
afii10026;0418
afii10027;0419
afii10028;041A
afii10029;041B
afii10030;041C
afii10031;041D
afii10032;041E
afii10033;041F
afii10034;0420
afii10035;0421
afii10036;0422
afii10037;0423
afii10038;0424
afii10039;0425
afii10040;0426
afii10041;0427
afii10042;0428
afii10043;0429
afii10044;042A
afii10045;042B
afii10046;042C
afii10047;042D
afii10048;042E
afii10049;042F
afii10050;0490
afii10051;0402
afii10052;0403
afii10053;0404
afii10054;0405
afii10055;0406
afii10056;0407
afii10057;0408
afii10058;0409
afii10059;040A
afii10060;040B
afii10061;040C
afii10062;040E
afii10063;F6C4
afii10064;F6C5
afii10065;0430
afii10066;0431
afii10067;0432
afii10068;0433
afii10069;0434
afii10070;0435
afii10071;0451
afii10072;0436
afii10073;0437
afii10074;0438
afii10075;0439
afii10076;043A
afii10077;043B
afii10078;043C
afii10079;043D
afii10080;043E
afii10081;043F
afii10082;0440
afii10083;0441
afii10084;0442
afii10085;0443
afii10086;0444
afii10087;0445
afii10088;0446
afii10089;0447
afii10090;0448
afii10091;0449
afii10092;044A
afii10093;044B
afii10094;044C
afii10095;044D
afii10096;044E
afii10097;044F
afii10098;0491
afii10099;0452
afii10100;0453
afii10101;0454
afii10102;0455
afii10103;0456
afii10104;0457
afii10105;0458
afii10106;0459
afii10107;045A
afii10108;045B
afii10109;045C
afii10110;045E
afii10145;040F
afii10146;0462
afii10147;0472
afii10148;0474
afii10192;F6C6
afii10193;045F
afii10194;0463
afii10195;0473
afii10196;0475
afii10831;F6C7
afii10832;F6C8
afii10846;04D9
afii299;200E
afii300;200F
afii301;200D
afii57381;066A
afii57388;060C
afii57392;0660
afii57393;0661
afii57394;0662
afii57395;0663
afii57396;0664
afii57397;0665
afii57398;0666
afii57399;0667
afii57400;0668
afii57401;0669
afii57403;061B
afii57407;061F
afii57409;0621
afii57410;0622
afii57411;0623
afii57412;0624
afii57413;0625
afii57414;0626
afii57415;0627
afii57416;0628
afii57417;0629
afii57418;062A
afii57419;062B
afii57420;062C
afii57421;062D
afii57422;062E
afii57423;062F
afii57424;0630
afii57425;0631
afii57426;0632
afii57427;0633
afii57428;0634
afii57429;0635
afii57430;0636
afii57431;0637
afii57432;0638
afii57433;0639
afii57434;063A
afii57440;0640
afii57441;0641
afii57442;0642
afii57443;0643
afii57444;0644
afii57445;0645
afii57446;0646
afii57448;0648
afii57449;0649
afii57450;064A
afii57451;064B
afii57452;064C
afii57453;064D
afii57454;064E
afii57455;064F
afii57456;0650
afii57457;0651
afii57458;0652
afii57470;0647
afii57505;06A4
afii57506;067E
afii57507;0686
afii57508;0698
afii57509;06AF
afii57511;0679
afii57512;0688
afii57513;0691
afii57514;06BA
afii57519;06D2
afii57534;06D5
afii57636;20AA
afii57645;05BE
afii57658;05C3
afii57664;05D0
afii57665;05D1
afii57666;05D2
afii57667;05D3
afii57668;05D4
afii57669;05D5
afii57670;05D6
afii57671;05D7
afii57672;05D8
afii57673;05D9
afii57674;05DA
afii57675;05DB
afii57676;05DC
afii57677;05DD
afii57678;05DE
afii57679;05DF
afii57680;05E0
afii57681;05E1
afii57682;05E2
afii57683;05E3
afii57684;05E4
afii57685;05E5
afii57686;05E6
afii57687;05E7
afii57688;05E8
afii57689;05E9
afii57690;05EA
afii57694;FB2A
afii57695;FB2B
afii57700;FB4B
afii57705;FB1F
afii57716;05F0
afii57717;05F1
afii57718;05F2
afii57723;FB35
afii57793;05B4
afii57794;05B5
afii57795;05B6
afii57796;05BB
afii57797;05B8
afii57798;05B7
afii57799;05B0
afii57800;05B2
afii57801;05B1
afii57802;05B3
afii57803;05C2
afii57804;05C1
afii57806;05B9
afii57807;05BC
afii57839;05BD
afii57841;05BF
afii57842;05C0
afii57929;02BC
afii61248;2105
afii61289;2113
afii61352;2116
afii61573;202C
afii61574;202D
afii61575;202E
afii61664;200C
afii63167;066D
afii64937;02BD
agrave;00E0
agujarati;0A85
agurmukhi;0A05
ahiragana;3042
ahookabove;1EA3
aibengali;0990
aibopomofo;311E
aideva;0910
aiecyrillic;04D5
aigujarati;0A90
aigurmukhi;0A10
aimatragurmukhi;0A48
ainarabic;0639
ainfinalarabic;FECA
aininitialarabic;FECB
ainmedialarabic;FECC
ainvertedbreve;0203
aivowelsignbengali;09C8
aivowelsigndeva;0948
aivowelsigngujarati;0AC8
akatakana;30A2
akatakanahalfwidth;FF71
akorean;314F
alef;05D0
alefarabic;0627
alefdageshhebrew;FB30
aleffinalarabic;FE8E
alefhamzaabovearabic;0623
alefhamzaabovefinalarabic;FE84
alefhamzabelowarabic;0625
alefhamzabelowfinalarabic;FE88
alefhebrew;05D0
aleflamedhebrew;FB4F
alefmaddaabovearabic;0622
alefmaddaabovefinalarabic;FE82
alefmaksuraarabic;0649
alefmaksurafinalarabic;FEF0
alefmaksurainitialarabic;FEF3
alefmaksuramedialarabic;FEF4
alefpatahhebrew;FB2E
alefqamatshebrew;FB2F
aleph;2135
allequal;224C
alpha;03B1
alphatonos;03AC
amacron;0101
amonospace;FF41
ampersand;0026
ampersandmonospace;FF06
ampersandsmall;F726
amsquare;33C2
anbopomofo;3122
angbopomofo;3124
angkhankhuthai;0E5A
angle;2220
anglebracketleft;3008
anglebracketleftvertical;FE3F
anglebracketright;3009
anglebracketrightvertical;FE40
angleleft;2329
angleright;232A
angstrom;212B
anoteleia;0387
anudattadeva;0952
anusvarabengali;0982
anusvaradeva;0902
anusvaragujarati;0A82
aogonek;0105
apaatosquare;3300
aparen;249C
apostrophearmenian;055A
apostrophemod;02BC
apple;F8FF
approaches;2250
approxequal;2248
approxequalorimage;2252
approximatelyequal;2245
araeaekorean;318E
araeakorean;318D
arc;2312
arighthalfring;1E9A
aring;00E5
aringacute;01FB
aringbelow;1E01
arrowboth;2194
arrowdashdown;21E3
arrowdashleft;21E0
arrowdashright;21E2
arrowdashup;21E1
arrowdblboth;21D4
arrowdbldown;21D3
arrowdblleft;21D0
arrowdblright;21D2
arrowdblup;21D1
arrowdown;2193
arrowdownleft;2199
arrowdownright;2198
arrowdownwhite;21E9
arrowheaddownmod;02C5
arrowheadleftmod;02C2
arrowheadrightmod;02C3
arrowheadupmod;02C4
arrowhorizex;F8E7
arrowleft;2190
arrowleftdbl;21D0
arrowleftdblstroke;21CD
arrowleftoverright;21C6
arrowleftwhite;21E6
arrowright;2192
arrowrightdblstroke;21CF
arrowrightheavy;279E
arrowrightoverleft;21C4
arrowrightwhite;21E8
arrowtableft;21E4
arrowtabright;21E5
arrowup;2191
arrowupdn;2195
arrowupdnbse;21A8
arrowupdownbase;21A8
arrowupleft;2196
arrowupleftofdown;21C5
arrowupright;2197
arrowupwhite;21E7
arrowvertex;F8E6
asciicircum;005E
asciicircummonospace;FF3E
asciitilde;007E
asciitildemonospace;FF5E
ascript;0251
ascriptturned;0252
asmallhiragana;3041
asmallkatakana;30A1
asmallkatakanahalfwidth;FF67
asterisk;002A
asteriskaltonearabic;066D
asteriskarabic;066D
asteriskmath;2217
asteriskmonospace;FF0A
asterisksmall;FE61
asterism;2042
asuperior;F6E9
asymptoticallyequal;2243
at;0040
atilde;00E3
atmonospace;FF20
atsmall;FE6B
aturned;0250
aubengali;0994
aubopomofo;3120
audeva;0914
augujarati;0A94
augurmukhi;0A14
aulengthmarkbengali;09D7
aumatragurmukhi;0A4C
auvowelsignbengali;09CC
auvowelsigndeva;094C
auvowelsigngujarati;0ACC
avagrahadeva;093D
aybarmenian;0561
ayin;05E2
ayinaltonehebrew;FB20
ayinhebrew;05E2
b;0062
babengali;09AC
backslash;005C
backslashmonospace;FF3C
badeva;092C
bagujarati;0AAC
bagurmukhi;0A2C
bahiragana;3070
bahtthai;0E3F
bakatakana;30D0
bar;007C
barmonospace;FF5C
bbopomofo;3105
bcircle;24D1
bdotaccent;1E03
bdotbelow;1E05
beamedsixteenthnotes;266C
because;2235
becyrillic;0431
beharabic;0628
behfinalarabic;FE90
behinitialarabic;FE91
behiragana;3079
behmedialarabic;FE92
behmeeminitialarabic;FC9F
behmeemisolatedarabic;FC08
behnoonfinalarabic;FC6D
bekatakana;30D9
benarmenian;0562
bet;05D1
beta;03B2
betasymbolgreek;03D0
betdagesh;FB31
betdageshhebrew;FB31
bethebrew;05D1
betrafehebrew;FB4C
bhabengali;09AD
bhadeva;092D
bhagujarati;0AAD
bhagurmukhi;0A2D
bhook;0253
bihiragana;3073
bikatakana;30D3
bilabialclick;0298
bindigurmukhi;0A02
birusquare;3331
blackcircle;25CF
blackdiamond;25C6
blackdownpointingtriangle;25BC
blackleftpointingpointer;25C4
blackleftpointingtriangle;25C0
blacklenticularbracketleft;3010
blacklenticularbracketleftvertical;FE3B
blacklenticularbracketright;3011
blacklenticularbracketrightvertical;FE3C
blacklowerlefttriangle;25E3
blacklowerrighttriangle;25E2
blackrectangle;25AC
blackrightpointingpointer;25BA
blackrightpointingtriangle;25B6
blacksmallsquare;25AA
blacksmilingface;263B
blacksquare;25A0
blackstar;2605
blackupperlefttriangle;25E4
blackupperrighttriangle;25E5
blackuppointingsmalltriangle;25B4
blackuppointingtriangle;25B2
blank;2423
blinebelow;1E07
block;2588
bmonospace;FF42
bobaimaithai;0E1A
bohiragana;307C
bokatakana;30DC
bparen;249D
bqsquare;33C3
braceex;F8F4
braceleft;007B
braceleftbt;F8F3
braceleftmid;F8F2
braceleftmonospace;FF5B
braceleftsmall;FE5B
bracelefttp;F8F1
braceleftvertical;FE37
braceright;007D
bracerightbt;F8FE
bracerightmid;F8FD
bracerightmonospace;FF5D
bracerightsmall;FE5C
bracerighttp;F8FC
bracerightvertical;FE38
bracketleft;005B
bracketleftbt;F8F0
bracketleftex;F8EF
bracketleftmonospace;FF3B
bracketlefttp;F8EE
bracketright;005D
bracketrightbt;F8FB
bracketrightex;F8FA
bracketrightmonospace;FF3D
bracketrighttp;F8F9
breve;02D8
brevebelowcmb;032E
brevecmb;0306
breveinvertedbelowcmb;032F
breveinvertedcmb;0311
breveinverteddoublecmb;0361
bridgebelowcmb;032A
bridgeinvertedbelowcmb;033A
brokenbar;00A6
bstroke;0180
bsuperior;F6EA
btopbar;0183
buhiragana;3076
bukatakana;30D6
bullet;2022
bulletinverse;25D8
bulletoperator;2219
bullseye;25CE
c;0063
caarmenian;056E
cabengali;099A
cacute;0107
cadeva;091A
cagujarati;0A9A
cagurmukhi;0A1A
calsquare;3388
candrabindubengali;0981
candrabinducmb;0310
candrabindudeva;0901
candrabindugujarati;0A81
capslock;21EA
careof;2105
caron;02C7
caronbelowcmb;032C
caroncmb;030C
carriagereturn;21B5
cbopomofo;3118
ccaron;010D
ccedilla;00E7
ccedillaacute;1E09
ccircle;24D2
ccircumflex;0109
ccurl;0255
cdot;010B
cdotaccent;010B
cdsquare;33C5
cedilla;00B8
cedillacmb;0327
cent;00A2
centigrade;2103
centinferior;F6DF
centmonospace;FFE0
centoldstyle;F7A2
centsuperior;F6E0
chaarmenian;0579
chabengali;099B
chadeva;091B
chagujarati;0A9B
chagurmukhi;0A1B
chbopomofo;3114
cheabkhasiancyrillic;04BD
checkmark;2713
checyrillic;0447
chedescenderabkhasiancyrillic;04BF
chedescendercyrillic;04B7
chedieresiscyrillic;04F5
cheharmenian;0573
chekhakassiancyrillic;04CC
cheverticalstrokecyrillic;04B9
chi;03C7
chieuchacirclekorean;3277
chieuchaparenkorean;3217
chieuchcirclekorean;3269
chieuchkorean;314A
chieuchparenkorean;3209
chochangthai;0E0A
chochanthai;0E08
chochingthai;0E09
chochoethai;0E0C
chook;0188
cieucacirclekorean;3276
cieucaparenkorean;3216
cieuccirclekorean;3268
cieuckorean;3148
cieucparenkorean;3208
cieucuparenkorean;321C
circle;25CB
circlemultiply;2297
circleot;2299
circleplus;2295
circlepostalmark;3036
circlewithlefthalfblack;25D0
circlewithrighthalfblack;25D1
circumflex;02C6
circumflexbelowcmb;032D
circumflexcmb;0302
clear;2327
clickalveolar;01C2
clickdental;01C0
clicklateral;01C1
clickretroflex;01C3
club;2663
clubsuitblack;2663
clubsuitwhite;2667
cmcubedsquare;33A4
cmonospace;FF43
cmsquaredsquare;33A0
coarmenian;0581
colon;003A
colonmonetary;20A1
colonmonospace;FF1A
colonsign;20A1
colonsmall;FE55
colontriangularhalfmod;02D1
colontriangularmod;02D0
comma;002C
commaabovecmb;0313
commaaboverightcmb;0315
commaaccent;F6C3
commaarabic;060C
commaarmenian;055D
commainferior;F6E1
commamonospace;FF0C
commareversedabovecmb;0314
commareversedmod;02BD
commasmall;FE50
commasuperior;F6E2
commaturnedabovecmb;0312
commaturnedmod;02BB
compass;263C
congruent;2245
contourintegral;222E
control;2303
controlACK;0006
controlBEL;0007
controlBS;0008
controlCAN;0018
controlCR;000D
controlDC1;0011
controlDC2;0012
controlDC3;0013
controlDC4;0014
controlDEL;007F
controlDLE;0010
controlEM;0019
controlENQ;0005
controlEOT;0004
controlESC;001B
controlETB;0017
controlETX;0003
controlFF;000C
controlFS;001C
controlGS;001D
controlHT;0009
controlLF;000A
controlNAK;0015
controlRS;001E
controlSI;000F
controlSO;000E
controlSOT;0002
controlSTX;0001
controlSUB;001A
controlSYN;0016
controlUS;001F
controlVT;000B
copyright;00A9
copyrightsans;F8E9
copyrightserif;F6D9
cornerbracketleft;300C
cornerbracketlefthalfwidth;FF62
cornerbracketleftvertical;FE41
cornerbracketright;300D
cornerbracketrighthalfwidth;FF63
cornerbracketrightvertical;FE42
corporationsquare;337F
cosquare;33C7
coverkgsquare;33C6
cparen;249E
cruzeiro;20A2
cstretched;0297
curlyand;22CF
curlyor;22CE
currency;00A4
cyrBreve;F6D1
cyrFlex;F6D2
cyrbreve;F6D4
cyrflex;F6D5
d;0064
daarmenian;0564
dabengali;09A6
dadarabic;0636
dadeva;0926
dadfinalarabic;FEBE
dadinitialarabic;FEBF
dadmedialarabic;FEC0
dagesh;05BC
dageshhebrew;05BC
dagger;2020
daggerdbl;2021
dagujarati;0AA6
dagurmukhi;0A26
dahiragana;3060
dakatakana;30C0
dalarabic;062F
dalet;05D3
daletdagesh;FB33
daletdageshhebrew;FB33
dalethatafpatah;05D3 05B2
dalethatafpatahhebrew;05D3 05B2
dalethatafsegol;05D3 05B1
dalethatafsegolhebrew;05D3 05B1
dalethebrew;05D3
dalethiriq;05D3 05B4
dalethiriqhebrew;05D3 05B4
daletholam;05D3 05B9
daletholamhebrew;05D3 05B9
daletpatah;05D3 05B7
daletpatahhebrew;05D3 05B7
daletqamats;05D3 05B8
daletqamatshebrew;05D3 05B8
daletqubuts;05D3 05BB
daletqubutshebrew;05D3 05BB
daletsegol;05D3 05B6
daletsegolhebrew;05D3 05B6
daletsheva;05D3 05B0
daletshevahebrew;05D3 05B0
dalettsere;05D3 05B5
dalettserehebrew;05D3 05B5
dalfinalarabic;FEAA
dammaarabic;064F
dammalowarabic;064F
dammatanaltonearabic;064C
dammatanarabic;064C
danda;0964
dargahebrew;05A7
dargalefthebrew;05A7
dasiapneumatacyrilliccmb;0485
dblGrave;F6D3
dblanglebracketleft;300A
dblanglebracketleftvertical;FE3D
dblanglebracketright;300B
dblanglebracketrightvertical;FE3E
dblarchinvertedbelowcmb;032B
dblarrowleft;21D4
dblarrowright;21D2
dbldanda;0965
dblgrave;F6D6
dblgravecmb;030F
dblintegral;222C
dbllowline;2017
dbllowlinecmb;0333
dbloverlinecmb;033F
dblprimemod;02BA
dblverticalbar;2016
dblverticallineabovecmb;030E
dbopomofo;3109
dbsquare;33C8
dcaron;010F
dcedilla;1E11
dcircle;24D3
dcircumflexbelow;1E13
dcroat;0111
ddabengali;09A1
ddadeva;0921
ddagujarati;0AA1
ddagurmukhi;0A21
ddalarabic;0688
ddalfinalarabic;FB89
dddhadeva;095C
ddhabengali;09A2
ddhadeva;0922
ddhagujarati;0AA2
ddhagurmukhi;0A22
ddotaccent;1E0B
ddotbelow;1E0D
decimalseparatorarabic;066B
decimalseparatorpersian;066B
decyrillic;0434
degree;00B0
dehihebrew;05AD
dehiragana;3067
deicoptic;03EF
dekatakana;30C7
deleteleft;232B
deleteright;2326
delta;03B4
deltaturned;018D
denominatorminusonenumeratorbengali;09F8
dezh;02A4
dhabengali;09A7
dhadeva;0927
dhagujarati;0AA7
dhagurmukhi;0A27
dhook;0257
dialytikatonos;0385
dialytikatonoscmb;0344
diamond;2666
diamondsuitwhite;2662
dieresis;00A8
dieresisacute;F6D7
dieresisbelowcmb;0324
dieresiscmb;0308
dieresisgrave;F6D8
dieresistonos;0385
dihiragana;3062
dikatakana;30C2
dittomark;3003
divide;00F7
divides;2223
divisionslash;2215
djecyrillic;0452
dkshade;2593
dlinebelow;1E0F
dlsquare;3397
dmacron;0111
dmonospace;FF44
dnblock;2584
dochadathai;0E0E
dodekthai;0E14
dohiragana;3069
dokatakana;30C9
dollar;0024
dollarinferior;F6E3
dollarmonospace;FF04
dollaroldstyle;F724
dollarsmall;FE69
dollarsuperior;F6E4
dong;20AB
dorusquare;3326
dotaccent;02D9
dotaccentcmb;0307
dotbelowcmb;0323
dotbelowcomb;0323
dotkatakana;30FB
dotlessi;0131
dotlessj;F6BE
dotlessjstrokehook;0284
dotmath;22C5
dottedcircle;25CC
doubleyodpatah;FB1F
doubleyodpatahhebrew;FB1F
downtackbelowcmb;031E
downtackmod;02D5
dparen;249F
dsuperior;F6EB
dtail;0256
dtopbar;018C
duhiragana;3065
dukatakana;30C5
dz;01F3
dzaltone;02A3
dzcaron;01C6
dzcurl;02A5
dzeabkhasiancyrillic;04E1
dzecyrillic;0455
dzhecyrillic;045F
e;0065
eacute;00E9
earth;2641
ebengali;098F
ebopomofo;311C
ebreve;0115
ecandradeva;090D
ecandragujarati;0A8D
ecandravowelsigndeva;0945
ecandravowelsigngujarati;0AC5
ecaron;011B
ecedillabreve;1E1D
echarmenian;0565
echyiwnarmenian;0587
ecircle;24D4
ecircumflex;00EA
ecircumflexacute;1EBF
ecircumflexbelow;1E19
ecircumflexdotbelow;1EC7
ecircumflexgrave;1EC1
ecircumflexhookabove;1EC3
ecircumflextilde;1EC5
ecyrillic;0454
edblgrave;0205
edeva;090F
edieresis;00EB
edot;0117
edotaccent;0117
edotbelow;1EB9
eegurmukhi;0A0F
eematragurmukhi;0A47
efcyrillic;0444
egrave;00E8
egujarati;0A8F
eharmenian;0567
ehbopomofo;311D
ehiragana;3048
ehookabove;1EBB
eibopomofo;311F
eight;0038
eightarabic;0668
eightbengali;09EE
eightcircle;2467
eightcircleinversesansserif;2791
eightdeva;096E
eighteencircle;2471
eighteenparen;2485
eighteenperiod;2499
eightgujarati;0AEE
eightgurmukhi;0A6E
eighthackarabic;0668
eighthangzhou;3028
eighthnotebeamed;266B
eightideographicparen;3227
eightinferior;2088
eightmonospace;FF18
eightoldstyle;F738
eightparen;247B
eightperiod;248F
eightpersian;06F8
eightroman;2177
eightsuperior;2078
eightthai;0E58
einvertedbreve;0207
eiotifiedcyrillic;0465
ekatakana;30A8
ekatakanahalfwidth;FF74
ekonkargurmukhi;0A74
ekorean;3154
elcyrillic;043B
element;2208
elevencircle;246A
elevenparen;247E
elevenperiod;2492
elevenroman;217A
ellipsis;2026
ellipsisvertical;22EE
emacron;0113
emacronacute;1E17
emacrongrave;1E15
emcyrillic;043C
emdash;2014
emdashvertical;FE31
emonospace;FF45
emphasismarkarmenian;055B
emptyset;2205
enbopomofo;3123
encyrillic;043D
endash;2013
endashvertical;FE32
endescendercyrillic;04A3
eng;014B
engbopomofo;3125
enghecyrillic;04A5
enhookcyrillic;04C8
enspace;2002
eogonek;0119
eokorean;3153
eopen;025B
eopenclosed;029A
eopenreversed;025C
eopenreversedclosed;025E
eopenreversedhook;025D
eparen;24A0
epsilon;03B5
epsilontonos;03AD
equal;003D
equalmonospace;FF1D
equalsmall;FE66
equalsuperior;207C
equivalence;2261
erbopomofo;3126
ercyrillic;0440
ereversed;0258
ereversedcyrillic;044D
escyrillic;0441
esdescendercyrillic;04AB
esh;0283
eshcurl;0286
eshortdeva;090E
eshortvowelsigndeva;0946
eshreversedloop;01AA
eshsquatreversed;0285
esmallhiragana;3047
esmallkatakana;30A7
esmallkatakanahalfwidth;FF6A
estimated;212E
esuperior;F6EC
eta;03B7
etarmenian;0568
etatonos;03AE
eth;00F0
etilde;1EBD
etildebelow;1E1B
etnahtafoukhhebrew;0591
etnahtafoukhlefthebrew;0591
etnahtahebrew;0591
etnahtalefthebrew;0591
eturned;01DD
eukorean;3161
euro;20AC
evowelsignbengali;09C7
evowelsigndeva;0947
evowelsigngujarati;0AC7
exclam;0021
exclamarmenian;055C
exclamdbl;203C
exclamdown;00A1
exclamdownsmall;F7A1
exclammonospace;FF01
exclamsmall;F721
existential;2203
ezh;0292
ezhcaron;01EF
ezhcurl;0293
ezhreversed;01B9
ezhtail;01BA
f;0066
fadeva;095E
fagurmukhi;0A5E
fahrenheit;2109
fathaarabic;064E
fathalowarabic;064E
fathatanarabic;064B
fbopomofo;3108
fcircle;24D5
fdotaccent;1E1F
feharabic;0641
feharmenian;0586
fehfinalarabic;FED2
fehinitialarabic;FED3
fehmedialarabic;FED4
feicoptic;03E5
female;2640
ff;FB00
ffi;FB03
ffl;FB04
fi;FB01
fifteencircle;246E
fifteenparen;2482
fifteenperiod;2496
figuredash;2012
filledbox;25A0
filledrect;25AC
finalkaf;05DA
finalkafdagesh;FB3A
finalkafdageshhebrew;FB3A
finalkafhebrew;05DA
finalkafqamats;05DA 05B8
finalkafqamatshebrew;05DA 05B8
finalkafsheva;05DA 05B0
finalkafshevahebrew;05DA 05B0
finalmem;05DD
finalmemhebrew;05DD
finalnun;05DF
finalnunhebrew;05DF
finalpe;05E3
finalpehebrew;05E3
finaltsadi;05E5
finaltsadihebrew;05E5
firsttonechinese;02C9
fisheye;25C9
fitacyrillic;0473
five;0035
fivearabic;0665
fivebengali;09EB
fivecircle;2464
fivecircleinversesansserif;278E
fivedeva;096B
fiveeighths;215D
fivegujarati;0AEB
fivegurmukhi;0A6B
fivehackarabic;0665
fivehangzhou;3025
fiveideographicparen;3224
fiveinferior;2085
fivemonospace;FF15
fiveoldstyle;F735
fiveparen;2478
fiveperiod;248C
fivepersian;06F5
fiveroman;2174
fivesuperior;2075
fivethai;0E55
fl;FB02
florin;0192
fmonospace;FF46
fmsquare;3399
fofanthai;0E1F
fofathai;0E1D
fongmanthai;0E4F
forall;2200
four;0034
fourarabic;0664
fourbengali;09EA
fourcircle;2463
fourcircleinversesansserif;278D
fourdeva;096A
fourgujarati;0AEA
fourgurmukhi;0A6A
fourhackarabic;0664
fourhangzhou;3024
fourideographicparen;3223
fourinferior;2084
fourmonospace;FF14
fournumeratorbengali;09F7
fouroldstyle;F734
fourparen;2477
fourperiod;248B
fourpersian;06F4
fourroman;2173
foursuperior;2074
fourteencircle;246D
fourteenparen;2481
fourteenperiod;2495
fourthai;0E54
fourthtonechinese;02CB
fparen;24A1
fraction;2044
franc;20A3
g;0067
gabengali;0997
gacute;01F5
gadeva;0917
gafarabic;06AF
gaffinalarabic;FB93
gafinitialarabic;FB94
gafmedialarabic;FB95
gagujarati;0A97
gagurmukhi;0A17
gahiragana;304C
gakatakana;30AC
gamma;03B3
gammalatinsmall;0263
gammasuperior;02E0
gangiacoptic;03EB
gbopomofo;310D
gbreve;011F
gcaron;01E7
gcedilla;0123
gcircle;24D6
gcircumflex;011D
gcommaaccent;0123
gdot;0121
gdotaccent;0121
gecyrillic;0433
gehiragana;3052
gekatakana;30B2
geometricallyequal;2251
gereshaccenthebrew;059C
gereshhebrew;05F3
gereshmuqdamhebrew;059D
germandbls;00DF
gershayimaccenthebrew;059E
gershayimhebrew;05F4
getamark;3013
ghabengali;0998
ghadarmenian;0572
ghadeva;0918
ghagujarati;0A98
ghagurmukhi;0A18
ghainarabic;063A
ghainfinalarabic;FECE
ghaininitialarabic;FECF
ghainmedialarabic;FED0
ghemiddlehookcyrillic;0495
ghestrokecyrillic;0493
gheupturncyrillic;0491
ghhadeva;095A
ghhagurmukhi;0A5A
ghook;0260
ghzsquare;3393
gihiragana;304E
gikatakana;30AE
gimarmenian;0563
gimel;05D2
gimeldagesh;FB32
gimeldageshhebrew;FB32
gimelhebrew;05D2
gjecyrillic;0453
glottalinvertedstroke;01BE
glottalstop;0294
glottalstopinverted;0296
glottalstopmod;02C0
glottalstopreversed;0295
glottalstopreversedmod;02C1
glottalstopreversedsuperior;02E4
glottalstopstroke;02A1
glottalstopstrokereversed;02A2
gmacron;1E21
gmonospace;FF47
gohiragana;3054
gokatakana;30B4
gparen;24A2
gpasquare;33AC
gradient;2207
grave;0060
gravebelowcmb;0316
gravecmb;0300
gravecomb;0300
gravedeva;0953
gravelowmod;02CE
gravemonospace;FF40
gravetonecmb;0340
greater;003E
greaterequal;2265
greaterequalorless;22DB
greatermonospace;FF1E
greaterorequivalent;2273
greaterorless;2277
greateroverequal;2267
greatersmall;FE65
gscript;0261
gstroke;01E5
guhiragana;3050
guillemotleft;00AB
guillemotright;00BB
guilsinglleft;2039
guilsinglright;203A
gukatakana;30B0
guramusquare;3318
gysquare;33C9
h;0068
haabkhasiancyrillic;04A9
haaltonearabic;06C1
habengali;09B9
hadescendercyrillic;04B3
hadeva;0939
hagujarati;0AB9
hagurmukhi;0A39
haharabic;062D
hahfinalarabic;FEA2
hahinitialarabic;FEA3
hahiragana;306F
hahmedialarabic;FEA4
haitusquare;332A
hakatakana;30CF
hakatakanahalfwidth;FF8A
halantgurmukhi;0A4D
hamzaarabic;0621
hamzadammaarabic;0621 064F
hamzadammatanarabic;0621 064C
hamzafathaarabic;0621 064E
hamzafathatanarabic;0621 064B
hamzalowarabic;0621
hamzalowkasraarabic;0621 0650
hamzalowkasratanarabic;0621 064D
hamzasukunarabic;0621 0652
hangulfiller;3164
hardsigncyrillic;044A
harpoonleftbarbup;21BC
harpoonrightbarbup;21C0
hasquare;33CA
hatafpatah;05B2
hatafpatah16;05B2
hatafpatah23;05B2
hatafpatah2f;05B2
hatafpatahhebrew;05B2
hatafpatahnarrowhebrew;05B2
hatafpatahquarterhebrew;05B2
hatafpatahwidehebrew;05B2
hatafqamats;05B3
hatafqamats1b;05B3
hatafqamats28;05B3
hatafqamats34;05B3
hatafqamatshebrew;05B3
hatafqamatsnarrowhebrew;05B3
hatafqamatsquarterhebrew;05B3
hatafqamatswidehebrew;05B3
hatafsegol;05B1
hatafsegol17;05B1
hatafsegol24;05B1
hatafsegol30;05B1
hatafsegolhebrew;05B1
hatafsegolnarrowhebrew;05B1
hatafsegolquarterhebrew;05B1
hatafsegolwidehebrew;05B1
hbar;0127
hbopomofo;310F
hbrevebelow;1E2B
hcedilla;1E29
hcircle;24D7
hcircumflex;0125
hdieresis;1E27
hdotaccent;1E23
hdotbelow;1E25
he;05D4
heart;2665
heartsuitblack;2665
heartsuitwhite;2661
hedagesh;FB34
hedageshhebrew;FB34
hehaltonearabic;06C1
heharabic;0647
hehebrew;05D4
hehfinalaltonearabic;FBA7
hehfinalalttwoarabic;FEEA
hehfinalarabic;FEEA
hehhamzaabovefinalarabic;FBA5
hehhamzaaboveisolatedarabic;FBA4
hehinitialaltonearabic;FBA8
hehinitialarabic;FEEB
hehiragana;3078
hehmedialaltonearabic;FBA9
hehmedialarabic;FEEC
heiseierasquare;337B
hekatakana;30D8
hekatakanahalfwidth;FF8D
hekutaarusquare;3336
henghook;0267
herutusquare;3339
het;05D7
hethebrew;05D7
hhook;0266
hhooksuperior;02B1
hieuhacirclekorean;327B
hieuhaparenkorean;321B
hieuhcirclekorean;326D
hieuhkorean;314E
hieuhparenkorean;320D
hihiragana;3072
hikatakana;30D2
hikatakanahalfwidth;FF8B
hiriq;05B4
hiriq14;05B4
hiriq21;05B4
hiriq2d;05B4
hiriqhebrew;05B4
hiriqnarrowhebrew;05B4
hiriqquarterhebrew;05B4
hiriqwidehebrew;05B4
hlinebelow;1E96
hmonospace;FF48
hoarmenian;0570
hohipthai;0E2B
hohiragana;307B
hokatakana;30DB
hokatakanahalfwidth;FF8E
holam;05B9
holam19;05B9
holam26;05B9
holam32;05B9
holamhebrew;05B9
holamnarrowhebrew;05B9
holamquarterhebrew;05B9
holamwidehebrew;05B9
honokhukthai;0E2E
hookabovecomb;0309
hookcmb;0309
hookpalatalizedbelowcmb;0321
hookretroflexbelowcmb;0322
hoonsquare;3342
horicoptic;03E9
horizontalbar;2015
horncmb;031B
hotsprings;2668
house;2302
hparen;24A3
hsuperior;02B0
hturned;0265
huhiragana;3075
huiitosquare;3333
hukatakana;30D5
hukatakanahalfwidth;FF8C
hungarumlaut;02DD
hungarumlautcmb;030B
hv;0195
hyphen;002D
hypheninferior;F6E5
hyphenmonospace;FF0D
hyphensmall;FE63
hyphensuperior;F6E6
hyphentwo;2010
i;0069
iacute;00ED
iacyrillic;044F
ibengali;0987
ibopomofo;3127
ibreve;012D
icaron;01D0
icircle;24D8
icircumflex;00EE
icyrillic;0456
idblgrave;0209
ideographearthcircle;328F
ideographfirecircle;328B
ideographicallianceparen;323F
ideographiccallparen;323A
ideographiccentrecircle;32A5
ideographicclose;3006
ideographiccomma;3001
ideographiccommaleft;FF64
ideographiccongratulationparen;3237
ideographiccorrectcircle;32A3
ideographicearthparen;322F
ideographicenterpriseparen;323D
ideographicexcellentcircle;329D
ideographicfestivalparen;3240
ideographicfinancialcircle;3296
ideographicfinancialparen;3236
ideographicfireparen;322B
ideographichaveparen;3232
ideographichighcircle;32A4
ideographiciterationmark;3005
ideographiclaborcircle;3298
ideographiclaborparen;3238
ideographicleftcircle;32A7
ideographiclowcircle;32A6
ideographicmedicinecircle;32A9
ideographicmetalparen;322E
ideographicmoonparen;322A
ideographicnameparen;3234
ideographicperiod;3002
ideographicprintcircle;329E
ideographicreachparen;3243
ideographicrepresentparen;3239
ideographicresourceparen;323E
ideographicrightcircle;32A8
ideographicsecretcircle;3299
ideographicselfparen;3242
ideographicsocietyparen;3233
ideographicspace;3000
ideographicspecialparen;3235
ideographicstockparen;3231
ideographicstudyparen;323B
ideographicsunparen;3230
ideographicsuperviseparen;323C
ideographicwaterparen;322C
ideographicwoodparen;322D
ideographiczero;3007
ideographmetalcircle;328E
ideographmooncircle;328A
ideographnamecircle;3294
ideographsuncircle;3290
ideographwatercircle;328C
ideographwoodcircle;328D
ideva;0907
idieresis;00EF
idieresisacute;1E2F
idieresiscyrillic;04E5
idotbelow;1ECB
iebrevecyrillic;04D7
iecyrillic;0435
ieungacirclekorean;3275
ieungaparenkorean;3215
ieungcirclekorean;3267
ieungkorean;3147
ieungparenkorean;3207
igrave;00EC
igujarati;0A87
igurmukhi;0A07
ihiragana;3044
ihookabove;1EC9
iibengali;0988
iicyrillic;0438
iideva;0908
iigujarati;0A88
iigurmukhi;0A08
iimatragurmukhi;0A40
iinvertedbreve;020B
iishortcyrillic;0439
iivowelsignbengali;09C0
iivowelsigndeva;0940
iivowelsigngujarati;0AC0
ij;0133
ikatakana;30A4
ikatakanahalfwidth;FF72
ikorean;3163
ilde;02DC
iluyhebrew;05AC
imacron;012B
imacroncyrillic;04E3
imageorapproximatelyequal;2253
imatragurmukhi;0A3F
imonospace;FF49
increment;2206
infinity;221E
iniarmenian;056B
integral;222B
integralbottom;2321
integralbt;2321
integralex;F8F5
integraltop;2320
integraltp;2320
intersection;2229
intisquare;3305
invbullet;25D8
invcircle;25D9
invsmileface;263B
iocyrillic;0451
iogonek;012F
iota;03B9
iotadieresis;03CA
iotadieresistonos;0390
iotalatin;0269
iotatonos;03AF
iparen;24A4
irigurmukhi;0A72
ismallhiragana;3043
ismallkatakana;30A3
ismallkatakanahalfwidth;FF68
issharbengali;09FA
istroke;0268
isuperior;F6ED
iterationhiragana;309D
iterationkatakana;30FD
itilde;0129
itildebelow;1E2D
iubopomofo;3129
iucyrillic;044E
ivowelsignbengali;09BF
ivowelsigndeva;093F
ivowelsigngujarati;0ABF
izhitsacyrillic;0475
izhitsadblgravecyrillic;0477
j;006A
jaarmenian;0571
jabengali;099C
jadeva;091C
jagujarati;0A9C
jagurmukhi;0A1C
jbopomofo;3110
jcaron;01F0
jcircle;24D9
jcircumflex;0135
jcrossedtail;029D
jdotlessstroke;025F
jecyrillic;0458
jeemarabic;062C
jeemfinalarabic;FE9E
jeeminitialarabic;FE9F
jeemmedialarabic;FEA0
jeharabic;0698
jehfinalarabic;FB8B
jhabengali;099D
jhadeva;091D
jhagujarati;0A9D
jhagurmukhi;0A1D
jheharmenian;057B
jis;3004
jmonospace;FF4A
jparen;24A5
jsuperior;02B2
k;006B
kabashkircyrillic;04A1
kabengali;0995
kacute;1E31
kacyrillic;043A
kadescendercyrillic;049B
kadeva;0915
kaf;05DB
kafarabic;0643
kafdagesh;FB3B
kafdageshhebrew;FB3B
kaffinalarabic;FEDA
kafhebrew;05DB
kafinitialarabic;FEDB
kafmedialarabic;FEDC
kafrafehebrew;FB4D
kagujarati;0A95
kagurmukhi;0A15
kahiragana;304B
kahookcyrillic;04C4
kakatakana;30AB
kakatakanahalfwidth;FF76
kappa;03BA
kappasymbolgreek;03F0
kapyeounmieumkorean;3171
kapyeounphieuphkorean;3184
kapyeounpieupkorean;3178
kapyeounssangpieupkorean;3179
karoriisquare;330D
kashidaautoarabic;0640
kashidaautonosidebearingarabic;0640
kasmallkatakana;30F5
kasquare;3384
kasraarabic;0650
kasratanarabic;064D
kastrokecyrillic;049F
katahiraprolongmarkhalfwidth;FF70
kaverticalstrokecyrillic;049D
kbopomofo;310E
kcalsquare;3389
kcaron;01E9
kcedilla;0137
kcircle;24DA
kcommaaccent;0137
kdotbelow;1E33
keharmenian;0584
kehiragana;3051
kekatakana;30B1
kekatakanahalfwidth;FF79
kenarmenian;056F
kesmallkatakana;30F6
kgreenlandic;0138
khabengali;0996
khacyrillic;0445
khadeva;0916
khagujarati;0A96
khagurmukhi;0A16
khaharabic;062E
khahfinalarabic;FEA6
khahinitialarabic;FEA7
khahmedialarabic;FEA8
kheicoptic;03E7
khhadeva;0959
khhagurmukhi;0A59
khieukhacirclekorean;3278
khieukhaparenkorean;3218
khieukhcirclekorean;326A
khieukhkorean;314B
khieukhparenkorean;320A
khokhaithai;0E02
khokhonthai;0E05
khokhuatthai;0E03
khokhwaithai;0E04
khomutthai;0E5B
khook;0199
khorakhangthai;0E06
khzsquare;3391
kihiragana;304D
kikatakana;30AD
kikatakanahalfwidth;FF77
kiroguramusquare;3315
kiromeetorusquare;3316
kirosquare;3314
kiyeokacirclekorean;326E
kiyeokaparenkorean;320E
kiyeokcirclekorean;3260
kiyeokkorean;3131
kiyeokparenkorean;3200
kiyeoksioskorean;3133
kjecyrillic;045C
klinebelow;1E35
klsquare;3398
kmcubedsquare;33A6
kmonospace;FF4B
kmsquaredsquare;33A2
kohiragana;3053
kohmsquare;33C0
kokaithai;0E01
kokatakana;30B3
kokatakanahalfwidth;FF7A
kooposquare;331E
koppacyrillic;0481
koreanstandardsymbol;327F
koroniscmb;0343
kparen;24A6
kpasquare;33AA
ksicyrillic;046F
ktsquare;33CF
kturned;029E
kuhiragana;304F
kukatakana;30AF
kukatakanahalfwidth;FF78
kvsquare;33B8
kwsquare;33BE
l;006C
labengali;09B2
lacute;013A
ladeva;0932
lagujarati;0AB2
lagurmukhi;0A32
lakkhangyaothai;0E45
lamaleffinalarabic;FEFC
lamalefhamzaabovefinalarabic;FEF8
lamalefhamzaaboveisolatedarabic;FEF7
lamalefhamzabelowfinalarabic;FEFA
lamalefhamzabelowisolatedarabic;FEF9
lamalefisolatedarabic;FEFB
lamalefmaddaabovefinalarabic;FEF6
lamalefmaddaaboveisolatedarabic;FEF5
lamarabic;0644
lambda;03BB
lambdastroke;019B
lamed;05DC
lameddagesh;FB3C
lameddageshhebrew;FB3C
lamedhebrew;05DC
lamedholam;05DC 05B9
lamedholamdagesh;05DC 05B9 05BC
lamedholamdageshhebrew;05DC 05B9 05BC
lamedholamhebrew;05DC 05B9
lamfinalarabic;FEDE
lamhahinitialarabic;FCCA
laminitialarabic;FEDF
lamjeeminitialarabic;FCC9
lamkhahinitialarabic;FCCB
lamlamhehisolatedarabic;FDF2
lammedialarabic;FEE0
lammeemhahinitialarabic;FD88
lammeeminitialarabic;FCCC
lammeemjeeminitialarabic;FEDF FEE4 FEA0
lammeemkhahinitialarabic;FEDF FEE4 FEA8
largecircle;25EF
lbar;019A
lbelt;026C
lbopomofo;310C
lcaron;013E
lcedilla;013C
lcircle;24DB
lcircumflexbelow;1E3D
lcommaaccent;013C
ldot;0140
ldotaccent;0140
ldotbelow;1E37
ldotbelowmacron;1E39
leftangleabovecmb;031A
lefttackbelowcmb;0318
less;003C
lessequal;2264
lessequalorgreater;22DA
lessmonospace;FF1C
lessorequivalent;2272
lessorgreater;2276
lessoverequal;2266
lesssmall;FE64
lezh;026E
lfblock;258C
lhookretroflex;026D
lira;20A4
liwnarmenian;056C
lj;01C9
ljecyrillic;0459
ll;F6C0
lladeva;0933
llagujarati;0AB3
llinebelow;1E3B
llladeva;0934
llvocalicbengali;09E1
llvocalicdeva;0961
llvocalicvowelsignbengali;09E3
llvocalicvowelsigndeva;0963
lmiddletilde;026B
lmonospace;FF4C
lmsquare;33D0
lochulathai;0E2C
logicaland;2227
logicalnot;00AC
logicalnotreversed;2310
logicalor;2228
lolingthai;0E25
longs;017F
lowlinecenterline;FE4E
lowlinecmb;0332
lowlinedashed;FE4D
lozenge;25CA
lparen;24A7
lslash;0142
lsquare;2113
lsuperior;F6EE
ltshade;2591
luthai;0E26
lvocalicbengali;098C
lvocalicdeva;090C
lvocalicvowelsignbengali;09E2
lvocalicvowelsigndeva;0962
lxsquare;33D3
m;006D
mabengali;09AE
macron;00AF
macronbelowcmb;0331
macroncmb;0304
macronlowmod;02CD
macronmonospace;FFE3
macute;1E3F
madeva;092E
magujarati;0AAE
magurmukhi;0A2E
mahapakhhebrew;05A4
mahapakhlefthebrew;05A4
mahiragana;307E
maichattawalowleftthai;F895
maichattawalowrightthai;F894
maichattawathai;0E4B
maichattawaupperleftthai;F893
maieklowleftthai;F88C
maieklowrightthai;F88B
maiekthai;0E48
maiekupperleftthai;F88A
maihanakatleftthai;F884
maihanakatthai;0E31
maitaikhuleftthai;F889
maitaikhuthai;0E47
maitholowleftthai;F88F
maitholowrightthai;F88E
maithothai;0E49
maithoupperleftthai;F88D
maitrilowleftthai;F892
maitrilowrightthai;F891
maitrithai;0E4A
maitriupperleftthai;F890
maiyamokthai;0E46
makatakana;30DE
makatakanahalfwidth;FF8F
male;2642
mansyonsquare;3347
maqafhebrew;05BE
mars;2642
masoracirclehebrew;05AF
masquare;3383
mbopomofo;3107
mbsquare;33D4
mcircle;24DC
mcubedsquare;33A5
mdotaccent;1E41
mdotbelow;1E43
meemarabic;0645
meemfinalarabic;FEE2
meeminitialarabic;FEE3
meemmedialarabic;FEE4
meemmeeminitialarabic;FCD1
meemmeemisolatedarabic;FC48
meetorusquare;334D
mehiragana;3081
meizierasquare;337E
mekatakana;30E1
mekatakanahalfwidth;FF92
mem;05DE
memdagesh;FB3E
memdageshhebrew;FB3E
memhebrew;05DE
menarmenian;0574
merkhahebrew;05A5
merkhakefulahebrew;05A6
merkhakefulalefthebrew;05A6
merkhalefthebrew;05A5
mhook;0271
mhzsquare;3392
middledotkatakanahalfwidth;FF65
middot;00B7
mieumacirclekorean;3272
mieumaparenkorean;3212
mieumcirclekorean;3264
mieumkorean;3141
mieumpansioskorean;3170
mieumparenkorean;3204
mieumpieupkorean;316E
mieumsioskorean;316F
mihiragana;307F
mikatakana;30DF
mikatakanahalfwidth;FF90
minus;2212
minusbelowcmb;0320
minuscircle;2296
minusmod;02D7
minusplus;2213
minute;2032
miribaarusquare;334A
mirisquare;3349
mlonglegturned;0270
mlsquare;3396
mmcubedsquare;33A3
mmonospace;FF4D
mmsquaredsquare;339F
mohiragana;3082
mohmsquare;33C1
mokatakana;30E2
mokatakanahalfwidth;FF93
molsquare;33D6
momathai;0E21
moverssquare;33A7
moverssquaredsquare;33A8
mparen;24A8
mpasquare;33AB
mssquare;33B3
msuperior;F6EF
mturned;026F
mu;00B5
mu1;00B5
muasquare;3382
muchgreater;226B
muchless;226A
mufsquare;338C
mugreek;03BC
mugsquare;338D
muhiragana;3080
mukatakana;30E0
mukatakanahalfwidth;FF91
mulsquare;3395
multiply;00D7
mumsquare;339B
munahhebrew;05A3
munahlefthebrew;05A3
musicalnote;266A
musicalnotedbl;266B
musicflatsign;266D
musicsharpsign;266F
mussquare;33B2
muvsquare;33B6
muwsquare;33BC
mvmegasquare;33B9
mvsquare;33B7
mwmegasquare;33BF
mwsquare;33BD
n;006E
nabengali;09A8
nabla;2207
nacute;0144
nadeva;0928
nagujarati;0AA8
nagurmukhi;0A28
nahiragana;306A
nakatakana;30CA
nakatakanahalfwidth;FF85
napostrophe;0149
nasquare;3381
nbopomofo;310B
nbspace;00A0
ncaron;0148
ncedilla;0146
ncircle;24DD
ncircumflexbelow;1E4B
ncommaaccent;0146
ndotaccent;1E45
ndotbelow;1E47
nehiragana;306D
nekatakana;30CD
nekatakanahalfwidth;FF88
newsheqelsign;20AA
nfsquare;338B
ngabengali;0999
ngadeva;0919
ngagujarati;0A99
ngagurmukhi;0A19
ngonguthai;0E07
nhiragana;3093
nhookleft;0272
nhookretroflex;0273
nieunacirclekorean;326F
nieunaparenkorean;320F
nieuncieuckorean;3135
nieuncirclekorean;3261
nieunhieuhkorean;3136
nieunkorean;3134
nieunpansioskorean;3168
nieunparenkorean;3201
nieunsioskorean;3167
nieuntikeutkorean;3166
nihiragana;306B
nikatakana;30CB
nikatakanahalfwidth;FF86
nikhahitleftthai;F899
nikhahitthai;0E4D
nine;0039
ninearabic;0669
ninebengali;09EF
ninecircle;2468
ninecircleinversesansserif;2792
ninedeva;096F
ninegujarati;0AEF
ninegurmukhi;0A6F
ninehackarabic;0669
ninehangzhou;3029
nineideographicparen;3228
nineinferior;2089
ninemonospace;FF19
nineoldstyle;F739
nineparen;247C
nineperiod;2490
ninepersian;06F9
nineroman;2178
ninesuperior;2079
nineteencircle;2472
nineteenparen;2486
nineteenperiod;249A
ninethai;0E59
nj;01CC
njecyrillic;045A
nkatakana;30F3
nkatakanahalfwidth;FF9D
nlegrightlong;019E
nlinebelow;1E49
nmonospace;FF4E
nmsquare;339A
nnabengali;09A3
nnadeva;0923
nnagujarati;0AA3
nnagurmukhi;0A23
nnnadeva;0929
nohiragana;306E
nokatakana;30CE
nokatakanahalfwidth;FF89
nonbreakingspace;00A0
nonenthai;0E13
nonuthai;0E19
noonarabic;0646
noonfinalarabic;FEE6
noonghunnaarabic;06BA
noonghunnafinalarabic;FB9F
noonhehinitialarabic;FEE7 FEEC
nooninitialarabic;FEE7
noonjeeminitialarabic;FCD2
noonjeemisolatedarabic;FC4B
noonmedialarabic;FEE8
noonmeeminitialarabic;FCD5
noonmeemisolatedarabic;FC4E
noonnoonfinalarabic;FC8D
notcontains;220C
notelement;2209
notelementof;2209
notequal;2260
notgreater;226F
notgreaternorequal;2271
notgreaternorless;2279
notidentical;2262
notless;226E
notlessnorequal;2270
notparallel;2226
notprecedes;2280
notsubset;2284
notsucceeds;2281
notsuperset;2285
nowarmenian;0576
nparen;24A9
nssquare;33B1
nsuperior;207F
ntilde;00F1
nu;03BD
nuhiragana;306C
nukatakana;30CC
nukatakanahalfwidth;FF87
nuktabengali;09BC
nuktadeva;093C
nuktagujarati;0ABC
nuktagurmukhi;0A3C
numbersign;0023
numbersignmonospace;FF03
numbersignsmall;FE5F
numeralsigngreek;0374
numeralsignlowergreek;0375
numero;2116
nun;05E0
nundagesh;FB40
nundageshhebrew;FB40
nunhebrew;05E0
nvsquare;33B5
nwsquare;33BB
nyabengali;099E
nyadeva;091E
nyagujarati;0A9E
nyagurmukhi;0A1E
o;006F
oacute;00F3
oangthai;0E2D
obarred;0275
obarredcyrillic;04E9
obarreddieresiscyrillic;04EB
obengali;0993
obopomofo;311B
obreve;014F
ocandradeva;0911
ocandragujarati;0A91
ocandravowelsigndeva;0949
ocandravowelsigngujarati;0AC9
ocaron;01D2
ocircle;24DE
ocircumflex;00F4
ocircumflexacute;1ED1
ocircumflexdotbelow;1ED9
ocircumflexgrave;1ED3
ocircumflexhookabove;1ED5
ocircumflextilde;1ED7
ocyrillic;043E
odblacute;0151
odblgrave;020D
odeva;0913
odieresis;00F6
odieresiscyrillic;04E7
odotbelow;1ECD
oe;0153
oekorean;315A
ogonek;02DB
ogonekcmb;0328
ograve;00F2
ogujarati;0A93
oharmenian;0585
ohiragana;304A
ohookabove;1ECF
ohorn;01A1
ohornacute;1EDB
ohorndotbelow;1EE3
ohorngrave;1EDD
ohornhookabove;1EDF
ohorntilde;1EE1
ohungarumlaut;0151
oi;01A3
oinvertedbreve;020F
okatakana;30AA
okatakanahalfwidth;FF75
okorean;3157
olehebrew;05AB
omacron;014D
omacronacute;1E53
omacrongrave;1E51
omdeva;0950
omega;03C9
omega1;03D6
omegacyrillic;0461
omegalatinclosed;0277
omegaroundcyrillic;047B
omegatitlocyrillic;047D
omegatonos;03CE
omgujarati;0AD0
omicron;03BF
omicrontonos;03CC
omonospace;FF4F
one;0031
onearabic;0661
onebengali;09E7
onecircle;2460
onecircleinversesansserif;278A
onedeva;0967
onedotenleader;2024
oneeighth;215B
onefitted;F6DC
onegujarati;0AE7
onegurmukhi;0A67
onehackarabic;0661
onehalf;00BD
onehangzhou;3021
oneideographicparen;3220
oneinferior;2081
onemonospace;FF11
onenumeratorbengali;09F4
oneoldstyle;F731
oneparen;2474
oneperiod;2488
onepersian;06F1
onequarter;00BC
oneroman;2170
onesuperior;00B9
onethai;0E51
onethird;2153
oogonek;01EB
oogonekmacron;01ED
oogurmukhi;0A13
oomatragurmukhi;0A4B
oopen;0254
oparen;24AA
openbullet;25E6
option;2325
ordfeminine;00AA
ordmasculine;00BA
orthogonal;221F
oshortdeva;0912
oshortvowelsigndeva;094A
oslash;00F8
oslashacute;01FF
osmallhiragana;3049
osmallkatakana;30A9
osmallkatakanahalfwidth;FF6B
ostrokeacute;01FF
osuperior;F6F0
otcyrillic;047F
otilde;00F5
otildeacute;1E4D
otildedieresis;1E4F
oubopomofo;3121
overline;203E
overlinecenterline;FE4A
overlinecmb;0305
overlinedashed;FE49
overlinedblwavy;FE4C
overlinewavy;FE4B
overscore;00AF
ovowelsignbengali;09CB
ovowelsigndeva;094B
ovowelsigngujarati;0ACB
p;0070
paampssquare;3380
paasentosquare;332B
pabengali;09AA
pacute;1E55
padeva;092A
pagedown;21DF
pageup;21DE
pagujarati;0AAA
pagurmukhi;0A2A
pahiragana;3071
paiyannoithai;0E2F
pakatakana;30D1
palatalizationcyrilliccmb;0484
palochkacyrillic;04C0
pansioskorean;317F
paragraph;00B6
parallel;2225
parenleft;0028
parenleftaltonearabic;FD3E
parenleftbt;F8ED
parenleftex;F8EC
parenleftinferior;208D
parenleftmonospace;FF08
parenleftsmall;FE59
parenleftsuperior;207D
parenlefttp;F8EB
parenleftvertical;FE35
parenright;0029
parenrightaltonearabic;FD3F
parenrightbt;F8F8
parenrightex;F8F7
parenrightinferior;208E
parenrightmonospace;FF09
parenrightsmall;FE5A
parenrightsuperior;207E
parenrighttp;F8F6
parenrightvertical;FE36
partialdiff;2202
paseqhebrew;05C0
pashtahebrew;0599
pasquare;33A9
patah;05B7
patah11;05B7
patah1d;05B7
patah2a;05B7
patahhebrew;05B7
patahnarrowhebrew;05B7
patahquarterhebrew;05B7
patahwidehebrew;05B7
pazerhebrew;05A1
pbopomofo;3106
pcircle;24DF
pdotaccent;1E57
pe;05E4
pecyrillic;043F
pedagesh;FB44
pedageshhebrew;FB44
peezisquare;333B
pefinaldageshhebrew;FB43
peharabic;067E
peharmenian;057A
pehebrew;05E4
pehfinalarabic;FB57
pehinitialarabic;FB58
pehiragana;307A
pehmedialarabic;FB59
pekatakana;30DA
pemiddlehookcyrillic;04A7
perafehebrew;FB4E
percent;0025
percentarabic;066A
percentmonospace;FF05
percentsmall;FE6A
period;002E
periodarmenian;0589
periodcentered;00B7
periodhalfwidth;FF61
periodinferior;F6E7
periodmonospace;FF0E
periodsmall;FE52
periodsuperior;F6E8
perispomenigreekcmb;0342
perpendicular;22A5
perthousand;2030
peseta;20A7
pfsquare;338A
phabengali;09AB
phadeva;092B
phagujarati;0AAB
phagurmukhi;0A2B
phi;03C6
phi1;03D5
phieuphacirclekorean;327A
phieuphaparenkorean;321A
phieuphcirclekorean;326C
phieuphkorean;314D
phieuphparenkorean;320C
philatin;0278
phinthuthai;0E3A
phisymbolgreek;03D5
phook;01A5
phophanthai;0E1E
phophungthai;0E1C
phosamphaothai;0E20
pi;03C0
pieupacirclekorean;3273
pieupaparenkorean;3213
pieupcieuckorean;3176
pieupcirclekorean;3265
pieupkiyeokkorean;3172
pieupkorean;3142
pieupparenkorean;3205
pieupsioskiyeokkorean;3174
pieupsioskorean;3144
pieupsiostikeutkorean;3175
pieupthieuthkorean;3177
pieuptikeutkorean;3173
pihiragana;3074
pikatakana;30D4
pisymbolgreek;03D6
piwrarmenian;0583
plus;002B
plusbelowcmb;031F
pluscircle;2295
plusminus;00B1
plusmod;02D6
plusmonospace;FF0B
plussmall;FE62
plussuperior;207A
pmonospace;FF50
pmsquare;33D8
pohiragana;307D
pointingindexdownwhite;261F
pointingindexleftwhite;261C
pointingindexrightwhite;261E
pointingindexupwhite;261D
pokatakana;30DD
poplathai;0E1B
postalmark;3012
postalmarkface;3020
pparen;24AB
precedes;227A
prescription;211E
primemod;02B9
primereversed;2035
product;220F
projective;2305
prolongedkana;30FC
propellor;2318
propersubset;2282
propersuperset;2283
proportion;2237
proportional;221D
psi;03C8
psicyrillic;0471
psilipneumatacyrilliccmb;0486
pssquare;33B0
puhiragana;3077
pukatakana;30D7
pvsquare;33B4
pwsquare;33BA
q;0071
qadeva;0958
qadmahebrew;05A8
qafarabic;0642
qaffinalarabic;FED6
qafinitialarabic;FED7
qafmedialarabic;FED8
qamats;05B8
qamats10;05B8
qamats1a;05B8
qamats1c;05B8
qamats27;05B8
qamats29;05B8
qamats33;05B8
qamatsde;05B8
qamatshebrew;05B8
qamatsnarrowhebrew;05B8
qamatsqatanhebrew;05B8
qamatsqatannarrowhebrew;05B8
qamatsqatanquarterhebrew;05B8
qamatsqatanwidehebrew;05B8
qamatsquarterhebrew;05B8
qamatswidehebrew;05B8
qarneyparahebrew;059F
qbopomofo;3111
qcircle;24E0
qhook;02A0
qmonospace;FF51
qof;05E7
qofdagesh;FB47
qofdageshhebrew;FB47
qofhatafpatah;05E7 05B2
qofhatafpatahhebrew;05E7 05B2
qofhatafsegol;05E7 05B1
qofhatafsegolhebrew;05E7 05B1
qofhebrew;05E7
qofhiriq;05E7 05B4
qofhiriqhebrew;05E7 05B4
qofholam;05E7 05B9
qofholamhebrew;05E7 05B9
qofpatah;05E7 05B7
qofpatahhebrew;05E7 05B7
qofqamats;05E7 05B8
qofqamatshebrew;05E7 05B8
qofqubuts;05E7 05BB
qofqubutshebrew;05E7 05BB
qofsegol;05E7 05B6
qofsegolhebrew;05E7 05B6
qofsheva;05E7 05B0
qofshevahebrew;05E7 05B0
qoftsere;05E7 05B5
qoftserehebrew;05E7 05B5
qparen;24AC
quarternote;2669
qubuts;05BB
qubuts18;05BB
qubuts25;05BB
qubuts31;05BB
qubutshebrew;05BB
qubutsnarrowhebrew;05BB
qubutsquarterhebrew;05BB
qubutswidehebrew;05BB
question;003F
questionarabic;061F
questionarmenian;055E
questiondown;00BF
questiondownsmall;F7BF
questiongreek;037E
questionmonospace;FF1F
questionsmall;F73F
quotedbl;0022
quotedblbase;201E
quotedblleft;201C
quotedblmonospace;FF02
quotedblprime;301E
quotedblprimereversed;301D
quotedblright;201D
quoteleft;2018
quoteleftreversed;201B
quotereversed;201B
quoteright;2019
quoterightn;0149
quotesinglbase;201A
quotesingle;0027
quotesinglemonospace;FF07
r;0072
raarmenian;057C
rabengali;09B0
racute;0155
radeva;0930
radical;221A
radicalex;F8E5
radoverssquare;33AE
radoverssquaredsquare;33AF
radsquare;33AD
rafe;05BF
rafehebrew;05BF
ragujarati;0AB0
ragurmukhi;0A30
rahiragana;3089
rakatakana;30E9
rakatakanahalfwidth;FF97
ralowerdiagonalbengali;09F1
ramiddlediagonalbengali;09F0
ramshorn;0264
ratio;2236
rbopomofo;3116
rcaron;0159
rcedilla;0157
rcircle;24E1
rcommaaccent;0157
rdblgrave;0211
rdotaccent;1E59
rdotbelow;1E5B
rdotbelowmacron;1E5D
referencemark;203B
reflexsubset;2286
reflexsuperset;2287
registered;00AE
registersans;F8E8
registerserif;F6DA
reharabic;0631
reharmenian;0580
rehfinalarabic;FEAE
rehiragana;308C
rehyehaleflamarabic;0631 FEF3 FE8E 0644
rekatakana;30EC
rekatakanahalfwidth;FF9A
resh;05E8
reshdageshhebrew;FB48
reshhatafpatah;05E8 05B2
reshhatafpatahhebrew;05E8 05B2
reshhatafsegol;05E8 05B1
reshhatafsegolhebrew;05E8 05B1
reshhebrew;05E8
reshhiriq;05E8 05B4
reshhiriqhebrew;05E8 05B4
reshholam;05E8 05B9
reshholamhebrew;05E8 05B9
reshpatah;05E8 05B7
reshpatahhebrew;05E8 05B7
reshqamats;05E8 05B8
reshqamatshebrew;05E8 05B8
reshqubuts;05E8 05BB
reshqubutshebrew;05E8 05BB
reshsegol;05E8 05B6
reshsegolhebrew;05E8 05B6
reshsheva;05E8 05B0
reshshevahebrew;05E8 05B0
reshtsere;05E8 05B5
reshtserehebrew;05E8 05B5
reversedtilde;223D
reviahebrew;0597
reviamugrashhebrew;0597
revlogicalnot;2310
rfishhook;027E
rfishhookreversed;027F
rhabengali;09DD
rhadeva;095D
rho;03C1
rhook;027D
rhookturned;027B
rhookturnedsuperior;02B5
rhosymbolgreek;03F1
rhotichookmod;02DE
rieulacirclekorean;3271
rieulaparenkorean;3211
rieulcirclekorean;3263
rieulhieuhkorean;3140
rieulkiyeokkorean;313A
rieulkiyeoksioskorean;3169
rieulkorean;3139
rieulmieumkorean;313B
rieulpansioskorean;316C
rieulparenkorean;3203
rieulphieuphkorean;313F
rieulpieupkorean;313C
rieulpieupsioskorean;316B
rieulsioskorean;313D
rieulthieuthkorean;313E
rieultikeutkorean;316A
rieulyeorinhieuhkorean;316D
rightangle;221F
righttackbelowcmb;0319
righttriangle;22BF
rihiragana;308A
rikatakana;30EA
rikatakanahalfwidth;FF98
ring;02DA
ringbelowcmb;0325
ringcmb;030A
ringhalfleft;02BF
ringhalfleftarmenian;0559
ringhalfleftbelowcmb;031C
ringhalfleftcentered;02D3
ringhalfright;02BE
ringhalfrightbelowcmb;0339
ringhalfrightcentered;02D2
rinvertedbreve;0213
rittorusquare;3351
rlinebelow;1E5F
rlongleg;027C
rlonglegturned;027A
rmonospace;FF52
rohiragana;308D
rokatakana;30ED
rokatakanahalfwidth;FF9B
roruathai;0E23
rparen;24AD
rrabengali;09DC
rradeva;0931
rragurmukhi;0A5C
rreharabic;0691
rrehfinalarabic;FB8D
rrvocalicbengali;09E0
rrvocalicdeva;0960
rrvocalicgujarati;0AE0
rrvocalicvowelsignbengali;09C4
rrvocalicvowelsigndeva;0944
rrvocalicvowelsigngujarati;0AC4
rsuperior;F6F1
rtblock;2590
rturned;0279
rturnedsuperior;02B4
ruhiragana;308B
rukatakana;30EB
rukatakanahalfwidth;FF99
rupeemarkbengali;09F2
rupeesignbengali;09F3
rupiah;F6DD
ruthai;0E24
rvocalicbengali;098B
rvocalicdeva;090B
rvocalicgujarati;0A8B
rvocalicvowelsignbengali;09C3
rvocalicvowelsigndeva;0943
rvocalicvowelsigngujarati;0AC3
s;0073
sabengali;09B8
sacute;015B
sacutedotaccent;1E65
sadarabic;0635
sadeva;0938
sadfinalarabic;FEBA
sadinitialarabic;FEBB
sadmedialarabic;FEBC
sagujarati;0AB8
sagurmukhi;0A38
sahiragana;3055
sakatakana;30B5
sakatakanahalfwidth;FF7B
sallallahoualayhewasallamarabic;FDFA
samekh;05E1
samekhdagesh;FB41
samekhdageshhebrew;FB41
samekhhebrew;05E1
saraaathai;0E32
saraaethai;0E41
saraaimaimalaithai;0E44
saraaimaimuanthai;0E43
saraamthai;0E33
saraathai;0E30
saraethai;0E40
saraiileftthai;F886
saraiithai;0E35
saraileftthai;F885
saraithai;0E34
saraothai;0E42
saraueeleftthai;F888
saraueethai;0E37
saraueleftthai;F887
sarauethai;0E36
sarauthai;0E38
sarauuthai;0E39
sbopomofo;3119
scaron;0161
scarondotaccent;1E67
scedilla;015F
schwa;0259
schwacyrillic;04D9
schwadieresiscyrillic;04DB
schwahook;025A
scircle;24E2
scircumflex;015D
scommaaccent;0219
sdotaccent;1E61
sdotbelow;1E63
sdotbelowdotaccent;1E69
seagullbelowcmb;033C
second;2033
secondtonechinese;02CA
section;00A7
seenarabic;0633
seenfinalarabic;FEB2
seeninitialarabic;FEB3
seenmedialarabic;FEB4
segol;05B6
segol13;05B6
segol1f;05B6
segol2c;05B6
segolhebrew;05B6
segolnarrowhebrew;05B6
segolquarterhebrew;05B6
segoltahebrew;0592
segolwidehebrew;05B6
seharmenian;057D
sehiragana;305B
sekatakana;30BB
sekatakanahalfwidth;FF7E
semicolon;003B
semicolonarabic;061B
semicolonmonospace;FF1B
semicolonsmall;FE54
semivoicedmarkkana;309C
semivoicedmarkkanahalfwidth;FF9F
sentisquare;3322
sentosquare;3323
seven;0037
sevenarabic;0667
sevenbengali;09ED
sevencircle;2466
sevencircleinversesansserif;2790
sevendeva;096D
seveneighths;215E
sevengujarati;0AED
sevengurmukhi;0A6D
sevenhackarabic;0667
sevenhangzhou;3027
sevenideographicparen;3226
seveninferior;2087
sevenmonospace;FF17
sevenoldstyle;F737
sevenparen;247A
sevenperiod;248E
sevenpersian;06F7
sevenroman;2176
sevensuperior;2077
seventeencircle;2470
seventeenparen;2484
seventeenperiod;2498
seventhai;0E57
sfthyphen;00AD
shaarmenian;0577
shabengali;09B6
shacyrillic;0448
shaddaarabic;0651
shaddadammaarabic;FC61
shaddadammatanarabic;FC5E
shaddafathaarabic;FC60
shaddafathatanarabic;0651 064B
shaddakasraarabic;FC62
shaddakasratanarabic;FC5F
shade;2592
shadedark;2593
shadelight;2591
shademedium;2592
shadeva;0936
shagujarati;0AB6
shagurmukhi;0A36
shalshelethebrew;0593
shbopomofo;3115
shchacyrillic;0449
sheenarabic;0634
sheenfinalarabic;FEB6
sheeninitialarabic;FEB7
sheenmedialarabic;FEB8
sheicoptic;03E3
sheqel;20AA
sheqelhebrew;20AA
sheva;05B0
sheva115;05B0
sheva15;05B0
sheva22;05B0
sheva2e;05B0
shevahebrew;05B0
shevanarrowhebrew;05B0
shevaquarterhebrew;05B0
shevawidehebrew;05B0
shhacyrillic;04BB
shimacoptic;03ED
shin;05E9
shindagesh;FB49
shindageshhebrew;FB49
shindageshshindot;FB2C
shindageshshindothebrew;FB2C
shindageshsindot;FB2D
shindageshsindothebrew;FB2D
shindothebrew;05C1
shinhebrew;05E9
shinshindot;FB2A
shinshindothebrew;FB2A
shinsindot;FB2B
shinsindothebrew;FB2B
shook;0282
sigma;03C3
sigma1;03C2
sigmafinal;03C2
sigmalunatesymbolgreek;03F2
sihiragana;3057
sikatakana;30B7
sikatakanahalfwidth;FF7C
siluqhebrew;05BD
siluqlefthebrew;05BD
similar;223C
sindothebrew;05C2
siosacirclekorean;3274
siosaparenkorean;3214
sioscieuckorean;317E
sioscirclekorean;3266
sioskiyeokkorean;317A
sioskorean;3145
siosnieunkorean;317B
siosparenkorean;3206
siospieupkorean;317D
siostikeutkorean;317C
six;0036
sixarabic;0666
sixbengali;09EC
sixcircle;2465
sixcircleinversesansserif;278F
sixdeva;096C
sixgujarati;0AEC
sixgurmukhi;0A6C
sixhackarabic;0666
sixhangzhou;3026
sixideographicparen;3225
sixinferior;2086
sixmonospace;FF16
sixoldstyle;F736
sixparen;2479
sixperiod;248D
sixpersian;06F6
sixroman;2175
sixsuperior;2076
sixteencircle;246F
sixteencurrencydenominatorbengali;09F9
sixteenparen;2483
sixteenperiod;2497
sixthai;0E56
slash;002F
slashmonospace;FF0F
slong;017F
slongdotaccent;1E9B
smileface;263A
smonospace;FF53
sofpasuqhebrew;05C3
softhyphen;00AD
softsigncyrillic;044C
sohiragana;305D
sokatakana;30BD
sokatakanahalfwidth;FF7F
soliduslongoverlaycmb;0338
solidusshortoverlaycmb;0337
sorusithai;0E29
sosalathai;0E28
sosothai;0E0B
sosuathai;0E2A
space;0020
spacehackarabic;0020
spade;2660
spadesuitblack;2660
spadesuitwhite;2664
sparen;24AE
squarebelowcmb;033B
squarecc;33C4
squarecm;339D
squarediagonalcrosshatchfill;25A9
squarehorizontalfill;25A4
squarekg;338F
squarekm;339E
squarekmcapital;33CE
squareln;33D1
squarelog;33D2
squaremg;338E
squaremil;33D5
squaremm;339C
squaremsquared;33A1
squareorthogonalcrosshatchfill;25A6
squareupperlefttolowerrightfill;25A7
squareupperrighttolowerleftfill;25A8
squareverticalfill;25A5
squarewhitewithsmallblack;25A3
srsquare;33DB
ssabengali;09B7
ssadeva;0937
ssagujarati;0AB7
ssangcieuckorean;3149
ssanghieuhkorean;3185
ssangieungkorean;3180
ssangkiyeokkorean;3132
ssangnieunkorean;3165
ssangpieupkorean;3143
ssangsioskorean;3146
ssangtikeutkorean;3138
ssuperior;F6F2
sterling;00A3
sterlingmonospace;FFE1
strokelongoverlaycmb;0336
strokeshortoverlaycmb;0335
subset;2282
subsetnotequal;228A
subsetorequal;2286
succeeds;227B
suchthat;220B
suhiragana;3059
sukatakana;30B9
sukatakanahalfwidth;FF7D
sukunarabic;0652
summation;2211
sun;263C
superset;2283
supersetnotequal;228B
supersetorequal;2287
svsquare;33DC
syouwaerasquare;337C
t;0074
tabengali;09A4
tackdown;22A4
tackleft;22A3
tadeva;0924
tagujarati;0AA4
tagurmukhi;0A24
taharabic;0637
tahfinalarabic;FEC2
tahinitialarabic;FEC3
tahiragana;305F
tahmedialarabic;FEC4
taisyouerasquare;337D
takatakana;30BF
takatakanahalfwidth;FF80
tatweelarabic;0640
tau;03C4
tav;05EA
tavdages;FB4A
tavdagesh;FB4A
tavdageshhebrew;FB4A
tavhebrew;05EA
tbar;0167
tbopomofo;310A
tcaron;0165
tccurl;02A8
tcedilla;0163
tcheharabic;0686
tchehfinalarabic;FB7B
tchehinitialarabic;FB7C
tchehmedialarabic;FB7D
tchehmeeminitialarabic;FB7C FEE4
tcircle;24E3
tcircumflexbelow;1E71
tcommaaccent;0163
tdieresis;1E97
tdotaccent;1E6B
tdotbelow;1E6D
tecyrillic;0442
tedescendercyrillic;04AD
teharabic;062A
tehfinalarabic;FE96
tehhahinitialarabic;FCA2
tehhahisolatedarabic;FC0C
tehinitialarabic;FE97
tehiragana;3066
tehjeeminitialarabic;FCA1
tehjeemisolatedarabic;FC0B
tehmarbutaarabic;0629
tehmarbutafinalarabic;FE94
tehmedialarabic;FE98
tehmeeminitialarabic;FCA4
tehmeemisolatedarabic;FC0E
tehnoonfinalarabic;FC73
tekatakana;30C6
tekatakanahalfwidth;FF83
telephone;2121
telephoneblack;260E
telishagedolahebrew;05A0
telishaqetanahebrew;05A9
tencircle;2469
tenideographicparen;3229
tenparen;247D
tenperiod;2491
tenroman;2179
tesh;02A7
tet;05D8
tetdagesh;FB38
tetdageshhebrew;FB38
tethebrew;05D8
tetsecyrillic;04B5
tevirhebrew;059B
tevirlefthebrew;059B
thabengali;09A5
thadeva;0925
thagujarati;0AA5
thagurmukhi;0A25
thalarabic;0630
thalfinalarabic;FEAC
thanthakhatlowleftthai;F898
thanthakhatlowrightthai;F897
thanthakhatthai;0E4C
thanthakhatupperleftthai;F896
theharabic;062B
thehfinalarabic;FE9A
thehinitialarabic;FE9B
thehmedialarabic;FE9C
thereexists;2203
therefore;2234
theta;03B8
theta1;03D1
thetasymbolgreek;03D1
thieuthacirclekorean;3279
thieuthaparenkorean;3219
thieuthcirclekorean;326B
thieuthkorean;314C
thieuthparenkorean;320B
thirteencircle;246C
thirteenparen;2480
thirteenperiod;2494
thonangmonthothai;0E11
thook;01AD
thophuthaothai;0E12
thorn;00FE
thothahanthai;0E17
thothanthai;0E10
thothongthai;0E18
thothungthai;0E16
thousandcyrillic;0482
thousandsseparatorarabic;066C
thousandsseparatorpersian;066C
three;0033
threearabic;0663
threebengali;09E9
threecircle;2462
threecircleinversesansserif;278C
threedeva;0969
threeeighths;215C
threegujarati;0AE9
threegurmukhi;0A69
threehackarabic;0663
threehangzhou;3023
threeideographicparen;3222
threeinferior;2083
threemonospace;FF13
threenumeratorbengali;09F6
threeoldstyle;F733
threeparen;2476
threeperiod;248A
threepersian;06F3
threequarters;00BE
threequartersemdash;F6DE
threeroman;2172
threesuperior;00B3
threethai;0E53
thzsquare;3394
tihiragana;3061
tikatakana;30C1
tikatakanahalfwidth;FF81
tikeutacirclekorean;3270
tikeutaparenkorean;3210
tikeutcirclekorean;3262
tikeutkorean;3137
tikeutparenkorean;3202
tilde;02DC
tildebelowcmb;0330
tildecmb;0303
tildecomb;0303
tildedoublecmb;0360
tildeoperator;223C
tildeoverlaycmb;0334
tildeverticalcmb;033E
timescircle;2297
tipehahebrew;0596
tipehalefthebrew;0596
tippigurmukhi;0A70
titlocyrilliccmb;0483
tiwnarmenian;057F
tlinebelow;1E6F
tmonospace;FF54
toarmenian;0569
tohiragana;3068
tokatakana;30C8
tokatakanahalfwidth;FF84
tonebarextrahighmod;02E5
tonebarextralowmod;02E9
tonebarhighmod;02E6
tonebarlowmod;02E8
tonebarmidmod;02E7
tonefive;01BD
tonesix;0185
tonetwo;01A8
tonos;0384
tonsquare;3327
topatakthai;0E0F
tortoiseshellbracketleft;3014
tortoiseshellbracketleftsmall;FE5D
tortoiseshellbracketleftvertical;FE39
tortoiseshellbracketright;3015
tortoiseshellbracketrightsmall;FE5E
tortoiseshellbracketrightvertical;FE3A
totaothai;0E15
tpalatalhook;01AB
tparen;24AF
trademark;2122
trademarksans;F8EA
trademarkserif;F6DB
tretroflexhook;0288
triagdn;25BC
triaglf;25C4
triagrt;25BA
triagup;25B2
ts;02A6
tsadi;05E6
tsadidagesh;FB46
tsadidageshhebrew;FB46
tsadihebrew;05E6
tsecyrillic;0446
tsere;05B5
tsere12;05B5
tsere1e;05B5
tsere2b;05B5
tserehebrew;05B5
tserenarrowhebrew;05B5
tserequarterhebrew;05B5
tserewidehebrew;05B5
tshecyrillic;045B
tsuperior;F6F3
ttabengali;099F
ttadeva;091F
ttagujarati;0A9F
ttagurmukhi;0A1F
tteharabic;0679
ttehfinalarabic;FB67
ttehinitialarabic;FB68
ttehmedialarabic;FB69
tthabengali;09A0
tthadeva;0920
tthagujarati;0AA0
tthagurmukhi;0A20
tturned;0287
tuhiragana;3064
tukatakana;30C4
tukatakanahalfwidth;FF82
tusmallhiragana;3063
tusmallkatakana;30C3
tusmallkatakanahalfwidth;FF6F
twelvecircle;246B
twelveparen;247F
twelveperiod;2493
twelveroman;217B
twentycircle;2473
twentyhangzhou;5344
twentyparen;2487
twentyperiod;249B
two;0032
twoarabic;0662
twobengali;09E8
twocircle;2461
twocircleinversesansserif;278B
twodeva;0968
twodotenleader;2025
twodotleader;2025
twodotleadervertical;FE30
twogujarati;0AE8
twogurmukhi;0A68
twohackarabic;0662
twohangzhou;3022
twoideographicparen;3221
twoinferior;2082
twomonospace;FF12
twonumeratorbengali;09F5
twooldstyle;F732
twoparen;2475
twoperiod;2489
twopersian;06F2
tworoman;2171
twostroke;01BB
twosuperior;00B2
twothai;0E52
twothirds;2154
u;0075
uacute;00FA
ubar;0289
ubengali;0989
ubopomofo;3128
ubreve;016D
ucaron;01D4
ucircle;24E4
ucircumflex;00FB
ucircumflexbelow;1E77
ucyrillic;0443
udattadeva;0951
udblacute;0171
udblgrave;0215
udeva;0909
udieresis;00FC
udieresisacute;01D8
udieresisbelow;1E73
udieresiscaron;01DA
udieresiscyrillic;04F1
udieresisgrave;01DC
udieresismacron;01D6
udotbelow;1EE5
ugrave;00F9
ugujarati;0A89
ugurmukhi;0A09
uhiragana;3046
uhookabove;1EE7
uhorn;01B0
uhornacute;1EE9
uhorndotbelow;1EF1
uhorngrave;1EEB
uhornhookabove;1EED
uhorntilde;1EEF
uhungarumlaut;0171
uhungarumlautcyrillic;04F3
uinvertedbreve;0217
ukatakana;30A6
ukatakanahalfwidth;FF73
ukcyrillic;0479
ukorean;315C
umacron;016B
umacroncyrillic;04EF
umacrondieresis;1E7B
umatragurmukhi;0A41
umonospace;FF55
underscore;005F
underscoredbl;2017
underscoremonospace;FF3F
underscorevertical;FE33
underscorewavy;FE4F
union;222A
universal;2200
uogonek;0173
uparen;24B0
upblock;2580
upperdothebrew;05C4
upsilon;03C5
upsilondieresis;03CB
upsilondieresistonos;03B0
upsilonlatin;028A
upsilontonos;03CD
uptackbelowcmb;031D
uptackmod;02D4
uragurmukhi;0A73
uring;016F
ushortcyrillic;045E
usmallhiragana;3045
usmallkatakana;30A5
usmallkatakanahalfwidth;FF69
ustraightcyrillic;04AF
ustraightstrokecyrillic;04B1
utilde;0169
utildeacute;1E79
utildebelow;1E75
uubengali;098A
uudeva;090A
uugujarati;0A8A
uugurmukhi;0A0A
uumatragurmukhi;0A42
uuvowelsignbengali;09C2
uuvowelsigndeva;0942
uuvowelsigngujarati;0AC2
uvowelsignbengali;09C1
uvowelsigndeva;0941
uvowelsigngujarati;0AC1
v;0076
vadeva;0935
vagujarati;0AB5
vagurmukhi;0A35
vakatakana;30F7
vav;05D5
vavdagesh;FB35
vavdagesh65;FB35
vavdageshhebrew;FB35
vavhebrew;05D5
vavholam;FB4B
vavholamhebrew;FB4B
vavvavhebrew;05F0
vavyodhebrew;05F1
vcircle;24E5
vdotbelow;1E7F
vecyrillic;0432
veharabic;06A4
vehfinalarabic;FB6B
vehinitialarabic;FB6C
vehmedialarabic;FB6D
vekatakana;30F9
venus;2640
verticalbar;007C
verticallineabovecmb;030D
verticallinebelowcmb;0329
verticallinelowmod;02CC
verticallinemod;02C8
vewarmenian;057E
vhook;028B
vikatakana;30F8
viramabengali;09CD
viramadeva;094D
viramagujarati;0ACD
visargabengali;0983
visargadeva;0903
visargagujarati;0A83
vmonospace;FF56
voarmenian;0578
voicediterationhiragana;309E
voicediterationkatakana;30FE
voicedmarkkana;309B
voicedmarkkanahalfwidth;FF9E
vokatakana;30FA
vparen;24B1
vtilde;1E7D
vturned;028C
vuhiragana;3094
vukatakana;30F4
w;0077
wacute;1E83
waekorean;3159
wahiragana;308F
wakatakana;30EF
wakatakanahalfwidth;FF9C
wakorean;3158
wasmallhiragana;308E
wasmallkatakana;30EE
wattosquare;3357
wavedash;301C
wavyunderscorevertical;FE34
wawarabic;0648
wawfinalarabic;FEEE
wawhamzaabovearabic;0624
wawhamzaabovefinalarabic;FE86
wbsquare;33DD
wcircle;24E6
wcircumflex;0175
wdieresis;1E85
wdotaccent;1E87
wdotbelow;1E89
wehiragana;3091
weierstrass;2118
wekatakana;30F1
wekorean;315E
weokorean;315D
wgrave;1E81
whitebullet;25E6
whitecircle;25CB
whitecircleinverse;25D9
whitecornerbracketleft;300E
whitecornerbracketleftvertical;FE43
whitecornerbracketright;300F
whitecornerbracketrightvertical;FE44
whitediamond;25C7
whitediamondcontainingblacksmalldiamond;25C8
whitedownpointingsmalltriangle;25BF
whitedownpointingtriangle;25BD
whiteleftpointingsmalltriangle;25C3
whiteleftpointingtriangle;25C1
whitelenticularbracketleft;3016
whitelenticularbracketright;3017
whiterightpointingsmalltriangle;25B9
whiterightpointingtriangle;25B7
whitesmallsquare;25AB
whitesmilingface;263A
whitesquare;25A1
whitestar;2606
whitetelephone;260F
whitetortoiseshellbracketleft;3018
whitetortoiseshellbracketright;3019
whiteuppointingsmalltriangle;25B5
whiteuppointingtriangle;25B3
wihiragana;3090
wikatakana;30F0
wikorean;315F
wmonospace;FF57
wohiragana;3092
wokatakana;30F2
wokatakanahalfwidth;FF66
won;20A9
wonmonospace;FFE6
wowaenthai;0E27
wparen;24B2
wring;1E98
wsuperior;02B7
wturned;028D
wynn;01BF
x;0078
xabovecmb;033D
xbopomofo;3112
xcircle;24E7
xdieresis;1E8D
xdotaccent;1E8B
xeharmenian;056D
xi;03BE
xmonospace;FF58
xparen;24B3
xsuperior;02E3
y;0079
yaadosquare;334E
yabengali;09AF
yacute;00FD
yadeva;092F
yaekorean;3152
yagujarati;0AAF
yagurmukhi;0A2F
yahiragana;3084
yakatakana;30E4
yakatakanahalfwidth;FF94
yakorean;3151
yamakkanthai;0E4E
yasmallhiragana;3083
yasmallkatakana;30E3
yasmallkatakanahalfwidth;FF6C
yatcyrillic;0463
ycircle;24E8
ycircumflex;0177
ydieresis;00FF
ydotaccent;1E8F
ydotbelow;1EF5
yeharabic;064A
yehbarreearabic;06D2
yehbarreefinalarabic;FBAF
yehfinalarabic;FEF2
yehhamzaabovearabic;0626
yehhamzaabovefinalarabic;FE8A
yehhamzaaboveinitialarabic;FE8B
yehhamzaabovemedialarabic;FE8C
yehinitialarabic;FEF3
yehmedialarabic;FEF4
yehmeeminitialarabic;FCDD
yehmeemisolatedarabic;FC58
yehnoonfinalarabic;FC94
yehthreedotsbelowarabic;06D1
yekorean;3156
yen;00A5
yenmonospace;FFE5
yeokorean;3155
yeorinhieuhkorean;3186
yerahbenyomohebrew;05AA
yerahbenyomolefthebrew;05AA
yericyrillic;044B
yerudieresiscyrillic;04F9
yesieungkorean;3181
yesieungpansioskorean;3183
yesieungsioskorean;3182
yetivhebrew;059A
ygrave;1EF3
yhook;01B4
yhookabove;1EF7
yiarmenian;0575
yicyrillic;0457
yikorean;3162
yinyang;262F
yiwnarmenian;0582
ymonospace;FF59
yod;05D9
yoddagesh;FB39
yoddageshhebrew;FB39
yodhebrew;05D9
yodyodhebrew;05F2
yodyodpatahhebrew;FB1F
yohiragana;3088
yoikorean;3189
yokatakana;30E8
yokatakanahalfwidth;FF96
yokorean;315B
yosmallhiragana;3087
yosmallkatakana;30E7
yosmallkatakanahalfwidth;FF6E
yotgreek;03F3
yoyaekorean;3188
yoyakorean;3187
yoyakthai;0E22
yoyingthai;0E0D
yparen;24B4
ypogegrammeni;037A
ypogegrammenigreekcmb;0345
yr;01A6
yring;1E99
ysuperior;02B8
ytilde;1EF9
yturned;028E
yuhiragana;3086
yuikorean;318C
yukatakana;30E6
yukatakanahalfwidth;FF95
yukorean;3160
yusbigcyrillic;046B
yusbigiotifiedcyrillic;046D
yuslittlecyrillic;0467
yuslittleiotifiedcyrillic;0469
yusmallhiragana;3085
yusmallkatakana;30E5
yusmallkatakanahalfwidth;FF6D
yuyekorean;318B
yuyeokorean;318A
yyabengali;09DF
yyadeva;095F
z;007A
zaarmenian;0566
zacute;017A
zadeva;095B
zagurmukhi;0A5B
zaharabic;0638
zahfinalarabic;FEC6
zahinitialarabic;FEC7
zahiragana;3056
zahmedialarabic;FEC8
zainarabic;0632
zainfinalarabic;FEB0
zakatakana;30B6
zaqefgadolhebrew;0595
zaqefqatanhebrew;0594
zarqahebrew;0598
zayin;05D6
zayindagesh;FB36
zayindageshhebrew;FB36
zayinhebrew;05D6
zbopomofo;3117
zcaron;017E
zcircle;24E9
zcircumflex;1E91
zcurl;0291
zdot;017C
zdotaccent;017C
zdotbelow;1E93
zecyrillic;0437
zedescendercyrillic;0499
zedieresiscyrillic;04DF
zehiragana;305C
zekatakana;30BC
zero;0030
zeroarabic;0660
zerobengali;09E6
zerodeva;0966
zerogujarati;0AE6
zerogurmukhi;0A66
zerohackarabic;0660
zeroinferior;2080
zeromonospace;FF10
zerooldstyle;F730
zeropersian;06F0
zerosuperior;2070
zerothai;0E50
zerowidthjoiner;FEFF
zerowidthnonjoiner;200C
zerowidthspace;200B
zeta;03B6
zhbopomofo;3113
zhearmenian;056A
zhebrevecyrillic;04C2
zhecyrillic;0436
zhedescendercyrillic;0497
zhedieresiscyrillic;04DD
zihiragana;3058
zikatakana;30B8
zinorhebrew;05AE
zlinebelow;1E95
zmonospace;FF5A
zohiragana;305E
zokatakana;30BE
zparen;24B5
zretroflexhook;0290
zstroke;01B6
zuhiragana;305A
zukatakana;30BA
a100;275E
a101;2761
a102;2762
a103;2763
a104;2764
a105;2710
a106;2765
a107;2766
a108;2767
a109;2660
a10;2721
a110;2665
a111;2666
a112;2663
a117;2709
a118;2708
a119;2707
a11;261B
a120;2460
a121;2461
a122;2462
a123;2463
a124;2464
a125;2465
a126;2466
a127;2467
a128;2468
a129;2469
a12;261E
a130;2776
a131;2777
a132;2778
a133;2779
a134;277A
a135;277B
a136;277C
a137;277D
a138;277E
a139;277F
a13;270C
a140;2780
a141;2781
a142;2782
a143;2783
a144;2784
a145;2785
a146;2786
a147;2787
a148;2788
a149;2789
a14;270D
a150;278A
a151;278B
a152;278C
a153;278D
a154;278E
a155;278F
a156;2790
a157;2791
a158;2792
a159;2793
a15;270E
a160;2794
a161;2192
a162;27A3
a163;2194
a164;2195
a165;2799
a166;279B
a167;279C
a168;279D
a169;279E
a16;270F
a170;279F
a171;27A0
a172;27A1
a173;27A2
a174;27A4
a175;27A5
a176;27A6
a177;27A7
a178;27A8
a179;27A9
a17;2711
a180;27AB
a181;27AD
a182;27AF
a183;27B2
a184;27B3
a185;27B5
a186;27B8
a187;27BA
a188;27BB
a189;27BC
a18;2712
a190;27BD
a191;27BE
a192;279A
a193;27AA
a194;27B6
a195;27B9
a196;2798
a197;27B4
a198;27B7
a199;27AC
a19;2713
a1;2701
a200;27AE
a201;27B1
a202;2703
a203;2750
a204;2752
a205;276E
a206;2770
a20;2714
a21;2715
a22;2716
a23;2717
a24;2718
a25;2719
a26;271A
a27;271B
a28;271C
a29;2722
a2;2702
a30;2723
a31;2724
a32;2725
a33;2726
a34;2727
a35;2605
a36;2729
a37;272A
a38;272B
a39;272C
a3;2704
a40;272D
a41;272E
a42;272F
a43;2730
a44;2731
a45;2732
a46;2733
a47;2734
a48;2735
a49;2736
a4;260E
a50;2737
a51;2738
a52;2739
a53;273A
a54;273B
a55;273C
a56;273D
a57;273E
a58;273F
a59;2740
a5;2706
a60;2741
a61;2742
a62;2743
a63;2744
a64;2745
a65;2746
a66;2747
a67;2748
a68;2749
a69;274A
a6;271D
a70;274B
a71;25CF
a72;274D
a73;25A0
a74;274F
a75;2751
a76;25B2
a77;25BC
a78;25C6
a79;2756
a7;271E
a81;25D7
a82;2758
a83;2759
a84;275A
a85;276F
a86;2771
a87;2772
a88;2773
a89;2768
a8;271F
a90;2769
a91;276C
a92;276D
a93;276A
a94;276B
a95;2774
a96;2775
a97;275B
a98;275C
a99;275D
a9;2720
"""
# string table management
#
class StringTable:
def __init__( self, name_list, master_table_name ):
self.names = name_list
self.master_table = master_table_name
self.indices = {}
index = 0
for name in name_list:
self.indices[name] = index
index += len( name ) + 1
self.total = index
def dump( self, file ):
write = file.write
write( "#ifndef DEFINE_PS_TABLES\n" )
write( "#ifdef __cplusplus\n" )
write( ' extern "C"\n' )
write( "#else\n" )
write( " extern\n" )
write( "#endif\n" )
write( "#endif\n" )
write( " const char " + self.master_table +
"[" + repr( self.total ) + "]\n" )
write( "#ifdef DEFINE_PS_TABLES\n" )
write( " =\n" )
write( " {\n" )
line = ""
for name in self.names:
line += " '"
line += string.join( ( re.findall( ".", name ) ), "','" )
line += "', 0,\n"
write( line )
write( " }\n" )
write( "#endif /* DEFINE_PS_TABLES */\n" )
write( " ;\n\n\n" )
def dump_sublist( self, file, table_name, macro_name, sublist ):
write = file.write
write( "#define " + macro_name + " " + repr( len( sublist ) ) + "\n\n" )
write( " /* Values are offsets into the `" +
self.master_table + "' table */\n\n" )
write( "#ifndef DEFINE_PS_TABLES\n" )
write( "#ifdef __cplusplus\n" )
write( ' extern "C"\n' )
write( "#else\n" )
write( " extern\n" )
write( "#endif\n" )
write( "#endif\n" )
write( " const short " + table_name +
"[" + macro_name + "]\n" )
write( "#ifdef DEFINE_PS_TABLES\n" )
write( " =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for name in sublist:
line += comma
line += "%4d" % self.indices[name]
col += 1
comma = ","
if col == 14:
col = 0
comma = ",\n "
write( line )
write( "\n" )
write( " }\n" )
write( "#endif /* DEFINE_PS_TABLES */\n" )
write( " ;\n\n\n" )
# We now store the Adobe Glyph List in compressed form. The list is put
# into a data structure called `trie' (because it has a tree-like
# appearance). Consider, for example, that you want to store the
# following name mapping:
#
# A => 1
# Aacute => 6
# Abalon => 2
# Abstract => 4
#
# It is possible to store the entries as follows.
#
# A => 1
# |
# +-acute => 6
# |
# +-b
# |
# +-alon => 2
# |
# +-stract => 4
#
# We see that each node in the trie has:
#
# - one or more `letters'
# - an optional value
# - zero or more child nodes
#
# The first step is to call
#
# root = StringNode( "", 0 )
# for word in map.values():
# root.add( word, map[word] )
#
# which creates a large trie where each node has only one children.
#
# Executing
#
# root = root.optimize()
#
# optimizes the trie by merging the letters of successive nodes whenever
# possible.
#
# Each node of the trie is stored as follows.
#
# - First the node's letter, according to the following scheme. We
# use the fact that in the AGL no name contains character codes > 127.
#
# name bitsize description
# ----------------------------------------------------------------
# notlast 1 Set to 1 if this is not the last letter
# in the word.
# ascii 7 The letter's ASCII value.
#
# - The letter is followed by a children count and the value of the
# current key (if any). Again we can do some optimization because all
# AGL entries are from the BMP; this means that 16 bits are sufficient
# to store its Unicode values. Additionally, no node has more than
# 127 children.
#
# name bitsize description
# -----------------------------------------
# hasvalue 1 Set to 1 if a 16-bit Unicode value follows.
# num_children 7 Number of children. Can be 0 only if
# `hasvalue' is set to 1.
# value 16 Optional Unicode value.
#
# - A node is finished by a list of 16bit absolute offsets to the
# children, which must be sorted in increasing order of their first
# letter.
#
# For simplicity, all 16bit quantities are stored in big-endian order.
#
# The root node has first letter = 0, and no value.
#
class StringNode:
def __init__( self, letter, value ):
self.letter = letter
self.value = value
self.children = {}
def __cmp__( self, other ):
return ord( self.letter[0] ) - ord( other.letter[0] )
def add( self, word, value ):
if len( word ) == 0:
self.value = value
return
letter = word[0]
word = word[1:]
if self.children.has_key( letter ):
child = self.children[letter]
else:
child = StringNode( letter, 0 )
self.children[letter] = child
child.add( word, value )
def optimize( self ):
# optimize all children first
children = self.children.values()
self.children = {}
for child in children:
self.children[child.letter[0]] = child.optimize()
# don't optimize if there's a value,
# if we don't have any child or if we
# have more than one child
if ( self.value != 0 ) or ( not children ) or len( children ) > 1:
return self
child = children[0]
self.letter += child.letter
self.value = child.value
self.children = child.children
return self
def dump_debug( self, write, margin ):
# this is used during debugging
line = margin + "+-"
if len( self.letter ) == 0:
line += "<NOLETTER>"
else:
line += self.letter
if self.value:
line += " => " + repr( self.value )
write( line + "\n" )
if self.children:
margin += "| "
for child in self.children.values():
child.dump_debug( write, margin )
def locate( self, index ):
self.index = index
if len( self.letter ) > 0:
index += len( self.letter ) + 1
else:
index += 2
if self.value != 0:
index += 2
children = self.children.values()
children.sort()
index += 2 * len( children )
for child in children:
index = child.locate( index )
return index
def store( self, storage ):
# write the letters
l = len( self.letter )
if l == 0:
storage += struct.pack( "B", 0 )
else:
for n in range( l ):
val = ord( self.letter[n] )
if n < l - 1:
val += 128
storage += struct.pack( "B", val )
# write the count
children = self.children.values()
children.sort()
count = len( children )
if self.value != 0:
storage += struct.pack( "!BH", count + 128, self.value )
else:
storage += struct.pack( "B", count )
for child in children:
storage += struct.pack( "!H", child.index )
for child in children:
storage = child.store( storage )
return storage
def adobe_glyph_values():
"""return the list of glyph names and their unicode values"""
lines = string.split( adobe_glyph_list, '\n' )
glyphs = []
values = []
for line in lines:
if line:
fields = string.split( line, ';' )
# print fields[1] + ' - ' + fields[0]
subfields = string.split( fields[1], ' ' )
if len( subfields ) == 1:
glyphs.append( fields[0] )
values.append( fields[1] )
return glyphs, values
def filter_glyph_names( alist, filter ):
"""filter `alist' by taking _out_ all glyph names that are in `filter'"""
count = 0
extras = []
for name in alist:
try:
filtered_index = filter.index( name )
except:
extras.append( name )
return extras
def dump_encoding( file, encoding_name, encoding_list ):
"""dump a given encoding"""
write = file.write
write( " /* the following are indices into the SID name table */\n" )
write( "#ifndef DEFINE_PS_TABLES\n" )
write( "#ifdef __cplusplus\n" )
write( ' extern "C"\n' )
write( "#else\n" )
write( " extern\n" )
write( "#endif\n" )
write( "#endif\n" )
write( " const unsigned short " + encoding_name +
"[" + repr( len( encoding_list ) ) + "]\n" )
write( "#ifdef DEFINE_PS_TABLES\n" )
write( " =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for value in encoding_list:
line += comma
line += "%3d" % value
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
write( line )
write( "\n" )
write( " }\n" )
write( "#endif /* DEFINE_PS_TABLES */\n" )
write( " ;\n\n\n" )
def dump_array( the_array, write, array_name ):
"""dumps a given encoding"""
write( "#ifndef DEFINE_PS_TABLES\n" )
write( "#ifdef __cplusplus\n" )
write( ' extern "C"\n' )
write( "#else\n" )
write( " extern\n" )
write( "#endif\n" )
write( "#endif\n" )
write( " const unsigned char " + array_name +
"[" + repr( len( the_array ) ) + "L]\n" )
write( "#ifdef DEFINE_PS_TABLES\n" )
write( " =\n" )
write( " {\n" )
line = ""
comma = " "
col = 0
for value in the_array:
line += comma
line += "%3d" % ord( value )
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
if len( line ) > 1024:
write( line )
line = ""
write( line )
write( "\n" )
write( " }\n" )
write( "#endif /* DEFINE_PS_TABLES */\n" )
write( " ;\n\n\n" )
def main():
"""main program body"""
if len( sys.argv ) != 2:
print __doc__ % sys.argv[0]
sys.exit( 1 )
file = open( sys.argv[1], "w\n" )
write = file.write
count_sid = len( sid_standard_names )
# `mac_extras' contains the list of glyph names in the Macintosh standard
# encoding which are not in the SID Standard Names.
#
mac_extras = filter_glyph_names( mac_standard_names, sid_standard_names )
# `base_list' contains the names of our final glyph names table.
# It consists of the `mac_extras' glyph names, followed by the SID
# standard names.
#
mac_extras_count = len( mac_extras )
base_list = mac_extras + sid_standard_names
write( "/***************************************************************************/\n" )
write( "/* */\n" )
write( "/* %-71s*/\n" % os.path.basename( sys.argv[1] ) )
write( "/* */\n" )
write( "/* PostScript glyph names. */\n" )
write( "/* */\n" )
write( "/* Copyright 2005-2017 by */\n" )
write( "/* David Turner, Robert Wilhelm, and Werner Lemberg. */\n" )
write( "/* */\n" )
write( "/* This file is part of the FreeType project, and may only be used, */\n" )
write( "/* modified, and distributed under the terms of the FreeType project */\n" )
write( "/* license, LICENSE.TXT. By continuing to use, modify, or distribute */\n" )
write( "/* this file you indicate that you have read the license and */\n" )
write( "/* understand and accept it fully. */\n" )
write( "/* */\n" )
write( "/***************************************************************************/\n" )
write( "\n" )
write( "\n" )
write( " /* This file has been generated automatically -- do not edit! */\n" )
write( "\n" )
write( "\n" )
# dump final glyph list (mac extras + sid standard names)
#
st = StringTable( base_list, "ft_standard_glyph_names" )
st.dump( file )
st.dump_sublist( file, "ft_mac_names",
"FT_NUM_MAC_NAMES", mac_standard_names )
st.dump_sublist( file, "ft_sid_names",
"FT_NUM_SID_NAMES", sid_standard_names )
dump_encoding( file, "t1_standard_encoding", t1_standard_encoding )
dump_encoding( file, "t1_expert_encoding", t1_expert_encoding )
# dump the AGL in its compressed form
#
agl_glyphs, agl_values = adobe_glyph_values()
dict = StringNode( "", 0 )
for g in range( len( agl_glyphs ) ):
dict.add( agl_glyphs[g], eval( "0x" + agl_values[g] ) )
dict = dict.optimize()
dict_len = dict.locate( 0 )
dict_array = dict.store( "" )
write( """\
/*
* This table is a compressed version of the Adobe Glyph List (AGL),
* optimized for efficient searching. It has been generated by the
* `glnames.py' python script located in the `src/tools' directory.
*
* The lookup function to get the Unicode value for a given string
* is defined below the table.
*/
#ifdef FT_CONFIG_OPTION_ADOBE_GLYPH_LIST
""" )
dump_array( dict_array, write, "ft_adobe_glyph_list" )
# write the lookup routine now
#
write( """\
#ifdef DEFINE_PS_TABLES
/*
* This function searches the compressed table efficiently.
*/
static unsigned long
ft_get_adobe_glyph_index( const char* name,
const char* limit )
{
int c = 0;
int count, min, max;
const unsigned char* p = ft_adobe_glyph_list;
if ( name == 0 || name >= limit )
goto NotFound;
c = *name++;
count = p[1];
p += 2;
min = 0;
max = count;
while ( min < max )
{
int mid = ( min + max ) >> 1;
const unsigned char* q = p + mid * 2;
int c2;
q = ft_adobe_glyph_list + ( ( (int)q[0] << 8 ) | q[1] );
c2 = q[0] & 127;
if ( c2 == c )
{
p = q;
goto Found;
}
if ( c2 < c )
min = mid + 1;
else
max = mid;
}
goto NotFound;
Found:
for (;;)
{
/* assert (*p & 127) == c */
if ( name >= limit )
{
if ( (p[0] & 128) == 0 &&
(p[1] & 128) != 0 )
return (unsigned long)( ( (int)p[2] << 8 ) | p[3] );
goto NotFound;
}
c = *name++;
if ( p[0] & 128 )
{
p++;
if ( c != (p[0] & 127) )
goto NotFound;
continue;
}
p++;
count = p[0] & 127;
if ( p[0] & 128 )
p += 2;
p++;
for ( ; count > 0; count--, p += 2 )
{
int offset = ( (int)p[0] << 8 ) | p[1];
const unsigned char* q = ft_adobe_glyph_list + offset;
if ( c == ( q[0] & 127 ) )
{
p = q;
goto NextIter;
}
}
goto NotFound;
NextIter:
;
}
NotFound:
return 0;
}
#endif /* DEFINE_PS_TABLES */
#endif /* FT_CONFIG_OPTION_ADOBE_GLYPH_LIST */
""" )
if 0: # generate unit test, or don't
#
# now write the unit test to check that everything works OK
#
write( "#ifdef TEST\n\n" )
write( "static const char* const the_names[] = {\n" )
for name in agl_glyphs:
write( ' "' + name + '",\n' )
write( " 0\n};\n" )
write( "static const unsigned long the_values[] = {\n" )
for val in agl_values:
write( ' 0x' + val + ',\n' )
write( " 0\n};\n" )
write( """
#include <stdlib.h>
#include <stdio.h>
int
main( void )
{
int result = 0;
const char* const* names = the_names;
const unsigned long* values = the_values;
for ( ; *names; names++, values++ )
{
const char* name = *names;
unsigned long reference = *values;
unsigned long value;
value = ft_get_adobe_glyph_index( name, name + strlen( name ) );
if ( value != reference )
{
result = 1;
fprintf( stderr, "name '%s' => %04x instead of %04x\\n",
name, value, reference );
}
}
return result;
}
""" )
write( "#endif /* TEST */\n" )
write("\n/* END */\n")
# Now run the main routine
#
main()
# END
|
gpl-3.0
| -6,469,612,608,725,052,000
| 18.251625
| 92
| 0.750156
| false
| 2.271272
| false
| false
| false
|
bbfamily/abu
|
abupy/WidgetBu/ABuWGBase.py
|
1
|
8919
|
# -*- encoding:utf-8 -*-
"""股票基本信息图形可视化"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import logging
import ipywidgets as widgets
from abc import ABCMeta, abstractmethod
from IPython.display import display
from ..CoreBu.ABuFixes import six, partial
from ..UtilBu.ABuStrUtil import to_unicode
from ..UtilBu.ABuOsUtil import show_msg
from ..MarketBu.ABuSymbol import search_to_symbol_dict
__author__ = '阿布'
__weixin__ = 'abu_quant'
show_msg_func = logging.info
"""基于不同系统的提示框使用partial包装title以及显示log"""
show_msg_toast_func = partial(show_msg, u'提示', log=True)
def accordion_shut(accordion):
"""由于版本兼容ipython widgets问题,所以需要对折叠内容做不同处理,且需要捕获异常"""
try:
accordion.selected_index = -1
except:
try:
accordion.selected_index = None
except:
pass
# noinspection PyUnresolvedReferences,PyProtectedMember
class WidgetBase(object):
"""界面组件基类,限定最终widget为self.widget"""
def __call__(self):
return self.widget
def display(self):
"""显示使用统一display"""
display(self.widget)
class WidgetFactorBase(six.with_metaclass(ABCMeta, WidgetBase)):
"""策略可视化基础类"""
def __init__(self, wg_manager):
self.wg_manager = wg_manager
self.widget = None
self.label_layout = widgets.Layout(width='300px', align_items='stretch')
self.description_layout = widgets.Layout(height='150px')
self.widget_layout = widgets.Layout(align_items='stretch', justify_content='space-between')
@abstractmethod
def _init_widget(self):
"""子类因子界面设置初始化"""
pass
@abstractmethod
def delegate_class(self):
"""子类因子所委托的具体因子类"""
pass
class WidgetFactorManagerBase(six.with_metaclass(ABCMeta, WidgetBase)):
"""策略管理可视化基础类"""
def __init__(self, show_add_buy=True, add_button_style='default'):
self.factor_dict = {}
self.factor_wg_array = []
# 策略候选池可x轴左右滚动
self.factor_layout = widgets.Layout(overflow_x='scroll',
# flex_direction='row',
display='flex')
self.selected_factors = widgets.SelectMultiple(
options=[],
description=u'已添加策略:',
disabled=False,
layout=widgets.Layout(width='100%', align_items='stretch')
)
# 已添加的全局策略可点击删除
self.selected_factors.observe(self.remove_factor, names='value')
# 全局策略改变通知接收序列
self.selected_factors_obs = set()
self.factor_box = None
# 默认不启动可滚动因子界面,因为对外的widget版本以及os操作系统不统一
self.scroll_factor_box = False
self._sub_children_group_cnt = 3
self.show_add_buy = show_add_buy
self.add_button_style = add_button_style
# 构建具体子类的界面构建
self._init_widget()
if self.factor_box is None:
raise RuntimeError('_init_widget must build factor_box!')
self.widget = widgets.VBox([self.factor_box, self.selected_factors])
def _sub_children(self, children, n_split):
"""将children每n_split个为一组,组装子children_group序列"""
sub_children_cnt = int(len(children) / n_split)
if sub_children_cnt == 0:
sub_children_cnt = 1
group_adjacent = lambda a, k: zip(*([iter(a)] * k))
children_group = list(group_adjacent(children, sub_children_cnt))
residue_ind = -(len(children) % sub_children_cnt) if sub_children_cnt > 0 else 0
if residue_ind < 0:
children_group.append(children[residue_ind:])
return children_group
def register_subscriber(self, observe):
"""注册已选策略池更新通知与BFSubscriberMixin共同作用"""
self.selected_factors_obs.add(observe)
def unregister_subscriber(self, observe):
"""解除注册已选策略池更新通知与BFSubscriberMixin共同作用"""
self.selected_factors_obs.remove(observe)
def notify_subscriber(self):
"""通知已选策略池发生改变的observe"""
for observe in self.selected_factors_obs:
if hasattr(observe, 'notify_subscriber'):
observe.notify_subscriber()
@abstractmethod
def _init_widget(self):
"""子类因子界面设置初始化, 内部需要构建self.factor_box"""
pass
def refresh_factor(self):
"""已选策略池刷新,通知其它更新"""
self.selected_factors.options = list(self.factor_dict.keys())
self.notify_subscriber()
def remove_factor(self, select):
"""点击从策略池中删除已选择的策略"""
for st_key in list(select['new']):
self.factor_dict.pop(st_key)
self.selected_factors.options = list(self.factor_dict.keys())
# 通知其它需要一起更新的界面进行更新
self.notify_subscriber()
def add_factor(self, factor_dict, factor_desc_key, only_one=False):
"""根据具体策略提供的策略字典对象和策略描述构建上层策略序列"""
if factor_desc_key in self.factor_dict:
msg = u'{} 策略已经添加过,重复添加!'.format(to_unicode(factor_desc_key))
show_msg_toast_func(msg)
return
if only_one:
"""
非重复容器类型策略,如一个买入策略只能对应一个仓位管理策略
大多数为可复容器类型策略,如可以有多个买入因子,多个卖出,
多个选股因子
"""
# 对基础类型不要使用clear等函数,py2低版本不支持
# self.factor_dict.clear()
self.factor_dict = {}
self.factor_dict[factor_desc_key] = factor_dict
self.selected_factors.options = list(self.factor_dict.keys())
# 通知其它需要一起更新的界面进行更新
self.notify_subscriber()
msg = u'{}策略已添加成功!'.format(to_unicode(factor_desc_key))
show_msg_toast_func(msg)
class WidgetSearchBox(WidgetBase):
"""搜索框ui界面"""
# noinspection PyProtectedMember
def __init__(self, search_result_callable):
"""构建股票池选股ui界面"""
if not callable(search_result_callable):
raise TypeError('search_result_select_func must callable!')
# symbol搜索框构建
self.search_bt = widgets.Button(description=u'搜索:', layout=widgets.Layout(height='10%', width='7%'))
self.search_input = widgets.Text(
value='',
placeholder=u'交易代码/公司名称/拼音首字母',
description='',
disabled=False
)
self.search_input.observe(self._search_input_change, names='value')
# symbol搜索结果框
self.search_result = widgets.SelectMultiple(
options=[],
description=u'搜索结果:',
disabled=False,
layout=widgets.Layout(width='300px', align_items='stretch', justify_content='space-between')
)
self.search_result.observe(search_result_callable, names='value')
self.search_bt.on_click(self._do_search)
# 搜索框 + 按钮 + 结果框 box拼接
sc_hb = widgets.HBox([self.search_bt, self.search_input])
self.widget = widgets.VBox([sc_hb, self.search_result])
# noinspection PyUnusedLocal
def _do_search(self, bt):
"""搜索框搜索执行函数"""
result_dict = search_to_symbol_dict(self.search_input.value)
result_options = [u'{}:{}'.format(to_unicode(result_dict[symbol]), to_unicode(symbol))
for symbol in result_dict]
self.search_result.options = result_options
def _search_input_change(self, change):
"""当搜索输入框文字大于1个进行自动搜索"""
search_word = change['new']
if len(search_word) > 1:
# 和_do_search不同这里使用fast_mode
result_dict = search_to_symbol_dict(self.search_input.value, fast_mode=True)
result_options = [u'{}:{}'.format(to_unicode(result_dict[symbol]), to_unicode(symbol))
for symbol in result_dict]
self.search_result.options = result_options
# noinspection PyUnusedLocal
def permission_denied(*arg, **kwargs):
"""执行权限不足的用户提示"""
show_msg_toast_func(u'所执行的操作权限不足!')
|
gpl-3.0
| 5,627,444,927,889,660,000
| 33.44
| 108
| 0.61324
| false
| 2.701883
| false
| false
| false
|
michellab/Sire
|
wrapper/Tools/DCDFile.py
|
1
|
10606
|
import struct, time, array, os
from math import pi
from Sire.Maths import Vector
from Sire.Mol import *
from Sire.IO import *
#
# Adapted from Peter Eastman's code in OpenMM python API to write a DCD file
#
class DCDFile(object):
"""DCDFile provides methods for creating DCD files.
DCD is a file format for storing simulation trajectories. It is supported by many programs, such
as CHARMM, NAMD, and X-PLOR. Note, however, that different programs produce subtly different
versions of the format. This class generates the CHARMM version. Also note that there is no
standard byte ordering (big-endian or little-endian) for this format. This class always generates
files with little-endian ordering.
To use this class, create a DCDFile object, then call writeModel() once for each model in the file."""
def __init__(self, strfile, group, space, dt, firstStep=0, interval=1):
"""Create a DCD file and write out the header.
Parameters:
- file (file) A file to write to
- topology (Topology) The Topology defining the molecular system being written
- dt (time) The time step used in the trajectory
- firstStep (int=0) The index of the first step in the trajectory
- interval (int=1) The frequency (measured in time steps) at which states are written to the trajectory
"""
file = open(strfile,'wb')
#PDB().write(group, "%s.pdb" % strfile)
self._file = file
self._group = group
self._space = space
self._firstStep = firstStep
self._interval = interval
self._modelCount = 0
#if is_quantity(dt):
# dt = dt.value_in_unit(picoseconds)
#dt /= 0.04888821
dt = dt.value()
natoms = 0
molecules = group.molecules()
molnums = molecules.molNums()
for molnum in molnums:
mol = molecules.molecule(molnum)[0].molecule()
nat = mol.nAtoms()
natoms += nat
print("There are %s atoms in the group " % natoms)
#sys.exit(-1)
boxFlag = 0
if space.isPeriodic():
boxFlag = 1
header = struct.pack(b'<i4c9if', 84, b'C', b'O', b'R', b'D', 0, firstStep, interval, 0, 0, 0, 0, 0, 0, dt)
header += struct.pack(b'<13i', boxFlag, 0, 0, 0, 0, 0, 0, 0, 0, 24, 84, 164, 2)
header += struct.pack(b'<80s', b'Created by OpenMM')
header += struct.pack(b'<80s', bytes('Created '+time.asctime(time.localtime(time.time())),"utf-8"))
header += struct.pack(b'<4i', 164, 4, natoms, 4)
file.write( header )
def writeModel(self, group, space):
"""Write out a model to the DCD file.
Parameters:
- positions (list) The list of atomic positions to write
"""
#if len(list(self._topology.atoms())) != len(positions):
# raise ValueError('The number of positions must match the number of atoms')
#if is_quantity(positions):
# positions = positions.value_in_unit(nanometers)
file = self._file
# Update the header.
self._modelCount += 1
file.seek(8, os.SEEK_SET)
file.write(struct.pack('<i', self._modelCount))
file.seek(20, os.SEEK_SET)
file.write(struct.pack('<i', self._firstStep+self._modelCount*self._interval))
# Write the data.
file.seek(0, os.SEEK_END)
if space.isPeriodic():
# PeriodicBox.
try:
boxSize = space.dimensions()
file.write(struct.pack('<i6di', 48, boxSize[0], 0, boxSize[1], 0, 0, boxSize[2], 48))
# TriclinicBox.
except:
v0 = space.vector0()
v1 = space.vector1()
v2 = space.vector2()
rad2deg = 180 / pi
alpha = Vector.angle(v1, v2).value() * rad2deg
beta = Vector.angle(v0, v2).value() * rad2deg
gamma = Vector.angle(v1, v0).value() * rad2deg
file.write(struct.pack('<i6di', 48, v0.magnitude(), gamma, v1.magnitude(), beta, alpha, v2.magnitude(), 48))
natoms = 0
for i in range(0,group.nMolecules()):
mol = group[MolIdx(i)][0].molecule()
nat = mol.nAtoms()
natoms += nat
length = struct.pack('<i', 4*natoms)
# To get the positions...
# Loop over that group
nmols = group.nMolecules()
coords = []
#spacedims = space.dimensions()
#wrapmolcoordinates = False
#wrapatomcoordinates = False
# JM 10/14 bugfix change of behavior of QSet in QT5
molnums = group.molNums()
molnums.sort()
for i in range(0,group.nMolecules()):
#mol = group[MolIdx(i)].molecule()
mol = group[molnums[i]][0].molecule()
#print (mol)
molcoords = mol.property("coordinates")
#if wrapmolcoordinates:
# molcog = CenterOfGeometry(mol).point()
#
# wrapdelta = Vector( int( math.floor( molcog.x() / spacedims.x() ) ) ,\
# int( math.floor( molcog.y() / spacedims.y() ) ) ,\
# int( math.floor( molcog.z() / spacedims.z() ) ) )
#
# if ( wrapdelta[0] != 0 or wrapdelta[1] != 0 or wrapdelta[2] != 0):
# print("Mol %s wrapdelta %s %s %s " % (molnum.toString(), wrapdelta[0], wrapdelta[1], wrapdelta[2]))
# print(spacedims)
# print(molcoords.toVector())
# wrap = Vector( - wrapdelta[0] * spacedims.x() , - wrapdelta[1] * spacedims.y(), -wrapdelta[2] * spacedims.z() )
# molcoords.translate(wrap)
# print(molcoords.toVector())
#molcoords.translate(wrapdelta)
#coords += molcoords
coords += molcoords.toVector()
#if wrapatomcoordinates:
# molvec = molcoords.toVector()
# for atvec in molvec:
# wrapdelta = Vector( int( math.floor( atvec.x() / spacedims.x() ) ) ,\
# int( math.floor( atvec.y() / spacedims.y() ) ) ,\
# int( math.floor( atvec.z() / spacedims.z() ) ) )
# if ( wrapdelta[0] != 0 or wrapdelta[1] != 0 or wrapdelta[2] != 0):
# wrap = Vector( - wrapdelta[0] * spacedims.x() , - wrapdelta[1] * spacedims.y(), -wrapdelta[2] * spacedims.z() )
# atvec = atvec + wrap
# coords += atvec
#print coords
#print len(coords)
# Have to study that bit...
for i in range(3):
file.write(length)
data = array.array('f', (x[i] for x in coords))
data.tofile(file)
file.write(length)
def writeBufferedModels(self, group, dimensions):
"""Write out a collection of snapshots to the DCD file.
Parameters:
- positions (list) The list of atomic positions to write
"""
#if len(list(self._topology.atoms())) != len(positions):
# raise ValueError('The number of positions must match the number of atoms')
#if is_quantity(positions):
# positions = positions.value_in_unit(nanometers)
file = self._file
# Find the number of buffered frames we have by inspecting the first molecule in the group
# assuming all molecules have same number of buffered coordinates...
mol = group.first()[0].molecule()
molprops = mol.propertyKeys()
nbuf = 0
for molprop in molprops:
if molprop.startswith("buffered_coord"):
nbuf += 1
if nbuf <= 0:
print("Could not find any buffered coordinates in the passed group ! ")
return
#
# Should be more efficient to loop over all mols once
#
for x in range(0,nbuf):
# Update the header
self._modelCount += 1
file.seek(8, os.SEEK_SET)
file.write(struct.pack('<i', self._modelCount))
file.seek(20, os.SEEK_SET)
file.write(struct.pack('<i', self._firstStep+self._modelCount*self._interval))
# Write the data.
file.seek(0, os.SEEK_END)
# Get buffered space...
boxSize = None
if ("buffered_space_%s" % x) in dimensions:
# PeriodicBox.
try:
boxSize = dimensions["buffered_space_%s" % x].dimensions()
#print "buffered_space_%s" % x, boxSize
if boxSize is not None:
file.write(struct.pack('<i6di', 48, boxSize[0], 0, boxSize[1], 0, 0, boxSize[2], 48))
# TriclinicBox.
except:
v0 = dimensions["buffered_space_%s" % x].vector0()
v1 = dimensions["buffered_space_%s" % x].vector1()
v2 = dimensions["buffered_space_%s" % x].vector2()
rad2deg = 180 / pi
alpha = Vector.angle(v1, v2).value() * rad2deg
beta = Vector.angle(v0, v2).value() * rad2deg
gamma = Vector.angle(v1, v0).value() * rad2deg
file.write(struct.pack('<i6di', 48, v0.magnitude(), gamma, v1.magnitude(), beta, alpha, v2.magnitude(), 48))
natoms = 0
for i in range(0,group.nMolecules()):
mol = group[MolIdx(i)][0].molecule()
nat = mol.nAtoms()
natoms += nat
length = struct.pack('<i', 4*natoms)
# To get the positions...
# Loop over that group
nmols = group.nMolecules()
coords = []
# JM 10/14 bugfix change of behavior of QSet in QT5
molnums = group.molNums()
molnums.sort()
for i in range(0,group.nMolecules()):
#mol = group[MolIdx(i)].molecule()
mol = group[molnums[i]][0]
molcoords = mol.property("buffered_coord_%s" % x)
coords += molcoords.toVector()
# Have to study that bit...
for i in range(3):
file.write(length)
data = array.array('f', (x[i] for x in coords))
data.tofile(file)
file.write(length)
#rewind
file.seek(0, os.SEEK_SET)
|
gpl-2.0
| 6,188,089,025,012,289,000
| 37.427536
| 136
| 0.528569
| false
| 3.812365
| false
| false
| false
|
amit0701/rally
|
tests/unit/common/objects/test_task.py
|
1
|
12985
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for db.task layer."""
import datetime as dt
import json
import ddt
import jsonschema
import mock
from rally.common import objects
from rally import consts
from rally import exceptions
from tests.unit import test
@ddt.ddt
class TaskTestCase(test.TestCase):
def setUp(self):
super(TaskTestCase, self).setUp()
self.task = {
"uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca1",
"status": consts.TaskStatus.INIT,
"verification_log": "",
}
@mock.patch("rally.common.objects.task.db.task_create")
def test_init_with_create(self, mock_task_create):
mock_task_create.return_value = self.task
task = objects.Task(status=consts.TaskStatus.FAILED)
mock_task_create.assert_called_once_with({
"status": consts.TaskStatus.FAILED})
self.assertEqual(task["uuid"], self.task["uuid"])
@mock.patch("rally.common.objects.task.db.task_create")
def test_init_without_create(self, mock_task_create):
task = objects.Task(task=self.task)
self.assertFalse(mock_task_create.called)
self.assertEqual(task["uuid"], self.task["uuid"])
@mock.patch("rally.common.objects.task.uuid.uuid4",
return_value="some_uuid")
@mock.patch("rally.common.objects.task.db.task_create")
def test_init_with_fake_true(self, mock_task_create, mock_uuid4):
task = objects.Task(temporary=True)
self.assertFalse(mock_task_create.called)
self.assertTrue(mock_uuid4.called)
self.assertEqual(task["uuid"], mock_uuid4.return_value)
@mock.patch("rally.common.objects.task.db.task_get")
def test_get(self, mock_task_get):
mock_task_get.return_value = self.task
task = objects.Task.get(self.task["uuid"])
mock_task_get.assert_called_once_with(self.task["uuid"])
self.assertEqual(task["uuid"], self.task["uuid"])
@mock.patch("rally.common.objects.task.db.task_get_status")
def test_get_status(self, mock_task_get_status):
task = objects.Task(task=self.task)
status = task.get_status(task["uuid"])
self.assertEqual(status, mock_task_get_status.return_value)
@mock.patch("rally.common.objects.task.db.task_delete")
@mock.patch("rally.common.objects.task.db.task_create")
def test_create_and_delete(self, mock_task_create, mock_task_delete):
mock_task_create.return_value = self.task
task = objects.Task()
task.delete()
mock_task_delete.assert_called_once_with(
self.task["uuid"], status=None)
@mock.patch("rally.common.objects.task.db.task_delete")
@mock.patch("rally.common.objects.task.db.task_create")
def test_create_and_delete_status(self, mock_task_create,
mock_task_delete):
mock_task_create.return_value = self.task
task = objects.Task()
task.delete(status=consts.TaskStatus.FINISHED)
mock_task_delete.assert_called_once_with(
self.task["uuid"], status=consts.TaskStatus.FINISHED)
@mock.patch("rally.common.objects.task.db.task_delete")
def test_delete_by_uuid(self, mock_task_delete):
objects.Task.delete_by_uuid(self.task["uuid"])
mock_task_delete.assert_called_once_with(
self.task["uuid"], status=None)
@mock.patch("rally.common.objects.task.db.task_delete")
def test_delete_by_uuid_status(self, mock_task_delete):
objects.Task.delete_by_uuid(self.task["uuid"],
consts.TaskStatus.FINISHED)
mock_task_delete.assert_called_once_with(
self.task["uuid"], status=consts.TaskStatus.FINISHED)
@mock.patch("rally.common.objects.task.db.task_list",
return_value=[{"uuid": "a",
"created_at": "b",
"status": consts.TaskStatus.FAILED,
"tag": "d",
"deployment_name": "some_name"}])
def list(self, mock_db_task_list):
tasks = objects.Task.list(status="somestatus")
mock_db_task_list.assert_called_once_with("somestatus", None)
self.assertIs(type(tasks), list)
self.assertIsInstance(tasks[0], objects.Task)
self.assertEqual(mock_db_task_list.return_value["uuis"],
tasks[0]["uuid"])
@mock.patch("rally.common.objects.deploy.db.task_update")
@mock.patch("rally.common.objects.task.db.task_create")
def test_update(self, mock_task_create, mock_task_update):
mock_task_create.return_value = self.task
mock_task_update.return_value = {"opt": "val2"}
deploy = objects.Task(opt="val1")
deploy._update({"opt": "val2"})
mock_task_update.assert_called_once_with(
self.task["uuid"], {"opt": "val2"})
self.assertEqual(deploy["opt"], "val2")
@ddt.data(
{
"status": "some_status", "allowed_statuses": ("s_1", "s_2")
},
{
"status": "some_status", "allowed_statuses": None
}
)
@ddt.unpack
@mock.patch("rally.common.objects.task.db.task_update_status")
@mock.patch("rally.common.objects.task.db.task_update")
def test_update_status(self, mock_task_update, mock_task_update_status,
status, allowed_statuses):
task = objects.Task(task=self.task)
task.update_status(consts.TaskStatus.FINISHED, allowed_statuses)
if allowed_statuses:
self.assertFalse(mock_task_update.called)
mock_task_update_status.assert_called_once_with(
self.task["uuid"],
consts.TaskStatus.FINISHED,
allowed_statuses
)
else:
self.assertFalse(mock_task_update_status.called)
mock_task_update.assert_called_once_with(
self.task["uuid"],
{"status": consts.TaskStatus.FINISHED},
)
@mock.patch("rally.common.objects.task.db.task_update")
def test_update_verification_log(self, mock_task_update):
mock_task_update.return_value = self.task
task = objects.Task(task=self.task)
task.update_verification_log({"a": "fake"})
mock_task_update.assert_called_once_with(
self.task["uuid"],
{"verification_log": json.dumps({"a": "fake"})}
)
def test_extend_results(self):
self.assertRaises(TypeError, objects.Task.extend_results)
now = dt.datetime.now()
iterations = [
{"timestamp": i + 2, "duration": i + 5,
"scenario_output": {"errors": "", "data": {}},
"error": [], "idle_duration": i,
"atomic_actions": {
"keystone.create_user": i + 10}} for i in range(10)]
obsolete = [
{"task_uuid": "foo_uuid", "created_at": now, "updated_at": None,
"id": 11, "key": {"kw": {"foo": 42},
"name": "Foo.bar", "pos": 0},
"data": {"raw": iterations, "sla": [],
"full_duration": 40, "load_duration": 32}}]
expected = [
{"iterations": "foo_iterations", "sla": [],
"key": {"kw": {"foo": 42}, "name": "Foo.bar", "pos": 0},
"info": {
"atomic": {"keystone.create_user": {"max_duration": 19,
"min_duration": 10}},
"iterations_count": 10, "iterations_failed": 0,
"max_duration": 14, "min_duration": 5,
"tstamp_start": 2, "full_duration": 40, "load_duration": 32}}]
# serializable is default
results = objects.Task.extend_results(obsolete)
self.assertIsInstance(results[0]["iterations"], type(iter([])))
self.assertEqual(list(results[0]["iterations"]), iterations)
results[0]["iterations"] = "foo_iterations"
self.assertEqual(results, expected)
# serializable is False
results = objects.Task.extend_results(obsolete, serializable=False)
self.assertIsInstance(results[0]["iterations"], type(iter([])))
self.assertEqual(list(results[0]["iterations"]), iterations)
results[0]["iterations"] = "foo_iterations"
self.assertEqual(results, expected)
# serializable is True
results = objects.Task.extend_results(obsolete, serializable=True)
self.assertEqual(list(results[0]["iterations"]), iterations)
expected[0]["created_at"] = now.strftime("%Y-%d-%mT%H:%M:%S")
expected[0]["updated_at"] = None
jsonschema.validate(results[0],
objects.task.TASK_EXTENDED_RESULT_SCHEMA)
results[0]["iterations"] = "foo_iterations"
self.assertEqual(results, expected)
@mock.patch("rally.common.objects.task.db.task_result_get_all_by_uuid",
return_value="foo_results")
def test_get_results(self, mock_task_result_get_all_by_uuid):
task = objects.Task(task=self.task)
results = task.get_results()
mock_task_result_get_all_by_uuid.assert_called_once_with(
self.task["uuid"])
self.assertEqual(results, "foo_results")
@mock.patch("rally.common.objects.task.db.task_result_create")
def test_append_results(self, mock_task_result_create):
task = objects.Task(task=self.task)
task.append_results("opt", "val")
mock_task_result_create.assert_called_once_with(
self.task["uuid"], "opt", "val")
@mock.patch("rally.common.objects.task.db.task_update")
def test_set_failed(self, mock_task_update):
mock_task_update.return_value = self.task
task = objects.Task(task=self.task)
task.set_failed()
mock_task_update.assert_called_once_with(
self.task["uuid"],
{"status": consts.TaskStatus.FAILED, "verification_log": "\"\""},
)
@ddt.data(
{
"soft": True, "status": consts.TaskStatus.INIT
},
{
"soft": True, "status": consts.TaskStatus.VERIFYING
},
{
"soft": False, "status": consts.TaskStatus.INIT
},
{
"soft": False, "status": consts.TaskStatus.VERIFYING
}
)
@ddt.unpack
def test_abort_with_init_and_verifying_states(self, soft, status):
task = objects.Task(mock.MagicMock(), fake=True)
task.get_status = mock.MagicMock(
side_effect=(status, status, "running"))
task._update_status_in_abort = mock.MagicMock()
self.assertRaises(exceptions.RallyException, task.abort, soft)
self.assertEqual(1, task.get_status.call_count)
self.assertFalse(task._update_status_in_abort.called)
@ddt.data(
{
"soft": True, "status": consts.TaskStatus.ABORTED
},
{
"soft": True, "status": consts.TaskStatus.FINISHED
},
{
"soft": True, "status": consts.TaskStatus.FAILED
},
{
"soft": False, "status": consts.TaskStatus.ABORTED
},
{
"soft": False, "status": consts.TaskStatus.FINISHED
},
{
"soft": False, "status": consts.TaskStatus.FAILED
}
)
@ddt.unpack
def test_abort_with_finished_states(self, soft, status):
task = objects.Task(mock.MagicMock(), fake=True)
task.get_status = mock.MagicMock(return_value=status)
task.update_status = mock.MagicMock()
self.assertRaises(exceptions.RallyException, task.abort, soft)
self.assertEqual(1, task.get_status.call_count)
self.assertFalse(task.update_status.called)
@ddt.data(True, False)
def test_abort_with_running_state(self, soft):
task = objects.Task(mock.MagicMock(), fake=True)
task.get_status = mock.MagicMock(return_value="running")
task.update_status = mock.MagicMock()
task.abort(soft)
if soft:
status = consts.TaskStatus.SOFT_ABORTING
else:
status = consts.TaskStatus.ABORTING
task.update_status.assert_called_once_with(
status,
allowed_statuses=(consts.TaskStatus.RUNNING,
consts.TaskStatus.SOFT_ABORTING)
)
|
apache-2.0
| -7,038,468,983,800,166,000
| 39.451713
| 78
| 0.595302
| false
| 3.681599
| true
| false
| false
|
lc525/cmake-project
|
docs/ext/breathe-1.0.0/breathe/renderer/rst/doxygen/compound.py
|
1
|
26721
|
from breathe.renderer.rst.doxygen.base import Renderer
class DoxygenTypeSubRenderer(Renderer):
def render(self):
compound_renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.compounddef)
nodelist = compound_renderer.render()
return [self.node_factory.block_quote("", *nodelist)]
class CompoundDefTypeSubRenderer(Renderer):
section_titles = [
"user-defined",
"public-type",
"public-func",
"public-attrib",
"public-slot",
"signal",
"dcop-func",
"property",
"event",
"public-static-func",
"public-static-attrib",
"protected-type",
"protected-func",
"protected-attrib",
"protected-slot",
"protected-static-func",
"protected-static-attrib",
"package-type",
"package-attrib",
"package-static-func",
"package-static-attrib",
"private-type",
"private-func",
"private-attrib",
"private-slot",
"private-static-func",
"private-static-attrib",
"friend",
"related",
"define",
"prototype",
"typedef",
"enum",
"func",
"var"
]
def render(self):
nodelist = []
if self.data_object.briefdescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.briefdescription)
nodelist.append(self.node_factory.paragraph("", "", *renderer.render()))
if self.data_object.detaileddescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.detaileddescription)
nodelist.append(self.node_factory.paragraph("", "", *renderer.render()))
section_nodelists = {}
# Get all sub sections
for sectiondef in self.data_object.sectiondef:
kind = sectiondef.kind
renderer = self.renderer_factory.create_renderer(self.data_object, sectiondef)
subnodes = renderer.render()
try:
# As "user-defined" can repeat
section_nodelists[kind] += subnodes
except KeyError:
section_nodelists[kind] = subnodes
# Order the results in an appropriate manner
for kind in self.section_titles:
nodelist.extend(section_nodelists.get(kind, []))
# Take care of innerclasses
for innerclass in self.data_object.innerclass:
renderer = self.renderer_factory.create_renderer(self.data_object, innerclass)
class_nodes = renderer.render()
if class_nodes:
nodelist.append(self.node_factory.paragraph("", "", *class_nodes))
for innernamespace in self.data_object.innernamespace:
renderer = self.renderer_factory.create_renderer(self.data_object, innernamespace)
namespace_nodes = renderer.render()
if namespace_nodes:
nodelist.append(self.node_factory.paragraph("", "", *namespace_nodes))
return nodelist
class SectionDefTypeSubRenderer(Renderer):
section_titles = {
"user-defined": "User Defined",
"public-type": "Public Type",
"public-func": "Public Functions",
"public-attrib": "Public Members",
"public-slot": "Public Slot",
"signal": "Signal",
"dcop-func": "DCOP Function",
"property": "Property",
"event": "Event",
"public-static-func": "Public Static Functions",
"public-static-attrib": "Public Static Attributes",
"protected-type": "Protected Types",
"protected-func": "Protected Functions",
"protected-attrib": "Protected Attributes",
"protected-slot": "Protected Slots",
"protected-static-func": "Protected Static Functions",
"protected-static-attrib": "Protected Static Attributes",
"package-type": "Package Types",
"package-attrib": "Package Attributes",
"package-static-func": "Package Static Functions",
"package-static-attrib": "Package Static Attributes",
"private-type": "Private Types",
"private-func": "Private Functions",
"private-attrib": "Private Members",
"private-slot": "Private Slots",
"private-static-func": "Private Static Functions",
"private-static-attrib": "Private Static Attributes",
"friend": "Friends",
"related": "Related",
"define": "Defines",
"prototype": "Prototypes",
"typedef": "Typedefs",
"enum": "Enums",
"func": "Functions",
"var": "Variables",
}
def render(self):
node_list = []
if self.data_object.description:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.description)
node_list.append(self.node_factory.paragraph( "", "", *renderer.render()))
# Get all the memberdef info
for memberdef in self.data_object.memberdef:
renderer = self.renderer_factory.create_renderer(self.data_object, memberdef)
node_list.extend(renderer.render())
if node_list:
text = self.section_titles[self.data_object.kind]
# Override default name for user-defined sections. Use "Unnamed
# Group" if the user didn't name the section
# This is different to Doxygen which will track the groups and name
# them Group1, Group2, Group3, etc.
if self.data_object.kind == "user-defined":
if self.data_object.header:
text = self.data_object.header
else:
text = "Unnamed Group"
title = self.node_factory.emphasis(text=text)
return [title, self.node_factory.block_quote("", *node_list)]
return []
class MemberDefTypeSubRenderer(Renderer):
def create_target(self, refid):
return self.target_handler.create_target(refid)
def create_domain_id(self):
return ""
def title(self):
kind = []
# Variable type or function return type
if self.data_object.type_:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.type_)
kind = renderer.render()
name = self.node_factory.strong(text=self.data_object.name)
args = []
args.extend(kind)
args.extend([self.node_factory.Text(" "), name])
return args
def description(self):
description_nodes = []
if self.data_object.briefdescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.briefdescription)
description_nodes.append(self.node_factory.paragraph("", "", *renderer.render()))
if self.data_object.detaileddescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.detaileddescription)
description_nodes.append(self.node_factory.paragraph( "", "", *renderer.render()))
return description_nodes
def render(self):
refid = "%s%s" % (self.project_info.name(), self.data_object.id)
domain_id = self.create_domain_id()
title = self.title()
target = self.create_target(refid)
target.extend(title)
term = self.node_factory.paragraph("", "", ids=[domain_id,refid], *target )
definition = self.node_factory.paragraph("", "", *self.description())
return [term, self.node_factory.block_quote("", definition)]
class FuncMemberDefTypeSubRenderer(MemberDefTypeSubRenderer):
def create_target(self, refid):
self.domain_handler.create_function_target(self.data_object)
return MemberDefTypeSubRenderer.create_target(self, refid)
def create_domain_id(self):
return self.domain_handler.create_function_id(self.data_object)
def title(self):
lines = []
# Handle any template information
if self.data_object.templateparamlist:
renderer = self.renderer_factory.create_renderer(
self.data_object,
self.data_object.templateparamlist
)
template = [
self.node_factory.Text("template < ")
]
template.extend(renderer.render())
template.append(self.node_factory.Text(" >"))
# Add blank string at the start otherwise for some reason it renders
# the emphasis tags around the kind in plain text (same below)
lines.append(
self.node_factory.line(
"",
self.node_factory.Text(""),
*template
)
)
# Get the function type and name
args = MemberDefTypeSubRenderer.title(self)
# Get the function arguments
args.append(self.node_factory.Text("("))
for i, parameter in enumerate(self.data_object.param):
if i: args.append(self.node_factory.Text(", "))
renderer = self.renderer_factory.create_renderer(self.data_object, parameter)
args.extend(renderer.render())
args.append(self.node_factory.Text(")"))
lines.append(
self.node_factory.line(
"",
self.node_factory.Text(""),
*args
)
)
# Setup the line block with gathered informationo
block = self.node_factory.line_block(
"",
*lines
)
return [block]
class DefineMemberDefTypeSubRenderer(MemberDefTypeSubRenderer):
def title(self):
title = []
title.append(self.node_factory.strong(text=self.data_object.name))
if self.data_object.param:
title.append(self.node_factory.Text("("))
for i, parameter in enumerate(self.data_object.param):
if i: title.append(self.node_factory.Text(", "))
renderer = self.renderer_factory.create_renderer(self.data_object, parameter)
title.extend(renderer.render())
title.append(self.node_factory.Text(")"))
return title
def description(self):
return MemberDefTypeSubRenderer.description(self)
class EnumMemberDefTypeSubRenderer(MemberDefTypeSubRenderer):
def title(self):
if self.data_object.name.startswith("@"):
# Assume anonymous enum
return [self.node_factory.strong(text="Anonymous enum")]
name = self.node_factory.strong(text="%s enum" % self.data_object.name)
return [name]
def description(self):
description_nodes = MemberDefTypeSubRenderer.description(self)
name = self.node_factory.emphasis("", self.node_factory.Text("Values:"))
title = self.node_factory.paragraph("", "", name)
description_nodes.append(title)
enums = []
for item in self.data_object.enumvalue:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
enums.extend(renderer.render())
description_nodes.append(self.node_factory.bullet_list("", classes=["breatheenumvalues"], *enums))
return description_nodes
class TypedefMemberDefTypeSubRenderer(MemberDefTypeSubRenderer):
def title(self):
args = [self.node_factory.Text("typedef ")]
args.extend(MemberDefTypeSubRenderer.title(self))
if self.data_object.argsstring:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.argsstring)
args.extend(renderer.render())
return args
class VariableMemberDefTypeSubRenderer(MemberDefTypeSubRenderer):
def title(self):
args = MemberDefTypeSubRenderer.title(self)
if self.data_object.argsstring:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.argsstring)
args.extend(renderer.render())
return args
class EnumvalueTypeSubRenderer(Renderer):
def render(self):
name = self.node_factory.literal(text=self.data_object.name)
description_nodes = [name]
if self.data_object.initializer:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.initializer)
nodelist = [self.node_factory.Text(" = ")]
nodelist.extend(renderer.render())
description_nodes.append(self.node_factory.literal("", "", *nodelist))
separator = self.node_factory.Text(" - ")
description_nodes.append(separator)
if self.data_object.briefdescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.briefdescription)
description_nodes.extend(renderer.render())
if self.data_object.detaileddescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.detaileddescription)
description_nodes.extend(renderer.render())
# Build the list item
return [self.node_factory.list_item("", *description_nodes)]
class DescriptionTypeSubRenderer(Renderer):
def render(self):
nodelist = []
# Get description in rst_nodes if possible
for item in self.data_object.content_:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
return nodelist
class LinkedTextTypeSubRenderer(Renderer):
def render(self):
nodelist = []
# Recursively process where possible
for i, entry in enumerate(self.data_object.content_):
if i:
nodelist.append(self.node_factory.Text(" "))
renderer = self.renderer_factory.create_renderer(self.data_object, entry)
nodelist.extend(renderer.render())
return nodelist
class ParamTypeSubRenderer(Renderer):
def __init__(
self,
output_defname,
*args
):
Renderer.__init__( self, *args )
self.output_defname = output_defname
def render(self):
nodelist = []
# Parameter type
if self.data_object.type_:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.type_)
nodelist.extend(renderer.render())
# Parameter name
if self.data_object.declname:
if nodelist: nodelist.append(self.node_factory.Text(" "))
nodelist.append(self.node_factory.Text(self.data_object.declname))
if self.output_defname and self.data_object.defname:
if nodelist: nodelist.append(self.node_factory.Text(" "))
nodelist.append(self.node_factory.Text(self.data_object.defname))
# Default value
if self.data_object.defval:
nodelist.append(self.node_factory.Text(" = "))
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.defval)
nodelist.extend(renderer.render())
return nodelist
class DocRefTextTypeSubRenderer(Renderer):
def render(self):
nodelist = []
for item in self.data_object.content_:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
for item in self.data_object.para:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
refid = "%s%s" % (self.project_info.name(), self.data_object.refid)
nodelist = [
self.node_factory.pending_xref(
"",
reftype="ref",
refdomain="std",
refexplicit=True,
refid=refid,
reftarget=refid,
*nodelist
)
]
return nodelist
class DocParaTypeSubRenderer(Renderer):
def render(self):
nodelist = []
for item in self.data_object.content: # Description
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
definition_nodes = []
for item in self.data_object.simplesects: # Returns, user par's, etc
renderer = self.renderer_factory.create_renderer(self.data_object, item)
definition_nodes.extend(renderer.render())
for entry in self.data_object.parameterlist: # Parameters/Exceptions
renderer = self.renderer_factory.create_renderer(self.data_object, entry)
definition_nodes.extend(renderer.render())
if definition_nodes:
definition_list = self.node_factory.definition_list("", *definition_nodes)
nodelist.append(definition_list)
return [self.node_factory.paragraph("", "", *nodelist)]
class DocMarkupTypeSubRenderer(Renderer):
def __init__(
self,
creator,
*args
):
Renderer.__init__( self, *args )
self.creator = creator
def render(self):
nodelist = []
for item in self.data_object.content_:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
return [self.creator("", "", *nodelist)]
class DocParamListTypeSubRenderer(Renderer):
""" Parameter/Exectpion documentation """
lookup = {
"param" : "Parameters",
"exception" : "Exceptions",
"templateparam" : "Templates",
"retval" : "Return Value",
}
def render(self):
nodelist = []
for entry in self.data_object.parameteritem:
renderer = self.renderer_factory.create_renderer(self.data_object, entry)
nodelist.extend(renderer.render())
# Fild list entry
nodelist_list = self.node_factory.bullet_list("", classes=["breatheparameterlist"], *nodelist)
term_text = self.lookup[self.data_object.kind]
term = self.node_factory.term("", "", self.node_factory.strong( "", term_text ) )
definition = self.node_factory.definition('', nodelist_list)
return [self.node_factory.definition_list_item('', term, definition)]
class DocParamListItemSubRenderer(Renderer):
""" Paramter Description Renderer """
def render(self):
nodelist = []
for entry in self.data_object.parameternamelist:
renderer = self.renderer_factory.create_renderer(self.data_object, entry)
nodelist.extend(renderer.render())
term = self.node_factory.literal("","", *nodelist)
separator = self.node_factory.Text(" - ")
nodelist = []
if self.data_object.parameterdescription:
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.parameterdescription)
nodelist.extend(renderer.render())
return [self.node_factory.list_item("", term, separator, *nodelist)]
class DocParamNameListSubRenderer(Renderer):
""" Parameter Name Renderer """
def render(self):
nodelist = []
for entry in self.data_object.parametername:
renderer = self.renderer_factory.create_renderer(self.data_object, entry)
nodelist.extend(renderer.render())
return nodelist
class DocParamNameSubRenderer(Renderer):
def render(self):
nodelist = []
for item in self.data_object.content_:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
return nodelist
class DocSect1TypeSubRenderer(Renderer):
def render(self):
return []
class DocSimpleSectTypeSubRenderer(Renderer):
"Other Type documentation such as Warning, Note, Returns, etc"
def title(self):
text = self.node_factory.Text(self.data_object.kind.capitalize())
return [self.node_factory.strong( "", text )]
def render(self):
nodelist = []
for item in self.data_object.para:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.append(self.node_factory.paragraph("", "", *renderer.render()))
term = self.node_factory.term("", "", *self.title())
definition = self.node_factory.definition("", *nodelist)
return [self.node_factory.definition_list_item("", term, definition)]
class ParDocSimpleSectTypeSubRenderer(DocSimpleSectTypeSubRenderer):
def title(self):
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.title)
return [self.node_factory.strong( "", *renderer.render() )]
class DocTitleTypeSubRenderer(Renderer):
def render(self):
nodelist = []
for item in self.data_object.content_:
renderer = self.renderer_factory.create_renderer(self.data_object, item)
nodelist.extend(renderer.render())
return nodelist
class DocForumlaTypeSubRenderer(Renderer):
def render(self):
nodelist = []
for item in self.data_object.content_:
latex = item.getValue()
# Somewhat hacky if statements to strip out the doxygen markup that slips through
node = None
# Either inline
if latex.startswith("$") and latex.endswith("$"):
latex = latex[1:-1]
# If we're inline create a math node like the :math: role
node = self.node_factory.math()
else:
# Else we're multiline
node = self.node_factory.displaymath()
# Or multiline
if latex.startswith("\[") and latex.endswith("\]"):
latex = latex[2:-2:]
# Here we steal the core of the mathbase "math" directive handling code from:
# sphinx.ext.mathbase
node["latex"] = latex
# Required parameters which we don't have values for
node["label"] = None
node["nowrap"] = False
node["docname"] = self.state.document.settings.env.docname
nodelist.append(node)
return nodelist
class TemplateParamListRenderer(Renderer):
def render(self):
nodelist = []
for i, param in enumerate(self.data_object.param):
if i:
nodelist.append(self.node_factory.Text(", "))
renderer = self.renderer_factory.create_renderer(self.data_object, param)
nodelist.extend(renderer.render())
return nodelist
class IncTypeSubRenderer(Renderer):
def render(self):
if self.data_object.local == u"yes":
text = '#include "%s"' % self.data_object.content_[0].getValue()
else:
text = '#include <%s>' % self.data_object.content_[0].getValue()
return [self.node_factory.emphasis(text=text)]
class RefTypeSubRenderer(Renderer):
ref_types = {
"innerclass" : "class",
"innernamespace" : "namespace",
}
def __init__(self, compound_parser, *args):
Renderer.__init__(self, *args)
self.compound_parser = compound_parser
def render(self):
# Read in the corresponding xml file and process
file_data = self.compound_parser.parse(self.data_object.refid)
data_renderer = self.renderer_factory.create_renderer(self.data_object, file_data)
child_nodes = data_renderer.render()
# Only render the header with refs if we've definitely got content to
# put underneath it. Otherwise return an empty list
if child_nodes:
refid = "%s%s" % (self.project_info.name(), self.data_object.refid)
nodelist = self.target_handler.create_target(refid)
# Set up the title and a reference for it (refid)
type_ = self.ref_types[self.data_object.node_name]
kind = self.node_factory.emphasis(text=type_)
name_text = self.data_object.content_[0].getValue()
name_text = name_text.rsplit("::", 1)[-1]
name = self.node_factory.strong(text=name_text)
nodelist = []
nodelist.append(
self.node_factory.paragraph(
"",
"",
kind,
self.node_factory.Text(" "),
name,
ids=[refid]
)
)
nodelist.extend(child_nodes)
return nodelist
return []
class VerbatimTypeSubRenderer(Renderer):
def __init__(self, content_creator, *args):
Renderer.__init__(self, *args)
self.content_creator = content_creator
def render(self):
if not self.data_object.text.strip().startswith("embed:rst"):
# Remove trailing new lines. Purely subjective call from viewing results
text = self.data_object.text.rstrip()
# Handle has a preformatted text
return [self.node_factory.literal_block(text, text)]
rst = self.content_creator(self.data_object.text)
# Parent node for the generated node subtree
node = self.node_factory.paragraph()
node.document = self.state.document
# Generate node subtree
self.state.nested_parse(rst, 0, node)
return node
class MixedContainerRenderer(Renderer):
def render(self):
renderer = self.renderer_factory.create_renderer(self.data_object, self.data_object.getValue())
return renderer.render()
|
bsd-2-clause
| -5,708,813,989,462,306,000
| 31.116587
| 117
| 0.587066
| false
| 4.394179
| false
| false
| false
|
fastcoinproject/fastcoin
|
contrib/bitrpc/bitrpc.py
|
1
|
9669
|
from jsonrpc import ServiceProxy
import sys
import string
import getpass
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "encryptwallet":
try:
pwd = getpass.getpass(prompt="Enter passphrase: ")
pwd2 = getpass.getpass(prompt="Repeat passphrase: ")
if pwd == pwd2:
access.encryptwallet(pwd)
print "\n---Wallet encrypted. Server stopping, restart to run with encrypted wallet---\n"
else:
print "\n---Passphrases do not match---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = getpass.getpass(prompt="Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = getpass.getpass(prompt="Enter old wallet passphrase: ")
pwd2 = getpass.getpass(prompt="Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
mit
| -1,961,335,208,077,969,200
| 27.522124
| 101
| 0.568104
| false
| 3.954601
| false
| false
| false
|
insomnia-lab/calibre
|
src/calibre/ebooks/docx/char_styles.py
|
1
|
8194
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
from collections import OrderedDict
from calibre.ebooks.docx.block_styles import ( # noqa
inherit, simple_color, LINE_STYLES, simple_float, binary_property, read_shd)
from calibre.ebooks.docx.names import XPath, get
# Read from XML {{{
def read_text_border(parent, dest):
border_color = border_style = border_width = padding = inherit
elems = XPath('./w:bdr')(parent)
if elems:
border_color = simple_color('auto')
border_style = 'solid'
border_width = 1
for elem in elems:
color = get(elem, 'w:color')
if color is not None:
border_color = simple_color(color)
style = get(elem, 'w:val')
if style is not None:
border_style = LINE_STYLES.get(style, 'solid')
space = get(elem, 'w:space')
if space is not None:
try:
padding = float(space)
except (ValueError, TypeError):
pass
sz = get(elem, 'w:sz')
if sz is not None:
# we dont care about art borders (they are only used for page borders)
try:
# A border of less than 1pt is not rendered by WebKit
border_width = min(96, max(8, float(sz))) / 8
except (ValueError, TypeError):
pass
setattr(dest, 'border_color', border_color)
setattr(dest, 'border_style', border_style)
setattr(dest, 'border_width', border_width)
setattr(dest, 'padding', padding)
def read_color(parent, dest):
ans = inherit
for col in XPath('./w:color[@w:val]')(parent):
val = get(col, 'w:val')
if not val:
continue
ans = simple_color(val)
setattr(dest, 'color', ans)
def read_highlight(parent, dest):
ans = inherit
for col in XPath('./w:highlight[@w:val]')(parent):
val = get(col, 'w:val')
if not val:
continue
if not val or val == 'none':
val = 'transparent'
ans = val
setattr(dest, 'highlight', ans)
def read_lang(parent, dest):
ans = inherit
for col in XPath('./w:lang[@w:val]')(parent):
val = get(col, 'w:val')
if not val:
continue
try:
code = int(val, 16)
except (ValueError, TypeError):
ans = val
else:
from calibre.ebooks.docx.lcid import lcid
val = lcid.get(code, None)
if val:
ans = val
setattr(dest, 'lang', ans)
def read_letter_spacing(parent, dest):
ans = inherit
for col in XPath('./w:spacing[@w:val]')(parent):
val = simple_float(get(col, 'w:val'), 0.05)
if val is not None:
ans = val
setattr(dest, 'letter_spacing', ans)
def read_sz(parent, dest):
ans = inherit
for col in XPath('./w:sz[@w:val]')(parent):
val = simple_float(get(col, 'w:val'), 0.5)
if val is not None:
ans = val
setattr(dest, 'font_size', ans)
def read_underline(parent, dest):
ans = inherit
for col in XPath('./w:u[@w:val]')(parent):
val = get(col, 'w:val')
if val:
ans = val if val == 'none' else 'underline'
setattr(dest, 'text_decoration', ans)
def read_vert_align(parent, dest):
ans = inherit
for col in XPath('./w:vertAlign[@w:val]')(parent):
val = get(col, 'w:val')
if val and val in {'baseline', 'subscript', 'superscript'}:
ans = val
setattr(dest, 'vert_align', ans)
def read_font_family(parent, dest):
ans = inherit
for col in XPath('./w:rFonts')(parent):
val = get(col, 'w:asciiTheme')
if val:
val = '|%s|' % val
else:
val = get(col, 'w:ascii')
if val:
ans = val
setattr(dest, 'font_family', ans)
# }}}
class RunStyle(object):
all_properties = {
'b', 'bCs', 'caps', 'cs', 'dstrike', 'emboss', 'i', 'iCs', 'imprint',
'rtl', 'shadow', 'smallCaps', 'strike', 'vanish', 'webHidden',
'border_color', 'border_style', 'border_width', 'padding', 'color', 'highlight', 'background_color',
'letter_spacing', 'font_size', 'text_decoration', 'vert_align', 'lang', 'font_family',
}
toggle_properties = {
'b', 'bCs', 'caps', 'emboss', 'i', 'iCs', 'imprint', 'shadow', 'smallCaps', 'strike', 'dstrike', 'vanish',
}
def __init__(self, rPr=None):
self.linked_style = None
if rPr is None:
for p in self.all_properties:
setattr(self, p, inherit)
else:
for p in (
'b', 'bCs', 'caps', 'cs', 'dstrike', 'emboss', 'i', 'iCs', 'imprint', 'rtl', 'shadow',
'smallCaps', 'strike', 'vanish', 'webHidden',
):
setattr(self, p, binary_property(rPr, p))
for x in ('text_border', 'color', 'highlight', 'shd', 'letter_spacing', 'sz', 'underline', 'vert_align', 'lang', 'font_family'):
f = globals()['read_%s' % x]
f(rPr, self)
for s in XPath('./w:rStyle[@w:val]')(rPr):
self.linked_style = get(s, 'w:val')
self._css = None
def update(self, other):
for prop in self.all_properties:
nval = getattr(other, prop)
if nval is not inherit:
setattr(self, prop, nval)
if other.linked_style is not None:
self.linked_style = other.linked_style
def resolve_based_on(self, parent):
for p in self.all_properties:
val = getattr(self, p)
if val is inherit:
setattr(self, p, getattr(parent, p))
def get_border_css(self, ans):
for x in ('color', 'style', 'width'):
val = getattr(self, 'border_'+x)
if x == 'width' and val is not inherit:
val = '%.3gpt' % val
if val is not inherit:
ans['border-%s' % x] = val
def clear_border_css(self):
for x in ('color', 'style', 'width'):
setattr(self, 'border_'+x, inherit)
@property
def css(self):
if self._css is None:
c = self._css = OrderedDict()
td = set()
if self.text_decoration is not inherit:
td.add(self.text_decoration)
if self.strike and self.strike is not inherit:
td.add('line-through')
if self.dstrike and self.dstrike is not inherit:
td.add('line-through')
if td:
c['text-decoration'] = ' '.join(td)
if self.caps is True:
c['text-transform'] = 'uppercase'
if self.i is True:
c['font-style'] = 'italic'
if self.shadow and self.shadow is not inherit:
c['text-shadow'] = '2px 2px'
if self.smallCaps is True:
c['font-variant'] = 'small-caps'
if self.vanish is True or self.webHidden is True:
c['display'] = 'none'
self.get_border_css(c)
if self.padding is not inherit:
c['padding'] = '%.3gpt' % self.padding
for x in ('color', 'background_color'):
val = getattr(self, x)
if val is not inherit:
c[x.replace('_', '-')] = val
for x in ('letter_spacing', 'font_size'):
val = getattr(self, x)
if val is not inherit:
c[x.replace('_', '-')] = '%.3gpt' % val
if self.highlight is not inherit and self.highlight != 'transparent':
c['background-color'] = self.highlight
if self.b:
c['font-weight'] = 'bold'
if self.font_family is not inherit:
c['font-family'] = self.font_family
return self._css
def same_border(self, other):
return self.get_border_css({}) == other.get_border_css({})
|
gpl-3.0
| 1,767,311,756,068,505,300
| 32.720165
| 140
| 0.520625
| false
| 3.633703
| false
| false
| false
|
nCoda/macOS
|
.eggs/py2app-0.14-py2.7.egg/py2app/recipes/PIL/prescript.py
|
1
|
1297
|
def _recipes_pil_prescript(plugins):
try:
import Image
have_PIL = False
except ImportError:
from PIL import Image
have_PIL = True
import sys
def init():
if Image._initialized >= 2:
return
if have_PIL:
try:
import PIL.JpegPresets
sys.modules['JpegPresets'] = PIL.JpegPresets
except ImportError:
pass
for plugin in plugins:
try:
if have_PIL:
try:
# First try absolute import through PIL (for
# Pillow support) only then try relative imports
m = __import__(
'PIL.' + plugin, globals(), locals(), [])
m = getattr(m, plugin)
sys.modules[plugin] = m
continue
except ImportError:
pass
__import__(plugin, globals(), locals(), [])
except ImportError:
if Image.DEBUG:
print('Image: failed to import')
if Image.OPEN or Image.SAVE:
Image._initialized = 2
return 1
Image.init = init
|
gpl-3.0
| 5,566,119,026,648,664,000
| 27.822222
| 72
| 0.43485
| false
| 5.404167
| false
| false
| false
|
Caoimhinmg/PmagPy
|
programs/lowrie.py
|
1
|
4077
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from builtins import input
from builtins import range
from past.utils import old_div
import sys
import matplotlib
if matplotlib.get_backend() != "TKAgg":
matplotlib.use("TKAgg")
import pmagpy.pmag as pmag
import pmagpy.pmagplotlib as pmagplotlib
def main():
"""
NAME
lowrie.py
DESCRIPTION
plots intensity decay curves for Lowrie experiments
SYNTAX
lowrie -h [command line options]
INPUT
takes SIO formatted input files
OPTIONS
-h prints help message and quits
-f FILE: specify input file
-N do not normalize by maximum magnetization
-fmt [svg, pdf, eps, png] specify fmt, default is svg
-sav save plots and quit
"""
fmt,plot='svg',0
FIG={} # plot dictionary
FIG['lowrie']=1 # demag is figure 1
pmagplotlib.plot_init(FIG['lowrie'],6,6)
norm=1 # default is to normalize by maximum axis
if len(sys.argv)>1:
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-N' in sys.argv: norm=0 # don't normalize
if '-sav' in sys.argv: plot=1 # don't normalize
if '-fmt' in sys.argv: # sets input filename
ind=sys.argv.index("-fmt")
fmt=sys.argv[ind+1]
if '-f' in sys.argv: # sets input filename
ind=sys.argv.index("-f")
in_file=sys.argv[ind+1]
else:
print(main.__doc__)
print('you must supply a file name')
sys.exit()
else:
print(main.__doc__)
print('you must supply a file name')
sys.exit()
data=open(in_file).readlines() # open the SIO format file
PmagRecs=[] # set up a list for the results
keys=['specimen','treatment','csd','M','dec','inc']
for line in data:
PmagRec={}
rec=line.replace('\n','').split()
for k in range(len(keys)):
PmagRec[keys[k]]=rec[k]
PmagRecs.append(PmagRec)
specs=pmag.get_dictkey(PmagRecs,'specimen','')
sids=[]
for spec in specs:
if spec not in sids:sids.append(spec) # get list of unique specimen names
for spc in sids: # step through the specimen names
print(spc)
specdata=pmag.get_dictitem(PmagRecs,'specimen',spc,'T') # get all this one's data
DIMs,Temps=[],[]
for dat in specdata: # step through the data
DIMs.append([float(dat['dec']),float(dat['inc']),float(dat['M'])*1e-3])
Temps.append(float(dat['treatment']))
carts=pmag.dir2cart(DIMs).transpose()
#if norm==1: # want to normalize
# nrm=max(max(abs(carts[0])),max(abs(carts[1])),max(abs(carts[2]))) # by maximum of x,y,z values
# ylab="M/M_max"
if norm==1: # want to normalize
nrm=(DIMs[0][2]) # normalize by NRM
ylab="M/M_o"
else:
nrm=1. # don't normalize
ylab="Magnetic moment (Am^2)"
xlab="Temperature (C)"
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[0]),nrm),sym='r-')
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[0]),nrm),sym='ro') # X direction
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[1]),nrm),sym='c-')
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[1]),nrm),sym='cs') # Y direction
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[2]),nrm),sym='k-')
pmagplotlib.plotXY(FIG['lowrie'],Temps,old_div(abs(carts[2]),nrm),sym='k^',title=spc,xlab=xlab,ylab=ylab) # Z direction
files={'lowrie':'lowrie:_'+spc+'_.'+fmt}
if plot==0:
pmagplotlib.drawFIGS(FIG)
ans=input('S[a]ve figure? [q]uit, <return> to continue ')
if ans=='a':
pmagplotlib.saveP(FIG,files)
elif ans=='q':
sys.exit()
else:
pmagplotlib.saveP(FIG,files)
pmagplotlib.clearFIG(FIG['lowrie'])
if __name__ == "__main__":
main()
|
bsd-3-clause
| 519,150,418,447,072,960
| 35.72973
| 127
| 0.575914
| false
| 3.238284
| false
| false
| false
|
lsaffre/blog
|
docs/blog/2017/2.py
|
1
|
3105
|
import sys
from PyQt5.QtWidgets import (QApplication, QWidget, QPushButton,
QMessageBox, QDesktopWidget, QMainWindow,
QAction, qApp, QTextEdit, QHBoxLayout,
QVBoxLayout)
# from PyQt5.QtCore import QCoreApplication
from PyQt5.QtGui import QIcon
class DetailForm(QWidget):
def __init__(self, title="Detail Form"):
super().__init__()
self.setWindowTitle(title)
self.initUI()
def initUI(self):
okButton = QPushButton("OK")
cancelButton = QPushButton("Cancel")
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(okButton)
hbox.addWidget(cancelButton)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addLayout(hbox)
self.setLayout(vbox)
self.setGeometry(300, 300, 300, 150)
# self.show()
class Example(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
textEdit = QTextEdit()
self.setCentralWidget(textEdit)
self.setGeometry(300, 300, 300, 220)
self.center()
self.setWindowTitle('2.py')
self.setWindowIcon(QIcon('../../.static/logo.png'))
self.setToolTip('This is a <b>QWidget</b> widget')
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
exitAction = QAction(QIcon('exit.png'), '&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(qApp.quit)
fileMenu.addAction(exitAction)
a = QAction(QIcon('detail.png'), '&Detail', self)
a.triggered.connect(self.show_detail)
fileMenu.addAction(a)
self.toolbar = self.addToolBar('Exit')
self.toolbar.addAction(exitAction)
# btn = QPushButton('Quit', self)
# btn.clicked.connect(QCoreApplication.instance().quit)
# btn.setToolTip('This is a <b>QPushButton</b> widget')
# btn.resize(btn.sizeHint())
# btn.move(50, 50)
self.show()
self.statusBar().showMessage('Ready')
def show_detail(self, event):
self.detail_form = DetailForm()
self.detail_form.show()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'MessageBox',
"This will close the window! Are you sure?",
QMessageBox.Yes |
QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
|
gpl-3.0
| 2,632,101,182,666,969,000
| 27.75
| 70
| 0.544605
| false
| 4.26511
| false
| false
| false
|
stphivos/django-mock-queries
|
examples/users/users/settings.py
|
1
|
3188
|
"""
Django settings for users project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import django
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3sy3!6p&s&g2@t922(%@4z+(np+yc#amz0id80vyk03$x8&38$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'analytics',
)
if django.VERSION[0] == 1:
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
else:
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'users.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'users.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
mit
| 4,385,639,867,455,560,700
| 26.721739
| 73
| 0.686324
| false
| 3.651775
| false
| false
| false
|
crichardson17/emgtemp
|
Metals_Sims/Dusty_sims/z_0.5_2.0/z_0.5_2.0_metal_sim_plots.py
|
1
|
18682
|
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.colors as colors
import urllib
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import matplotlib.cm as cm
Low_Temp_Color = 'k'
Mid_Temp_Color = 'g'
High_Temp_Color = 'r'
#Temp_Color = 0.5
Cloudy_Sim_Color = 'cyan'
markersize = 20
SDSS_File = '/Users/Sam/Documents/emgtemp/data/4363_gr_5_0_err_dered.csv'
SDSS_Data = np.genfromtxt(SDSS_File,skip_header=1, delimiter = ',',dtype=float,unpack=True,names=True)
NII_6584 = SDSS_Data['Flux_NII_6583']
Ha_6562 = SDSS_Data['Flux_Ha_6562']
OI_6300 = SDSS_Data['Flux_OI_6300']
OIII_5006 = SDSS_Data['Flux_OIII_5006']
Hb_4861 = SDSS_Data['Flux_Hb_4861']
OIII_4363 = SDSS_Data['Flux_OIII_4363']
SII_6716 = SDSS_Data['Flux_SII_6716']
SII_6731 = SDSS_Data['Flux_SII_6730']
OII_3727 = SDSS_Data['Flux_OII_3726'] + SDSS_Data['Flux_OII_3728']
OIII_Hb = np.log10(OIII_5006/Hb_4861)
NII_Ha = np.log10(NII_6584/Ha_6562)
Temp_Ratio = np.log10(OIII_5006/OIII_4363)
S_Ratio = np.log10(SII_6716/SII_6731)
NO_Ratio = np.log10(NII_6584/OII_3727)
OI_Ratio = np.log10(OI_6300/Ha_6562)
O_Ratio = np.log10(OIII_5006/OII_3727)
S_Ha_Ratio = np.log10((SII_6716+SII_6731)/Ha_6562)
Cloudy_File = '/Users/Sam/Documents/emgtemp/Metals_Sims/Dusty_sims/z_0.5_2.0/z_0.5_2.0_sims.pun'
Cloudy_Data = np.genfromtxt(Cloudy_File, delimiter = '\t',dtype=float,unpack=True,names=True)
Cloudy_NII_6584 = Cloudy_Data['N__2__6584A']
Cloudy_Ha_6562 = Cloudy_Data['H__1__6563A']
Cloudy_OIII_5006 = Cloudy_Data['O__3__5007A']
Cloudy_Hb_4861 = Cloudy_Data['TOTL__4861A']
Cloudy_OIII_4363 = Cloudy_Data['TOTL__4363A']
Cloudy_SII_6716 = Cloudy_Data['S_II__6716A']
Cloudy_SII_6731 = Cloudy_Data['S_II__6731A']
Cloudy_OII_3727 = Cloudy_Data['TOTL__3727A']
Cloudy_OI_6300 = Cloudy_Data['O__1__6300A']
Cloudy_OIII_Hb = np.log10(Cloudy_OIII_5006/Cloudy_Hb_4861)
Cloudy_NII_Ha = np.log10(Cloudy_NII_6584/Cloudy_Ha_6562)
Cloudy_Temp_Ratio = np.log10(Cloudy_OIII_5006/Cloudy_OIII_4363)
Cloudy_S_Ratio = np.log10(Cloudy_SII_6716/Cloudy_SII_6731)
Cloudy_NO_Ratio = np.log10(Cloudy_NII_6584/Cloudy_OII_3727)
Cloudy_OI_Ratio = np.log10(Cloudy_OI_6300/Cloudy_Ha_6562)
Cloudy_O_Ratio = np.log10(Cloudy_OIII_5006/Cloudy_OII_3727)
Cloudy_S_Ha_Ratio = np.log10((Cloudy_SII_6716+Cloudy_SII_6731)/Cloudy_Ha_6562)
Grid_File = '/Users/Sam/Documents/emgtemp/Metals_Sims/Dusty_sims/z_0.6_2.0/z_0.6_2.0_sims.grd'
Grid_Data = np.genfromtxt(Grid_File,skip_header=1,delimiter = '\t',dtype=float,unpack=True)
Cloudy_Metals = Grid_Data[8,:]
Cloudy_Den = Grid_Data[6,:]
Cloudy_NII_Ha_array = np.reshape(Cloudy_NII_Ha,(6,-1))
Cloudy_OI_Ratio_array = np.reshape(Cloudy_OI_Ratio,(6,-1))
Cloudy_OIII_Hb_array = np.reshape(Cloudy_OIII_Hb,(6,-1))
Cloudy_Temp_Ratio_array = np.reshape(Cloudy_Temp_Ratio,(6,-1))
Cloudy_S_Ratio_array = np.reshape(Cloudy_S_Ratio,(6,-1))
Cloudy_NO_Ratio_array = np.reshape(Cloudy_NO_Ratio,(6,-1))
Cloudy_O_Ratio_array = np.reshape(Cloudy_O_Ratio,(6,-1))
Cloudy_S_Ha_Ratio_array = np.reshape(Cloudy_S_Ha_Ratio,(6,-1))
Cloudy_NII_Ha_transpose = np.transpose(Cloudy_NII_Ha_array)
Cloudy_OI_Ratio_transpose = np.transpose(Cloudy_OI_Ratio_array)
Cloudy_OIII_Hb_transpose = np.transpose(Cloudy_OIII_Hb_array)
Cloudy_Temp_Ratio_transpose = np.transpose(Cloudy_Temp_Ratio_array)
Cloudy_S_Ratio_transpose = np.transpose(Cloudy_S_Ratio_array)
Cloudy_NO_Ratio_transpose = np.transpose(Cloudy_NO_Ratio_array)
Cloudy_O_Ratio_transpose = np.transpose(Cloudy_O_Ratio_array)
Cloudy_S_Ha_Ratio_transpose = np.transpose(Cloudy_S_Ha_Ratio_array)
#cold_data_colors = [plt.cm.Blues(i) for i in np.linspace(0,1,len(SDSS_Data['z']))]
#mid_data_colors = [plt.cm.Greens(i) for i in np.linspace(0,1,len(SDSS_Data['z']))]
#hot_data_colors = [plt.cm.Reds(i) for i in np.linspace(0,1,len(SDSS_Data['z']))]
u_colors = [plt.cm.Purples(i) for i in np.linspace(0.5,1,7)]
metal_colors = [plt.cm.Blues(i) for i in np.linspace(0.25,1,6)]
def truncate_colormap(cmap, minval=0.15, maxval=1.0, n=100):
new_cmap = colors.LinearSegmentedColormap.from_list(
'trunc({n},{a:.2f},{b:.2f})'.format(n=cmap.name, a=minval, b=maxval),
cmap(np.linspace(minval, maxval, n)))
return new_cmap
u_colors_map = truncate_colormap(cm.Purples)
metal_colors_map = truncate_colormap(cm.Blues)
#This is bad^ 3 and 7 are the number of densities and ionization parameters used, but ideally this wouldn't be hardcoded.
#sf_count = 0.0
#comp_count = 0.0
#agn_count = 0.0
#liner_count = 0.0
#amb_count = 0.0
shape = ['v']
#####################################################################################################
def getShape(NII_Ha, OIII_Hb, S_Ha_Ratio, OI_Ratio):
# Star forming
if OIII_Hb < 0.61/(NII_Ha-0.05)+1.3 and \
OIII_Hb < 0.72/(S_Ha_Ratio-0.32)+1.30 and \
OIII_Hb < 0.73/(OI_Ratio+0.59)+1.33:
shape = 'x'
#sf_count = sf_count+1
# Composite
elif 0.61/(NII_Ha-0.05)+1.3 < OIII_Hb and \
0.61/(NII_Ha-0.47)+1.19 > OIII_Hb:
shape = '+'
#comp_count = comp_count+1
# AGN
elif 0.61/(NII_Ha-0.47)+1.19 < OIII_Hb and \
0.72/(S_Ha_Ratio-0.32)+1.30 < OIII_Hb and \
0.73/(OI_Ratio+0.59)+1.33 < OIII_Hb and \
(1.89*S_Ha_Ratio)+0.76 < OIII_Hb and \
(1.18*OI_Ratio)+1.30 < OIII_Hb:
shape = 'D'
#agn_count = agn_count+1
# LINERs
elif 0.61/(NII_Ha-0.47)+1.19 < OIII_Hb and \
0.72/(S_Ha_Ratio-0.32)+1.30 < OIII_Hb and \
OIII_Hb < (1.89*S_Ha_Ratio)+0.76 and \
0.73/(OI_Ratio+0.59)+1.33 < OIII_Hb and \
OIII_Hb < (1.18*OI_Ratio)+1.30:
shape = 's'
#liner_count = liner_count+1
else:
# Ambiguous
shape = '*'
#amb_count = amb_count+1
return shape
#####################################################################################################
#####################################################################################################
def getColor(OIII_5006, OIII_4363):
Temp_Color = 'k'
if OIII_5006/OIII_4363<50:
#Temp_Color = '0.25'
Temp_Color = plt.cm.gray(0.2)
#red = red + 1
elif OIII_5006/OIII_4363>50 and OIII_5006/OIII_4363<100:
#Temp_Color = '0.5'
Temp_Color = plt.cm.gray(0.5)
#green = green + 1
elif OIII_5006/OIII_4363>100:
#Temp_Color = '0.75'
Temp_Color = plt.cm.gray(0.75)
#black = black + 1
else:
print ("error")
return Temp_Color
#####################################################################################################
fig = plt.figure(21)
fig.subplots_adjust(wspace=0.8,hspace=0.4)
sp1 = plt.subplot(231)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
#print(Temp_Color)
plt.scatter(NII_Ha[i],OIII_Hb[i],s = markersize, marker = shape, color = Temp_Color, edgecolor = 'none')
#print (Temp_Color)
#print(sf_count)
#print(comp_count)
#print(agn_count)
#print(liner_count)
#print(amb_count)
#print(red)
#print(green)
#print(black)
#print(counter)
plt.xlim(-1.5,0.5)
plt.ylim(0,1.3)
plt.ylabel(r"log([OIII] $\lambda$5007/H$\beta$)")
plt.xlabel(r"log ([NII] $\lambda$6584/H$\alpha$)")
#plt.title("BPT Diagram")
#plt.scatter(Cloudy_NII_Ha,Cloudy_OIII_Hb,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp1.set_color_cycle(u_colors)
plt.plot(Cloudy_NII_Ha_array,Cloudy_OIII_Hb_array,linestyle = '--')
sp1.set_color_cycle(metal_colors)
plt.plot(Cloudy_NII_Ha_transpose,Cloudy_OIII_Hb_transpose, lw = '1.5')
x=np.linspace(-1.5,0.3,50)
y=((.61/(x-.47))+1.19)
plt.plot(x,y,color=Low_Temp_Color)
x3=np.linspace(-1,-0.2,50)
y3=((.61/(x3-.05)+1.3))
plt.plot(x3,y3,linestyle='--',color='k')
plt.xticks(np.arange(min(x), max(x)+1,1.0))
#plt.legend([plt.scatter([],[],color='.75', s = markersize, marker = 'x', edgecolor = 'none'),plt.scatter([],[],color='0.75', s = markersize, marker = '+', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = 'D', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = 's', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = '*', edgecolor = 'none')], ("Star-Forming","Composite","AGN","LINER","Ambiguous"),scatterpoints = 1, loc = 'lower left',fontsize =8)
#counter=0
sm = plt.cm.ScalarMappable(norm=colors.Normalize(vmin=0.5, vmax=2.0),cmap=metal_colors_map)
sm._A = []
smaxes = inset_axes(sp1, width=0.06, height=0.4, loc=3, bbox_to_anchor=(0.14, 0.1), bbox_transform=sp1.figure.transFigure)
#smaxes = inset_axes(sp1, width="3%", height="20%", loc=3, bbox_to_anchor=(0.1, 0.1), bbox_transform=ax.figure.transFigure)
cbar = plt.colorbar(sm,cax=smaxes)
cbar.ax.set_title('Z',fontsize=8)
cbar.set_ticks([0.5,2.0])
cbar.set_ticklabels([0.5,2.0])
cbar.ax.tick_params(labelsize=8)
sp2 = plt.subplot(232)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(NII_Ha[i],Temp_Ratio[i], s = markersize, marker = shape, color = Temp_Color, edgecolor = 'none')
#print(counter)
plt.ylabel(r"log([OIII] $\lambda$5007/4363)")
plt.xlabel(r"log ([NII] $\lambda$6584/H$\alpha$)")
#plt.title("Temperature")
plt.ylim(1,2.5)
plt.xlim(-1.5,0.5)
#plt.scatter(Cloudy_NII_Ha,Cloudy_Temp_Ratio,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp2.set_color_cycle(u_colors)
plt.plot(Cloudy_NII_Ha_array,Cloudy_Temp_Ratio_array,linestyle = '--')
sp2.set_color_cycle(metal_colors)
plt.plot(Cloudy_NII_Ha_transpose,Cloudy_Temp_Ratio_transpose, lw = '1.5')
plt.xticks(np.arange(min(x), max(x)+1,1.0))
#plt.legend([plt.scatter([],[],color='0.75', s = markersize), plt.scatter([],[],color='0.5', s = markersize), plt.scatter([],[],color='0.25', s = markersize)], (r"T$_e$<1.17*10$^4$",r"1.17*10$^4$<T$_e$<1.54*10$^4$",r"T$_e$>1.54*10$^4$"),scatterpoints = 1, loc = 'lower left',fontsize =8)
sm = plt.cm.ScalarMappable(norm=colors.Normalize(vmin=-3.5, vmax=-0.5),cmap=u_colors_map)
sm._A = []
smaxes = inset_axes(sp2, width=0.06, height=0.4, loc=3, bbox_to_anchor=(0.3, .1), bbox_transform=sp2.figure.transFigure)
cbar = plt.colorbar(sm,cax=smaxes)
cbar.ax.set_title('U',fontsize=8)
cbar.set_ticks([-3.5,-0.5])
cbar.set_ticklabels([-3.5,-0.5])
cbar.ax.tick_params(labelsize=8)
sp3 = plt.subplot(234)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(NII_Ha[i],S_Ratio[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([SII] $\lambda$6717/6731)")
plt.xlabel(r"log ([NII] $\lambda$6584/H$\alpha$)")
plt.ylim(-0.3,0.3)
plt.xlim(-1.5,0.5)
#plt.title("Density")
#plt.scatter(Cloudy_NII_Ha,Cloudy_S_Ratio,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp3.set_color_cycle(u_colors)
plt.plot(Cloudy_NII_Ha_array,Cloudy_S_Ratio_array,linestyle = '--')
sp3.set_color_cycle(metal_colors)
plt.plot(Cloudy_NII_Ha_transpose,Cloudy_S_Ratio_transpose, lw = '1.5')
plt.xticks(np.arange(min(x), max(x)+1,1.0))
#plt.legend([plt.scatter([],[],color=Low_Temp_Color, s = markersize), plt.scatter([],[],color=Mid_Temp_Color, s = markersize), plt.scatter([],[],color=High_Temp_Color, s = markersize),plt.scatter([],[],c=Cloudy_Sim_Color, s = markersize, edgecolor = 'none')], (r"$\frac{OIII[5007]}{OIII[4363]}$<50.0",r"$50.0<\frac{OIII[5007]}{OIII[4363]}<100.0$",r"$\frac{OIII[5007]}{OIII[4363]}$>100.0","Cloudy Simulation"),scatterpoints = 1, loc = 'lower left',fontsize =8)
sp4 = plt.subplot(233)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(NII_Ha[i],NO_Ratio[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([NII] $\lambda$6584/[OII] $\lambda$3727)")
plt.xlabel(r"log ([NII] $\lambda$6584/H$\alpha$)")
#plt.title("Metallicity")
plt.xlim(-1.5,0.5)
plt.ylim(-1,0.5)
#plt.scatter(Cloudy_NII_Ha,Cloudy_NO_Ratio,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp4.set_color_cycle(u_colors)
plt.plot(Cloudy_NII_Ha_array,Cloudy_NO_Ratio_array,linestyle = '--')
sp4.set_color_cycle(metal_colors)
plt.plot(Cloudy_NII_Ha_transpose,Cloudy_NO_Ratio_transpose, lw = '1.5')
plt.xticks(np.arange(min(x), max(x)+1,1.0))
#plt.legend([plt.scatter([],[],color=Low_Temp_Color, s = markersize), plt.scatter([],[],color=Mid_Temp_Color, s = markersize), plt.scatter([],[],color=High_Temp_Color, s = markersize),plt.scatter([],[],c=Cloudy_Sim_Color, s = markersize, edgecolor = 'none')], (r"$\frac{OIII[5007]}{OIII[4363]}$<50.0",r"$50.0<\frac{OIII[5007]}{OIII[4363]}<100.0$",r"$\frac{OIII[5007]}{OIII[4363]}$>100.0","Cloudy Simulation"),scatterpoints = 1, loc = 'lower left',fontsize =8)
plt.suptitle('n$_H$ = 3.5, -0.5 < U < -3.5, 0.5 < Z < 2.0')
plt.savefig("Z_0.5_2.0_Sims_Plots.pdf", dpi = 600)
plt.show()
fig2 = plt.figure(22)
sp5 = plt.subplot(221)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(NII_Ha[i],OI_Ratio[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([OI] $\lambda$6300/H$\alpha$)")
plt.xlabel(r"log ([NII] $\lambda$6584/H$\alpha$)")
plt.title("OI_6300")
plt.xlim(-2.5,0.5)
plt.ylim(-2.5,0)
#plt.scatter(Cloudy_NII_Ha,Cloudy_OI_Ratio,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp5.set_color_cycle(u_colors)
plt.plot(Cloudy_NII_Ha_array,Cloudy_OI_Ratio_array,linestyle = '--', lw = '2')
sp5.set_color_cycle(metal_colors)
plt.plot(Cloudy_NII_Ha_transpose,Cloudy_OI_Ratio_transpose, lw = '2')
plt.legend([plt.scatter([],[],color='.75', s = markersize, marker = 'x', edgecolor = 'none'),plt.scatter([],[],color='0.75', s = markersize, marker = '+', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = 'D', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = 's', edgecolor = 'none'), plt.scatter([],[],color='.75', s = markersize, marker = '*', edgecolor = 'none')], ("Star-Forming","Composite","AGN","LINER","Ambiguous"),scatterpoints = 1, loc = 'lower left',fontsize =8)
sp6 = plt.subplot(222)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(OI_Ratio[i],OIII_Hb[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([OIII] $\lambda$5007/H$\beta$)")
plt.xlabel(r"log ([OI] $\lambda$6300/H$\alpha$)")
plt.title("OI_6300 vs. OIII_5007")
#plt.scatter(Cloudy_OI_Ratio,Cloudy_OIII_Hb,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp6.set_color_cycle(u_colors)
plt.plot(Cloudy_OI_Ratio_array,Cloudy_OIII_Hb_array,linestyle = '--', lw = '2')
sp6.set_color_cycle(metal_colors)
plt.plot(Cloudy_OI_Ratio_transpose,Cloudy_OIII_Hb_transpose, lw = '2')
x6 = np.linspace(-2.5,-0.6,50)
y6 = ((.73/(x6+0.59))+1.33)
plt.plot(x6,y6,color = 'k')
x7 = np.linspace(-1.125,0.25,50)
y7 = (1.18*x7) + 1.30
plt.plot(x7,y7, color = 'b')
plt.ylim(-1,1.5)
plt.xlim(-2.5,0.5)
#plt.legend([plt.scatter([],[],color=Low_Temp_Color, s = markersize), plt.scatter([],[],color=Mid_Temp_Color, s = markersize), plt.scatter([],[],color=High_Temp_Color, s = markersize),plt.scatter([],[],c=Cloudy_Sim_Color, s = markersize, edgecolor = 'none')], (r"$\frac{OIII[5007]}{OIII[4363]}$<50.0",r"$50.0<\frac{OIII[5007]}{OIII[4363]}<100.0$",r"$\frac{OIII[5007]}{OIII[4363]}$>100.0","Cloudy Simulation"),scatterpoints = 1, loc = 'lower left',fontsize =8)
sp7 = plt.subplot(223)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(OI_Ratio[i],O_Ratio[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([OIII] $\lambda$5007/[OII]$\lambda$3727)")
plt.xlabel(r"log ([OI] $\lambda$6300/H$\alpha$)")
plt.title("Groves Diagram")
#plt.scatter(Cloudy_OI_Ratio,Cloudy_O_Ratio,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp7.set_color_cycle(u_colors)
plt.plot(Cloudy_OI_Ratio_array,Cloudy_O_Ratio_array,linestyle = '--', lw = '2')
sp7.set_color_cycle(metal_colors)
plt.plot(Cloudy_OI_Ratio_transpose,Cloudy_O_Ratio_transpose, lw = '2')
x1 = np.linspace(-2.0,-.25,50)
y1 = ((-1.701*x1)-2.163)
x2 = np.linspace(-1.05998,0,50)
y2 = x2 + 0.7
plt.plot(x2,y2, color = 'k')
plt.plot(x1,y1, color = 'k')
plt.xlim(-2.5,0)
plt.ylim(-1.5,1)
#plt.legend([plt.scatter([],[],color=Low_Temp_Color, s = markersize), plt.scatter([],[],color=Mid_Temp_Color, s = markersize), plt.scatter([],[],color=High_Temp_Color, s = markersize),plt.scatter([],[],c=Cloudy_Sim_Color, s = markersize, edgecolor = 'none')], (r"$\frac{OIII[5007]}{OIII[4363]}$<50.0",r"$50.0<\frac{OIII[5007]}{OIII[4363]}<100.0$",r"$\frac{OIII[5007]}{OIII[4363]}$>100.0","Cloudy Simulation"),scatterpoints = 1, loc = 'lower left',fontsize =8)
sp8 = plt.subplot(224)
for i in range(0,len(SDSS_Data['z'])):
shape = getShape(NII_Ha[i], OIII_Hb[i], S_Ha_Ratio[i], OI_Ratio[i])
Temp_Color = getColor(OIII_5006[i], OIII_4363[i])
plt.scatter(S_Ha_Ratio[i],OIII_Hb[i], s = markersize, marker = shape, c = Temp_Color, edgecolor = 'none')
plt.ylabel(r"log([OIII] $\lambda$5007/H$\beta$)")
plt.xlabel(r"log ([SII]/H$\alpha$)")
plt.title("OIII_5007 vs. SII")
plt.ylim(-1,1.5)
x4 = np.linspace(-0.32,0.25,50)
y4 = ((1.89*x4)+0.76)
x5 = np.linspace(-1.5,0.25,50)
y5 = ((0.72/(x - 0.32))+1.3)
plt.plot(x5,y5,color = 'k')
plt.plot(x4,y4,color = 'b')
#plt.scatter(Cloudy_S_Ha_Ratio,Cloudy_OIII_Hb,c=Cloudy_Sim_Color, s = markersize, edgecolor ='none')
sp8.set_color_cycle(u_colors)
plt.plot(Cloudy_S_Ha_Ratio_array,Cloudy_OIII_Hb_array,linestyle = '--', lw = '2')
sp8.set_color_cycle(metal_colors)
plt.plot(Cloudy_S_Ha_Ratio_transpose,Cloudy_OIII_Hb_transpose, lw = '2')
plt.suptitle('n$_H$ = 3.5, -0.5 < U < -3.5, 0.5 < Z < 2.0')
#plt.legend([plt.scatter([],[],color=Low_Temp_Color, s = markersize), plt.scatter([],[],color=Mid_Temp_Color, s = markersize), plt.scatter([],[],color=High_Temp_Color, s = markersize),plt.scatter([],[],c=Cloudy_Sim_Color, s = markersize, edgecolor = 'none')], (r"$\frac{OIII[5007]}{OIII[4363]}$<50.0",r"$50.0<\frac{OIII[5007]}{OIII[4363]}<100.0$",r"$\frac{OIII[5007]}{OIII[4363]}$>100.0","Cloudy Simulation"),scatterpoints = 1, loc = 'lower left',fontsize =8)
#plt.savefig("Metallicity Sim Plots1.pdf")
#plt.show()
|
mit
| 2,493,449,929,102,906,400
| 49.631436
| 529
| 0.640938
| false
| 2.316429
| false
| false
| false
|
cjaniake/ionicweb
|
webapp/area/views.py
|
1
|
10040
|
from django.shortcuts import render
from django.views.generic.base import TemplateView
from area.forms import AreaForm, LocationForm, BuildingForm
from area.models import Area, Location, Building
from django.http import HttpResponseRedirect
import logging
# GET /areas
class ListAreaView(TemplateView):
template_name = "area/area_list.html"
def get_context_data(self, **kwargs):
logger = logging.getLogger('webapp')
logger.info('run get_context_data run')
context = super(ListAreaView, self).get_context_data(**kwargs)
context['object_list'] = list(Area.objects.all())
new = Area()
new.name = "new"
context['object_list'].append(new)
return context
# GET/POST /area
def handle_area(request):
logger = logging.getLogger('webapp')
logger.info('run handle_area run')
if request.method == 'POST':
form = AreaForm(request.POST, request.FILES)
if form.is_valid():
a = Area()
a.adminEmail = form.cleaned_data['adminEmail']
a.areaStatus = form.cleaned_data['areaStatus']
a.createdDate = form.cleaned_data['createdDate']
a.folderName = form.cleaned_data['folderName']
a.language = form.cleaned_data['language']
a.logoFile = form.cleaned_data['logoFile']
a.name = form.cleaned_data['name']
a.paymentIntegration = form.cleaned_data['paymentIntegration']
a.paymentId = form.cleaned_data['paymentId']
a.plan = form.cleaned_data['plan']
a.save()
return HttpResponseRedirect('/areas/')
else:
form = AreaForm()
return render(request, 'area/area_detail.html', {'form': form, 'action':'/area/', 'http_method':'POST'})
# GET/POST /area/<areacode>
def edit_area(request, areacode=None):
logger = logging.getLogger('webapp')
logger.info('run edit_area run')
if(areacode):
a = Area.objects.get(id=int(areacode))
if request.method == 'POST':
#update record with submitted values
logger.info('run submit_edit run')
form = AreaForm(request.POST, request.FILES, instance=a)
if form.is_valid():
logger.info('updating area')
logger.info(form.cleaned_data)
a.adminEmail = form.cleaned_data['adminEmail']
a.areaStatus = form.cleaned_data['areaStatus']
a.createdDate = form.cleaned_data['createdDate']
a.folderName = form.cleaned_data['folderName']
a.language = form.cleaned_data['language']
a.logoFile = form.cleaned_data['logoFile']
a.name = form.cleaned_data['name']
a.paymentIntegration = form.cleaned_data['paymentIntegration']
a.paymentId = form.cleaned_data['paymentId']
a.plan = form.cleaned_data['plan']
a.save()
return HttpResponseRedirect('/areas/')
return render(request, 'area/area_detail.html', {'form': form, 'action':'/area/' + areacode + '/', 'http_method':'POST'})
else:
#load record to allow edition
form = AreaForm(instance=a)
return render(request, 'area/area_detail.html', {'form': form, 'action':'/area/' + areacode + '/', 'http_method':'POST'})
else:
return HttpResponseRedirect('/areas/')
# GET /area/<areacode>/locations
class ListLocationView(TemplateView):
template_name = "area/location_list.html"
def get_context_data(self, **kwargs):
logger = logging.getLogger('webapp')
areacode = kwargs['areacode']
#logger.info('get locations', areacode)
context = super(ListLocationView, self).get_context_data(**kwargs)
area = Area.objects.get(id=int(areacode))
context['area'] = area
locationArray = area.location_set.values
context['object_list'] = locationArray
return context
# GET/POST /area/<areacode>/location
def handle_location(request, areacode=None):
logger = logging.getLogger('webapp')
logger.info('run handle_location run')
area = Area.objects.get(id=int(areacode))
if request.method == 'POST':
form = LocationForm(request.POST)
if form.is_valid():
l = Location()
l.name = form.cleaned_data['name']
l.city = form.cleaned_data['city']
l.state = form.cleaned_data['state']
l.adminEmail = form.cleaned_data['adminEmail']
area = Area.objects.get(id=int(areacode))
area.location_set.add(l)
return HttpResponseRedirect('/area/' + areacode + '/locations')
else:
form = LocationForm()
return render(request, 'area/location_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/', 'http_method':'POST', 'area': area})
# GET/POST /area/<areacode>/location/<locationid>
def edit_location(request, areacode=None, locationid=None):
logger = logging.getLogger('webapp')
logger.info('run edit_location run')
if(areacode and locationid):
area = Area.objects.get(id=int(areacode))
l = Location.objects.get(id=int(locationid))
if request.method == 'POST':
#update record with submitted values
form = LocationForm(request.POST, instance=l)
if form.is_valid():
l.name = form.cleaned_data['name']
l.city = form.cleaned_data['city']
l.state = form.cleaned_data['state']
l.adminEmail = form.cleaned_data['adminEmail']
l.save()
return HttpResponseRedirect('/area/' + areacode + '/locations')
return render(request, 'area/location_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/' + locationid + '/', 'http_method':'POST', 'area': area})
else:
#load record to allow edition
form = LocationForm(instance=l)
return render(request, 'area/location_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/' + locationid + '/', 'http_method':'POST', 'area': area})
else:
return HttpResponseRedirect('/area/' + areacode + '/locations') if areacode else HttpResponseRedirect('/areas/')
# GET /area/<areacode>/location/<locationid>/buildings
class ListBuildingView(TemplateView):
template_name = "area/building_list.html"
def get_context_data(self, **kwargs):
logger = logging.getLogger('webapp')
areacode = kwargs['areacode']
locationid = kwargs['locationid']
#logger.info('get buildings', areacode, locationid)
context = super(ListBuildingView, self).get_context_data(**kwargs)
area = Area.objects.get(id=int(areacode))
context['area'] = area
location = Location.objects.get(id=int(locationid))
context['location'] = location
buildingArray = location.building_set.values
context['object_list'] = buildingArray
return context
# GET/POST /area/<areacode>/location/<locationid>/building
def handle_building(request, areacode=None, locationid=None):
logger = logging.getLogger('webapp')
logger.info('run handle_building run')
area = Area.objects.get(id=int(areacode))
if request.method == 'POST':
form = BuildingForm(request.POST)
if form.is_valid():
b = Building()
b.name = form.cleaned_data['name']
b.address = form.cleaned_data['address']
b.zipcode = form.cleaned_data['zipcode']
b.phone = form.cleaned_data['phone']
b.cellphone = form.cleaned_data['cellphone']
b.adminEmail = form.cleaned_data['adminEmail']
location = Location.objects.get(id=int(locationid))
location.building_set.add(b)
return HttpResponseRedirect('/area/' + areacode + '/location/' + locationid + '/buildings')
else:
form = BuildingForm()
return render(request, 'area/building_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/' + locationid + '/building/', 'http_method':'POST', 'area': area})
# GET/POST /area/<areacode>/location/<locationid>/building/<buildingid>
def edit_building(request, areacode=None, locationid=None, buildingid=None):
logger = logging.getLogger('webapp')
logger.info('run edit_building run')
if(areacode and locationid and buildingid):
area = Area.objects.get(id=int(areacode))
l = Location.objects.get(id=int(locationid))
b = Building.objects.get(id=int(buildingid))
if request.method == 'POST':
#update record with submitted values
form = BuildingForm(request.POST, instance=b)
if form.is_valid():
b.name = form.cleaned_data['name']
b.address = form.cleaned_data['address']
b.zipcode = form.cleaned_data['zipcode']
b.phone = form.cleaned_data['phone']
b.cellphone = form.cleaned_data['cellphone']
b.adminEmail = form.cleaned_data['adminEmail']
b.save()
return HttpResponseRedirect('/area/' + areacode + '/location/' + locationid + '/buildings')
return render(request, 'area/building_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/' + locationid + '/building/' + buildingid + '/', 'http_method':'POST', 'area': area, 'location': l})
else:
#load record to allow edition
form = BuildingForm(instance=b)
return render(request, 'area/building_detail.html', {'form': form, 'action':'/area/' + areacode + '/location/' + locationid + '/building/' + buildingid + '/', 'http_method':'POST', 'area': area, 'location': l})
else:
return HttpResponseRedirect('/area/' + areacode + '/location/' + locationid + '/buildings') if areacode and locationid else HttpResponseRedirect('/areas/')
|
gpl-2.0
| 3,064,346,551,269,282,300
| 40.147541
| 222
| 0.609263
| false
| 3.977813
| false
| false
| false
|
loggerhead/Easy-Karabiner
|
easy_karabiner/basexml.py
|
1
|
3477
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import lxml.etree as etree
import xml.dom.minidom as minidom
import xml.sax.saxutils as saxutils
from . import exception
from .fucking_string import ensure_utf8
class BaseXML(object):
xml_parser = etree.XMLParser(strip_cdata=False)
@classmethod
def unescape(cls, s):
return saxutils.unescape(s, {
""": '"',
"'": "'",
})
@classmethod
def parse(cls, filepath):
return etree.parse(filepath).getroot()
@classmethod
def parse_string(cls, xml_str):
return etree.fromstring(xml_str, cls.xml_parser)
@classmethod
def get_class_name(cls):
return cls.__name__
@classmethod
def is_cdata_text(cls, text):
return text.startswith('<![CDATA[') and text.endswith(']]>')
@classmethod
def remove_cdata_mark(cls, text):
return text[len('<![CDATA['):-len(']]>')]
@classmethod
def create_cdata_text(cls, text):
# do NOT use `etree.CDATA`
return '<![CDATA[%s]]>' % text
@classmethod
def assign_text_attribute(cls, etree_element, text):
if text is not None:
etree_element.text = ensure_utf8(text)
else:
etree_element.text = text
@classmethod
def create_tag(cls, name, text=None, **kwargs):
et = etree.Element(name, **kwargs)
cls.assign_text_attribute(et, text)
return et
@classmethod
def pretty_text(cls, elem, indent=" ", level=0):
"""WARNING: This method would change the construct of XML tree"""
i = "\n" + level * indent
if len(elem) == 0:
if elem.text is not None:
lines = elem.text.split('\n')
if len(lines) > 1:
if not lines[0].startswith(' '):
lines[0] = (i + indent) + lines[0]
if lines[-1].strip() == '':
lines.pop()
elem.text = (i + indent).join(lines) + i
else:
for subelem in elem:
BaseXML.pretty_text(subelem, indent, level + 1)
return elem
@classmethod
def to_format_str(cls, xml_tree, pretty_text=True):
indent = " "
if pretty_text:
BaseXML.pretty_text(xml_tree, indent=indent)
xml_string = etree.tostring(xml_tree)
xml_string = minidom.parseString(xml_string).toprettyxml(indent=indent)
xml_string = cls.unescape(xml_string)
return xml_string
def to_xml(self):
"""NOTICE: This method must be a REENTRANT function, which means
it should NOT change status or modify any member of `self` object.
Because other methods may change the construct of the XML tree.
"""
raise exception.NeedOverrideError()
def to_str(self, pretty_text=True, remove_first_line=False):
xml_str = self.to_format_str(self.to_xml(), pretty_text=pretty_text)
if remove_first_line:
lines = xml_str.split('\n')
if len(lines[-1].strip()) == 0:
# remove last blank line
lines = lines[1:-1]
else:
lines = lines[1:]
xml_str = '\n'.join(lines)
return xml_str
def __str__(self):
# `remove_first_line=True` is used to remove version tag in the first line
return self.to_str(remove_first_line=True)
|
mit
| 7,036,706,235,977,799,000
| 30.324324
| 82
| 0.561979
| false
| 3.871938
| false
| false
| false
|
jfterpstra/bluebottle
|
bluebottle/test/factory_models/accounting.py
|
1
|
2678
|
import factory
from datetime import date, timedelta
from decimal import Decimal
from bluebottle.accounting.models import (BankTransaction, BankTransactionCategory,
RemoteDocdataPayout, RemoteDocdataPayment)
from bluebottle.test.factory_models.payouts import ProjectPayoutFactory
from .payments import PaymentFactory
DEFAULT_CURRENCY = 'EUR'
TODAY = date.today()
class RemoteDocdataPayoutFactory(factory.DjangoModelFactory):
class Meta(object):
model = RemoteDocdataPayout
payout_reference = factory.Sequence(lambda n: 'Reference_{0}'.format(n))
payout_date = TODAY
start_date = TODAY - timedelta(days=10)
end_date = TODAY + timedelta(days=10)
collected_amount = Decimal('10')
payout_amount = Decimal('10')
class RemoteDocdataPaymentFactory(factory.DjangoModelFactory):
class Meta(object):
model = RemoteDocdataPayment
merchant_reference = 'merchant reference'
triple_deal_reference = 'triple deal reference'
payment_type = 1
amount_collected = Decimal('10')
currency_amount_collected = DEFAULT_CURRENCY
docdata_fee = Decimal('0.25')
currency_docdata_fee = DEFAULT_CURRENCY
local_payment = factory.SubFactory(PaymentFactory)
remote_payout = factory.SubFactory(RemoteDocdataPayoutFactory)
status = 'valid' # or 'missing' or 'mismatch' as in RemoteDocdataPayment.IntegretyStatus
# status_remarks, tpcd, currency_tpcd, tpci, currency_tpci
class BankTransactionCategoryFactory(factory.DjangoModelFactory):
class Meta(object):
model = BankTransactionCategory
django_get_or_create = ('name',)
name = factory.Sequence(lambda n: 'Category_{0}'.format(n))
class BankTransactionFactory(factory.DjangoModelFactory):
class Meta(object):
model = BankTransaction
category = factory.SubFactory(BankTransactionCategoryFactory)
# only one of these three make sense, so set 2 on None when using this factory
payout = factory.SubFactory(ProjectPayoutFactory)
remote_payout = factory.SubFactory(RemoteDocdataPayoutFactory)
remote_payment = factory.SubFactory(RemoteDocdataPaymentFactory)
sender_account = 'NL24RABO0133443493'
currency = DEFAULT_CURRENCY
interest_date = TODAY + timedelta(days=30)
credit_debit = 'C' # or 'D'
amount = Decimal('100')
counter_account = 'NL91ABNA0417164300'
counter_name = 'Counter name'
book_date = TODAY
book_code = 'bg'
status = 'valid' # or 'unknown', 'mismatch' # BankTransaction.IntegrityStatus.choices
# description1 (t/m description6), end_to_end_id, id_recipient, mandate_id, status_remarks, filler
|
bsd-3-clause
| -7,912,064,976,835,343,000
| 32.898734
| 102
| 0.724048
| false
| 3.831187
| false
| false
| false
|
automl/SpySMAC
|
cave/analyzer/parameter_importance/fanova.py
|
1
|
5790
|
import operator
import os
from collections import OrderedDict
from pandas import DataFrame
from cave.analyzer.parameter_importance.base_parameter_importance import BaseParameterImportance
class Fanova(BaseParameterImportance):
"""
fANOVA (functional analysis of variance) computes the fraction of the variance in the cost space explained by
changing a parameter by marginalizing over all other parameters, for each parameter (or for pairs of
parameters). Parameters with high importance scores will have a large impact on the performance. To this end, a
random forest is trained as an empirical performance model on the available empirical data from the available
runhistories.
"""
def __init__(self,
runscontainer,
marginal_threshold=0.05):
"""Wrapper for parameter_importance to save the importance-object/ extract the results. We want to show the
top X most important parameter-fanova-plots.
Parameters
----------
runscontainer: RunsContainer
contains all important information about the configurator runs
marginal_threshold: float
parameter/s must be at least this important to be mentioned
"""
super().__init__(runscontainer)
self.marginal_threshold = marginal_threshold
self.parameter_importance("fanova")
def get_name(self):
return 'fANOVA'
def postprocess(self, pimp, output_dir):
result = OrderedDict()
def parse_pairwise(p):
"""parse pimp's way of having pairwise parameters as key as str and return list of individuals"""
res = [tmp.strip('\' ') for tmp in p.strip('[]').split(',')]
return res
parameter_imp = {k: v * 100 for k, v in pimp.evaluator.evaluated_parameter_importance.items()}
param_imp_std = {}
if hasattr(pimp.evaluator, 'evaluated_parameter_importance_uncertainty'):
param_imp_std = {k: v * 100 for k, v in pimp.evaluator.evaluated_parameter_importance_uncertainty.items()}
for k in parameter_imp.keys():
self.logger.debug("fanova-importance for %s: mean (over trees): %f, std: %s", k, parameter_imp[k],
str(param_imp_std[k]) if param_imp_std else 'N/A')
# Split single and pairwise (pairwise are string: "['p1','p2']")
single_imp = {k: v for k, v in parameter_imp.items() if not k.startswith('[') and v > self.marginal_threshold}
pairwise_imp = {k: v for k, v in parameter_imp.items() if k.startswith('[') and v > self.marginal_threshold}
# Set internal parameter importance for further analysis (such as parallel coordinates)
self.fanova_single_importance = single_imp
self.fanova_pairwise_importance = single_imp
# Dicts to lists of tuples, sorted descending after importance
single_imp = OrderedDict(sorted(single_imp.items(), key=operator.itemgetter(1), reverse=True))
pairwise_imp = OrderedDict(sorted(pairwise_imp.items(), key=operator.itemgetter(1), reverse=True))
# Create table
table = []
if len(single_imp) > 0:
table.extend([(20*"-"+" Single importance: "+20*"-", 20*"-")])
for k, v in single_imp.items():
value = str(round(v, 4))
if param_imp_std:
value += " +/- " + str(round(param_imp_std[k], 4))
table.append((k, value))
if len(pairwise_imp) > 0:
table.extend([(20*"-"+" Pairwise importance: "+20*"-", 20*"-")])
for k, v in pairwise_imp.items():
name = ' & '.join(parse_pairwise(k))
value = str(round(v, 4))
if param_imp_std:
value += " +/- " + str(round(param_imp_std[k], 4))
table.append((name, value))
keys, fanova_table = [k[0] for k in table], [k[1:] for k in table]
df = DataFrame(data=fanova_table, index=keys)
result['Importance'] = {'table': df.to_html(escape=False, header=False, index=True, justify='left')}
# Get plot-paths
result['Marginals'] = {p: {'figure': os.path.join(output_dir, "fanova", p + '.png')} for p in single_imp.keys()}
# Right now no way to access paths of the plots -> file issue
pairwise_plots = {" & ".join(parse_pairwise(p)):
os.path.join(output_dir, 'fanova', '_'.join(parse_pairwise(p)) + '.png')
for p in pairwise_imp.keys()}
result['Pairwise Marginals'] = {p: {'figure': path}
for p, path in pairwise_plots.items() if os.path.exists(path)}
return result
def get_jupyter(self):
from IPython.core.display import HTML, Image, display
for b, result in self.result.items():
error = self.result[b]['else'] if 'else' in self.result[b] else None
if error:
display(HTML(error))
else:
# Show table
display(HTML(self.result[b]["Importance"]["table"]))
# Show plots
display(*list([Image(filename=d["figure"]) for d in self.result[b]['Marginals'].values()]))
display(*list([Image(filename=d["figure"]) for d in self.result[b]['Pairwise Marginals'].values()]))
# While working for a prettier solution, this might be an option:
# display(HTML(figure_to_html([d["figure"] for d in self.result[b]['Marginals'].values()] +
# [d["figure"] for d in self.result[b]['Pairwise Marginals'].values()],
# max_in_a_row=3, true_break_between_rows=True)))
|
bsd-3-clause
| 8,205,455,852,385,124,000
| 48.487179
| 120
| 0.589465
| false
| 3.979381
| false
| false
| false
|
rfriedlein/zenoss
|
ZenPacks/ZenPacks.Coredial.Baytech/ZenPacks/Coredial/Baytech/BaytechPduBank.py
|
1
|
3595
|
##########################################################################
# Author: rfriedlein, rfriedlein@coredial.com
# Date: February 4th, 2011
# Revised:
#
# BaytechPduBank object class
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
##########################################################################
__doc__="""BaytechPduBank
BaytechPduBank is a component of a BaytechPduDevice Device
$Id: $"""
__version__ = "$Revision: $"[11:-2]
from Globals import DTMLFile
from Globals import InitializeClass
from Products.ZenRelations.RelSchema import *
from Products.ZenModel.ZenossSecurity import ZEN_VIEW, ZEN_CHANGE_SETTINGS
from Products.ZenModel.DeviceComponent import DeviceComponent
from Products.ZenModel.ManagedEntity import ManagedEntity
import logging
log = logging.getLogger('BaytechPduBank')
class BaytechPduBank(DeviceComponent, ManagedEntity):
"""Baytech PDU Bank object"""
portal_type = meta_type = 'BaytechPduBank'
#**************Custom data Variables here from modeling************************
bankNumber = 0
bankState = 0
bankStateText = ''
#**************END CUSTOM VARIABLES *****************************
#************* Those should match this list below *******************
_properties = (
{'id':'bankNumber', 'type':'int', 'mode':''},
{'id':'bankState', 'type':'int', 'mode':''},
{'id':'bankStateText', 'type':'string', 'mode':''},
)
#****************
_relations = (
("BaytechPduDevBan", ToOne(ToManyCont,
"ZenPacks.ZenSystems.Baytech.BaytechPduDevice", "BaytechPduBan")),
)
factory_type_information = (
{
'id' : 'BaytechPduBank',
'meta_type' : 'BaytechPduBank',
'description' : """Baytech PDU Bank info""",
'product' : 'BaytechPdu',
'immediate_view' : 'viewBaytechPduBank',
'actions' :
(
{ 'id' : 'status'
, 'name' : 'Baytech PDU Bank Graphs'
, 'action' : 'viewBaytechPduBank'
, 'permissions' : (ZEN_VIEW, )
},
{ 'id' : 'perfConf'
, 'name' : 'Baytech PDU Bank Template'
, 'action' : 'objTemplates'
, 'permissions' : (ZEN_CHANGE_SETTINGS, )
},
{ 'id' : 'viewHistory'
, 'name' : 'Modifications'
, 'action' : 'viewHistory'
, 'permissions' : (ZEN_VIEW, )
},
)
},
)
isUserCreatedFlag = True
def isUserCreated(self):
"""
Returns the value of isUserCreated. True adds SAVE & CANCEL buttons to Details menu
"""
return self.isUserCreatedFlag
def viewName(self):
"""Pretty version human readable version of this object"""
# return str( self.bankNumber )
return self.id
# use viewName as titleOrId because that method is used to display a human
# readable version of the object in the breadcrumbs
titleOrId = name = viewName
def device(self):
return self.BaytechPduDevBan()
def monitored(self):
"""
Dummy
"""
return True
InitializeClass(BaytechPduBank)
|
gpl-3.0
| 5,278,256,024,859,762,000
| 29.991379
| 91
| 0.502921
| false
| 4.141705
| false
| false
| false
|
alexisrolland/data-quality
|
scripts/init/validity.py
|
1
|
3496
|
"""Manage class and methods for data validity indicators."""
import logging
import pandas
from indicator import Indicator
from session import update_session_status
# Load logging configuration
log = logging.getLogger(__name__)
class Validity(Indicator):
"""Class used to compute indicators of type validity."""
def __init__(self):
pass
def execute(self, session: dict):
"""Execute indicator of type validity."""
# Update session status to running
session_id = session['id']
indicator_id = session['indicatorId']
log.info('Start execution of session Id %i for indicator Id %i.', session_id, indicator_id)
log.debug('Update session status to Running.')
update_session_status(session_id, 'Running')
# Verify if the list of indicator parameters is valid
indicator_type_id = session['indicatorByIndicatorId']['indicatorTypeId']
parameters = session['indicatorByIndicatorId']['parametersByIndicatorId']['nodes']
parameters = super().verify_indicator_parameters(indicator_type_id, parameters)
# Get target data
dimensions = parameters[4]
measures = parameters[5]
target = parameters[8]
target_request = parameters[9]
target_data = super().get_data_frame(target, target_request, dimensions, measures)
# Evaluate completeness
alert_operator = parameters[1] # Alert operator
alert_threshold = parameters[2] # Alert threshold
log.info('Evaluate validity of target data source.')
result_data = self.evaluate_validity(
target_data, measures, alert_operator, alert_threshold)
# Compute session result
nb_records_alert = super().compute_session_result(
session_id, alert_operator, alert_threshold, result_data)
# Send e-mail alert
if nb_records_alert != 0:
indicator_name = session['indicatorByIndicatorId']['name']
distribution_list = parameters[3] # Distribution list
super().send_alert(indicator_id, indicator_name, session_id, distribution_list,
alert_operator, alert_threshold, nb_records_alert, result_data)
# Update session status to succeeded
log.debug('Update session status to Succeeded.')
update_session_status(session_id, 'Succeeded')
log.info('Session Id %i for indicator Id %i completed successfully.', session_id, indicator_id)
def evaluate_validity(self, target_data: pandas.DataFrame, measures: str, alert_operator: str, alert_threshold: str):
"""Compute specificities of validity indicator and return results in a data frame."""
# No tranformation needed for this data frame
result_data = target_data
result_data = result_data.fillna(value=0) # Replace NaN values per 0
# Formatting data to improve readability
for measure in measures:
result_data[measure] = round(result_data[measure], 2).astype(float)
# For each record and measure in data frame test if alert must be sent and update alert column
result_data['Alert'] = False
for measure in measures:
for row_num in result_data.index:
measure_value = result_data.loc[row_num, measure]
if self.is_alert(measure_value, alert_operator, alert_threshold):
result_data.loc[row_num, 'Alert'] = True
return result_data
|
apache-2.0
| 4,305,772,009,656,806,400
| 43.253165
| 121
| 0.657609
| false
| 4.459184
| false
| false
| false
|
lidavidm/mathics-heroku
|
venv/lib/python2.7/site-packages/sympy/polys/polytools.py
|
1
|
163118
|
"""User-friendly public interface to polynomial functions. """
from sympy.core import (
S, Basic, Expr, I, Integer, Add, Mul, Dummy, Tuple, Rational
)
from sympy.core.mul import _keep_coeff
from sympy.core.basic import preorder_traversal
from sympy.core.sympify import (
sympify, SympifyError,
)
from sympy.core.decorators import (
_sympifyit,
)
from sympy.polys.polyclasses import DMP
from sympy.polys.polyutils import (
basic_from_dict,
_sort_gens,
_unify_gens,
_dict_reorder,
_dict_from_expr,
_parallel_dict_from_expr,
)
from sympy.polys.rationaltools import (
together,
)
from sympy.polys.rootisolation import (
dup_isolate_real_roots_list,
)
from sympy.polys.groebnertools import groebner as _groebner
from sympy.polys.fglmtools import matrix_fglm
from sympy.polys.monomialtools import (
Monomial, monomial_key,
)
from sympy.polys.polyerrors import (
OperationNotSupported, DomainError,
CoercionFailed, UnificationFailed,
GeneratorsNeeded, PolynomialError,
MultivariatePolynomialError,
ExactQuotientFailed,
PolificationFailed,
ComputationFailed,
GeneratorsError,
)
from sympy.utilities import group
import sympy.polys
import sympy.mpmath
from sympy.polys.domains import FF, QQ
from sympy.polys.constructor import construct_domain
from sympy.polys import polyoptions as options
from sympy.core.compatibility import iterable
class Poly(Expr):
"""Generic class for representing polynomial expressions. """
__slots__ = ['rep', 'gens']
is_commutative = True
is_Poly = True
def __new__(cls, rep, *gens, **args):
"""Create a new polynomial instance out of something useful. """
opt = options.build_options(gens, args)
if 'order' in opt:
raise NotImplementedError("'order' keyword is not implemented yet")
if iterable(rep, exclude=str):
if isinstance(rep, dict):
return cls._from_dict(rep, opt)
else:
return cls._from_list(list(rep), opt)
else:
rep = sympify(rep)
if rep.is_Poly:
return cls._from_poly(rep, opt)
else:
return cls._from_expr(rep, opt)
@classmethod
def new(cls, rep, *gens):
"""Construct :class:`Poly` instance from raw representation. """
if not isinstance(rep, DMP):
raise PolynomialError(
"invalid polynomial representation: %s" % rep)
elif rep.lev != len(gens) - 1:
raise PolynomialError("invalid arguments: %s, %s" % (rep, gens))
obj = Basic.__new__(cls)
obj.rep = rep
obj.gens = gens
return obj
@classmethod
def from_dict(cls, rep, *gens, **args):
"""Construct a polynomial from a ``dict``. """
opt = options.build_options(gens, args)
return cls._from_dict(rep, opt)
@classmethod
def from_list(cls, rep, *gens, **args):
"""Construct a polynomial from a ``list``. """
opt = options.build_options(gens, args)
return cls._from_list(rep, opt)
@classmethod
def from_poly(cls, rep, *gens, **args):
"""Construct a polynomial from a polynomial. """
opt = options.build_options(gens, args)
return cls._from_poly(rep, opt)
@classmethod
def from_expr(cls, rep, *gens, **args):
"""Construct a polynomial from an expression. """
opt = options.build_options(gens, args)
return cls._from_expr(rep, opt)
@classmethod
def _from_dict(cls, rep, opt):
"""Construct a polynomial from a ``dict``. """
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'dict' without generators")
level = len(gens) - 1
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
for monom, coeff in rep.iteritems():
rep[monom] = domain.convert(coeff)
return cls.new(DMP.from_dict(rep, level, domain), *gens)
@classmethod
def _from_list(cls, rep, opt):
"""Construct a polynomial from a ``list``. """
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'list' without generators")
elif len(gens) != 1:
raise MultivariatePolynomialError(
"'list' representation not supported")
level = len(gens) - 1
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
rep = map(domain.convert, rep)
return cls.new(DMP.from_list(rep, level, domain), *gens)
@classmethod
def _from_poly(cls, rep, opt):
"""Construct a polynomial from a polynomial. """
if cls != rep.__class__:
rep = cls.new(rep.rep, *rep.gens)
gens = opt.gens
field = opt.field
domain = opt.domain
if gens and rep.gens != gens:
if set(rep.gens) != set(gens):
return cls._from_expr(rep.as_expr(), opt)
else:
rep = rep.reorder(*gens)
if 'domain' in opt and domain:
rep = rep.set_domain(domain)
elif field is True:
rep = rep.to_field()
return rep
@classmethod
def _from_expr(cls, rep, opt):
"""Construct a polynomial from an expression. """
rep, opt = _dict_from_expr(rep, opt)
return cls._from_dict(rep, opt)
def _hashable_content(self):
"""Allow SymPy to hash Poly instances. """
return (self.rep, self.gens)
def __hash__(self):
return super(Poly, self).__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial expression.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1).free_symbols
set([x])
>>> Poly(x**2 + y).free_symbols
set([x, y])
>>> Poly(x**2 + y, x).free_symbols
set([x, y])
"""
symbols = set([])
for gen in self.gens:
symbols |= gen.free_symbols
return symbols | self.free_symbols_in_domain
@property
def free_symbols_in_domain(self):
"""
Free symbols of the domain of ``self``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1).free_symbols_in_domain
set()
>>> Poly(x**2 + y).free_symbols_in_domain
set()
>>> Poly(x**2 + y, x).free_symbols_in_domain
set([y])
"""
domain, symbols = self.rep.dom, set()
if domain.is_Composite:
for gen in domain.gens:
symbols |= gen.free_symbols
elif domain.is_EX:
for coeff in self.coeffs():
symbols |= coeff.free_symbols
return symbols
@property
def args(self):
"""
Don't mess up with the core.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).args
(x**2 + 1,)
"""
return (self.as_expr(),)
@property
def gen(self):
"""
Return the principal generator.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).gen
x
"""
return self.gens[0]
@property
def domain(self):
"""Get the ground domain of ``self``. """
return self.get_domain()
@property
def zero(self):
"""Return zero polynomial with ``self``'s properties. """
return self.new(self.rep.zero(self.rep.lev, self.rep.dom), *self.gens)
@property
def one(self):
"""Return one polynomial with ``self``'s properties. """
return self.new(self.rep.one(self.rep.lev, self.rep.dom), *self.gens)
@property
def unit(self):
"""Return unit polynomial with ``self``'s properties. """
return self.new(self.rep.unit(self.rep.lev, self.rep.dom), *self.gens)
def unify(f, g):
"""
Make ``f`` and ``g`` belong to the same domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f, g = Poly(x/2 + 1), Poly(2*x + 1)
>>> f
Poly(1/2*x + 1, x, domain='QQ')
>>> g
Poly(2*x + 1, x, domain='ZZ')
>>> F, G = f.unify(g)
>>> F
Poly(1/2*x + 1, x, domain='QQ')
>>> G
Poly(2*x + 1, x, domain='QQ')
"""
_, per, F, G = f._unify(g)
return per(F), per(G)
def _unify(f, g):
g = sympify(g)
if not g.is_Poly:
try:
return f.rep.dom, f.per, f.rep, f.rep.per(f.rep.dom.from_sympy(g))
except CoercionFailed:
raise UnificationFailed("can't unify %s with %s" % (f, g))
if isinstance(f.rep, DMP) and isinstance(g.rep, DMP):
gens = _unify_gens(f.gens, g.gens)
dom, lev = f.rep.dom.unify(g.rep.dom, gens), len(gens) - 1
if f.gens != gens:
f_monoms, f_coeffs = _dict_reorder(
f.rep.to_dict(), f.gens, gens)
if f.rep.dom != dom:
f_coeffs = [ dom.convert(c, f.rep.dom) for c in f_coeffs ]
F = DMP(dict(zip(f_monoms, f_coeffs)), dom, lev)
else:
F = f.rep.convert(dom)
if g.gens != gens:
g_monoms, g_coeffs = _dict_reorder(
g.rep.to_dict(), g.gens, gens)
if g.rep.dom != dom:
g_coeffs = [ dom.convert(c, g.rep.dom) for c in g_coeffs ]
G = DMP(dict(zip(g_monoms, g_coeffs)), dom, lev)
else:
G = g.rep.convert(dom)
else:
raise UnificationFailed("can't unify %s with %s" % (f, g))
cls = f.__class__
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_sympy(rep)
return cls.new(rep, *gens)
return dom, per, F, G
def per(f, rep, gens=None, remove=None):
"""
Create a Poly out of the given representation.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x, y
>>> from sympy.polys.polyclasses import DMP
>>> a = Poly(x**2 + 1)
>>> a.per(DMP([ZZ(1), ZZ(1)], ZZ), gens=[y])
Poly(y + 1, y, domain='ZZ')
"""
if gens is None:
gens = f.gens
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return f.rep.dom.to_sympy(rep)
return f.__class__.new(rep, *gens)
def set_domain(f, domain):
"""Set the ground domain of ``f``. """
opt = options.build_options(f.gens, {'domain': domain})
return f.per(f.rep.convert(opt.domain))
def get_domain(f):
"""Get the ground domain of ``f``. """
return f.rep.dom
def set_modulus(f, modulus):
"""
Set the modulus of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(5*x**2 + 2*x - 1, x).set_modulus(2)
Poly(x**2 + 1, x, modulus=2)
"""
modulus = options.Modulus.preprocess(modulus)
return f.set_domain(FF(modulus))
def get_modulus(f):
"""
Get the modulus of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, modulus=2).get_modulus()
2
"""
domain = f.get_domain()
if domain.is_FiniteField:
return Integer(domain.characteristic())
else:
raise PolynomialError("not a polynomial over a Galois field")
def _eval_subs(f, old, new):
"""Internal implementation of :func:`subs`. """
if old in f.gens:
if new.is_number:
return f.eval(old, new)
else:
try:
return f.replace(old, new)
except PolynomialError:
pass
return f.as_expr().subs(old, new)
def exclude(f):
"""
Remove unnecessary generators from ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import a, b, c, d, x
>>> Poly(a + x, a, b, c, d, x).exclude()
Poly(a + x, a, x, domain='ZZ')
"""
J, new = f.rep.exclude()
gens = []
for j in range(len(f.gens)):
if j not in J:
gens.append(f.gens[j])
return f.per(new, gens=gens)
def replace(f, x, y=None):
"""
Replace ``x`` with ``y`` in generators list.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1, x).replace(x, y)
Poly(y**2 + 1, y, domain='ZZ')
"""
if y is None:
if f.is_univariate:
x, y = f.gen, x
else:
raise PolynomialError(
"syntax supported only in univariate case")
if x == y:
return f
if x in f.gens and y not in f.gens:
dom = f.get_domain()
if not dom.is_Composite or y not in dom.gens:
gens = list(f.gens)
gens[gens.index(x)] = y
return f.per(f.rep, gens=gens)
raise PolynomialError("can't replace %s with %s in %s" % (x, y, f))
def reorder(f, *gens, **args):
"""
Efficiently apply new order of generators.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x*y**2, x, y).reorder(y, x)
Poly(y**2*x + x**2, y, x, domain='ZZ')
"""
opt = options.Options((), args)
if not gens:
gens = _sort_gens(f.gens, opt=opt)
elif set(f.gens) != set(gens):
raise PolynomialError(
"generators list can differ only up to order of elements")
rep = dict(zip(*_dict_reorder(f.rep.to_dict(), f.gens, gens)))
return f.per(DMP(rep, f.rep.dom, len(gens) - 1), gens=gens)
def ltrim(f, gen):
"""
Remove dummy generators from the "left" of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(y**2 + y*z**2, x, y, z).ltrim(y)
Poly(y**2 + y*z**2, y, z, domain='ZZ')
"""
rep = f.as_dict(native=True)
j = f._gen_to_level(gen)
terms = {}
for monom, coeff in rep.iteritems():
monom = monom[j:]
if monom not in terms:
terms[monom] = coeff
else:
raise PolynomialError("can't left trim %s" % f)
gens = f.gens[j:]
return f.new(DMP.from_dict(terms, len(gens) - 1, f.rep.dom), *gens)
def has_only_gens(f, *gens):
"""
Return ``True`` if ``Poly(f, *gens)`` retains ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(x*y + 1, x, y, z).has_only_gens(x, y)
True
>>> Poly(x*y + z, x, y, z).has_only_gens(x, y)
False
"""
f_gens = list(f.gens)
indices = set([])
for gen in gens:
try:
index = f_gens.index(gen)
except ValueError:
raise GeneratorsError(
"%s doesn't have %s as generator" % (f, gen))
else:
indices.add(index)
for monom in f.monoms():
for i, elt in enumerate(monom):
if i not in indices and elt:
return False
return True
def to_ring(f):
"""
Make the ground domain a ring.
Examples
========
>>> from sympy import Poly, QQ
>>> from sympy.abc import x
>>> Poly(x**2 + 1, domain=QQ).to_ring()
Poly(x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'to_ring'):
result = f.rep.to_ring()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_ring')
return f.per(result)
def to_field(f):
"""
Make the ground domain a field.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x, domain=ZZ).to_field()
Poly(x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'to_field'):
result = f.rep.to_field()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_field')
return f.per(result)
def to_exact(f):
"""
Make the ground domain exact.
Examples
========
>>> from sympy import Poly, RR
>>> from sympy.abc import x
>>> Poly(x**2 + 1.0, x, domain=RR).to_exact()
Poly(x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'to_exact'):
result = f.rep.to_exact()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_exact')
return f.per(result)
def retract(f, field=None):
"""
Recalculate the ground domain of a polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2 + 1, x, domain='QQ[y]')
>>> f
Poly(x**2 + 1, x, domain='QQ[y]')
>>> f.retract()
Poly(x**2 + 1, x, domain='ZZ')
>>> f.retract(field=True)
Poly(x**2 + 1, x, domain='QQ')
"""
dom, rep = construct_domain(f.as_dict(zero=True), field=field)
return f.from_dict(rep, f.gens, domain=dom)
def slice(f, x, m, n=None):
"""Take a continuous subsequence of terms of ``f``. """
if n is None:
j, m, n = 0, x, m
else:
j = f._gen_to_level(x)
m, n = int(m), int(n)
if hasattr(f.rep, 'slice'):
result = f.rep.slice(m, n, j)
else: # pragma: no cover
raise OperationNotSupported(f, 'slice')
return f.per(result)
def coeffs(f, order=None):
"""
Returns all non-zero coefficients from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x + 3, x).coeffs()
[1, 2, 3]
See Also
========
all_coeffs
coeff_monomial
nth
"""
return [ f.rep.dom.to_sympy(c) for c in f.rep.coeffs(order=order) ]
def monoms(f, order=None):
"""
Returns all non-zero monomials from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).monoms()
[(2, 0), (1, 2), (1, 1), (0, 1)]
See Also
========
all_monoms
"""
return f.rep.monoms(order=order)
def terms(f, order=None):
"""
Returns all non-zero terms from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).terms()
[((2, 0), 1), ((1, 2), 2), ((1, 1), 1), ((0, 1), 3)]
See Also
========
all_terms
"""
return [ (m, f.rep.dom.to_sympy(c)) for m, c in f.rep.terms(order=order) ]
def all_coeffs(f):
"""
Returns all coefficients from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_coeffs()
[1, 0, 2, -1]
"""
return [ f.rep.dom.to_sympy(c) for c in f.rep.all_coeffs() ]
def all_monoms(f):
"""
Returns all monomials from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_monoms()
[(3,), (2,), (1,), (0,)]
See Also
========
all_terms
"""
return f.rep.all_monoms()
def all_terms(f):
"""
Returns all terms from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_terms()
[((3,), 1), ((2,), 0), ((1,), 2), ((0,), -1)]
"""
return [ (m, f.rep.dom.to_sympy(c)) for m, c in f.rep.all_terms() ]
def termwise(f, func, *gens, **args):
"""
Apply a function to all terms of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> def func((k,), coeff):
... return coeff//10**(2-k)
>>> Poly(x**2 + 20*x + 400).termwise(func)
Poly(x**2 + 2*x + 4, x, domain='ZZ')
"""
terms = {}
for monom, coeff in f.terms():
result = func(monom, coeff)
if isinstance(result, tuple):
monom, coeff = result
else:
coeff = result
if coeff:
if monom not in terms:
terms[monom] = coeff
else:
raise PolynomialError(
"%s monomial was generated twice" % monom)
return f.from_dict(terms, *(gens or f.gens), **args)
def length(f):
"""
Returns the number of non-zero terms in ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 2*x - 1).length()
3
"""
return len(f.as_dict())
def as_dict(f, native=False, zero=False):
"""
Switch to a ``dict`` representation.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 - y, x, y).as_dict()
{(0, 1): -1, (1, 2): 2, (2, 0): 1}
"""
if native:
return f.rep.to_dict(zero=zero)
else:
return f.rep.to_sympy_dict(zero=zero)
def as_list(f, native=False):
"""Switch to a ``list`` representation. """
if native:
return f.rep.to_list()
else:
return f.rep.to_sympy_list()
def as_expr(f, *gens):
"""
Convert a Poly instance to an Expr instance.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2 + 2*x*y**2 - y, x, y)
>>> f.as_expr()
x**2 + 2*x*y**2 - y
>>> f.as_expr({x: 5})
10*y**2 - y + 25
>>> f.as_expr(5, 6)
379
"""
if not gens:
gens = f.gens
elif len(gens) == 1 and isinstance(gens[0], dict):
mapping = gens[0]
gens = list(f.gens)
for gen, value in mapping.iteritems():
try:
index = gens.index(gen)
except ValueError:
raise GeneratorsError(
"%s doesn't have %s as generator" % (f, gen))
else:
gens[index] = value
return basic_from_dict(f.rep.to_sympy_dict(), *gens)
def lift(f):
"""
Convert algebraic coefficients to rationals.
Examples
========
>>> from sympy import Poly, I
>>> from sympy.abc import x
>>> Poly(x**2 + I*x + 1, x, extension=I).lift()
Poly(x**4 + 3*x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'lift'):
result = f.rep.lift()
else: # pragma: no cover
raise OperationNotSupported(f, 'lift')
return f.per(result)
def deflate(f):
"""
Reduce degree of ``f`` by mapping ``x_i**m`` to ``y_i``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**6*y**2 + x**3 + 1, x, y).deflate()
((3, 2), Poly(x**2*y + x + 1, x, y, domain='ZZ'))
"""
if hasattr(f.rep, 'deflate'):
J, result = f.rep.deflate()
else: # pragma: no cover
raise OperationNotSupported(f, 'deflate')
return J, f.per(result)
def inject(f, front=False):
"""
Inject ground domain generators into ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2*y + x*y**3 + x*y + 1, x)
>>> f.inject()
Poly(x**2*y + x*y**3 + x*y + 1, x, y, domain='ZZ')
>>> f.inject(front=True)
Poly(y**3*x + y*x**2 + y*x + 1, y, x, domain='ZZ')
"""
dom = f.rep.dom
if dom.is_Numerical:
return f
elif not dom.is_Poly:
raise DomainError("can't inject generators over %s" % dom)
if hasattr(f.rep, 'inject'):
result = f.rep.inject(front=front)
else: # pragma: no cover
raise OperationNotSupported(f, 'inject')
if front:
gens = dom.gens + f.gens
else:
gens = f.gens + dom.gens
return f.new(result, *gens)
def eject(f, *gens):
"""
Eject selected generators into the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2*y + x*y**3 + x*y + 1, x, y)
>>> f.eject(x)
Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]')
>>> f.eject(y)
Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]')
"""
dom = f.rep.dom
if not dom.is_Numerical:
raise DomainError("can't eject generators over %s" % dom)
n, k = len(f.gens), len(gens)
if f.gens[:k] == gens:
_gens, front = f.gens[k:], True
elif f.gens[-k:] == gens:
_gens, front = f.gens[:-k], False
else:
raise NotImplementedError(
"can only eject front or back generators")
dom = dom.inject(*gens)
if hasattr(f.rep, 'eject'):
result = f.rep.eject(dom, front=front)
else: # pragma: no cover
raise OperationNotSupported(f, 'eject')
return f.new(result, *_gens)
def terms_gcd(f):
"""
Remove GCD of terms from the polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**6*y**2 + x**3*y, x, y).terms_gcd()
((3, 1), Poly(x**3*y + 1, x, y, domain='ZZ'))
"""
if hasattr(f.rep, 'terms_gcd'):
J, result = f.rep.terms_gcd()
else: # pragma: no cover
raise OperationNotSupported(f, 'terms_gcd')
return J, f.per(result)
def add_ground(f, coeff):
"""
Add an element of the ground domain to ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).add_ground(2)
Poly(x + 3, x, domain='ZZ')
"""
if hasattr(f.rep, 'add_ground'):
result = f.rep.add_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'add_ground')
return f.per(result)
def sub_ground(f, coeff):
"""
Subtract an element of the ground domain from ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).sub_ground(2)
Poly(x - 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'sub_ground'):
result = f.rep.sub_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'sub_ground')
return f.per(result)
def mul_ground(f, coeff):
"""
Multiply ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).mul_ground(2)
Poly(2*x + 2, x, domain='ZZ')
"""
if hasattr(f.rep, 'mul_ground'):
result = f.rep.mul_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'mul_ground')
return f.per(result)
def quo_ground(f, coeff):
"""
Quotient of ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x + 4).quo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> Poly(2*x + 3).quo_ground(2)
Poly(x + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'quo_ground'):
result = f.rep.quo_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'quo_ground')
return f.per(result)
def exquo_ground(f, coeff):
"""
Exact quotient of ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x + 4).exquo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> Poly(2*x + 3).exquo_ground(2)
Traceback (most recent call last):
...
ExactQuotientFailed: 2 does not divide 3 in ZZ
"""
if hasattr(f.rep, 'exquo_ground'):
result = f.rep.exquo_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'exquo_ground')
return f.per(result)
def abs(f):
"""
Make all coefficients in ``f`` positive.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).abs()
Poly(x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'abs'):
result = f.rep.abs()
else: # pragma: no cover
raise OperationNotSupported(f, 'abs')
return f.per(result)
def neg(f):
"""
Negate all coefficients in ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).neg()
Poly(-x**2 + 1, x, domain='ZZ')
>>> -Poly(x**2 - 1, x)
Poly(-x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'neg'):
result = f.rep.neg()
else: # pragma: no cover
raise OperationNotSupported(f, 'neg')
return f.per(result)
def add(f, g):
"""
Add two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).add(Poly(x - 2, x))
Poly(x**2 + x - 1, x, domain='ZZ')
>>> Poly(x**2 + 1, x) + Poly(x - 2, x)
Poly(x**2 + x - 1, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.add_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'add'):
result = F.add(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'add')
return per(result)
def sub(f, g):
"""
Subtract two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).sub(Poly(x - 2, x))
Poly(x**2 - x + 3, x, domain='ZZ')
>>> Poly(x**2 + 1, x) - Poly(x - 2, x)
Poly(x**2 - x + 3, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.sub_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'sub'):
result = F.sub(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'sub')
return per(result)
def mul(f, g):
"""
Multiply two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).mul(Poly(x - 2, x))
Poly(x**3 - 2*x**2 + x - 2, x, domain='ZZ')
>>> Poly(x**2 + 1, x)*Poly(x - 2, x)
Poly(x**3 - 2*x**2 + x - 2, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.mul_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'mul'):
result = F.mul(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'mul')
return per(result)
def sqr(f):
"""
Square a polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x - 2, x).sqr()
Poly(x**2 - 4*x + 4, x, domain='ZZ')
>>> Poly(x - 2, x)**2
Poly(x**2 - 4*x + 4, x, domain='ZZ')
"""
if hasattr(f.rep, 'sqr'):
result = f.rep.sqr()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqr')
return f.per(result)
def pow(f, n):
"""
Raise ``f`` to a non-negative power ``n``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x - 2, x).pow(3)
Poly(x**3 - 6*x**2 + 12*x - 8, x, domain='ZZ')
>>> Poly(x - 2, x)**3
Poly(x**3 - 6*x**2 + 12*x - 8, x, domain='ZZ')
"""
n = int(n)
if hasattr(f.rep, 'pow'):
result = f.rep.pow(n)
else: # pragma: no cover
raise OperationNotSupported(f, 'pow')
return f.per(result)
def pdiv(f, g):
"""
Polynomial pseudo-division of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).pdiv(Poly(2*x - 4, x))
(Poly(2*x + 4, x, domain='ZZ'), Poly(20, x, domain='ZZ'))
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pdiv'):
q, r = F.pdiv(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'pdiv')
return per(q), per(r)
def prem(f, g):
"""
Polynomial pseudo-remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).prem(Poly(2*x - 4, x))
Poly(20, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'prem'):
result = F.prem(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'prem')
return per(result)
def pquo(f, g):
"""
Polynomial pseudo-quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).pquo(Poly(2*x - 4, x))
Poly(2*x + 4, x, domain='ZZ')
>>> Poly(x**2 - 1, x).pquo(Poly(2*x - 2, x))
Poly(2*x + 2, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pquo'):
result = F.pquo(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'pquo')
return per(result)
def pexquo(f, g):
"""
Polynomial exact pseudo-quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).pexquo(Poly(2*x - 2, x))
Poly(2*x + 2, x, domain='ZZ')
>>> Poly(x**2 + 1, x).pexquo(Poly(2*x - 4, x))
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pexquo'):
try:
result = F.pexquo(G)
except ExactQuotientFailed, exc:
raise exc.new(f.as_expr(), g.as_expr())
else: # pragma: no cover
raise OperationNotSupported(f, 'pexquo')
return per(result)
def div(f, g, auto=True):
"""
Polynomial division with remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x))
(Poly(1/2*x + 1, x, domain='QQ'), Poly(5, x, domain='QQ'))
>>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x), auto=False)
(Poly(0, x, domain='ZZ'), Poly(x**2 + 1, x, domain='ZZ'))
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'div'):
q, r = F.div(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'div')
if retract:
try:
Q, R = q.to_ring(), r.to_ring()
except CoercionFailed:
pass
else:
q, r = Q, R
return per(q), per(r)
def rem(f, g, auto=True):
"""
Computes the polynomial remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly, ZZ, QQ
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x))
Poly(5, x, domain='ZZ')
>>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x), auto=False)
Poly(x**2 + 1, x, domain='ZZ')
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'rem'):
r = F.rem(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'rem')
if retract:
try:
r = r.to_ring()
except CoercionFailed:
pass
return per(r)
def quo(f, g, auto=True):
"""
Computes polynomial quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).quo(Poly(2*x - 4, x))
Poly(1/2*x + 1, x, domain='QQ')
>>> Poly(x**2 - 1, x).quo(Poly(x - 1, x))
Poly(x + 1, x, domain='ZZ')
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'quo'):
q = F.quo(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'quo')
if retract:
try:
q = q.to_ring()
except CoercionFailed:
pass
return per(q)
def exquo(f, g, auto=True):
"""
Computes polynomial exact quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).exquo(Poly(x - 1, x))
Poly(x + 1, x, domain='ZZ')
>>> Poly(x**2 + 1, x).exquo(Poly(2*x - 4, x))
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'exquo'):
try:
q = F.exquo(G)
except ExactQuotientFailed, exc:
raise exc.new(f.as_expr(), g.as_expr())
else: # pragma: no cover
raise OperationNotSupported(f, 'exquo')
if retract:
try:
q = q.to_ring()
except CoercionFailed:
pass
return per(q)
def _gen_to_level(f, gen):
"""Returns level associated with the given generator. """
if isinstance(gen, int):
length = len(f.gens)
if -length <= gen < length:
if gen < 0:
return length + gen
else:
return gen
else:
raise PolynomialError("-%s <= gen < %s expected, got %s" %
(length, length, gen))
else:
try:
return list(f.gens).index(sympify(gen))
except ValueError:
raise PolynomialError(
"a valid generator expected, got %s" % gen)
def degree(f, gen=0):
"""
Returns degree of ``f`` in ``x_j``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).degree()
2
>>> Poly(x**2 + y*x + y, x, y).degree(y)
1
"""
j = f._gen_to_level(gen)
if hasattr(f.rep, 'degree'):
return f.rep.degree(j)
else: # pragma: no cover
raise OperationNotSupported(f, 'degree')
def degree_list(f):
"""
Returns a list of degrees of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).degree_list()
(2, 1)
"""
if hasattr(f.rep, 'degree_list'):
return f.rep.degree_list()
else: # pragma: no cover
raise OperationNotSupported(f, 'degree_list')
def total_degree(f):
"""
Returns the total degree of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).total_degree()
2
>>> Poly(x + y**5, x, y).total_degree()
5
"""
if hasattr(f.rep, 'total_degree'):
return f.rep.total_degree()
else: # pragma: no cover
raise OperationNotSupported(f, 'total_degree')
def homogeneous_order(f):
"""
Returns the homogeneous order of ``f``.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree. This degree is
the homogeneous order of ``f``. If you only want to check if a
polynomial is homogeneous, then use :func:`Poly.is_homogeneous`.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**5 + 2*x**3*y**2 + 9*x*y**4)
>>> f.homogeneous_order()
5
"""
if hasattr(f.rep, 'homogeneous_order'):
return f.rep.homogeneous_order()
else: # pragma: no cover
raise OperationNotSupported(f, 'homogeneous_order')
def LC(f, order=None):
"""
Returns the leading coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(4*x**3 + 2*x**2 + 3*x, x).LC()
4
"""
if order is not None:
return f.coeffs(order)[0]
if hasattr(f.rep, 'LC'):
result = f.rep.LC()
else: # pragma: no cover
raise OperationNotSupported(f, 'LC')
return f.rep.dom.to_sympy(result)
def TC(f):
"""
Returns the trailing coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x**2 + 3*x, x).TC()
0
"""
if hasattr(f.rep, 'TC'):
result = f.rep.TC()
else: # pragma: no cover
raise OperationNotSupported(f, 'TC')
return f.rep.dom.to_sympy(result)
def EC(f, order=None):
"""
Returns the last non-zero coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x**2 + 3*x, x).EC()
3
"""
if hasattr(f.rep, 'coeffs'):
return f.coeffs(order)[-1]
else: # pragma: no cover
raise OperationNotSupported(f, 'EC')
def coeff_monomial(f, monom):
"""
Returns the coefficient of ``monom`` in ``f`` if there, else None.
Examples
========
>>> from sympy import Poly, exp
>>> from sympy.abc import x, y
>>> p = Poly(24*x*y*exp(8) + 23*x, x, y)
>>> p.coeff_monomial(x)
23
>>> p.coeff_monomial(y)
0
>>> p.coeff_monomial(x*y)
24*exp(8)
Note that ``Expr.coeff()`` behaves differently, collecting terms
if possible; the Poly must be converted to an Expr to use that
method, however:
>>> p.as_expr().coeff(x)
24*y*exp(8) + 23
>>> p.as_expr().coeff(y)
24*x*exp(8)
>>> p.as_expr().coeff(x*y)
24*exp(8)
See Also
========
nth: more efficient query using exponents of the monomial's generators
"""
return f.nth(*Monomial(monom, f.gens).exponents)
def nth(f, *N):
"""
Returns the ``n``-th coefficient of ``f`` where ``N`` are the
exponents of the generators in the term of interest.
Examples
========
>>> from sympy import Poly, sqrt
>>> from sympy.abc import x, y
>>> Poly(x**3 + 2*x**2 + 3*x, x).nth(2)
2
>>> Poly(x**3 + 2*x*y**2 + y**2, x, y).nth(1, 2)
2
>>> Poly(4*sqrt(x)*y)
Poly(4*y*sqrt(x), y, sqrt(x), domain='ZZ')
>>> _.nth(1, 1)
4
See Also
========
coeff_monomial
"""
if hasattr(f.rep, 'nth'):
result = f.rep.nth(*map(int, N))
else: # pragma: no cover
raise OperationNotSupported(f, 'nth')
return f.rep.dom.to_sympy(result)
def coeff(f, x, n=1, right=False):
# the semantics of coeff_monomial and Expr.coeff are different;
# if someone is working with a Poly, they should be aware of the
# differences and chose the method best suited for the query.
# Alternatively, a pure-polys method could be written here but
# at this time the ``right`` keyword would be ignored because Poly
# doesn't work with non-commutatives.
raise NotImplementedError(
'Either convert to Expr with `as_expr` method '
'to use Expr\'s coeff method or else use the '
'`coeff_monomial` method of Polys.')
def LM(f, order=None):
"""
Returns the leading monomial of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LM()
x**2*y**0
"""
return Monomial(f.monoms(order)[0], f.gens)
def EM(f, order=None):
"""
Returns the last non-zero monomial of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).EM()
x**0*y**1
"""
return Monomial(f.monoms(order)[-1], f.gens)
def LT(f, order=None):
"""
Returns the leading term of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LT()
(x**2*y**0, 4)
"""
monom, coeff = f.terms(order)[0]
return Monomial(monom, f.gens), coeff
def ET(f, order=None):
"""
Returns the last non-zero term of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).ET()
(x**0*y**1, 3)
"""
monom, coeff = f.terms(order)[-1]
return Monomial(monom, f.gens), coeff
def max_norm(f):
"""
Returns maximum norm of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(-x**2 + 2*x - 3, x).max_norm()
3
"""
if hasattr(f.rep, 'max_norm'):
result = f.rep.max_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'max_norm')
return f.rep.dom.to_sympy(result)
def l1_norm(f):
"""
Returns l1 norm of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(-x**2 + 2*x - 3, x).l1_norm()
6
"""
if hasattr(f.rep, 'l1_norm'):
result = f.rep.l1_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'l1_norm')
return f.rep.dom.to_sympy(result)
def clear_denoms(f, convert=False):
"""
Clear denominators, but keep the ground domain.
Examples
========
>>> from sympy import Poly, S, QQ
>>> from sympy.abc import x
>>> f = Poly(x/2 + S(1)/3, x, domain=QQ)
>>> f.clear_denoms()
(6, Poly(3*x + 2, x, domain='QQ'))
>>> f.clear_denoms(convert=True)
(6, Poly(3*x + 2, x, domain='ZZ'))
"""
if not f.rep.dom.has_Field:
return S.One, f
dom = f.get_domain()
if dom.has_assoc_Ring:
dom = f.rep.dom.get_ring()
if hasattr(f.rep, 'clear_denoms'):
coeff, result = f.rep.clear_denoms()
else: # pragma: no cover
raise OperationNotSupported(f, 'clear_denoms')
coeff, f = dom.to_sympy(coeff), f.per(result)
if not convert:
return coeff, f
else:
return coeff, f.to_ring()
def rat_clear_denoms(f, g):
"""
Clear denominators in a rational function ``f/g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2/y + 1, x)
>>> g = Poly(x**3 + y, x)
>>> p, q = f.rat_clear_denoms(g)
>>> p
Poly(x**2 + y, x, domain='ZZ[y]')
>>> q
Poly(y*x**3 + y**2, x, domain='ZZ[y]')
"""
dom, per, f, g = f._unify(g)
f = per(f)
g = per(g)
if not (dom.has_Field and dom.has_assoc_Ring):
return f, g
a, f = f.clear_denoms(convert=True)
b, g = g.clear_denoms(convert=True)
f = f.mul_ground(b)
g = g.mul_ground(a)
return f, g
def integrate(f, *specs, **args):
"""
Computes indefinite integral of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x + 1, x).integrate()
Poly(1/3*x**3 + x**2 + x, x, domain='QQ')
>>> Poly(x*y**2 + x, x, y).integrate((0, 1), (1, 0))
Poly(1/2*x**2*y**2 + 1/2*x**2, x, y, domain='QQ')
"""
if args.get('auto', True) and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'integrate'):
if not specs:
return f.per(f.rep.integrate(m=1))
rep = f.rep
for spec in specs:
if type(spec) is tuple:
gen, m = spec
else:
gen, m = spec, 1
rep = rep.integrate(int(m), f._gen_to_level(gen))
return f.per(rep)
else: # pragma: no cover
raise OperationNotSupported(f, 'integrate')
def diff(f, *specs):
"""
Computes partial derivative of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x + 1, x).diff()
Poly(2*x + 2, x, domain='ZZ')
>>> Poly(x*y**2 + x, x, y).diff((0, 0), (1, 1))
Poly(2*x*y, x, y, domain='ZZ')
"""
if hasattr(f.rep, 'diff'):
if not specs:
return f.per(f.rep.diff(m=1))
rep = f.rep
for spec in specs:
if type(spec) is tuple:
gen, m = spec
else:
gen, m = spec, 1
rep = rep.diff(int(m), f._gen_to_level(gen))
return f.per(rep)
else: # pragma: no cover
raise OperationNotSupported(f, 'diff')
def eval(f, x, a=None, auto=True):
"""
Evaluate ``f`` at ``a`` in the given variable.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(x**2 + 2*x + 3, x).eval(2)
11
>>> Poly(2*x*y + 3*x + y + 2, x, y).eval(x, 2)
Poly(5*y + 8, y, domain='ZZ')
>>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z)
>>> f.eval({x: 2})
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f.eval({x: 2, y: 5})
Poly(2*z + 31, z, domain='ZZ')
>>> f.eval({x: 2, y: 5, z: 7})
45
>>> f.eval((2, 5))
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
"""
if a is None:
if isinstance(x, dict):
mapping = x
for gen, value in mapping.iteritems():
f = f.eval(gen, value)
return f
elif isinstance(x, (tuple, list)):
values = x
if len(values) > len(f.gens):
raise ValueError("too many values provided")
for gen, value in zip(f.gens, values):
f = f.eval(gen, value)
return f
else:
j, a = 0, x
else:
j = f._gen_to_level(x)
if not hasattr(f.rep, 'eval'): # pragma: no cover
raise OperationNotSupported(f, 'eval')
try:
result = f.rep.eval(a, j)
except CoercionFailed:
if not auto:
raise DomainError("can't evaluate at %s in %s" % (a, f.rep.dom))
else:
a_domain, [a] = construct_domain([a])
new_domain = f.get_domain().unify(a_domain, gens=f.gens)
f = f.set_domain(new_domain)
a = new_domain.convert(a, a_domain)
result = f.rep.eval(a, j)
return f.per(result, remove=j)
def __call__(f, *values):
"""
Evaluate ``f`` at the give values.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z)
>>> f(2)
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5, 7)
45
"""
return f.eval(values)
def half_gcdex(f, g, auto=True):
"""
Half extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15
>>> g = x**3 + x**2 - 4*x - 4
>>> Poly(f).half_gcdex(Poly(g))
(Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'half_gcdex'):
s, h = F.half_gcdex(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'half_gcdex')
return per(s), per(h)
def gcdex(f, g, auto=True):
"""
Extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15
>>> g = x**3 + x**2 - 4*x - 4
>>> Poly(f).gcdex(Poly(g))
(Poly(-1/5*x + 3/5, x, domain='QQ'),
Poly(1/5*x**2 - 6/5*x + 2, x, domain='QQ'),
Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'gcdex'):
s, t, h = F.gcdex(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'gcdex')
return per(s), per(t), per(h)
def invert(f, g, auto=True):
"""
Invert ``f`` modulo ``g`` when possible.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).invert(Poly(2*x - 1, x))
Poly(-4/3, x, domain='QQ')
>>> Poly(x**2 - 1, x).invert(Poly(x - 1, x))
Traceback (most recent call last):
...
NotInvertible: zero divisor
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'invert'):
result = F.invert(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'invert')
return per(result)
def revert(f, n):
"""Compute ``f**(-1)`` mod ``x**n``. """
if hasattr(f.rep, 'revert'):
result = f.rep.revert(int(n))
else: # pragma: no cover
raise OperationNotSupported(f, 'revert')
return f.per(result)
def subresultants(f, g):
"""
Computes the subresultant PRS of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).subresultants(Poly(x**2 - 1, x))
[Poly(x**2 + 1, x, domain='ZZ'),
Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')]
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'subresultants'):
result = F.subresultants(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'subresultants')
return map(per, result)
def resultant(f, g, includePRS=False):
"""
Computes the resultant of ``f`` and ``g`` via PRS.
If includePRS=True, it includes the subresultant PRS in the result.
Because the PRS is used to calculate the resultant, this is more
efficient than calling :func:`subresultants` separately.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(x**2 + 1, x)
>>> f.resultant(Poly(x**2 - 1, x))
4
>>> f.resultant(Poly(x**2 - 1, x), includePRS=True)
(4, [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')])
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'resultant'):
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
else:
result = F.resultant(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'resultant')
if includePRS:
return (per(result, remove=0), map(per, R))
return per(result, remove=0)
def discriminant(f):
"""
Computes the discriminant of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 2*x + 3, x).discriminant()
-8
"""
if hasattr(f.rep, 'discriminant'):
result = f.rep.discriminant()
else: # pragma: no cover
raise OperationNotSupported(f, 'discriminant')
return f.per(result, remove=0)
def cofactors(f, g):
"""
Returns the GCD of ``f`` and ``g`` and their cofactors.
Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and
``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors
of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).cofactors(Poly(x**2 - 3*x + 2, x))
(Poly(x - 1, x, domain='ZZ'),
Poly(x + 1, x, domain='ZZ'),
Poly(x - 2, x, domain='ZZ'))
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'cofactors'):
h, cff, cfg = F.cofactors(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'cofactors')
return per(h), per(cff), per(cfg)
def gcd(f, g):
"""
Returns the polynomial GCD of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).gcd(Poly(x**2 - 3*x + 2, x))
Poly(x - 1, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'gcd'):
result = F.gcd(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'gcd')
return per(result)
def lcm(f, g):
"""
Returns polynomial LCM of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).lcm(Poly(x**2 - 3*x + 2, x))
Poly(x**3 - 2*x**2 - x + 2, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'lcm'):
result = F.lcm(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'lcm')
return per(result)
def trunc(f, p):
"""
Reduce ``f`` modulo a constant ``p``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 + 3*x**2 + 5*x + 7, x).trunc(3)
Poly(-x**3 - x + 1, x, domain='ZZ')
"""
p = f.rep.dom.convert(p)
if hasattr(f.rep, 'trunc'):
result = f.rep.trunc(p)
else: # pragma: no cover
raise OperationNotSupported(f, 'trunc')
return f.per(result)
def monic(f, auto=True):
"""
Divides all coefficients by ``LC(f)``.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x
>>> Poly(3*x**2 + 6*x + 9, x, domain=ZZ).monic()
Poly(x**2 + 2*x + 3, x, domain='QQ')
>>> Poly(3*x**2 + 4*x + 2, x, domain=ZZ).monic()
Poly(x**2 + 4/3*x + 2/3, x, domain='QQ')
"""
if auto and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'monic'):
result = f.rep.monic()
else: # pragma: no cover
raise OperationNotSupported(f, 'monic')
return f.per(result)
def content(f):
"""
Returns the GCD of polynomial coefficients.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(6*x**2 + 8*x + 12, x).content()
2
"""
if hasattr(f.rep, 'content'):
result = f.rep.content()
else: # pragma: no cover
raise OperationNotSupported(f, 'content')
return f.rep.dom.to_sympy(result)
def primitive(f):
"""
Returns the content and a primitive form of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 + 8*x + 12, x).primitive()
(2, Poly(x**2 + 4*x + 6, x, domain='ZZ'))
"""
if hasattr(f.rep, 'primitive'):
cont, result = f.rep.primitive()
else: # pragma: no cover
raise OperationNotSupported(f, 'primitive')
return f.rep.dom.to_sympy(cont), f.per(result)
def compose(f, g):
"""
Computes the functional composition of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + x, x).compose(Poly(x - 1, x))
Poly(x**2 - x, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'compose'):
result = F.compose(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'compose')
return per(result)
def decompose(f):
"""
Computes a functional decomposition of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**4 + 2*x**3 - x - 1, x, domain='ZZ').decompose()
[Poly(x**2 - x - 1, x, domain='ZZ'), Poly(x**2 + x, x, domain='ZZ')]
"""
if hasattr(f.rep, 'decompose'):
result = f.rep.decompose()
else: # pragma: no cover
raise OperationNotSupported(f, 'decompose')
return map(f.per, result)
def shift(f, a):
"""
Efficiently compute Taylor shift ``f(x + a)``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 2*x + 1, x).shift(2)
Poly(x**2 + 2*x + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'shift'):
result = f.rep.shift(a)
else: # pragma: no cover
raise OperationNotSupported(f, 'shift')
return f.per(result)
def sturm(f, auto=True):
"""
Computes the Sturm sequence of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 - 2*x**2 + x - 3, x).sturm()
[Poly(x**3 - 2*x**2 + x - 3, x, domain='QQ'),
Poly(3*x**2 - 4*x + 1, x, domain='QQ'),
Poly(2/9*x + 25/9, x, domain='QQ'),
Poly(-2079/4, x, domain='QQ')]
"""
if auto and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'sturm'):
result = f.rep.sturm()
else: # pragma: no cover
raise OperationNotSupported(f, 'sturm')
return map(f.per, result)
def gff_list(f):
"""
Computes greatest factorial factorization of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**5 + 2*x**4 - x**3 - 2*x**2
>>> Poly(f).gff_list()
[(Poly(x, x, domain='ZZ'), 1), (Poly(x + 2, x, domain='ZZ'), 4)]
"""
if hasattr(f.rep, 'gff_list'):
result = f.rep.gff_list()
else: # pragma: no cover
raise OperationNotSupported(f, 'gff_list')
return [ (f.per(g), k) for g, k in result ]
def sqf_norm(f):
"""
Computes square-free norm of ``f``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> from sympy import Poly, sqrt
>>> from sympy.abc import x
>>> s, f, r = Poly(x**2 + 1, x, extension=[sqrt(3)]).sqf_norm()
>>> s
1
>>> f
Poly(x**2 - 2*sqrt(3)*x + 4, x, domain='QQ<sqrt(3)>')
>>> r
Poly(x**4 - 4*x**2 + 16, x, domain='QQ')
"""
if hasattr(f.rep, 'sqf_norm'):
s, g, r = f.rep.sqf_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_norm')
return s, f.per(g), f.per(r)
def sqf_part(f):
"""
Computes square-free part of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 - 3*x - 2, x).sqf_part()
Poly(x**2 - x - 2, x, domain='ZZ')
"""
if hasattr(f.rep, 'sqf_part'):
result = f.rep.sqf_part()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_part')
return f.per(result)
def sqf_list(f, all=False):
"""
Returns a list of square-free factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = 2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16
>>> Poly(f).sqf_list()
(2, [(Poly(x + 1, x, domain='ZZ'), 2),
(Poly(x + 2, x, domain='ZZ'), 3)])
>>> Poly(f).sqf_list(all=True)
(2, [(Poly(1, x, domain='ZZ'), 1),
(Poly(x + 1, x, domain='ZZ'), 2),
(Poly(x + 2, x, domain='ZZ'), 3)])
"""
if hasattr(f.rep, 'sqf_list'):
coeff, factors = f.rep.sqf_list(all)
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_list')
return f.rep.dom.to_sympy(coeff), [ (f.per(g), k) for g, k in factors ]
def sqf_list_include(f, all=False):
"""
Returns a list of square-free factors of ``f``.
Examples
========
>>> from sympy import Poly, expand
>>> from sympy.abc import x
>>> f = expand(2*(x + 1)**3*x**4)
>>> f
2*x**7 + 6*x**6 + 6*x**5 + 2*x**4
>>> Poly(f).sqf_list_include()
[(Poly(2, x, domain='ZZ'), 1),
(Poly(x + 1, x, domain='ZZ'), 3),
(Poly(x, x, domain='ZZ'), 4)]
>>> Poly(f).sqf_list_include(all=True)
[(Poly(2, x, domain='ZZ'), 1),
(Poly(1, x, domain='ZZ'), 2),
(Poly(x + 1, x, domain='ZZ'), 3),
(Poly(x, x, domain='ZZ'), 4)]
"""
if hasattr(f.rep, 'sqf_list_include'):
factors = f.rep.sqf_list_include(all)
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_list_include')
return [ (f.per(g), k) for g, k in factors ]
def factor_list(f):
"""
Returns a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = 2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y
>>> Poly(f).factor_list()
(2, [(Poly(x + y, x, y, domain='ZZ'), 1),
(Poly(x**2 + 1, x, y, domain='ZZ'), 2)])
"""
if hasattr(f.rep, 'factor_list'):
try:
coeff, factors = f.rep.factor_list()
except DomainError:
return S.One, [(f, 1)]
else: # pragma: no cover
raise OperationNotSupported(f, 'factor_list')
return f.rep.dom.to_sympy(coeff), [ (f.per(g), k) for g, k in factors ]
def factor_list_include(f):
"""
Returns a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = 2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y
>>> Poly(f).factor_list_include()
[(Poly(2*x + 2*y, x, y, domain='ZZ'), 1),
(Poly(x**2 + 1, x, y, domain='ZZ'), 2)]
"""
if hasattr(f.rep, 'factor_list_include'):
try:
factors = f.rep.factor_list_include()
except DomainError:
return [(f, 1)]
else: # pragma: no cover
raise OperationNotSupported(f, 'factor_list_include')
return [ (f.per(g), k) for g, k in factors ]
def intervals(f, all=False, eps=None, inf=None, sup=None, fast=False, sqf=False):
"""
Compute isolating intervals for roots of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3, x).intervals()
[((-2, -1), 1), ((1, 2), 1)]
>>> Poly(x**2 - 3, x).intervals(eps=1e-2)
[((-26/15, -19/11), 1), ((19/11, 26/15), 1)]
"""
if eps is not None:
eps = QQ.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if inf is not None:
inf = QQ.convert(inf)
if sup is not None:
sup = QQ.convert(sup)
if hasattr(f.rep, 'intervals'):
result = f.rep.intervals(
all=all, eps=eps, inf=inf, sup=sup, fast=fast, sqf=sqf)
else: # pragma: no cover
raise OperationNotSupported(f, 'intervals')
if sqf:
def _real(interval):
s, t = interval
return (QQ.to_sympy(s), QQ.to_sympy(t))
if not all:
return map(_real, result)
def _complex(rectangle):
(u, v), (s, t) = rectangle
return (QQ.to_sympy(u) + I*QQ.to_sympy(v),
QQ.to_sympy(s) + I*QQ.to_sympy(t))
real_part, complex_part = result
return map(_real, real_part), map(_complex, complex_part)
else:
def _real(interval):
(s, t), k = interval
return ((QQ.to_sympy(s), QQ.to_sympy(t)), k)
if not all:
return map(_real, result)
def _complex(rectangle):
((u, v), (s, t)), k = rectangle
return ((QQ.to_sympy(u) + I*QQ.to_sympy(v),
QQ.to_sympy(s) + I*QQ.to_sympy(t)), k)
real_part, complex_part = result
return map(_real, real_part), map(_complex, complex_part)
def refine_root(f, s, t, eps=None, steps=None, fast=False, check_sqf=False):
"""
Refine an isolating interval of a root to the given precision.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3, x).refine_root(1, 2, eps=1e-2)
(19/11, 26/15)
"""
if check_sqf and not f.is_sqf:
raise PolynomialError("only square-free polynomials supported")
s, t = QQ.convert(s), QQ.convert(t)
if eps is not None:
eps = QQ.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if steps is not None:
steps = int(steps)
elif eps is None:
steps = 1
if hasattr(f.rep, 'refine_root'):
S, T = f.rep.refine_root(s, t, eps=eps, steps=steps, fast=fast)
else: # pragma: no cover
raise OperationNotSupported(f, 'refine_root')
return QQ.to_sympy(S), QQ.to_sympy(T)
def count_roots(f, inf=None, sup=None):
"""
Return the number of roots of ``f`` in ``[inf, sup]`` interval.
Examples
========
>>> from sympy import Poly, I
>>> from sympy.abc import x
>>> Poly(x**4 - 4, x).count_roots(-3, 3)
2
>>> Poly(x**4 - 4, x).count_roots(0, 1 + 3*I)
1
"""
inf_real, sup_real = True, True
if inf is not None:
inf = sympify(inf)
if inf is S.NegativeInfinity:
inf = None
else:
re, im = inf.as_real_imag()
if not im:
inf = QQ.convert(inf)
else:
inf, inf_real = map(QQ.convert, (re, im)), False
if sup is not None:
sup = sympify(sup)
if sup is S.Infinity:
sup = None
else:
re, im = sup.as_real_imag()
if not im:
sup = QQ.convert(sup)
else:
sup, sup_real = map(QQ.convert, (re, im)), False
if inf_real and sup_real:
if hasattr(f.rep, 'count_real_roots'):
count = f.rep.count_real_roots(inf=inf, sup=sup)
else: # pragma: no cover
raise OperationNotSupported(f, 'count_real_roots')
else:
if inf_real and inf is not None:
inf = (inf, QQ.zero)
if sup_real and sup is not None:
sup = (sup, QQ.zero)
if hasattr(f.rep, 'count_complex_roots'):
count = f.rep.count_complex_roots(inf=inf, sup=sup)
else: # pragma: no cover
raise OperationNotSupported(f, 'count_complex_roots')
return Integer(count)
def root(f, index, radicals=True):
"""
Get an indexed root of a polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(2*x**3 - 7*x**2 + 4*x + 4)
>>> f.root(0)
-1/2
>>> f.root(1)
2
>>> f.root(2)
2
>>> f.root(3)
Traceback (most recent call last):
...
IndexError: root index out of [-3, 2] range, got 3
>>> Poly(x**5 + x + 1).root(0)
RootOf(x**3 - x**2 + 1, 0)
"""
return sympy.polys.rootoftools.RootOf(f, index, radicals=radicals)
def real_roots(f, multiple=True, radicals=True):
"""
Return a list of real roots with multiplicities.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 - 7*x**2 + 4*x + 4).real_roots()
[-1/2, 2, 2]
>>> Poly(x**3 + x + 1).real_roots()
[RootOf(x**3 + x + 1, 0)]
"""
reals = sympy.polys.rootoftools.RootOf.real_roots(f, radicals=radicals)
if multiple:
return reals
else:
return group(reals, multiple=False)
def all_roots(f, multiple=True, radicals=True):
"""
Return a list of real and complex roots with multiplicities.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 - 7*x**2 + 4*x + 4).all_roots()
[-1/2, 2, 2]
>>> Poly(x**3 + x + 1).all_roots()
[RootOf(x**3 + x + 1, 0),
RootOf(x**3 + x + 1, 1),
RootOf(x**3 + x + 1, 2)]
"""
roots = sympy.polys.rootoftools.RootOf.all_roots(f, radicals=radicals)
if multiple:
return roots
else:
return group(roots, multiple=False)
def nroots(f, n=15, maxsteps=50, cleanup=True, error=False):
"""
Compute numerical approximations of roots of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3).nroots(n=15)
[-1.73205080756888, 1.73205080756888]
>>> Poly(x**2 - 3).nroots(n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"can't compute numerical roots of %s" % f)
if f.degree() <= 0:
return []
coeffs = [ coeff.evalf(n=n).as_real_imag()
for coeff in f.all_coeffs() ]
dps = sympy.mpmath.mp.dps
sympy.mpmath.mp.dps = n
try:
try:
coeffs = [ sympy.mpmath.mpc(*coeff) for coeff in coeffs ]
except TypeError:
raise DomainError(
"numerical domain expected, got %s" % f.rep.dom)
result = sympy.mpmath.polyroots(
coeffs, maxsteps=maxsteps, cleanup=cleanup, error=error)
if error:
roots, error = result
else:
roots, error = result, None
roots = map(sympify, sorted(roots, key=lambda r: (r.real, r.imag)))
finally:
sympy.mpmath.mp.dps = dps
if error is not None:
return roots, sympify(error)
else:
return roots
def ground_roots(f):
"""
Compute roots of ``f`` by factorization in the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**6 - 4*x**4 + 4*x**3 - x**2).ground_roots()
{0: 2, 1: 2}
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"can't compute ground roots of %s" % f)
roots = {}
for factor, k in f.factor_list()[1]:
if factor.is_linear:
a, b = factor.all_coeffs()
roots[-b/a] = k
return roots
def nth_power_roots_poly(f, n):
"""
Construct a polynomial with n-th powers of roots of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(x**4 - x**2 + 1)
>>> f.nth_power_roots_poly(2)
Poly(x**4 - 2*x**3 + 3*x**2 - 2*x + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(3)
Poly(x**4 + 2*x**2 + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(4)
Poly(x**4 + 2*x**3 + 3*x**2 + 2*x + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(12)
Poly(x**4 - 4*x**3 + 6*x**2 - 4*x + 1, x, domain='ZZ')
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"must be a univariate polynomial")
N = sympify(n)
if N.is_Integer and N >= 1:
n = int(N)
else:
raise ValueError("'n' must an integer and n >= 1, got %s" % n)
x = f.gen
t = Dummy('t')
r = f.resultant(f.__class__.from_expr(x**n - t, x, t))
return r.replace(t, x)
def cancel(f, g, include=False):
"""
Cancel common factors in a rational function ``f/g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x))
(1, Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
>>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x), include=True)
(Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
"""
dom, per, F, G = f._unify(g)
if hasattr(F, 'cancel'):
result = F.cancel(G, include=include)
else: # pragma: no cover
raise OperationNotSupported(f, 'cancel')
if not include:
if dom.has_assoc_Ring:
dom = dom.get_ring()
cp, cq, p, q = result
cp = dom.to_sympy(cp)
cq = dom.to_sympy(cq)
return cp/cq, per(p), per(q)
else:
return tuple(map(per, result))
@property
def is_zero(f):
"""
Returns ``True`` if ``f`` is a zero polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(0, x).is_zero
True
>>> Poly(1, x).is_zero
False
"""
return f.rep.is_zero
@property
def is_one(f):
"""
Returns ``True`` if ``f`` is a unit polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(0, x).is_one
False
>>> Poly(1, x).is_one
True
"""
return f.rep.is_one
@property
def is_sqf(f):
"""
Returns ``True`` if ``f`` is a square-free polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 2*x + 1, x).is_sqf
False
>>> Poly(x**2 - 1, x).is_sqf
True
"""
return f.rep.is_sqf
@property
def is_monic(f):
"""
Returns ``True`` if the leading coefficient of ``f`` is one.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 2, x).is_monic
True
>>> Poly(2*x + 2, x).is_monic
False
"""
return f.rep.is_monic
@property
def is_primitive(f):
"""
Returns ``True`` if GCD of the coefficients of ``f`` is one.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 + 6*x + 12, x).is_primitive
False
>>> Poly(x**2 + 3*x + 6, x).is_primitive
True
"""
return f.rep.is_primitive
@property
def is_ground(f):
"""
Returns ``True`` if ``f`` is an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x, x).is_ground
False
>>> Poly(2, x).is_ground
True
>>> Poly(y, x).is_ground
True
"""
return f.rep.is_ground
@property
def is_linear(f):
"""
Returns ``True`` if ``f`` is linear in all its variables.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x + y + 2, x, y).is_linear
True
>>> Poly(x*y + 2, x, y).is_linear
False
"""
return f.rep.is_linear
@property
def is_quadratic(f):
"""
Returns ``True`` if ``f`` is quadratic in all its variables.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x*y + 2, x, y).is_quadratic
True
>>> Poly(x*y**2 + 2, x, y).is_quadratic
False
"""
return f.rep.is_quadratic
@property
def is_monomial(f):
"""
Returns ``True`` if ``f`` is zero or has only one term.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(3*x**2, x).is_monomial
True
>>> Poly(3*x**2 + 1, x).is_monomial
False
"""
return f.rep.is_monomial
@property
def is_homogeneous(f):
"""
Returns ``True`` if ``f`` is a homogeneous polynomial.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree. If you want not
only to check if a polynomial is homogeneous but also compute its
homogeneous order, then use :func:`Poly.homogeneous_order`.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x*y, x, y).is_homogeneous
True
>>> Poly(x**3 + x*y, x, y).is_homogeneous
False
"""
return f.rep.is_homogeneous
@property
def is_irreducible(f):
"""
Returns ``True`` if ``f`` has no factors over its domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + x + 1, x, modulus=2).is_irreducible
True
>>> Poly(x**2 + 1, x, modulus=2).is_irreducible
False
"""
return f.rep.is_irreducible
@property
def is_univariate(f):
"""
Returns ``True`` if ``f`` is a univariate polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x + 1, x).is_univariate
True
>>> Poly(x*y**2 + x*y + 1, x, y).is_univariate
False
>>> Poly(x*y**2 + x*y + 1, x).is_univariate
True
>>> Poly(x**2 + x + 1, x, y).is_univariate
False
"""
return len(f.gens) == 1
@property
def is_multivariate(f):
"""
Returns ``True`` if ``f`` is a multivariate polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x + 1, x).is_multivariate
False
>>> Poly(x*y**2 + x*y + 1, x, y).is_multivariate
True
>>> Poly(x*y**2 + x*y + 1, x).is_multivariate
False
>>> Poly(x**2 + x + 1, x, y).is_multivariate
True
"""
return len(f.gens) != 1
@property
def is_cyclotomic(f):
"""
Returns ``True`` if ``f`` is a cyclotomic polnomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1
>>> Poly(f).is_cyclotomic
False
>>> g = x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1
>>> Poly(g).is_cyclotomic
True
"""
return f.rep.is_cyclotomic
def __abs__(f):
return f.abs()
def __neg__(f):
return f.neg()
@_sympifyit('g', NotImplemented)
def __add__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr() + g
return f.add(g)
@_sympifyit('g', NotImplemented)
def __radd__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g + f.as_expr()
return g.add(f)
@_sympifyit('g', NotImplemented)
def __sub__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr() - g
return f.sub(g)
@_sympifyit('g', NotImplemented)
def __rsub__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g - f.as_expr()
return g.sub(f)
@_sympifyit('g', NotImplemented)
def __mul__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr()*g
return f.mul(g)
@_sympifyit('g', NotImplemented)
def __rmul__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g*f.as_expr()
return g.mul(f)
@_sympifyit('n', NotImplemented)
def __pow__(f, n):
if n.is_Integer and n >= 0:
return f.pow(n)
else:
return f.as_expr()**n
@_sympifyit('g', NotImplemented)
def __divmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.div(g)
@_sympifyit('g', NotImplemented)
def __rdivmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.div(f)
@_sympifyit('g', NotImplemented)
def __mod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.rem(g)
@_sympifyit('g', NotImplemented)
def __rmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.rem(f)
@_sympifyit('g', NotImplemented)
def __floordiv__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.quo(g)
@_sympifyit('g', NotImplemented)
def __rfloordiv__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.quo(f)
@_sympifyit('g', NotImplemented)
def __div__(f, g):
return f.as_expr()/g.as_expr()
@_sympifyit('g', NotImplemented)
def __rdiv__(f, g):
return g.as_expr()/f.as_expr()
__truediv__ = __div__
__rtruediv__ = __rdiv__
@_sympifyit('g', NotImplemented)
def __eq__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, f.gens, domain=f.get_domain())
except (PolynomialError, DomainError, CoercionFailed):
return False
if f.gens != g.gens:
return False
if f.rep.dom != g.rep.dom:
try:
dom = f.rep.dom.unify(g.rep.dom, f.gens)
except UnificationFailed:
return False
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep == g.rep
@_sympifyit('g', NotImplemented)
def __ne__(f, g):
return not f.__eq__(g)
def __nonzero__(f):
return not f.is_zero
def eq(f, g, strict=False):
if not strict:
return f.__eq__(g)
else:
return f._strict_eq(sympify(g))
def ne(f, g, strict=False):
return not f.eq(g, strict=strict)
def _strict_eq(f, g):
return isinstance(g, f.__class__) and f.gens == g.gens and f.rep.eq(g.rep, strict=True)
class PurePoly(Poly):
"""Class for representing pure polynomials. """
def _hashable_content(self):
"""Allow SymPy to hash Poly instances. """
return (self.rep,)
def __hash__(self):
return super(PurePoly, self).__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial.
Examples
========
>>> from sympy import PurePoly
>>> from sympy.abc import x, y
>>> PurePoly(x**2 + 1).free_symbols
set()
>>> PurePoly(x**2 + y).free_symbols
set()
>>> PurePoly(x**2 + y, x).free_symbols
set([y])
"""
return self.free_symbols_in_domain
@_sympifyit('g', NotImplemented)
def __eq__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, f.gens, domain=f.get_domain())
except (PolynomialError, DomainError, CoercionFailed):
return False
if len(f.gens) != len(g.gens):
return False
if f.rep.dom != g.rep.dom:
try:
dom = f.rep.dom.unify(g.rep.dom, f.gens)
except UnificationFailed:
return False
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep == g.rep
def _strict_eq(f, g):
return isinstance(g, f.__class__) and f.rep.eq(g.rep, strict=True)
def _unify(f, g):
g = sympify(g)
if not g.is_Poly:
try:
return f.rep.dom, f.per, f.rep, f.rep.per(f.rep.dom.from_sympy(g))
except CoercionFailed:
raise UnificationFailed("can't unify %s with %s" % (f, g))
if len(f.gens) != len(g.gens):
raise UnificationFailed("can't unify %s with %s" % (f, g))
if not (isinstance(f.rep, DMP) and isinstance(g.rep, DMP)):
raise UnificationFailed("can't unify %s with %s" % (f, g))
cls = f.__class__
gens = f.gens
dom = f.rep.dom.unify(g.rep.dom, gens)
F = f.rep.convert(dom)
G = g.rep.convert(dom)
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_sympy(rep)
return cls.new(rep, *gens)
return dom, per, F, G
def poly_from_expr(expr, *gens, **args):
"""Construct a polynomial from an expression. """
opt = options.build_options(gens, args)
return _poly_from_expr(expr, opt)
def _poly_from_expr(expr, opt):
"""Construct a polynomial from an expression. """
orig, expr = expr, sympify(expr)
if not isinstance(expr, Basic):
raise PolificationFailed(opt, orig, expr)
elif expr.is_Poly:
poly = expr.__class__._from_poly(expr, opt)
opt['gens'] = poly.gens
opt['domain'] = poly.domain
if opt.polys is None:
opt['polys'] = True
return poly, opt
elif opt.expand:
expr = expr.expand()
try:
rep, opt = _dict_from_expr(expr, opt)
except GeneratorsNeeded:
raise PolificationFailed(opt, orig, expr)
monoms, coeffs = zip(*rep.items())
domain = opt.domain
if domain is None:
domain, coeffs = construct_domain(coeffs, opt=opt)
else:
coeffs = map(domain.from_sympy, coeffs)
level = len(opt.gens) - 1
poly = Poly.new(
DMP.from_monoms_coeffs(monoms, coeffs, level, domain), *opt.gens)
opt['domain'] = domain
if opt.polys is None:
opt['polys'] = False
return poly, opt
def parallel_poly_from_expr(exprs, *gens, **args):
"""Construct polynomials from expressions. """
opt = options.build_options(gens, args)
return _parallel_poly_from_expr(exprs, opt)
def _parallel_poly_from_expr(exprs, opt):
"""Construct polynomials from expressions. """
if len(exprs) == 2:
f, g = exprs
if isinstance(f, Poly) and isinstance(g, Poly):
f = f.__class__._from_poly(f, opt)
g = g.__class__._from_poly(g, opt)
f, g = f.unify(g)
opt['gens'] = f.gens
opt['domain'] = f.domain
if opt.polys is None:
opt['polys'] = True
return [f, g], opt
origs, exprs = list(exprs), []
_exprs, _polys = [], []
failed = False
for i, expr in enumerate(origs):
expr = sympify(expr)
if isinstance(expr, Basic):
if expr.is_Poly:
_polys.append(i)
else:
_exprs.append(i)
if opt.expand:
expr = expr.expand()
else:
failed = True
exprs.append(expr)
if failed:
raise PolificationFailed(opt, origs, exprs, True)
if _polys:
# XXX: this is a temporary solution
for i in _polys:
exprs[i] = exprs[i].as_expr()
try:
reps, opt = _parallel_dict_from_expr(exprs, opt)
except GeneratorsNeeded:
raise PolificationFailed(opt, origs, exprs, True)
coeffs_list, lengths = [], []
all_monoms = []
all_coeffs = []
for rep in reps:
monoms, coeffs = zip(*rep.items())
coeffs_list.extend(coeffs)
all_monoms.append(monoms)
lengths.append(len(coeffs))
domain = opt.domain
if domain is None:
domain, coeffs_list = construct_domain(coeffs_list, opt=opt)
else:
coeffs_list = map(domain.from_sympy, coeffs_list)
for k in lengths:
all_coeffs.append(coeffs_list[:k])
coeffs_list = coeffs_list[k:]
polys, level = [], len(opt.gens) - 1
for monoms, coeffs in zip(all_monoms, all_coeffs):
rep = DMP.from_monoms_coeffs(monoms, coeffs, level, domain)
polys.append(Poly.new(rep, *opt.gens))
opt['domain'] = domain
if opt.polys is None:
opt['polys'] = bool(_polys)
return polys, opt
def _update_args(args, key, value):
"""Add a new ``(key, value)`` pair to arguments ``dict``. """
args = dict(args)
if key not in args:
args[key] = value
return args
def degree(f, *gens, **args):
"""
Return the degree of ``f`` in the given variable.
Examples
========
>>> from sympy import degree
>>> from sympy.abc import x, y
>>> degree(x**2 + y*x + 1, gen=x)
2
>>> degree(x**2 + y*x + 1, gen=y)
1
"""
options.allowed_flags(args, ['gen', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('degree', 1, exc)
return Integer(F.degree(opt.gen))
def degree_list(f, *gens, **args):
"""
Return a list of degrees of ``f`` in all variables.
Examples
========
>>> from sympy import degree_list
>>> from sympy.abc import x, y
>>> degree_list(x**2 + y*x + 1)
(2, 1)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('degree_list', 1, exc)
degrees = F.degree_list()
return tuple(map(Integer, degrees))
def LC(f, *gens, **args):
"""
Return the leading coefficient of ``f``.
Examples
========
>>> from sympy import LC
>>> from sympy.abc import x, y
>>> LC(4*x**2 + 2*x*y**2 + x*y + 3*y)
4
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('LC', 1, exc)
return F.LC(order=opt.order)
def LM(f, *gens, **args):
"""
Return the leading monomial of ``f``.
Examples
========
>>> from sympy import LM
>>> from sympy.abc import x, y
>>> LM(4*x**2 + 2*x*y**2 + x*y + 3*y)
x**2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('LM', 1, exc)
monom = F.LM(order=opt.order)
return monom.as_expr()
def LT(f, *gens, **args):
"""
Return the leading term of ``f``.
Examples
========
>>> from sympy import LT
>>> from sympy.abc import x, y
>>> LT(4*x**2 + 2*x*y**2 + x*y + 3*y)
4*x**2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('LT', 1, exc)
monom, coeff = F.LT(order=opt.order)
return coeff*monom.as_expr()
def pdiv(f, g, *gens, **args):
"""
Compute polynomial pseudo-division of ``f`` and ``g``.
Examples
========
>>> from sympy import pdiv
>>> from sympy.abc import x
>>> pdiv(x**2 + 1, 2*x - 4)
(2*x + 4, 20)
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('pdiv', 2, exc)
q, r = F.pdiv(G)
if not opt.polys:
return q.as_expr(), r.as_expr()
else:
return q, r
def prem(f, g, *gens, **args):
"""
Compute polynomial pseudo-remainder of ``f`` and ``g``.
Examples
========
>>> from sympy import prem
>>> from sympy.abc import x
>>> prem(x**2 + 1, 2*x - 4)
20
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('prem', 2, exc)
r = F.prem(G)
if not opt.polys:
return r.as_expr()
else:
return r
def pquo(f, g, *gens, **args):
"""
Compute polynomial pseudo-quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import pquo
>>> from sympy.abc import x
>>> pquo(x**2 + 1, 2*x - 4)
2*x + 4
>>> pquo(x**2 - 1, 2*x - 1)
2*x + 1
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('pquo', 2, exc)
try:
q = F.pquo(G)
except ExactQuotientFailed:
raise ExactQuotientFailed(f, g)
if not opt.polys:
return q.as_expr()
else:
return q
def pexquo(f, g, *gens, **args):
"""
Compute polynomial exact pseudo-quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import pexquo
>>> from sympy.abc import x
>>> pexquo(x**2 - 1, 2*x - 2)
2*x + 2
>>> pexquo(x**2 + 1, 2*x - 4)
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('pexquo', 2, exc)
q = F.pexquo(G)
if not opt.polys:
return q.as_expr()
else:
return q
def div(f, g, *gens, **args):
"""
Compute polynomial division of ``f`` and ``g``.
Examples
========
>>> from sympy import div, ZZ, QQ
>>> from sympy.abc import x
>>> div(x**2 + 1, 2*x - 4, domain=ZZ)
(0, x**2 + 1)
>>> div(x**2 + 1, 2*x - 4, domain=QQ)
(x/2 + 1, 5)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('div', 2, exc)
q, r = F.div(G, auto=opt.auto)
if not opt.polys:
return q.as_expr(), r.as_expr()
else:
return q, r
def rem(f, g, *gens, **args):
"""
Compute polynomial remainder of ``f`` and ``g``.
Examples
========
>>> from sympy import rem, ZZ, QQ
>>> from sympy.abc import x
>>> rem(x**2 + 1, 2*x - 4, domain=ZZ)
x**2 + 1
>>> rem(x**2 + 1, 2*x - 4, domain=QQ)
5
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('rem', 2, exc)
r = F.rem(G, auto=opt.auto)
if not opt.polys:
return r.as_expr()
else:
return r
def quo(f, g, *gens, **args):
"""
Compute polynomial quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import quo
>>> from sympy.abc import x
>>> quo(x**2 + 1, 2*x - 4)
x/2 + 1
>>> quo(x**2 - 1, x - 1)
x + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('quo', 2, exc)
q = F.quo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
def exquo(f, g, *gens, **args):
"""
Compute polynomial exact quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import exquo
>>> from sympy.abc import x
>>> exquo(x**2 - 1, x - 1)
x + 1
>>> exquo(x**2 + 1, 2*x - 4)
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('exquo', 2, exc)
q = F.exquo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
def half_gcdex(f, g, *gens, **args):
"""
Half extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> from sympy import half_gcdex
>>> from sympy.abc import x
>>> half_gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x + 1)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
s, h = domain.half_gcdex(a, b)
except NotImplementedError:
raise ComputationFailed('half_gcdex', 2, exc)
else:
return domain.to_sympy(s), domain.to_sympy(h)
s, h = F.half_gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), h.as_expr()
else:
return s, h
def gcdex(f, g, *gens, **args):
"""
Extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> from sympy import gcdex
>>> from sympy.abc import x
>>> gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x**2/5 - 6*x/5 + 2, x + 1)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
s, t, h = domain.gcdex(a, b)
except NotImplementedError:
raise ComputationFailed('gcdex', 2, exc)
else:
return domain.to_sympy(s), domain.to_sympy(t), domain.to_sympy(h)
s, t, h = F.gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), t.as_expr(), h.as_expr()
else:
return s, t, h
def invert(f, g, *gens, **args):
"""
Invert ``f`` modulo ``g`` when possible.
Examples
========
>>> from sympy import invert
>>> from sympy.abc import x
>>> invert(x**2 - 1, 2*x - 1)
-4/3
>>> invert(x**2 - 1, x - 1)
Traceback (most recent call last):
...
NotInvertible: zero divisor
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.invert(a, b))
except NotImplementedError:
raise ComputationFailed('invert', 2, exc)
h = F.invert(G, auto=opt.auto)
if not opt.polys:
return h.as_expr()
else:
return h
def subresultants(f, g, *gens, **args):
"""
Compute subresultant PRS of ``f`` and ``g``.
Examples
========
>>> from sympy import subresultants
>>> from sympy.abc import x
>>> subresultants(x**2 + 1, x**2 - 1)
[x**2 + 1, x**2 - 1, -2]
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('subresultants', 2, exc)
result = F.subresultants(G)
if not opt.polys:
return [ r.as_expr() for r in result ]
else:
return result
def resultant(f, g, *gens, **args):
"""
Compute resultant of ``f`` and ``g``.
Examples
========
>>> from sympy import resultant
>>> from sympy.abc import x
>>> resultant(x**2 + 1, x**2 - 1)
4
"""
includePRS = args.pop('includePRS', False)
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('resultant', 2, exc)
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
else:
result = F.resultant(G)
if not opt.polys:
if includePRS:
return result.as_expr(), [r.as_expr() for r in R]
return result.as_expr()
else:
if includePRS:
return result, R
return result
def discriminant(f, *gens, **args):
"""
Compute discriminant of ``f``.
Examples
========
>>> from sympy import discriminant
>>> from sympy.abc import x
>>> discriminant(x**2 + 2*x + 3)
-8
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('discriminant', 1, exc)
result = F.discriminant()
if not opt.polys:
return result.as_expr()
else:
return result
def cofactors(f, g, *gens, **args):
"""
Compute GCD and cofactors of ``f`` and ``g``.
Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and
``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors
of ``f`` and ``g``.
Examples
========
>>> from sympy import cofactors
>>> from sympy.abc import x
>>> cofactors(x**2 - 1, x**2 - 3*x + 2)
(x - 1, x + 1, x - 2)
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
h, cff, cfg = domain.cofactors(a, b)
except NotImplementedError:
raise ComputationFailed('cofactors', 2, exc)
else:
return domain.to_sympy(h), domain.to_sympy(cff), domain.to_sympy(cfg)
h, cff, cfg = F.cofactors(G)
if not opt.polys:
return h.as_expr(), cff.as_expr(), cfg.as_expr()
else:
return h, cff, cfg
def gcd_list(seq, *gens, **args):
"""
Compute GCD of a list of polynomials.
Examples
========
>>> from sympy import gcd_list
>>> from sympy.abc import x
>>> gcd_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2])
x - 1
"""
seq = sympify(seq)
if not gens and not args:
domain, numbers = construct_domain(seq)
if not numbers:
return domain.zero
elif domain.is_Numerical:
result, numbers = numbers[0], numbers[1:]
for number in numbers:
result = domain.gcd(result, number)
if domain.is_one(result):
break
return domain.to_sympy(result)
options.allowed_flags(args, ['polys'])
try:
polys, opt = parallel_poly_from_expr(seq, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('gcd_list', len(seq), exc)
if not polys:
if not opt.polys:
return S.Zero
else:
return Poly(0, opt=opt)
result, polys = polys[0], polys[1:]
for poly in polys:
result = result.gcd(poly)
if result.is_one:
break
if not opt.polys:
return result.as_expr()
else:
return result
def gcd(f, g=None, *gens, **args):
"""
Compute GCD of ``f`` and ``g``.
Examples
========
>>> from sympy import gcd
>>> from sympy.abc import x
>>> gcd(x**2 - 1, x**2 - 3*x + 2)
x - 1
"""
if hasattr(f, '__iter__'):
if g is not None:
gens = (g,) + gens
return gcd_list(f, *gens, **args)
elif g is None:
raise TypeError("gcd() takes 2 arguments or a sequence of arguments")
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.gcd(a, b))
except NotImplementedError:
raise ComputationFailed('gcd', 2, exc)
result = F.gcd(G)
if not opt.polys:
return result.as_expr()
else:
return result
def lcm_list(seq, *gens, **args):
"""
Compute LCM of a list of polynomials.
Examples
========
>>> from sympy import lcm_list
>>> from sympy.abc import x
>>> lcm_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2])
x**5 - x**4 - 2*x**3 - x**2 + x + 2
"""
seq = sympify(seq)
if not gens and not args:
domain, numbers = construct_domain(seq)
if not numbers:
return domain.one
elif domain.is_Numerical:
result, numbers = numbers[0], numbers[1:]
for number in numbers:
result = domain.lcm(result, number)
return domain.to_sympy(result)
options.allowed_flags(args, ['polys'])
try:
polys, opt = parallel_poly_from_expr(seq, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('lcm_list', len(seq), exc)
if not polys:
if not opt.polys:
return S.One
else:
return Poly(1, opt=opt)
result, polys = polys[0], polys[1:]
for poly in polys:
result = result.lcm(poly)
if not opt.polys:
return result.as_expr()
else:
return result
def lcm(f, g=None, *gens, **args):
"""
Compute LCM of ``f`` and ``g``.
Examples
========
>>> from sympy import lcm
>>> from sympy.abc import x
>>> lcm(x**2 - 1, x**2 - 3*x + 2)
x**3 - 2*x**2 - x + 2
"""
if hasattr(f, '__iter__'):
if g is not None:
gens = (g,) + gens
return lcm_list(f, *gens, **args)
elif g is None:
raise TypeError("lcm() takes 2 arguments or a sequence of arguments")
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.lcm(a, b))
except NotImplementedError:
raise ComputationFailed('lcm', 2, exc)
result = F.lcm(G)
if not opt.polys:
return result.as_expr()
else:
return result
def terms_gcd(f, *gens, **args):
"""
Remove GCD of terms from ``f``.
If the ``deep`` flag is True, then the arguments of ``f`` will have
terms_gcd applied to them.
If a fraction is factored out of ``f`` and ``f`` is an Add, then
an unevaluated Mul will be returned so that automatic simplification
does not redistribute it. The hint ``clear``, when set to False, can be
used to prevent such factoring when all coefficients are not fractions.
Examples
========
>>> from sympy import terms_gcd, cos, pi
>>> from sympy.abc import x, y
>>> terms_gcd(x**6*y**2 + x**3*y, x, y)
x**3*y*(x**3*y + 1)
The default action of polys routines is to expand the expression
given to them. terms_gcd follows this behavior:
>>> terms_gcd((3+3*x)*(x+x*y))
3*x*(x*y + x + y + 1)
If this is not desired then the hint ``expand`` can be set to False.
In this case the expression will be treated as though it were comprised
of one or more terms:
>>> terms_gcd((3+3*x)*(x+x*y), expand=False)
(3*x + 3)*(x*y + x)
In order to traverse factors of a Mul or the arguments of other
functions, the ``deep`` hint can be used:
>>> terms_gcd((3 + 3*x)*(x + x*y), expand=False, deep=True)
3*x*(x + 1)*(y + 1)
>>> terms_gcd(cos(x + x*y), deep=True)
cos(x*(y + 1))
Rationals are factored out by default:
>>> terms_gcd(x + y/2)
(2*x + y)/2
Only the y-term had a coefficient that was a fraction; if one
does not want to factor out the 1/2 in cases like this, the
flag ``clear`` can be set to False:
>>> terms_gcd(x + y/2, clear=False)
x + y/2
>>> terms_gcd(x*y/2 + y**2, clear=False)
y*(x/2 + y)
The ``clear`` flag is ignored if all coefficients are fractions:
>>> terms_gcd(x/3 + y/2, clear=False)
(2*x + 3*y)/6
See Also
========
sympy.core.exprtools.gcd_terms, sympy.core.exprtools.factor_terms
"""
if not isinstance(f, Expr) or f.is_Atom:
return sympify(f)
if args.get('deep', False):
new = f.func(*[terms_gcd(a, *gens, **args) for a in f.args])
args.pop('deep')
args['expand'] = False
return terms_gcd(new, *gens, **args)
clear = args.pop('clear', True)
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
return exc.expr
J, f = F.terms_gcd()
if opt.domain.has_Ring:
if opt.domain.has_Field:
denom, f = f.clear_denoms(convert=True)
coeff, f = f.primitive()
if opt.domain.has_Field:
coeff /= denom
else:
coeff = S.One
term = Mul(*[ x**j for x, j in zip(f.gens, J) ])
if clear:
return _keep_coeff(coeff, term*f.as_expr())
# base the clearing on the form of the original expression, not
# the (perhaps) Mul that we have now
coeff, f = _keep_coeff(coeff, f.as_expr(), clear=False).as_coeff_Mul()
return _keep_coeff(coeff, term*f, clear=False)
def trunc(f, p, *gens, **args):
"""
Reduce ``f`` modulo a constant ``p``.
Examples
========
>>> from sympy import trunc
>>> from sympy.abc import x
>>> trunc(2*x**3 + 3*x**2 + 5*x + 7, 3)
-x**3 - x + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('trunc', 1, exc)
result = F.trunc(sympify(p))
if not opt.polys:
return result.as_expr()
else:
return result
def monic(f, *gens, **args):
"""
Divide all coefficients of ``f`` by ``LC(f)``.
Examples
========
>>> from sympy import monic
>>> from sympy.abc import x
>>> monic(3*x**2 + 4*x + 2)
x**2 + 4*x/3 + 2/3
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('monic', 1, exc)
result = F.monic(auto=opt.auto)
if not opt.polys:
return result.as_expr()
else:
return result
def content(f, *gens, **args):
"""
Compute GCD of coefficients of ``f``.
Examples
========
>>> from sympy import content
>>> from sympy.abc import x
>>> content(6*x**2 + 8*x + 12)
2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('content', 1, exc)
return F.content()
def primitive(f, *gens, **args):
"""
Compute content and the primitive form of ``f``.
Examples
========
>>> from sympy.polys.polytools import primitive
>>> from sympy.abc import x, y
>>> primitive(6*x**2 + 8*x + 12)
(2, 3*x**2 + 4*x + 6)
>>> eq = (2 + 2*x)*x + 2
Expansion is performed by default:
>>> primitive(eq)
(2, x**2 + x + 1)
Set ``expand`` to False to shut this off. Note that the
extraction will not be recursive; use the as_content_primitive method
for recursive, non-destructive Rational extraction.
>>> primitive(eq, expand=False)
(1, x*(2*x + 2) + 2)
>>> eq.as_content_primitive()
(2, x*(x + 1) + 1)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('primitive', 1, exc)
cont, result = F.primitive()
if not opt.polys:
return cont, result.as_expr()
else:
return cont, result
def compose(f, g, *gens, **args):
"""
Compute functional composition ``f(g)``.
Examples
========
>>> from sympy import compose
>>> from sympy.abc import x
>>> compose(x**2 + x, x - 1)
x**2 - x
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('compose', 2, exc)
result = F.compose(G)
if not opt.polys:
return result.as_expr()
else:
return result
def decompose(f, *gens, **args):
"""
Compute functional decomposition of ``f``.
Examples
========
>>> from sympy import decompose
>>> from sympy.abc import x
>>> decompose(x**4 + 2*x**3 - x - 1)
[x**2 - x - 1, x**2 + x]
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('decompose', 1, exc)
result = F.decompose()
if not opt.polys:
return [ r.as_expr() for r in result ]
else:
return result
def sturm(f, *gens, **args):
"""
Compute Sturm sequence of ``f``.
Examples
========
>>> from sympy import sturm
>>> from sympy.abc import x
>>> sturm(x**3 - 2*x**2 + x - 3)
[x**3 - 2*x**2 + x - 3, 3*x**2 - 4*x + 1, 2*x/9 + 25/9, -2079/4]
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('sturm', 1, exc)
result = F.sturm(auto=opt.auto)
if not opt.polys:
return [ r.as_expr() for r in result ]
else:
return result
def gff_list(f, *gens, **args):
"""
Compute a list of greatest factorial factors of ``f``.
Examples
========
>>> from sympy import gff_list, ff
>>> from sympy.abc import x
>>> f = x**5 + 2*x**4 - x**3 - 2*x**2
>>> gff_list(f)
[(x, 1), (x + 2, 4)]
>>> (ff(x, 1)*ff(x + 2, 4)).expand() == f
True
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('gff_list', 1, exc)
factors = F.gff_list()
if not opt.polys:
return [ (g.as_expr(), k) for g, k in factors ]
else:
return factors
def gff(f, *gens, **args):
"""Compute greatest factorial factorization of ``f``. """
raise NotImplementedError('symbolic falling factorial')
def sqf_norm(f, *gens, **args):
"""
Compute square-free norm of ``f``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> from sympy import sqf_norm, sqrt
>>> from sympy.abc import x
>>> sqf_norm(x**2 + 1, extension=[sqrt(3)])
(1, x**2 - 2*sqrt(3)*x + 4, x**4 - 4*x**2 + 16)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('sqf_norm', 1, exc)
s, g, r = F.sqf_norm()
if not opt.polys:
return Integer(s), g.as_expr(), r.as_expr()
else:
return Integer(s), g, r
def sqf_part(f, *gens, **args):
"""
Compute square-free part of ``f``.
Examples
========
>>> from sympy import sqf_part
>>> from sympy.abc import x
>>> sqf_part(x**3 - 3*x - 2)
x**2 - x - 2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('sqf_part', 1, exc)
result = F.sqf_part()
if not opt.polys:
return result.as_expr()
else:
return result
def _sorted_factors(factors, method):
"""Sort a list of ``(expr, exp)`` pairs. """
if method == 'sqf':
def key(obj):
poly, exp = obj
rep = poly.rep.rep
return (exp, len(rep), rep)
else:
def key(obj):
poly, exp = obj
rep = poly.rep.rep
return (len(rep), exp, rep)
return sorted(factors, key=key)
def _factors_product(factors):
"""Multiply a list of ``(expr, exp)`` pairs. """
return Mul(*[ f.as_expr()**k for f, k in factors ])
def _symbolic_factor_list(expr, opt, method):
"""Helper function for :func:`_symbolic_factor`. """
coeff, factors = S.One, []
for arg in Mul.make_args(expr):
if arg.is_Number:
coeff *= arg
continue
elif arg.is_Pow:
base, exp = arg.args
if base.is_Number:
factors.append((base, exp))
continue
else:
base, exp = arg, S.One
try:
poly, _ = _poly_from_expr(base, opt)
except PolificationFailed, exc:
factors.append((exc.expr, exp))
else:
func = getattr(poly, method + '_list')
_coeff, _factors = func()
if _coeff is not S.One:
if exp.is_Integer:
coeff *= _coeff**exp
elif _coeff.is_positive:
factors.append((_coeff, exp))
else:
_factors.append((_coeff, None))
if exp is S.One:
factors.extend(_factors)
elif exp.is_integer or len(_factors) == 1:
factors.extend([ (f, k*exp) for f, k in _factors ])
else:
other = []
for f, k in _factors:
if f.as_expr().is_positive:
factors.append((f, k*exp))
elif k is not None:
other.append((f, k))
else:
other.append((f, S.One))
if len(other) == 1:
f, k = other[0]
factors.append((f, k*exp))
else:
factors.append((_factors_product(other), exp))
return coeff, factors
def _symbolic_factor(expr, opt, method):
"""Helper function for :func:`_factor`. """
if isinstance(expr, Expr) and not expr.is_Relational:
coeff, factors = _symbolic_factor_list(together(expr), opt, method)
return _keep_coeff(coeff, _factors_product(factors))
elif hasattr(expr, 'args'):
return expr.func(*[ _symbolic_factor(arg, opt, method) for arg in expr.args ])
elif hasattr(expr, '__iter__'):
return expr.__class__([ _symbolic_factor(arg, opt, method) for arg in expr ])
else:
return expr
def _generic_factor_list(expr, gens, args, method):
"""Helper function for :func:`sqf_list` and :func:`factor_list`. """
options.allowed_flags(args, ['frac', 'polys'])
opt = options.build_options(gens, args)
expr = sympify(expr)
if isinstance(expr, Expr) and not expr.is_Relational:
numer, denom = together(expr).as_numer_denom()
cp, fp = _symbolic_factor_list(numer, opt, method)
cq, fq = _symbolic_factor_list(denom, opt, method)
if fq and not opt.frac:
raise PolynomialError("a polynomial expected, got %s" % expr)
_opt = opt.clone(dict(expand=True))
for factors in (fp, fq):
for i, (f, k) in enumerate(factors):
if not f.is_Poly:
f, _ = _poly_from_expr(f, _opt)
factors[i] = (f, k)
fp = _sorted_factors(fp, method)
fq = _sorted_factors(fq, method)
if not opt.polys:
fp = [ (f.as_expr(), k) for f, k in fp ]
fq = [ (f.as_expr(), k) for f, k in fq ]
coeff = cp/cq
if not opt.frac:
return coeff, fp
else:
return coeff, fp, fq
else:
raise PolynomialError("a polynomial expected, got %s" % expr)
def _generic_factor(expr, gens, args, method):
"""Helper function for :func:`sqf` and :func:`factor`. """
options.allowed_flags(args, [])
opt = options.build_options(gens, args)
return _symbolic_factor(sympify(expr), opt, method)
def to_rational_coeffs(f):
"""
try to transform a polynomial to have rational coefficients
try to find a transformation ``x = alpha*y``
``f(x) = lc*alpha**n * g(y)`` where ``g`` is a polynomial with
rational coefficients, ``lc`` the leading coefficient.
If this fails, try ``x = y + beta``
``f(x) = g(y)``
Returns ``None`` if ``g`` not found;
``(lc, alpha, None, g)`` in case of rescaling
``(None, None, beta, g)`` in case of translation
Notes
=====
Currently it transforms only polynomials without roots larger than 2.
Examples
========
>>> from sympy import sqrt, Poly, simplify, expand
>>> from sympy.polys.polytools import to_rational_coeffs
>>> from sympy.abc import x
>>> p = Poly(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))}), x, domain='EX')
>>> lc, r, _, g = to_rational_coeffs(p)
>>> lc, r
(7 + 5*sqrt(2), -2*sqrt(2) + 2)
>>> g
Poly(x**3 + x**2 - 1/4*x - 1/4, x, domain='QQ')
>>> r1 = simplify(1/r)
>>> Poly(lc*r**3*(g.as_expr()).subs({x:x*r1}), x, domain='EX') == p
True
"""
from sympy.simplify.simplify import simplify
def _try_rescale(f):
"""
try rescaling ``x -> alpha*x`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the rescaling is successful,
``alpha`` is the rescaling factor, and ``f`` is the rescaled
polynomial; else ``alpha`` is ``None``.
"""
from sympy.core.add import Add
if not len(f.gens) == 1 or not (f.gens[0]).is_Atom:
return None, f
n = f.degree()
lc = f.LC()
coeffs = f.monic().all_coeffs()[1:]
coeffs = [simplify(coeffx) for coeffx in coeffs]
if coeffs[-2] and not all(coeffx.is_rational for coeffx in coeffs):
rescale1_x = simplify(coeffs[-2]/coeffs[-1])
coeffs1 = []
for i in range(len(coeffs)):
coeffx = simplify(coeffs[i]*rescale1_x**(i + 1))
if not coeffx.is_rational:
break
coeffs1.append(coeffx)
else:
rescale_x = simplify(1/rescale1_x)
x = f.gens[0]
v = [x**n]
for i in range(1, n + 1):
v.append(coeffs1[i - 1]*x**(n - i))
f = Add(*v)
f = Poly(f)
return lc, rescale_x, f
return None
def _try_translate(f):
"""
try translating ``x -> x + alpha`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the translating is successful,
``alpha`` is the translating factor, and ``f`` is the shifted
polynomial; else ``alpha`` is ``None``.
"""
from sympy.core.add import Add
from sympy.utilities.iterables import sift
if not len(f.gens) == 1 or not (f.gens[0]).is_Atom:
return None, f
n = f.degree()
f1 = f.monic()
coeffs = f1.all_coeffs()[1:]
c = simplify(coeffs[0])
if c and not c.is_rational:
if c.is_Add:
args = c.args
else:
args = [c]
sifted = sift(args, lambda z: z.is_rational)
c1, c2 = sifted[True], sifted[False]
alpha = -Add(*c2)/n
f2 = f1.shift(alpha)
return alpha, f2
return None
def _has_square_roots(p):
"""
Return True if ``f`` is a sum with square roots but no other root
"""
from sympy.core.exprtools import Factors
coeffs = p.coeffs()
has_sq = False
for y in coeffs:
for x in Add.make_args(y):
f = Factors(x).factors
r = [wx.q for wx in f.values() if wx.is_Rational and wx.q >= 2]
if not r:
continue
if min(r) == 2:
has_sq = True
if max(r) > 2:
return False
return has_sq
if f.get_domain().is_EX and _has_square_roots(f):
rescale_x = None
translate_x = None
r = _try_rescale(f)
if r:
return r[0], r[1], None, r[2]
else:
r = _try_translate(f)
if r:
return None, None, r[0], r[1]
return None
def _torational_factor_list(p, x):
"""
helper function to factor polynomial using to_rational_coeffs
Examples
========
>>> from sympy.polys.polytools import _torational_factor_list
>>> from sympy.abc import x
>>> from sympy import sqrt, expand, Mul
>>> p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))}))
>>> factors = _torational_factor_list(p, x); factors
(-2, [(-x*(1 + sqrt(2))/2 + 1, 1), (-x*(1 + sqrt(2)) - 1, 1), (-x*(1 + sqrt(2)) + 1, 1)])
>>> expand(factors[0]*Mul(*[z[0] for z in factors[1]])) == p
True
>>> p = expand(((x**2-1)*(x-2)).subs({x:x + sqrt(2)}))
>>> factors = _torational_factor_list(p, x); factors
(1, [(x - 2 + sqrt(2), 1), (x - 1 + sqrt(2), 1), (x + 1 + sqrt(2), 1)])
>>> expand(factors[0]*Mul(*[z[0] for z in factors[1]])) == p
True
"""
from sympy.simplify.simplify import simplify
p1 = Poly(p, x, domain='EX')
n = p1.degree()
res = to_rational_coeffs(p1)
if not res:
return None
lc, r, t, g = res
factors = factor_list(g.as_expr())
if lc:
c = simplify(factors[0]*lc*r**n)
r1 = simplify(1/r)
a = []
for z in factors[1:][0]:
a.append((simplify(z[0].subs({x:x*r1})), z[1]))
else:
c = factors[0]
a = []
for z in factors[1:][0]:
a.append((z[0].subs({x:x - t}), z[1]))
return (c, a)
def sqf_list(f, *gens, **args):
"""
Compute a list of square-free factors of ``f``.
Examples
========
>>> from sympy import sqf_list
>>> from sympy.abc import x
>>> sqf_list(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
(2, [(x + 1, 2), (x + 2, 3)])
"""
return _generic_factor_list(f, gens, args, method='sqf')
def sqf(f, *gens, **args):
"""
Compute square-free factorization of ``f``.
Examples
========
>>> from sympy import sqf
>>> from sympy.abc import x
>>> sqf(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
2*(x + 1)**2*(x + 2)**3
"""
return _generic_factor(f, gens, args, method='sqf')
def factor_list(f, *gens, **args):
"""
Compute a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import factor_list
>>> from sympy.abc import x, y
>>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
(2, [(x + y, 1), (x**2 + 1, 2)])
"""
return _generic_factor_list(f, gens, args, method='factor')
def factor(f, *gens, **args):
"""
Compute the factorization of expression, ``f``, into irreducibles. (To
factor an integer into primes, use ``factorint``.)
There two modes implemented: symbolic and formal. If ``f`` is not an
instance of :class:`Poly` and generators are not specified, then the
former mode is used. Otherwise, the formal mode is used.
In symbolic mode, :func:`factor` will traverse the expression tree and
factor its components without any prior expansion, unless an instance
of :class:`Add` is encountered (in this case formal factorization is
used). This way :func:`factor` can handle large or symbolic exponents.
By default, the factorization is computed over the rationals. To factor
over other domain, e.g. an algebraic or finite field, use appropriate
options: ``extension``, ``modulus`` or ``domain``.
Examples
========
>>> from sympy import factor, sqrt
>>> from sympy.abc import x, y
>>> factor(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
2*(x + y)*(x**2 + 1)**2
>>> factor(x**2 + 1)
x**2 + 1
>>> factor(x**2 + 1, modulus=2)
(x + 1)**2
>>> factor(x**2 + 1, gaussian=True)
(x - I)*(x + I)
>>> factor(x**2 - 2, extension=sqrt(2))
(x - sqrt(2))*(x + sqrt(2))
>>> factor((x**2 - 1)/(x**2 + 4*x + 4))
(x - 1)*(x + 1)/(x + 2)**2
>>> factor((x**2 + 4*x + 4)**10000000*(x**2 + 1))
(x + 2)**20000000*(x**2 + 1)
By default, factor deals with an expression as a whole:
>>> eq = 2**(x**2 + 2*x + 1)
>>> factor(eq)
2**(x**2 + 2*x + 1)
If the ``deep`` flag is True then subexpressions will
be factored:
>>> factor(eq, deep=True)
2**((x + 1)**2)
See Also
========
sympy.ntheory.factor_.factorint
"""
f = sympify(f)
if args.pop('deep', False):
partials = {}
muladd = f.atoms(Mul, Add)
for p in muladd:
fac = factor(p, *gens, **args)
if (fac.is_Mul or fac.is_Pow) and fac != p:
partials[p] = fac
return f.xreplace(partials)
try:
return _generic_factor(f, gens, args, method='factor')
except PolynomialError, msg:
if not f.is_commutative:
from sympy.core.exprtools import factor_nc
return factor_nc(f)
else:
raise PolynomialError(msg)
def intervals(F, all=False, eps=None, inf=None, sup=None, strict=False, fast=False, sqf=False):
"""
Compute isolating intervals for roots of ``f``.
Examples
========
>>> from sympy import intervals
>>> from sympy.abc import x
>>> intervals(x**2 - 3)
[((-2, -1), 1), ((1, 2), 1)]
>>> intervals(x**2 - 3, eps=1e-2)
[((-26/15, -19/11), 1), ((19/11, 26/15), 1)]
"""
if not hasattr(F, '__iter__'):
try:
F = Poly(F)
except GeneratorsNeeded:
return []
return F.intervals(all=all, eps=eps, inf=inf, sup=sup, fast=fast, sqf=sqf)
else:
polys, opt = parallel_poly_from_expr(F, domain='QQ')
if len(opt.gens) > 1:
raise MultivariatePolynomialError
for i, poly in enumerate(polys):
polys[i] = poly.rep.rep
if eps is not None:
eps = opt.domain.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if inf is not None:
inf = opt.domain.convert(inf)
if sup is not None:
sup = opt.domain.convert(sup)
intervals = dup_isolate_real_roots_list(polys, opt.domain,
eps=eps, inf=inf, sup=sup, strict=strict, fast=fast)
result = []
for (s, t), indices in intervals:
s, t = opt.domain.to_sympy(s), opt.domain.to_sympy(t)
result.append(((s, t), indices))
return result
def refine_root(f, s, t, eps=None, steps=None, fast=False, check_sqf=False):
"""
Refine an isolating interval of a root to the given precision.
Examples
========
>>> from sympy import refine_root
>>> from sympy.abc import x
>>> refine_root(x**2 - 3, 1, 2, eps=1e-2)
(19/11, 26/15)
"""
try:
F = Poly(f)
except GeneratorsNeeded:
raise PolynomialError(
"can't refine a root of %s, not a polynomial" % f)
return F.refine_root(s, t, eps=eps, steps=steps, fast=fast, check_sqf=check_sqf)
def count_roots(f, inf=None, sup=None):
"""
Return the number of roots of ``f`` in ``[inf, sup]`` interval.
If one of ``inf`` or ``sup`` is complex, it will return the number of roots
in the complex rectangle with corners at ``inf`` and ``sup``.
Examples
========
>>> from sympy import count_roots, I
>>> from sympy.abc import x
>>> count_roots(x**4 - 4, -3, 3)
2
>>> count_roots(x**4 - 4, 0, 1 + 3*I)
1
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError("can't count roots of %s, not a polynomial" % f)
return F.count_roots(inf=inf, sup=sup)
def real_roots(f, multiple=True):
"""
Return a list of real roots with multiplicities of ``f``.
Examples
========
>>> from sympy import real_roots
>>> from sympy.abc import x
>>> real_roots(2*x**3 - 7*x**2 + 4*x + 4)
[-1/2, 2, 2]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(
"can't compute real roots of %s, not a polynomial" % f)
return F.real_roots(multiple=multiple)
def nroots(f, n=15, maxsteps=50, cleanup=True, error=False):
"""
Compute numerical approximations of roots of ``f``.
Examples
========
>>> from sympy import nroots
>>> from sympy.abc import x
>>> nroots(x**2 - 3, n=15)
[-1.73205080756888, 1.73205080756888]
>>> nroots(x**2 - 3, n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(
"can't compute numerical roots of %s, not a polynomial" % f)
return F.nroots(n=n, maxsteps=maxsteps, cleanup=cleanup, error=error)
def ground_roots(f, *gens, **args):
"""
Compute roots of ``f`` by factorization in the ground domain.
Examples
========
>>> from sympy import ground_roots
>>> from sympy.abc import x
>>> ground_roots(x**6 - 4*x**4 + 4*x**3 - x**2)
{0: 2, 1: 2}
"""
options.allowed_flags(args, [])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('ground_roots', 1, exc)
return F.ground_roots()
def nth_power_roots_poly(f, n, *gens, **args):
"""
Construct a polynomial with n-th powers of roots of ``f``.
Examples
========
>>> from sympy import nth_power_roots_poly, factor, roots
>>> from sympy.abc import x
>>> f = x**4 - x**2 + 1
>>> g = factor(nth_power_roots_poly(f, 2))
>>> g
(x**2 - x + 1)**2
>>> R_f = [ (r**2).expand() for r in roots(f) ]
>>> R_g = roots(g).keys()
>>> set(R_f) == set(R_g)
True
"""
options.allowed_flags(args, [])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('nth_power_roots_poly', 1, exc)
result = F.nth_power_roots_poly(n)
if not opt.polys:
return result.as_expr()
else:
return result
def cancel(f, *gens, **args):
"""
Cancel common factors in a rational function ``f``.
Examples
========
>>> from sympy import cancel, sqrt, Symbol
>>> from sympy.abc import x
>>> A = Symbol('A', commutative=False)
>>> cancel((2*x**2 - 2)/(x**2 - 2*x + 1))
(2*x + 2)/(x - 1)
>>> cancel((sqrt(3) + sqrt(15)*A)/(sqrt(2) + sqrt(10)*A))
sqrt(6)/2
"""
from sympy.core.exprtools import factor_terms
options.allowed_flags(args, ['polys'])
f = sympify(f)
if not isinstance(f, (tuple, Tuple)):
if f.is_Number:
return f
f = factor_terms(f, radical=True)
p, q = f.as_numer_denom()
elif len(f) == 2:
p, q = f
elif isinstance(f, Tuple):
return factor_terms(f)
else:
raise ValueError('unexpected argument: %s' % f)
try:
(F, G), opt = parallel_poly_from_expr((p, q), *gens, **args)
except PolificationFailed:
if not isinstance(f, (tuple, Tuple)):
return f
else:
return S.One, p, q
except PolynomialError, msg:
if f.is_commutative:
raise PolynomialError(msg)
# non-commutative
if f.is_Mul:
c, nc = f.args_cnc(split_1=False)
nc = [cancel(i) for i in nc]
return cancel(Mul._from_args(c))*Mul(*nc)
elif f.is_Add:
c = []
nc = []
for i in f.args:
if i.is_commutative:
c.append(i)
else:
nc.append(cancel(i))
return cancel(Add(*c)) + Add(*nc)
else:
reps = []
pot = preorder_traversal(f)
pot.next()
for e in pot:
if isinstance(e, (tuple, Tuple)):
continue
try:
reps.append((e, cancel(e)))
pot.skip() # this was handled successfully
except NotImplementedError:
pass
return f.xreplace(dict(reps))
c, P, Q = F.cancel(G)
if not isinstance(f, (tuple, Tuple)):
return c*(P.as_expr()/Q.as_expr())
else:
if not opt.polys:
return c, P.as_expr(), Q.as_expr()
else:
return c, P, Q
def reduced(f, G, *gens, **args):
"""
Reduces a polynomial ``f`` modulo a set of polynomials ``G``.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> from sympy import reduced
>>> from sympy.abc import x, y
>>> reduced(2*x**4 + y**2 - x**2 + y**3, [x**3 - x, y**3 - y])
([2*x, 1], x**2 + y**2 + y)
"""
options.allowed_flags(args, ['polys', 'auto'])
try:
polys, opt = parallel_poly_from_expr([f] + list(G), *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('reduced', 0, exc)
domain = opt.domain
retract = False
if opt.auto and domain.has_Ring and not domain.has_Field:
opt = opt.clone(dict(domain=domain.get_field()))
retract = True
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [ Poly._from_dict(dict(q), opt) for q in Q ]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [ q.to_ring() for q in Q ], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [ q.as_expr() for q in Q ], r.as_expr()
else:
return Q, r
def groebner(F, *gens, **args):
"""
Computes the reduced Groebner basis for a set of polynomials.
Use the ``order`` argument to set the monomial ordering that will be
used to compute the basis. Allowed orders are ``lex``, ``grlex`` and
``grevlex``. If no order is specified, it defaults to ``lex``.
For more information on Groebner bases, see the references and the docstring
of `solve_poly_system()`.
Examples
========
Example taken from [1].
>>> from sympy import groebner
>>> from sympy.abc import x, y
>>> F = [x*y - 2*y, 2*y**2 - x**2]
>>> groebner(F, x, y, order='lex')
GroebnerBasis([x**2 - 2*y**2, x*y - 2*y, y**3 - 2*y], x, y,
domain='ZZ', order='lex')
>>> groebner(F, x, y, order='grlex')
GroebnerBasis([y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y], x, y,
domain='ZZ', order='grlex')
>>> groebner(F, x, y, order='grevlex')
GroebnerBasis([y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y], x, y,
domain='ZZ', order='grevlex')
By default, an improved implementation of the Buchberger algorithm is
used. Optionally, an implementation of the F5B algorithm can be used.
The algorithm can be set using ``method`` flag or with the :func:`setup`
function from :mod:`sympy.polys.polyconfig`:
>>> F = [x**2 - x - 1, (2*x - 1) * y - (x**10 - (1 - x)**10)]
>>> groebner(F, x, y, method='buchberger')
GroebnerBasis([x**2 - x - 1, y - 55], x, y, domain='ZZ', order='lex')
>>> groebner(F, x, y, method='f5b')
GroebnerBasis([x**2 - x - 1, y - 55], x, y, domain='ZZ', order='lex')
References
==========
1. [Buchberger01]_
2. [Cox97]_
"""
return GroebnerBasis(F, *gens, **args)
def is_zero_dimensional(F, *gens, **args):
"""
Checks if the ideal generated by a Groebner basis is zero-dimensional.
The algorithm checks if the set of monomials not divisible by the
leading monomial of any element of ``F`` is bounded.
References
==========
David A. Cox, John B. Little, Donal O'Shea. Ideals, Varieties and
Algorithms, 3rd edition, p. 230
"""
return GroebnerBasis(F, *gens, **args).is_zero_dimensional
class GroebnerBasis(Basic):
"""Represents a reduced Groebner basis. """
__slots__ = ['_basis', '_options']
def __new__(cls, F, *gens, **args):
"""Compute a reduced Groebner basis for a system of polynomials. """
options.allowed_flags(args, ['polys', 'method'])
try:
polys, opt = parallel_poly_from_expr(F, *gens, **args)
except PolificationFailed, exc:
raise ComputationFailed('groebner', len(F), exc)
domain = opt.domain
if domain.has_assoc_Field:
opt.domain = domain.get_field()
else:
raise DomainError("can't compute a Groebner basis over %s" % opt.domain)
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
G = _groebner(polys, _ring, method=opt.method)
G = [ Poly._from_dict(g, opt) for g in G ]
if not domain.has_Field:
G = [ g.clear_denoms(convert=True)[1] for g in G ]
opt.domain = domain
return cls._new(G, opt)
@classmethod
def _new(cls, basis, options):
obj = Basic.__new__(cls)
obj._basis = tuple(basis)
obj._options = options
return obj
@property
def args(self):
return (Tuple(*self._basis), Tuple(*self._options.gens))
@property
def exprs(self):
return [ poly.as_expr() for poly in self._basis ]
@property
def polys(self):
return list(self._basis)
@property
def gens(self):
return self._options.gens
@property
def domain(self):
return self._options.domain
@property
def order(self):
return self._options.order
def __len__(self):
return len(self._basis)
def __iter__(self):
if self._options.polys:
return iter(self.polys)
else:
return iter(self.exprs)
def __getitem__(self, item):
if self._options.polys:
basis = self.polys
else:
basis = self.exprs
return basis[item]
def __hash__(self):
return hash((self._basis, tuple(self._options.items())))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._basis == other._basis and self._options == other._options
elif iterable(other):
return self.polys == list(other) or self.exprs == list(other)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
@property
def is_zero_dimensional(self):
"""
Checks if the ideal generated by a Groebner basis is zero-dimensional.
The algorithm checks if the set of monomials not divisible by the
leading monomial of any element of ``F`` is bounded.
References
==========
David A. Cox, John B. Little, Donal O'Shea. Ideals, Varieties and
Algorithms, 3rd edition, p. 230
"""
def single_var(monomial):
return sum(map(bool, monomial)) == 1
exponents = Monomial([0]*len(self.gens))
order = self._options.order
for poly in self.polys:
monomial = poly.LM(order=order)
if single_var(monomial):
exponents *= monomial
# If any element of the exponents vector is zero, then there's
# a variable for which there's no degree bound and the ideal
# generated by this Groebner basis isn't zero-dimensional.
return all(exponents)
def fglm(self, order):
"""
Convert a Groebner basis from one ordering to another.
The FGLM algorithm converts reduced Groebner bases of zero-dimensional
ideals from one ordering to another. This method is often used when it
is infeasible to compute a Groebner basis with respect to a particular
ordering directly.
Examples
========
>>> from sympy.abc import x, y
>>> from sympy import groebner
>>> F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
>>> G = groebner(F, x, y, order='grlex')
>>> list(G.fglm('lex'))
[2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7]
>>> list(groebner(F, x, y, order='lex'))
[2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7]
References
==========
J.C. Faugere, P. Gianni, D. Lazard, T. Mora (1994). Efficient
Computation of Zero-dimensional Groebner Bases by Change of
Ordering
"""
opt = self._options
src_order = opt.order
dst_order = monomial_key(order)
if src_order == dst_order:
return self
if not self.is_zero_dimensional:
raise NotImplementedError("can't convert Groebner bases of ideals with positive dimension")
polys = list(self._basis)
domain = opt.domain
opt = opt.clone(dict(
domain=domain.get_field(),
order=dst_order,
))
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, src_order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
G = matrix_fglm(polys, _ring, dst_order)
G = [ Poly._from_dict(dict(g), opt) for g in G ]
if not domain.has_Field:
G = [ g.clear_denoms(convert=True)[1] for g in G ]
opt.domain = domain
return self._new(G, opt)
def reduce(self, expr, auto=True):
"""
Reduces a polynomial modulo a Groebner basis.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> from sympy import groebner, expand
>>> from sympy.abc import x, y
>>> f = 2*x**4 - x**2 + y**3 + y**2
>>> G = groebner([x**3 - x, y**3 - y])
>>> G.reduce(f)
([2*x, 1], x**2 + y**2 + y)
>>> Q, r = _
>>> expand(sum(q*g for q, g in zip(Q, G)) + r)
2*x**4 - x**2 + y**3 + y**2
>>> _ == f
True
"""
poly = Poly._from_expr(expr, self._options)
polys = [poly] + list(self._basis)
opt = self._options
domain = opt.domain
retract = False
if auto and domain.has_Ring and not domain.has_Field:
opt = opt.clone(dict(domain=domain.get_field()))
retract = True
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [ Poly._from_dict(dict(q), opt) for q in Q ]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [ q.to_ring() for q in Q ], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [ q.as_expr() for q in Q ], r.as_expr()
else:
return Q, r
def contains(self, poly):
"""
Check if ``poly`` belongs the ideal generated by ``self``.
Examples
========
>>> from sympy import groebner
>>> from sympy.abc import x, y
>>> f = 2*x**3 + y**3 + 3*y
>>> G = groebner([x**2 + y**2 - 1, x*y - 2])
>>> G.contains(f)
True
>>> G.contains(f + 1)
False
"""
return self.reduce(poly)[1] == 0
def poly(expr, *gens, **args):
"""
Efficiently transform an expression into a polynomial.
Examples
========
>>> from sympy import poly
>>> from sympy.abc import x
>>> poly(x*(x**2 + x - 1)**2)
Poly(x**5 + 2*x**4 - x**3 - 2*x**2 + x, x, domain='ZZ')
"""
options.allowed_flags(args, [])
def _poly(expr, opt):
terms, poly_terms = [], []
for term in Add.make_args(expr):
factors, poly_factors = [], []
for factor in Mul.make_args(term):
if factor.is_Add:
poly_factors.append(_poly(factor, opt))
elif factor.is_Pow and factor.base.is_Add and factor.exp.is_Integer:
poly_factors.append(
_poly(factor.base, opt).pow(factor.exp))
else:
factors.append(factor)
if not poly_factors:
terms.append(term)
else:
product = poly_factors[0]
for factor in poly_factors[1:]:
product = product.mul(factor)
if factors:
factor = Mul(*factors)
if factor.is_Number:
product = product.mul(factor)
else:
product = product.mul(Poly._from_expr(factor, opt))
poly_terms.append(product)
if not poly_terms:
result = Poly._from_expr(expr, opt)
else:
result = poly_terms[0]
for term in poly_terms[1:]:
result = result.add(term)
if terms:
term = Add(*terms)
if term.is_Number:
result = result.add(term)
else:
result = result.add(Poly._from_expr(term, opt))
return result.reorder(*opt.get('gens', ()), **args)
expr = sympify(expr)
if expr.is_Poly:
return Poly(expr, *gens, **args)
if 'expand' not in args:
args['expand'] = False
opt = options.build_options(gens, args)
return _poly(expr, opt)
|
gpl-3.0
| 801,035,838,103,991,400
| 24.114396
| 103
| 0.490173
| false
| 3.482897
| false
| false
| false
|
Kanabanarama/Skrubba
|
skrubba/scheduler.py
|
1
|
6277
|
#!/usr/bin/env python
"""
File scheduler.py
Job scheduler for managing configured events
by Kana kanabanarama@googlemail.com
"""
import time
import atexit
import logging
from datetime import datetime
from apscheduler.schedulers.background import BackgroundScheduler # pylint: disable=import-error
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR # pylint: disable=import-error
from skrubba.shiftregister import Shiftregister
from skrubba.relay import Relay
from skrubba.display import Display
from skrubba.db import DB
class Scheduler():
"""
Scheduler
"""
SCHEDULER = BackgroundScheduler(standalone=True)
STORE = DB()
VALVES = Shiftregister()
PUMP = Relay()
TFT = Display()
def __init__(self):
logging.basicConfig()
self.TFT.display_image('static/gfx/lcd-skrubba-color.png',
pos_x=67, pos_y=10, clear_screen=True)
def unload_scheduler(self):
"""
Scheduler cleanups
"""
self.SCHEDULER.shutdown()
self.VALVES.disable()
return True
def is_running(self):
return self.SCHEDULER.running
def valve_job(self, valve_setting): #(valve, onDuration)
"""
Open a valve specified with settings
"""
self.TFT.mark_active_job(valve_setting['id'], True)
duration_left = int(valve_setting['on_duration']) + 2
#binaryValveList = map(int, list(format(valve_setting['valve'], '08b')))
self.PUMP.switch_on()
time.sleep(1)
#VALVES = Shiftregister()
#shiftreg.output_list(binaryValveList)
self.VALVES.output_decimal(valve_setting['valve'])
self.VALVES.enable()
while duration_left > 2:
time.sleep(1)
duration_left -= 1
self.PUMP.switch_off()
self.VALVES.disable()
self.VALVES.reset()
time.sleep(1)
self.TFT.mark_active_job(valve_setting['id'], False)
self.STORE.add_log_line(valve_setting, datetime.now())
return True
def start_scheduler(self):
"""
start scheduler if not already running (debug mode has 2 threads, so we
have to make sure it only starts once)
"""
self.SCHEDULER.start()
self.SCHEDULER.add_listener(self.scheduler_job_event_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)
atexit.register(self.unload_scheduler)
return True
def scheduler_job_event_listener(self, event):
"""
Event listener for scheduler, do emergency stuff when something goes wrong
"""
if event.exception:
print('The scheduler job crashed.')
#else:
# print('The scheduler job finished successfully.')
def restart_job_manager(self):
"""
Remove all jobs
"""
display_time = time.time()
for job in self.SCHEDULER.get_jobs():
self.SCHEDULER.remove_job(job.id)
self.TFT.clear_job_display()
# Add all jobs that are stored in database
valve_configs = self.STORE.load_valve_configs()
for config in valve_configs:
if config['on_time'] and config['on_duration'] and config['is_active']:
self.TFT.display_job(config)
time_components = [int(x) for x in config['on_time'].split(':')]
if config['interval_type'] == 'daily':
self.SCHEDULER.add_job(self.valve_job,
'cron',
day_of_week='mon-sun',
hour=time_components[0],
minute=time_components[1],
second=time_components[2],
args=[config])
#print('Scheduled daily job [%i:%i]'
# % (time_components[0], time_components[1]))
if config['interval_type'] == 'weekly':
self.SCHEDULER.add_job(self.valve_job,
'cron',
day_of_week='sun',
hour=time_components[0],
minute=time_components[1],
second=time_components[2],
args=[config])
#print('Scheduled weekly job [sun %i:%i]'
# % (time_components[0], time_components[1]))
if config['interval_type'] == 'monthly':
self.SCHEDULER.add_job(self.valve_job,
'cron',
day=1,
hour=time_components[0],
minute=time_components[1],
second=time_components[2],
args=[config])
#print('Scheduled monthly job [1st of the month %i:%i]'
# % (time_components[0], time_components[1]))
# print('JOBS:')
# print(SCHEDULER.get_jobs())
while time.time() - display_time < 5:
time.sleep(1)
self.TFT.clear()
self.TFT.set_background_image('static/gfx/lcd-ui-background.png', pos_x=0, pos_y=0)
self.add_tft_job()
return True
def add_tft_job(self):
"""
Job for updating tft display
"""
def tft_job():
#if(os.getenv('SSH_CLIENT')):
# os.environ.get('SSH_CLIENT')
# os.environ['SSH_CLIENT'] // nothing ?
# TFT.display_text(os.getenv('SSH_CLIENT'),
# 24,
# (205, 30),
# (249, 116, 75),
# (0, 110, 46))
# text, size, pos_x, pos_y, color, bg_color
self.TFT.display_text(time.strftime('%H:%M:%S'), 40, 205, 10, (255, 255, 255), (0, 110, 46))
self.TFT.update_job_display()
self.SCHEDULER.add_job(tft_job, 'interval', seconds=1)
return True
|
gpl-2.0
| 6,901,141,346,179,683,000
| 35.494186
| 108
| 0.507249
| false
| 4.089251
| true
| false
| false
|
cmattoon/today-i-learned
|
arduino-ekg/replay.py
|
1
|
1929
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy
import sys, struct
from scipy.fftpack import fft, fftfreq, fftshift
def getlines(filename):
with open(filename) as fd:
return [line.strip() for line in fd.readlines()]
def replay_packets(filename):
for line in getlines(filename):
yield tuple(int(i) for i in line[1:-1].split(','))
def replay_bytes(filename):
"""Reconstructs binary packets based on reading in
17-element tuples each representing a packet.
"""
for packet in replay_packets(filename):
yield struct.pack('BBBBBBBBBBBBBBBBB', *packet)
ch1 = []
ch2 = []
ch3 = []
ch4 = []
ch5 = []
ch6 = []
def do_fft(ch):
samples = len(ch)
spacing = 1.0 / len(ch)
x = numpy.linspace(0.0, samples * spacing, samples)
y = [c for c in ch]
xf = numpy.linspace(0.0, 1.0/(2.0*spacing), samples/2)
yf = fft(ch)
print x
print y
print("-----------")
print xf
print yf
plt.plot(xf, 2.0/samples * numpy.abs(yf[0:samples/2]))
plt.grid()
plt.show()
last = -1
for pkt in replay_packets(sys.argv[1]):
seq = int(pkt[3])
if seq > last:
ch1.append(pkt[5])
ch2.append(pkt[7])
ch3.append(pkt[9])
ch4.append(pkt[11])
ch5.append(pkt[13])
ch6.append(pkt[15])
"""
group[seq] = {
'CH1': { 'low': pkt[4], 'high': pkt[5] },
'CH2': { 'low': pkt[6], 'high': pkt[7] },
'CH3': { 'low': pkt[8], 'high': pkt[9] },
'CH4': { 'low': pkt[10], 'high': pkt[11] },
'CH5': { 'low': pkt[12], 'high': pkt[13] },
'CH6': { 'low': pkt[14], 'high': pkt[15] }
}
"""
last = int(seq)
else:
print ch1
last = -1
do_fft(ch1)
do_fft(ch2)
ch1 = []
ch2 = []
ch3 = []
ch4 = []
ch5 = []
ch6 = []
|
mit
| 3,888,901,447,269,513,700
| 22.814815
| 58
| 0.505962
| false
| 3.028257
| false
| false
| false
|
douglaskastle/AcraNetwork
|
examples/benchmark_pcap.py
|
1
|
4560
|
# -------------------------------------------------------------------------------
# Name: benchmark_pcap
# Purpose: Benchmark the creation and parsing of a biug pcap file
#
# Author:
#
# Created:
#
# Copyright 2014 Diarmuid Collins
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
sys.path.append("..")
import time
import struct
import AcraNetwork.IENA as iena
import AcraNetwork.iNetX as inetx
import AcraNetwork.Pcap as pcap
import AcraNetwork.SimpleEthernet as SimpleEthernet
import argparse
parser = argparse.ArgumentParser(description='Benchmark the creation of pcap files containing packets')
parser.add_argument('--type', required=True, type=str,choices=["udp","iena","inetx"], help='The type of payload, udp iena or inetx')
parser.add_argument('--ignoretime',required=False, action='store_true', default=False)
args = parser.parse_args()
# constants
PCAP_FNAME = "output_test.pcap"
PACKETS_TO_WRITE = 50000
PAYLOAD_SIZE = 1300 # size of the payload in bytes
HEADER_SIZE = {'udp' : 58 , 'inetx' :86 ,'iena':74}
# Write out a pcapfile with each inetx and iena packet generated
mypcap = pcap.Pcap(PCAP_FNAME,forreading=False)
mypcap.writeGlobalHeader()
ethernet_packet = SimpleEthernet.Ethernet()
ethernet_packet.srcmac = 0x001122334455
ethernet_packet.dstmac = 0x554433221100
ethernet_packet.type = SimpleEthernet.Ethernet.TYPE_IP
ip_packet = SimpleEthernet.IP()
ip_packet.dstip = "235.0.0.2"
ip_packet.srcip = "127.0.0.1"
udp_packet = SimpleEthernet.UDP()
udp_packet.dstport = 4422
# Fixed payload for both
payload = (struct.pack(">B",5) * PAYLOAD_SIZE)
if args.type == "inetx":
# Create an inetx packet
avionics_packet = inetx.iNetX()
avionics_packet.inetxcontrol = inetx.iNetX.DEF_CONTROL_WORD
avionics_packet.pif = 0
avionics_packet.streamid = 0xdc
avionics_packet.sequence = 0
avionics_packet.payload = payload
elif args.type == "iena":
# Create an iena packet
avionics_packet = iena.IENA()
avionics_packet.key = 0xdc
avionics_packet.keystatus = 0
avionics_packet.endfield = 0xbeef
avionics_packet.sequence = 0
avionics_packet.payload = payload
avionics_packet.status = 0
packets_written = 0
start_time = time.time()
while packets_written < PACKETS_TO_WRITE:
if args.type == "udp":
udp_packet.srcport = 4999
udp_packet.payload = payload
else:
if args.ignoretime:
currenttime = 0
else:
currenttime = int(time.time())
if args.type == "iena":
avionics_packet.sequence = (avionics_packet.sequence +1) % 65536
udp_packet.srcport = 5000
else:
avionics_packet.sequence = (avionics_packet.sequence +1) % 0x100000000
udp_packet.srcport = 5001
avionics_packet.setPacketTime(currenttime)
udp_packet.payload = avionics_packet.pack()
ip_packet.payload = udp_packet.pack()
ethernet_packet.payload = ip_packet.pack()
record = pcap.PcapRecord()
if args.ignoretime:
record.usec = 0
record.sec = 0
else:
record.setCurrentTime()
record.packet = ethernet_packet.pack()
mypcap.writeARecord(record)
packets_written += 1
mypcap.close()
end_time = time.time()
print("INFO: Wrote {} packets of type {} with payload of {} bytes in {} seconds".format(PACKETS_TO_WRITE,args.type,PAYLOAD_SIZE,end_time-start_time))
print("INFO: Wrote {} bytes in {}".format((HEADER_SIZE[args.type]+PAYLOAD_SIZE)*PACKETS_TO_WRITE,end_time-start_time))
print("INFO: Wrote {} packets per second".format(PACKETS_TO_WRITE/(end_time-start_time)))
print("INFO: Wrote {:.2f} Mbps".format((HEADER_SIZE[args.type]+PAYLOAD_SIZE)*PACKETS_TO_WRITE*8/((end_time-start_time)*1024*1024)))
|
gpl-2.0
| -101,800,711,013,690,860
| 33.905512
| 149
| 0.669956
| false
| 3.316364
| false
| false
| false
|
unitedstack/rock
|
rock/rules/rule_parser.py
|
1
|
10238
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import datetime
import uuid
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import timeutils
from rock.db import api as db_api
from rock.rules import rule_utils
from rock.tasks.manager import run_flow
LOG = logging.getLogger(__name__)
def data_get_by_obj_time(obj_name, delta):
model_name = 'model_' + obj_name
model = importutils.import_class(
'rock.db.sqlalchemy.%s.%s' %
(model_name, rule_utils.underline_to_camel(model_name)))
timedelta = datetime.timedelta(seconds=delta)
return db_api.get_period_records(model,
timeutils.utcnow()-timedelta,
end_time=timeutils.utcnow(),
sort_key='created_at')
class RuleParser(object):
def __init__(self, rule):
if isinstance(rule, basestring):
self.rule = json.loads(rule)
else:
self.rule = rule
self.raw_data = {}
self.target_data = {}
self.l1_data = {}
self.l2_data = {}
self.all_data = {}
def calculate(self):
LOG.info("Starting collect data.")
self._collect_data()
LOG.info("Got target data %s", self.target_data)
l2_result = self._rule_mapping_per_target()
LOG.info("Got l1 data %s", self.l1_data)
LOG.info("Got l2 result %s", l2_result)
if l2_result:
self._action()
def _collect_data(self):
funcs = self.Functions()
splited_raw_data = {}
# raw_data organized by data_model.
# {"data_model": [list_of_all_target_resources]}
for key, value in self.rule["collect_data"].items():
rule = copy.deepcopy(value['data'])
self.raw_data[key] = self._calculate(rule, funcs)
splited_raw_data[key] = {}
# splited_raw_data organized by data_model first, then target.
# {"data_model": {"target": [list_of_single_target_resources]}}
for key, value in self.raw_data.items():
for item in value:
if not splited_raw_data[key].get(item['target']):
splited_raw_data[key][item['target']] = [item]
else:
splited_raw_data[key][item['target']].append(item)
# target_data organized by target first, then data_model, and each
# data_model show calc_result.
# {'target': {'data_model': {'last_piece','judge_result'}
for key, value in self.rule["collect_data"].items():
for target, target_data in splited_raw_data[key].items():
if not self.target_data.get(target):
self.target_data[target] = {}
self.target_data[target][key] = {}
judge_rule = value["judge"] + [target_data]
self.target_data[target][key]['judge_result'] = \
self._calculate(judge_rule, funcs)
self.target_data[target][key].update(target_data[0])
target_required_len = len(self.rule["collect_data"])
for key, value in self.target_data.items():
if len(self.target_data[key]) != target_required_len:
self.target_data.pop(key)
LOG.warning("Find host %s do not match.", key)
def _rule_mapping_per_target(self):
def _replace_variate(target, rule):
for posi, value in enumerate(rule):
if isinstance(value, unicode) and value.startswith('$'):
dict_args = value[1:].split('.')
rule[posi] = self.target_data[target][dict_args[0]]
dict_args.pop(0)
while dict_args:
rule[posi] = rule[posi][dict_args[0]]
dict_args.pop(0)
elif isinstance(value, list):
_replace_variate(target, value)
funcs = self.Functions()
for target, data in self.target_data.items():
l1_rule = copy.deepcopy(self.rule['l1_rule'])
_replace_variate(target, l1_rule)
self.l1_data[target] = \
{'l1_result': self._calculate(l1_rule, funcs)}
l2_rule = copy.deepcopy(self.rule['l2_rule'])
l2_rule = self._replace_rule_parameter(l2_rule)
return self._calculate(l2_rule, funcs)
def _action(self):
funcs = self.Functions()
actions = self.rule['action']['tasks']
for target in self.l1_data:
if not self.l1_data[target]['l1_result']:
filter_flag = False
for each_filter in self.rule['action']['filters']:
rule = copy.deepcopy(each_filter)
rule = self._replace_rule_parameter(
rule, self.target_data[target])
if not self._calculate(rule, funcs):
filter_flag = True
LOG.info('Skipped target %s due to filter %s.',
target, each_filter)
if filter_flag:
continue
LOG.info("Triggered action on %s.", target)
tasks = []
task_uuid = str(uuid.uuid4())
store_spec = {'taskflow_uuid': task_uuid,
'target': target}
for task in actions:
tasks.append(task[0])
for input_params in task[1:]:
input_kv = input_params.split(':')
store_spec[input_kv[0]] = input_kv[1]
run_flow(task_uuid, store_spec, tasks)
def _calculate(self, rule, funcs):
def _recurse_calc(arg):
if isinstance(arg, list) and isinstance(arg[0], unicode) \
and arg[0].startswith('%') and arg[0] != '%map':
return self._calculate(arg, funcs)
elif isinstance(arg, list)and arg[0] == '%map':
ret = {}
for k, v in arg[1].items():
map_rule = self._replace_map_para(arg[2], arg[1], k)
ret[k] = {}
ret[k]['map_result'] = self._calculate(map_rule, funcs)
return ret
else:
return arg
r = map(_recurse_calc, rule)
r[0] = self.Functions.ALIAS.get(r[0]) or r[0]
func = getattr(funcs, r[0])
return func(*r[1:])
def _replace_rule_parameter(self, rule, input=None):
if not isinstance(rule, list):
return
def _recurse_replace(arg):
if isinstance(arg, list) and isinstance(arg[0], unicode) \
and arg[0].startswith('%'):
return self._replace_rule_parameter(arg, input)
elif isinstance(arg, unicode) and arg.startswith('$'):
args = arg[1:].split('.')
if not input:
ret = getattr(self, args[0])
else:
ret = input[args[0]]
args.pop(0)
while args:
ret = ret[args[0]]
args.pop(0)
return ret
else:
return arg
r = map(_recurse_replace, rule)
return r
def _replace_map_para(self, map_rule, para_dict, target):
def _recurse_replace(arg):
if isinstance(arg, list) and isinstance(arg[0], unicode) \
and arg[0].startswith('%'):
return self._replace_map_para(arg, para_dict, target)
elif isinstance(arg, unicode) and arg.startswith('map.'):
map_para_list = arg.split('.')
map_para_list.pop(0)
ret = para_dict[target][map_para_list[0]]
map_para_list.pop(0)
while map_para_list:
ret = ret[map_para_list[0]]
map_para_list.pop(0)
return ret
else:
return arg
r = map(_recurse_replace, map_rule)
return r
class Functions(object):
ALIAS = {
'%==': 'eq',
'%<=': 'lt_or_eq',
'%and': '_and',
'%or': '_or',
'%not': '_not',
'%get_by_time': 'get_data_by_time',
'%false_end_count_lt': 'false_end_count_lt',
'%count': 'count',
'%map': '_map'
}
def eq(self, *args):
return args[0] == args[1]
def lt_or_eq(self, *args):
return args[0] <= args[1]
def _and(self, *args):
return all(args)
def _or(self, *args):
return any(args)
def _not(self, *args):
return not args[0]
def get_data_by_time(self, *args):
return data_get_by_obj_time(args[0], args[1])
def false_end_count_lt(self, *args):
boundary = int(args[0])
count = 0
for item in args[1]:
if item['result'] in [False, 'false', 'False']:
count += 1
else:
break
return count < boundary
def count(self, *args):
if args[2] in [False, 'false', 'False']:
count_type = False
elif args[2] in [True, 'true', 'True']:
count_type = True
else:
return len(args[1])
count = 0
for k, v in args[0].items():
if v.get(args[1]) == count_type:
count += 1
return count
|
apache-2.0
| -6,026,574,041,103,912,000
| 35.695341
| 75
| 0.504298
| false
| 3.983658
| false
| false
| false
|
rushiprasad/stwitto
|
stwitto/twitt.py
|
1
|
1227
|
import tweepy
import argparse
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('/etc/stwitto/config.ini')
consumer_key = config.get('twitter_auth_config', 'consumer_key')
consumer_secret = config.get('twitter_auth_config', 'consumer_secret')
access_token = config.get('twitter_auth_config', 'access_token')
access_token_secret = config.get('twitter_auth_config', 'access_token_secret')
def get_api():
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
return tweepy.API(auth)
def main():
parser = argparse.ArgumentParser(description='Automatic Twitter Tweets through CLI')
parser.add_argument('--tweet', nargs='+', help='your tweet goes here')
parser.add_argument('--image', help='your tweet image goes here')
args = parser.parse_args()
api = get_api()
if args.tweet and args.image:
tweet = ' '.join(args.tweet)
status = api.update_with_media(args.image, tweet)
elif args.tweet:
tweet = ' '.join(args.tweet)
status = api.update_status(status=tweet)
else:
print('Expected an Argument')
parser.print_help()
if __name__ == "__main__":
main()
|
gpl-2.0
| -6,080,710,177,383,759,000
| 32.162162
| 88
| 0.679707
| false
| 3.556522
| true
| false
| false
|
hyc/HyperDex
|
test/doc-extract.py
|
1
|
2556
|
# Copyright (c) 2013, Cornell University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of HyperDex nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
PREAMBLE = {
'python':
'''# File generated from {code} blocks in "{infile}"
>>> import sys
>>> HOST = sys.argv[2]
>>> PORT = int(sys.argv[3])
'''}
def transform(code, infile, outfile):
fin = open(infile, 'r')
fout = open(outfile, 'w')
begin = '\\begin{%scode}\n' % code
end = '\\end{%scode}\n' % code
fout.write(PREAMBLE[code].format(code=code, infile=infile))
output = False
for line in fin:
if line == begin:
assert not output
output = True
elif line == end:
output = False
elif output:
if line == ">>> a = hyperdex.admin.Admin('127.0.0.1', 1982)\n":
fout.write(">>> a = hyperdex.admin.Admin(HOST, PORT)\n")
elif line == ">>> c = hyperdex.client.Client('127.0.0.1', 1982)\n":
fout.write(">>> c = hyperdex.client.Client(HOST, PORT)\n")
else:
fout.write(line)
transform(sys.argv[1], sys.argv[2], sys.argv[3])
|
bsd-3-clause
| 4,422,403,885,164,071,400
| 43.068966
| 80
| 0.678013
| false
| 4.057143
| false
| false
| false
|
unho/translate
|
translate/convert/test_po2ts.py
|
1
|
4424
|
# -*- coding: utf-8 -*-
from io import BytesIO
from translate.convert import po2ts, test_convert
from translate.storage import po
class TestPO2TS:
def po2ts(self, posource):
"""helper that converts po source to ts source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2ts.po2ts()
output = BytesIO()
convertor.convertstore(inputpo, output)
return output.getvalue().decode('utf-8')
def singleelement(self, storage):
"""checks that the pofile contains a single non-header element, and returns it"""
assert len(storage.units) == 1
return storage.units[0]
def test_simpleunit(self):
"""checks that a simple po entry definition converts properly to a ts entry"""
minipo = r'''#: term.cpp
msgid "Term"
msgstr "asdf"'''
tsfile = self.po2ts(minipo)
print(tsfile)
assert "<name>term.cpp</name>" in tsfile
assert "<source>Term</source>" in tsfile
assert "<translation>asdf</translation>" in tsfile
assert "<comment>" not in tsfile
def test_simple_unicode_unit(self):
"""checks that a simple unit with unicode strings"""
minipo = r'''#: unicode.cpp
msgid "ßource"
msgstr "†arget"'''
tsfile = self.po2ts(minipo)
print(tsfile)
print(type(tsfile))
assert u"<name>unicode.cpp</name>" in tsfile
assert u"<source>ßource</source>" in tsfile
assert u"<translation>†arget</translation>" in tsfile
def test_fullunit(self):
"""check that an entry with various settings is converted correctly"""
posource = '''# Translator comment
#. Automatic comment
#: location.cpp:100
msgid "Source"
msgstr "Target"
'''
tsfile = self.po2ts(posource)
print(tsfile)
# The other section are a duplicate of test_simplentry
# FIXME need to think about auto vs trans comments maybe in TS v1.1
assert "<comment>Translator comment</comment>" in tsfile
def test_fuzzyunit(self):
"""check that we handle fuzzy units correctly"""
posource = '''#: term.cpp
#, fuzzy
msgid "Source"
msgstr "Target"'''
tsfile = self.po2ts(posource)
print(tsfile)
assert '''<translation type="unfinished">Target</translation>''' in tsfile
def test_obsolete(self):
"""test that we can take back obsolete messages"""
posource = '''#. (obsolete)
#: term.cpp
msgid "Source"
msgstr "Target"'''
tsfile = self.po2ts(posource)
print(tsfile)
assert '''<translation type="obsolete">Target</translation>''' in tsfile
def test_duplicates(self):
"""test that we can handle duplicates in the same context block"""
posource = '''#: @@@#1
msgid "English"
msgstr "a"
#: @@@#3
msgid "English"
msgstr "b"
'''
tsfile = self.po2ts(posource)
print(tsfile)
assert tsfile.find("English") != tsfile.rfind("English")
def test_linebreak(self):
"""test that we can handle linebreaks"""
minipo = r'''#: linebreak.cpp
msgid "Line 1\n"
"Line 2"
msgstr "Linea 1\n"
"Linea 2"'''
tsfile = self.po2ts(minipo)
print(tsfile)
print(type(tsfile))
assert u"<name>linebreak.cpp</name>" in tsfile
assert r'''<source>Line 1
Line 2</source>''' in tsfile
assert r'''<translation>Linea 1
Linea 2</translation>''' in tsfile
def test_linebreak_consecutive(self):
"""test that we can handle consecutive linebreaks"""
minipo = r'''#: linebreak.cpp
msgid "Line 1\n"
"\n"
"Line 3"
msgstr "Linea 1\n"
"\n"
"Linea 3"'''
tsfile = self.po2ts(minipo)
print(tsfile)
print(type(tsfile))
assert u"<name>linebreak.cpp</name>" in tsfile
assert r'''<source>Line 1
Line 3</source>''' in tsfile
assert r'''<translation>Linea 1
Linea 3</translation>''' in tsfile
class TestPO2TSCommand(test_convert.TestConvertCommand, TestPO2TS):
"""Tests running actual po2ts commands on files"""
convertmodule = po2ts
def test_help(self, capsys):
"""tests getting help"""
options = test_convert.TestConvertCommand.test_help(self, capsys)
options = self.help_check(options, "-c CONTEXT, --context=CONTEXT")
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE", last=True)
|
gpl-2.0
| -6,105,735,019,182,657,000
| 30.333333
| 89
| 0.62947
| false
| 3.687813
| true
| false
| false
|
ksons/gltf-blender-importer
|
addons/io_scene_gltf_ksons/animation/material.py
|
1
|
2432
|
import bpy
from . import quote
from .curve import Curve
def add_material_animation(op, anim_info, material_id):
anim_id = anim_info.anim_id
data = anim_info.material[material_id]
animation = op.gltf['animations'][anim_id]
material = op.get('material', material_id)
name = '%s@%s (Material)' % (
animation.get('name', 'animations[%d]' % anim_id),
material.name,
)
action = bpy.data.actions.new(name)
anim_info.material_actions[material_id] = action
fcurves = []
for prop, sampler in data.get('properties', {}).items():
curve = Curve.for_sampler(op, sampler)
data_path = op.material_infos[material_id].paths.get(prop)
if not data_path:
print('no place to put animated property %s in material node tree' % prop)
continue
fcurves += curve.make_fcurves(op, action, data_path)
if fcurves:
group = action.groups.new('Material Property')
for fcurve in fcurves:
fcurve.group = group
for texture_type, samplers in data.get('texture_transform', {}).items():
base_path = op.material_infos[material_id].paths[texture_type + '-transform']
fcurves = []
if 'offset' in samplers:
curve = Curve.for_sampler(op, samplers['offset'])
data_path = base_path + '.translation'
fcurves += curve.make_fcurves(op, action, data_path)
if 'rotation' in samplers:
curve = Curve.for_sampler(op, samplers['rotation'])
data_path = [(base_path + '.rotation', 2)] # animate rotation around Z-axis
fcurves += curve.make_fcurves(op, action, data_path, transform=lambda theta:-theta)
if 'scale' in samplers:
curve = Curve.for_sampler(op, samplers['scale'])
data_path = base_path + '.scale'
fcurves += curve.make_fcurves(op, action, data_path)
group_name = {
'normalTexture': 'Normal',
'occlusionTexture': 'Occlusion',
'emissiveTexture': 'Emissive',
'baseColorTexture': 'Base Color',
'metallicRoughnessTexture': 'Metallic-Roughness',
'diffuseTexture': 'Diffuse',
'specularGlossinessTexture': 'Specular-Glossiness',
}[texture_type] + ' Texture Transform'
group = action.groups.new(group_name)
for fcurve in fcurves:
fcurve.group = group
|
mit
| -374,302,995,571,520,960
| 36.415385
| 95
| 0.598684
| false
| 3.701674
| false
| false
| false
|
EsqWiggles/SLA-bot
|
SLA_bot/module/alertfeed.py
|
1
|
2516
|
"""Collect hourly event announcements for PSO2
Take announcement data from a third party source, convert it into a string,
and store it for later reads. Update at least once before reading.
"""
# The current source is already translated into English. In the event that the
# flyergo source is discontinued, this module must adapt to a new source. If no
# other alternatives can be found, then this will fall back to using twitter
# bot at https://twitter.com/pso2_emg_hour and translating it.
import aiohttp
import asyncio
import json
import re
import SLA_bot.util as ut
cache = ''
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def update():
"""Download and store the announcement data."""
try:
async with aiohttp.ClientSession() as session:
async with session.get(source_url) as response:
global cache
data = await response.json(content_type=None)
cache = data[0]['text']
except ut.GetErrors as e:
ut.note('Failed to GET: ' + source_url)
except (json.decoder.JSONDecodeError, IndexError, KeyError) as e:
ut.note('Unexpected data format from: ' + source_url)
def read():
"""Return the string of the most recent announcement."""
if not cache:
return '[ ?? JST Emergency Quest Notice ]\nNot found.'
if is_unscheduled():
return align_shiplabels(cache) + '\n** **'
return cache + '\n** **'
def is_unscheduled():
"""Return whether the last announcement looks like an unscheduled event."""
# Checks if 50% or more of the lines (-1 from the header) start with what
# looks like a ship label.
# Unscheduled Ship label = ##:Event Name
# Scheduled Time = ##:## Event Name
# The regex matches if there are 2 numbers and a semicolon at the start of
# the line but not if 2 more numbers and a space follows it. In case an
# event name is added that starts with 2 digits and a space in the future,
# exclude the common minute times of 00 and 30 instead.
if not cache:
return True
ship_labels = re.findall('^\d\d:(?!\d\d\s)', cache, flags=re.MULTILINE)
return len(ship_labels) / cache.count('\n') >= 0.50
def align_shiplabels(text):
"""Return the announcement with the ship labels aligned."""
# See comment in is_unscheduled() for a description of the regex,
def monospace(matched):
return '`{}`'.format(matched.group(0))
return re.sub('^\d\d:(?!\d\d\s)', monospace, text, flags=re.MULTILINE)
|
mit
| -8,873,405,941,662,445,000
| 37.707692
| 79
| 0.667329
| false
| 3.744048
| false
| false
| false
|
CognitionGuidedSurgery/msml-gui
|
src/msmlgui/text/mainframe.py
|
1
|
5476
|
__author__ = 'weigl'
from path import path
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import msmlgui.rcc
from .flycheck import build_overview
from .editor_ui import Ui_MainWindow
from ..help import *
icon = lambda x: QIcon(r':/icons/tango/16x16/%s.png' % x)
class MainFrame(Ui_MainWindow, QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.setupUi(self)
self._setupActions()
self.setupToolBar()
self.readSettings()
self.textEditor.firePositionStackUpdate.connect(self.breadcrump.positionStackUpdate)
self.textEditor.firePositionStackUpdate.connect(self.openHelp)
self.textEditor.problemsChanged.connect(self.updateProblems)
self.textEditor.contentChanged.connect(self.updateOverview)
self.open("/org/share/home/weigl/workspace/msml/examples/BunnyExample/bunnyCGAL.msml.xml")
self.oldHelp = None
def _setupActions(self):
self.actionNew = QAction(icon("document-new"), "New", self)
self.actionNew.setShortcut(QKeySequence.New)
self.actionOpen = QAction(icon("document-open"), "Open", self)
self.actionOpen.setShortcut(QKeySequence.Open)
self.actionOpen.triggered.connect(self.search_open_file)
self.actionSave = QAction(icon("document-save"), "Save", self)
self.actionSave.setShortcut(QKeySequence.Save)
self.actionSave.triggered.connect(self.save_file)
self.actionSaveAs = QAction(icon("document-save-as"), "Save as...", self)
self.actionSaveAs.setShortcut(QKeySequence.SaveAs)
self.actionSaveAs.triggered.connect(self.save_file_as)
self.actionClose = QAction(icon("document-close"), "Close", self)
self.actionClose.setShortcut(QKeySequence("Alt-F4"))
def setupToolBar(self):
file = [self.actionNew, self.actionOpen, self.actionSave, self.actionSaveAs, self.actionClose]
self.toolBar.addActions(file)
self.menuFile.clear()
self.menuFile.addActions(file)
self.menuWindow.addAction(self.dockHelp.toggleViewAction())
def open_file(self, filename):
pass
def closeEvent(self, event=QCloseEvent()):
settings = QSettings("CoginitionGuidedSurgery", "msml-gui-editor")
settings.setValue("geometry", self.saveGeometry())
settings.setValue("windowState", self.saveState())
QMainWindow.closeEvent(self, event)
def readSettings(self):
settings = QSettings("CoginitionGuidedSurgery", "msml-gui-editor")
self.restoreGeometry(settings.value("geometry").toByteArray())
self.restoreState(settings.value("windowState").toByteArray())
def search_open_file(self):
MSML_FILE_FILTER = "MSML (*.xml *.msml *.msml.xml);; All types (*.*)"
filename, _ = QFileDialog.getOpenFileNameAndFilter(self, "Open MSML file", self.last_path, MSML_FILE_FILTER)
if filename:
filename = path(filename)
self.last_path = filename.dirname()
self.open_file(filename)
def save_file(self):
if self.last_save_path is None:
self.save_file_as()
from msmlgui.helper.writer import to_xml, save_xml
xml = to_xml(self.msml_model)
save_xml(self.last_save_path, xml)
def save_file_as(self):
MSML_FILE_FILTER = "MSML (*.xml *.msml *.msml.xml);; All types (*.*)"
last_dir = ""
if self.last_save_path:
last_dir = self.last_save_path.dirname()
filename = QFileDialog.getSaveFileName(self, "Open MSML file", last_dir, MSML_FILE_FILTER)
if filename:
self.last_save_path = path(filename)
self.save_file()
def openHelp(self, toks):
try:
tok = toks[-1]
c = get_help(tok.value)
if c == self.oldHelp: return
self.oldHelp = c
if c.startswith("http://"):
print "Open Help: %s" % c
self.webView.setUrl(QUrl(c))
else:
self.webView.setHtml(c)
except IndexError: pass
def updateOverview(self, tokens, char2line):
overview = build_overview(tokens)
self.treeOverview.clear()
def appendUnder(name, seq):
item = QTreeWidgetItem(self.treeOverview)
item.setText(0, name)
item.addChildren(seq)
appendUnder("Variables", overview.variables)
appendUnder("Objects", overview.objects)
appendUnder("Tasks", overview.tasks)
appendUnder("Environment", overview.environment)
def updateProblems(self, problems):
print "updateProblems", problems
self.tableProblems.clear()
self.tableProblems.setColumnCount(3)
self.tableProblems.setRowCount(len(problems))
for i, p in enumerate(problems):
c2 = QTableWidgetItem(p.message)
c3 = QTableWidgetItem(str(p.position))
if p.level == 1:
c2.setForeground(QBrush(Qt.darkRed))
c3.setForeground(QBrush(Qt.darkRed))
else:
c2.setForeground(QBrush(Qt.darkBlue))
c3.setForeground(QBrush(Qt.darkBlue))
self.tableProblems.setItem(i, 0, c2)
self.tableProblems.setItem(i, 1, c3)
def open(self, filename):
with open(filename) as fp:
content = fp.read()
self.textEditor.insertPlainText(content)
|
gpl-3.0
| -8,745,189,579,018,515,000
| 30.291429
| 116
| 0.6313
| false
| 3.677636
| false
| false
| false
|
nkmk/python-snippets
|
notebook/arithmetic_operator_num.py
|
1
|
1336
|
print(10 + 3)
# 13
print(10 - 3)
# 7
print(10 * 3)
# 30
print(10 / 3)
# 3.3333333333333335
print(10 // 3)
# 3
print(10 % 3)
# 1
print(10 ** 3)
# 1000
print(2 ** 0.5)
# 1.4142135623730951
print(10 ** -2)
# 0.01
print(0 ** 0)
# 1
# print(10 / 0)
# ZeroDivisionError: integer division or modulo by zero
# print(10 // 0)
# ZeroDivisionError: integer division or modulo by zero
# print(10 % 0)
# ZeroDivisionError: integer division or modulo by zero
# print(0 ** -1)
# ZeroDivisionError: 0.0 cannot be raised to a negative power
a = 10
b = 3
c = a + b
print('a:', a)
print('b:', b)
print('c:', c)
# a: 10
# b: 3
# c: 13
a = 10
b = 3
a += b
print('a:', a)
print('b:', b)
# a: 13
# b: 3
a = 10
b = 3
a %= b
print('a:', a)
print('b:', b)
# a: 1
# b: 3
a = 10
b = 3
a **= b
print('a:', a)
print('b:', b)
# a: 1000
# b: 3
print(2 + 3.0)
print(type(2 + 3.0))
# 5.0
# <class 'float'>
print(10 / 2)
print(type(10 / 2))
# 5.0
# <class 'float'>
print(2 ** 3)
print(type(2 ** 3))
# 8
# <class 'int'>
print(2.0 ** 3)
print(type(2.0 ** 3))
# 8.0
# <class 'float'>
print(25 ** 0.5)
print(type(25 ** 0.5))
# 5.0
# <class 'float'>
print(0.01 ** -2)
print(type(0.01 ** -2))
# 10000.0
# <class 'float'>
print(100 / 10 ** 2 + 2 * 3 - 5)
# 2.0
print(100 / (10 ** 2) + (2 * 3) - 5)
# 2.0
print((100 / 10) ** 2 + 2 * (3 - 5))
# 96.0
|
mit
| 8,110,380,658,963,956,000
| 10.322034
| 61
| 0.528443
| false
| 2.107256
| false
| false
| false
|
grimoirelab/arthur
|
arthur/server.py
|
1
|
4264
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Santiago Dueñas <sduenas@bitergia.com>
# Alvaro del Castillo San Felix <acs@bitergia.com>
#
import logging
import threading
import time
import cherrypy
from grimoirelab_toolkit.datetime import str_to_datetime
from .arthur import Arthur
from .utils import JSONEncoder
logger = logging.getLogger(__name__)
def json_encoder(*args, **kwargs):
"""Custom JSON encoder handler"""
obj = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
for chunk in JSONEncoder().iterencode(obj):
yield chunk.encode('utf-8')
class ArthurServer(Arthur):
"""Arthur REST server"""
def __init__(self, *args, **kwargs):
if 'writer' in kwargs:
writer = kwargs.pop('writer')
super().__init__(*args, **kwargs)
if writer:
self.writer_th = threading.Thread(target=self.write_items,
args=(writer, self.items))
else:
self.writer_th = None
cherrypy.engine.subscribe('start', self.start, 100)
def start(self):
"""Start the server and the writer"""
super().start()
if self.writer_th:
self.writer_th.start()
@classmethod
def write_items(cls, writer, items_generator):
"""Write items to the queue
:param writer: the writer object
:param items_generator: items to be written in the queue
"""
while True:
items = items_generator()
writer.write(items)
time.sleep(1)
@cherrypy.expose
@cherrypy.tools.json_in()
def add(self):
"""Add tasks"""
payload = cherrypy.request.json
logger.debug("Reading tasks...")
for task_data in payload['tasks']:
try:
category = task_data['category']
backend_args = task_data['backend_args']
archive_args = task_data.get('archive', None)
sched_args = task_data.get('scheduler', None)
except KeyError as ex:
logger.error("Task badly formed")
raise ex
from_date = backend_args.get('from_date', None)
if from_date:
backend_args['from_date'] = str_to_datetime(from_date)
super().add_task(task_data['task_id'],
task_data['backend'],
category,
backend_args,
archive_args=archive_args,
sched_args=sched_args)
logger.debug("Done. Ready to work!")
return "Tasks added"
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out(handler=json_encoder)
def remove(self):
"""Remove tasks"""
payload = cherrypy.request.json
logger.debug("Reading tasks to remove...")
task_ids = {}
for task_data in payload['tasks']:
task_id = task_data['task_id']
removed = super().remove_task(task_id)
task_ids[task_id] = removed
result = {'tasks': task_ids}
return result
@cherrypy.expose
@cherrypy.tools.json_out(handler=json_encoder)
def tasks(self):
"""List tasks"""
logger.debug("API 'tasks' method called")
result = [task.to_dict() for task in self._tasks.tasks]
result = {'tasks': result}
logger.debug("Tasks registry read")
return result
|
gpl-3.0
| -4,247,991,870,050,024,000
| 27.610738
| 76
| 0.588318
| false
| 4.122824
| false
| false
| false
|
sein-tao/trash-cli
|
trashcli/trash.py
|
1
|
25907
|
# Copyright (C) 2007-2011 Andrea Francia Trivolzio(PV) Italy
from __future__ import absolute_import
version='0.12.10.3~'
import os
import logging
from .fstab import Fstab
logger=logging.getLogger('trashcli.trash')
logger.setLevel(logging.WARNING)
logger.addHandler(logging.StreamHandler())
# Error codes (from os on *nix, hard coded for Windows):
EX_OK = getattr(os, 'EX_OK' , 0)
EX_USAGE = getattr(os, 'EX_USAGE', 64)
EX_IOERR = getattr(os, 'EX_IOERR', 74)
from .fs import list_files_in_dir
import os
from .fs import remove_file
from .fs import move, mkdirs
class TrashDirectory:
def __init__(self, path, volume):
self.path = os.path.normpath(path)
self.volume = volume
self.logger = logger
self.info_dir = os.path.join(self.path, 'info')
self.files_dir = os.path.join(self.path, 'files')
def warn_non_trashinfo():
self.logger.warning("Non .trashinfo file in info dir")
self.on_non_trashinfo_found = warn_non_trashinfo
def trashed_files(self) :
# Only used by trash-restore
for info_file in self.all_info_files():
try:
yield self._create_trashed_file_from_info_file(info_file)
except ValueError:
self.logger.warning("Non parsable trashinfo file: %s" % info_file)
except IOError as e:
self.logger.warning(str(e))
def all_info_files(self) :
'Returns a generator of "Path"s'
try :
for info_file in list_files_in_dir(self.info_dir):
if not os.path.basename(info_file).endswith('.trashinfo') :
self.on_non_trashinfo_found()
else :
yield info_file
except OSError: # when directory does not exist
pass
def _create_trashed_file_from_info_file(self, trashinfo_file_path):
trash_info2 = LazyTrashInfoParser(
lambda:contents_of(trashinfo_file_path), self.volume)
original_location = trash_info2.original_location()
deletion_date = trash_info2.deletion_date()
backup_file_path = backup_file_path_from(trashinfo_file_path)
return TrashedFile(original_location, deletion_date,
trashinfo_file_path, backup_file_path, self)
def backup_file_path_from(trashinfo_file_path):
trashinfo_basename = os.path.basename(trashinfo_file_path)
backupfile_basename = trashinfo_basename[:-len('.trashinfo')]
info_dir = os.path.dirname(trashinfo_file_path)
trash_dir = os.path.dirname(info_dir)
files_dir = os.path.join(trash_dir, 'files')
return os.path.join(files_dir, backupfile_basename)
class HomeTrashCan:
def __init__(self, environ):
self.environ = environ
def path_to(self, out):
if 'XDG_DATA_HOME' in self.environ:
out('%(XDG_DATA_HOME)s/Trash' % self.environ)
elif 'HOME' in self.environ:
out('%(HOME)s/.local/share/Trash' % self.environ)
class TrashDirectories:
def __init__(self, volume_of, getuid, mount_points, environ):
self.home_trashcan = HomeTrashCan(environ)
self.volume_of = volume_of
self.getuid = getuid
self.mount_points = mount_points
def all_trashed_files(self):
for trash_dir in self.all_trash_directories():
for trashedfile in trash_dir.trashed_files():
yield trashedfile
def all_trash_directories(self):
collected = []
def add_trash_dir(path, volume):
collected.append(TrashDirectory(path, volume))
self.home_trash_dir(add_trash_dir)
for volume in self.mount_points:
self.volume_trash_dir1(volume, add_trash_dir)
self.volume_trash_dir2(volume, add_trash_dir)
return collected
def home_trash_dir(self, out) :
self.home_trashcan.path_to(lambda path:
out(path, self.volume_of(path)))
def volume_trash_dir1(self, volume, out):
out(
path = os.path.join(volume, '.Trash/%s' % self.getuid()),
volume = volume)
def volume_trash_dir2(self, volume, out):
out(
path = os.path.join(volume, ".Trash-%s" % self.getuid()),
volume = volume)
class TrashedFile:
"""
Represent a trashed file.
Each trashed file is persisted in two files:
- $trash_dir/info/$id.trashinfo
- $trash_dir/files/$id
Properties:
- path : the original path from where the file has been trashed
- deletion_date : the time when the file has been trashed (instance of
datetime)
- info_file : the file that contains information (instance of Path)
- actual_path : the path where the trashed file has been placed after the
trash opeartion (instance of Path)
- trash_directory :
"""
def __init__(self, path, deletion_date, info_file, actual_path,
trash_directory):
self.path = path
self.deletion_date = deletion_date
self.info_file = info_file
self.actual_path = actual_path
self.trash_directory = trash_directory
self.original_file = actual_path
def restore(self, dest=None) :
if dest is not None:
raise NotImplementedError("not yet supported")
if os.path.exists(self.path):
raise IOError('Refusing to overwrite existing file "%s".' % os.path.basename(self.path))
else:
parent = os.path.dirname(self.path)
mkdirs(parent)
move(self.original_file, self.path)
remove_file(self.info_file)
def getcwd_as_realpath(): return os.path.realpath(os.curdir)
import sys
class RestoreCmd:
def __init__(self, stdout, stderr, environ, exit, input,
curdir = getcwd_as_realpath, version = version):
self.out = stdout
self.err = stderr
self.exit = exit
self.input = input
fstab = Fstab()
self.trashcan = TrashDirectories(
volume_of = fstab.volume_of,
getuid = os.getuid,
mount_points = fstab.mount_points(),
environ = environ)
self.curdir = curdir
self.version = version
def run(self, args = sys.argv):
if '--version' in args[1:]:
command = os.path.basename(args[0])
self.println('%s %s' %(command, self.version))
return
trashed_files = []
self.for_all_trashed_file_in_dir(trashed_files.append, self.curdir())
if not trashed_files:
self.report_no_files_found()
else :
for i, trashedfile in enumerate(trashed_files):
self.println("%4d %s %s" % (i, trashedfile.deletion_date, trashedfile.path))
index=self.input("What file to restore [0..%d]: " % (len(trashed_files)-1))
if index == "" :
self.println("Exiting")
else :
#index = int(index)
files = (trashed_files[int(i)] for i in index.split())
for file in files:
try:
file.restore()
except IOError as e:
self.printerr(e)
self.exit(1)
def for_all_trashed_file_in_dir(self, action, dir):
def is_trashed_from_curdir(trashedfile):
return trashedfile.path.startswith(dir + os.path.sep)
for trashedfile in filter(is_trashed_from_curdir,
self.trashcan.all_trashed_files()) :
action(trashedfile)
def report_no_files_found(self):
self.println("No files trashed from current dir ('%s')" % self.curdir())
def println(self, line):
self.out.write(line + '\n')
def printerr(self, msg):
self.err.write('%s\n' % msg)
from .fs import FileSystemReader, contents_of, FileRemover
class ListCmd:
def __init__(self, out, err, environ, list_volumes, getuid,
file_reader = FileSystemReader(),
version = version):
self.output = self.Output(out, err)
self.err = self.output.err
self.contents_of = file_reader.contents_of
self.version = version
top_trashdir_rules = TopTrashDirRules(file_reader)
self.trashdirs = TrashDirs(environ, getuid,
list_volumes = list_volumes,
top_trashdir_rules=top_trashdir_rules)
self.harvester = Harvester(file_reader)
def run(self, *argv):
parse=Parser()
parse.on_help(PrintHelp(self.description, self.output.println))
parse.on_version(PrintVersion(self.output.println, self.version))
parse.as_default(self.list_trash)
parse(argv)
def list_trash(self):
self.harvester.on_volume = self.output.set_volume_path
self.harvester.on_trashinfo_found = self._print_trashinfo
self.trashdirs.on_trashdir_skipped_because_parent_not_sticky = self.output.top_trashdir_skipped_because_parent_not_sticky
self.trashdirs.on_trashdir_skipped_because_parent_is_symlink = self.output.top_trashdir_skipped_because_parent_is_symlink
self.trashdirs.on_trash_dir_found = self.harvester._analize_trash_directory
self.trashdirs.list_trashdirs()
def _print_trashinfo(self, path):
try:
contents = self.contents_of(path)
except IOError as e :
self.output.print_read_error(e)
else:
deletion_date = parse_deletion_date(contents) or unknown_date()
try:
path = parse_path(contents)
except ParseError:
self.output.print_parse_path_error(path)
else:
self.output.print_entry(deletion_date, path)
def description(self, program_name, printer):
printer.usage('Usage: %s [OPTIONS...]' % program_name)
printer.summary('List trashed files')
printer.options(
" --version show program's version number and exit",
" -h, --help show this help message and exit")
printer.bug_reporting()
class Output:
def __init__(self, out, err):
self.out = out
self.err = err
def println(self, line):
self.out.write(line+'\n')
def error(self, line):
self.err.write(line+'\n')
def print_read_error(self, error):
self.error(str(error))
def print_parse_path_error(self, offending_file):
self.error("Parse Error: %s: Unable to parse Path." % (offending_file))
def top_trashdir_skipped_because_parent_not_sticky(self, trashdir):
self.error("TrashDir skipped because parent not sticky: %s"
% trashdir)
def top_trashdir_skipped_because_parent_is_symlink(self, trashdir):
self.error("TrashDir skipped because parent is symlink: %s"
% trashdir)
def set_volume_path(self, volume_path):
self.volume_path = volume_path
def print_entry(self, maybe_deletion_date, relative_location):
import os
original_location = os.path.join(self.volume_path, relative_location)
self.println("%s %s" %(maybe_deletion_date, original_location))
def do_nothing(*argv, **argvk): pass
class Parser:
def __init__(self):
self.default_action = do_nothing
self.argument_action = do_nothing
self.short_options = ''
self.long_options = []
self.actions = dict()
self._on_invalid_option = do_nothing
def __call__(self, argv):
program_name = argv[0]
from getopt import getopt, GetoptError
try:
options, arguments = getopt(argv[1:],
self.short_options,
self.long_options)
except GetoptError, e:
invalid_option = e.opt
self._on_invalid_option(program_name, invalid_option)
else:
for option, value in options:
if option in self.actions:
self.actions[option](program_name)
return
for argument in arguments:
self.argument_action(argument)
self.default_action()
def on_invalid_option(self, action):
self._on_invalid_option = action
def on_help(self, action):
self.add_option('help', action, 'h')
def on_version(self, action):
self.add_option('version', action)
def add_option(self, long_option, action, short_aliases=''):
self.long_options.append(long_option)
self.actions['--' + long_option] = action
for short_alias in short_aliases:
self.add_short_option(short_alias, action)
def add_short_option(self, short_option, action):
self.short_options += short_option
self.actions['-' + short_option] = action
def on_argument(self, argument_action):
self.argument_action = argument_action
def as_default(self, default_action):
self.default_action = default_action
class CleanableTrashcan:
def __init__(self, file_remover):
self._file_remover = file_remover
def delete_orphan(self, path_to_backup_copy):
self._file_remover.remove_file(path_to_backup_copy)
def delete_trashinfo_and_backup_copy(self, trashinfo_path):
backup_copy = self._path_of_backup_copy(trashinfo_path)
self._file_remover.remove_file_if_exists(backup_copy)
self._file_remover.remove_file(trashinfo_path)
def _path_of_backup_copy(self, path_to_trashinfo):
from os.path import dirname as parent_of, join, basename
trash_dir = parent_of(parent_of(path_to_trashinfo))
return join(trash_dir, 'files', basename(path_to_trashinfo)[:-len('.trashinfo')])
class ExpiryDate:
def __init__(self, contents_of, now, trashcan):
self._contents_of = contents_of
self._now = now
self._maybe_delete = self._delete_unconditionally
self._trashcan = trashcan
def set_max_age_in_days(self, arg):
self.max_age_in_days = int(arg)
self._maybe_delete = self._delete_according_date
def delete_if_expired(self, trashinfo_path):
self._maybe_delete(trashinfo_path)
def _delete_according_date(self, trashinfo_path):
contents = self._contents_of(trashinfo_path)
ParseTrashInfo(
on_deletion_date=IfDate(
OlderThan(self.max_age_in_days, self._now),
lambda: self._delete_unconditionally(trashinfo_path)
),
)(contents)
def _delete_unconditionally(self, trashinfo_path):
self._trashcan.delete_trashinfo_and_backup_copy(trashinfo_path)
class TrashDirs:
def __init__(self, environ, getuid, list_volumes, top_trashdir_rules):
self.getuid = getuid
self.mount_points = list_volumes
self.top_trashdir_rules = top_trashdir_rules
self.home_trashcan = HomeTrashCan(environ)
# events
self.on_trash_dir_found = lambda trashdir, volume: None
self.on_trashdir_skipped_because_parent_not_sticky = lambda trashdir: None
self.on_trashdir_skipped_because_parent_is_symlink = lambda trashdir: None
def list_trashdirs(self):
self.emit_home_trashcan()
self._for_each_volume_trashcan()
def emit_home_trashcan(self):
def return_result_with_volume(trashcan_path):
self.on_trash_dir_found(trashcan_path, '/')
self.home_trashcan.path_to(return_result_with_volume)
def _for_each_volume_trashcan(self):
for volume in self.mount_points():
self.emit_trashcans_for(volume)
def emit_trashcans_for(self, volume):
self.emit_trashcan_1_for(volume)
self.emit_trashcan_2_for(volume)
def emit_trashcan_1_for(self,volume):
top_trashdir_path = os.path.join(volume, '.Trash/%s' % self.getuid())
class IsValidOutput:
def not_valid_parent_should_not_be_a_symlink(_):
self.on_trashdir_skipped_because_parent_is_symlink(top_trashdir_path)
def not_valid_parent_should_be_sticky(_):
self.on_trashdir_skipped_because_parent_not_sticky(top_trashdir_path)
def is_valid(_):
self.on_trash_dir_found(top_trashdir_path, volume)
self.top_trashdir_rules.valid_to_be_read(top_trashdir_path, IsValidOutput())
def emit_trashcan_2_for(self, volume):
alt_top_trashdir = os.path.join(volume, '.Trash-%s' % self.getuid())
self.on_trash_dir_found(alt_top_trashdir, volume)
from datetime import datetime
class EmptyCmd:
def __init__(self, out, err, environ, list_volumes,
now = datetime.now,
file_reader = FileSystemReader(),
getuid = os.getuid,
file_remover = FileRemover(),
version = version):
self.out = out
self.err = err
self.file_reader = file_reader
top_trashdir_rules = TopTrashDirRules(file_reader)
self.trashdirs = TrashDirs(environ, getuid,
list_volumes = list_volumes,
top_trashdir_rules = top_trashdir_rules)
self.harvester = Harvester(file_reader)
self.version = version
self._cleaning = CleanableTrashcan(file_remover)
self._expiry_date = ExpiryDate(file_reader.contents_of, now,
self._cleaning)
def run(self, *argv):
self.exit_code = EX_OK
parse = Parser()
parse.on_help(PrintHelp(self.description, self.println))
parse.on_version(PrintVersion(self.println, self.version))
parse.on_argument(self._expiry_date.set_max_age_in_days)
parse.as_default(self._empty_all_trashdirs)
parse.on_invalid_option(self.report_invalid_option_usage)
parse(argv)
return self.exit_code
def report_invalid_option_usage(self, program_name, option):
self.err.write(
"{program_name}: invalid option -- '{option}'\n".format(**locals()))
self.exit_code |= EX_USAGE
def description(self, program_name, printer):
printer.usage('Usage: %s [days]' % program_name)
printer.summary('Purge trashed files.')
printer.options(
" --version show program's version number and exit",
" -h, --help show this help message and exit")
printer.bug_reporting()
def _empty_all_trashdirs(self):
self.harvester.on_trashinfo_found = self._expiry_date.delete_if_expired
self.harvester.on_orphan_found = self._cleaning.delete_orphan
self.trashdirs.on_trash_dir_found = self.harvester._analize_trash_directory
self.trashdirs.list_trashdirs()
def println(self, line):
self.out.write(line + '\n')
class Harvester:
def __init__(self, file_reader):
self.file_reader = file_reader
self.trashdir = TrashDir(self.file_reader)
self.on_orphan_found = do_nothing
self.on_trashinfo_found = do_nothing
self.on_volume = do_nothing
def _analize_trash_directory(self, trash_dir_path, volume_path):
self.on_volume(volume_path)
self.trashdir.open(trash_dir_path, volume_path)
self.trashdir.each_trashinfo(self.on_trashinfo_found)
self.trashdir.each_orphan(self.on_orphan_found)
class IfDate:
def __init__(self, date_criteria, then):
self.date_criteria = date_criteria
self.then = then
def __call__(self, date2):
if self.date_criteria(date2):
self.then()
class OlderThan:
def __init__(self, days_ago, now):
from datetime import timedelta
self.limit_date = now() - timedelta(days=days_ago)
def __call__(self, deletion_date):
return deletion_date < self.limit_date
class PrintHelp:
def __init__(self, description, println):
class Printer:
def __init__(self, println):
self.println = println
def usage(self, usage):
self.println(usage)
self.println('')
def summary(self, summary):
self.println(summary)
self.println('')
def options(self, *line_describing_option):
self.println('Options:')
for line in line_describing_option:
self.println(line)
self.println('')
def bug_reporting(self):
self.println("Report bugs to http://code.google.com/p/trash-cli/issues")
self.description = description
self.printer = Printer(println)
def __call__(self, program_name):
self.description(program_name, self.printer)
class PrintVersion:
def __init__(self, println, version):
self.println = println
self.version = version
def __call__(self, program_name):
self.println("%s %s" % (program_name, self.version))
class TopTrashDirRules:
def __init__(self, fs):
self.fs = fs
def valid_to_be_read(self, path, output):
parent_trashdir = os.path.dirname(path)
if not self.fs.exists(path):
return
if not self.fs.is_sticky_dir(parent_trashdir):
output.not_valid_parent_should_be_sticky()
return
if self.fs.is_symlink(parent_trashdir):
output.not_valid_parent_should_not_be_a_symlink()
return
else:
output.is_valid()
class Dir:
def __init__(self, path, entries_if_dir_exists):
self.path = path
self.entries_if_dir_exists = entries_if_dir_exists
def entries(self):
return self.entries_if_dir_exists(self.path)
def full_path(self, entry):
return os.path.join(self.path, entry)
class TrashDir:
def __init__(self, file_reader):
self.file_reader = file_reader
def open(self, path, volume_path):
self.trash_dir_path = path
self.volume_path = volume_path
self.files_dir = Dir(self._files_dir(),
self.file_reader.entries_if_dir_exists)
def each_orphan(self, action):
for entry in self.files_dir.entries():
trashinfo_path = self._trashinfo_path_from_file(entry)
file_path = self.files_dir.full_path(entry)
if not self.file_reader.exists(trashinfo_path): action(file_path)
def _entries_if_dir_exists(self, path):
return self.file_reader.entries_if_dir_exists(path)
def each_trashinfo(self, action):
for entry in self._trashinfo_entries():
action(os.path.join(self._info_dir(), entry))
def _info_dir(self):
return os.path.join(self.trash_dir_path, 'info')
def _trashinfo_path_from_file(self, file_entry):
return os.path.join(self._info_dir(), file_entry + '.trashinfo')
def _files_dir(self):
return os.path.join(self.trash_dir_path, 'files')
def _trashinfo_entries(self, on_non_trashinfo=do_nothing):
for entry in self._entries_if_dir_exists(self._info_dir()):
if entry.endswith('.trashinfo'):
yield entry
else:
on_non_trashinfo()
class ParseError(ValueError): pass
class LazyTrashInfoParser:
def __init__(self, contents, volume_path):
self.contents = contents
self.volume_path = volume_path
def deletion_date(self):
return parse_deletion_date(self.contents())
def _path(self):
return parse_path(self.contents())
def original_location(self):
return os.path.join(self.volume_path, self._path())
def maybe_parse_deletion_date(contents):
result = Basket(unknown_date())
ParseTrashInfo(
on_deletion_date = lambda date: result.collect(date),
on_invalid_date = lambda: result.collect(unknown_date())
)(contents)
return result.collected
def unknown_date():
return '????-??-?? ??:??:??'
class ParseTrashInfo:
def __init__(self,
on_deletion_date = do_nothing,
on_invalid_date = do_nothing,
on_path = do_nothing):
self.found_deletion_date = on_deletion_date
self.found_invalid_date = on_invalid_date
self.found_path = on_path
def __call__(self, contents):
from datetime import datetime
import urllib
for line in contents.split('\n'):
if line.startswith('DeletionDate='):
try:
date = datetime.strptime(line, "DeletionDate=%Y-%m-%dT%H:%M:%S")
except ValueError:
self.found_invalid_date()
else:
self.found_deletion_date(date)
if line.startswith('Path='):
path=urllib.unquote(line[len('Path='):])
self.found_path(path)
class Basket:
def __init__(self, initial_value = None):
self.collected = initial_value
def collect(self, value):
self.collected = value
def parse_deletion_date(contents):
result = Basket()
ParseTrashInfo(on_deletion_date=result.collect)(contents)
return result.collected
def parse_path(contents):
import urllib
for line in contents.split('\n'):
if line.startswith('Path='):
return urllib.unquote(line[len('Path='):])
raise ParseError('Unable to parse Path')
|
gpl-2.0
| 731,896,114,119,365,900
| 38.673813
| 129
| 0.59289
| false
| 3.711073
| false
| false
| false
|
emonty/burrow
|
burrow/tests/__init__.py
|
1
|
1565
|
# Copyright (C) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Unittests for burrow.'''
import atexit
import os
import signal
import sys
import time
import burrow
def start_server():
'''Fork and start the server, saving the pid in a file.'''
kill_server()
pid = os.fork()
if pid == 0:
server = burrow.Server(add_default_log_handler=False)
server.frontends[0].default_ttl = 0
server.run()
sys.exit(0)
pid_file = open('TestHTTP.pid', 'w')
pid_file.write(str(pid))
pid_file.close()
atexit.register(kill_server)
time.sleep(1)
def kill_server():
'''Try killing the server if the pid file exists.'''
try:
pid_file = open('TestHTTP.pid', 'r')
pid = pid_file.read()
pid_file.close()
try:
os.kill(int(pid), signal.SIGUSR1)
time.sleep(1)
os.kill(int(pid), signal.SIGTERM)
except OSError:
pass
os.unlink('TestHTTP.pid')
except IOError:
pass
start_server()
|
apache-2.0
| -4,100,098,598,457,448,400
| 25.982759
| 74
| 0.646645
| false
| 3.648019
| false
| false
| false
|
CopyChat/Plotting
|
Downscaling/bias.RSDS.GCMs.py
|
1
|
5090
|
#!/usr/bin/env python
########################################
#Globale Karte fuer tests
# from Rabea Amther
########################################
# http://gfesuite.noaa.gov/developer/netCDFPythonInterface.html
import math
import numpy as np
import pylab as pl
import Scientific.IO.NetCDF as IO
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import matplotlib.lines as lines
from mpl_toolkits.basemap import Basemap , addcyclic
from matplotlib.colors import LinearSegmentedColormap
import textwrap
pl.close('all')
########################## for CMIP5 charactors
DIR='/Users/tang/climate/CMIP5/hist/SWIO'
VARIABLE='rsds'
PRODUCT='Amon'
ENSEMBLE='r1i1p1'
EXPERIMENT='hist'
TIME='195001-200512'
#OBS='CRU'
OBS='CERES'
season='summer'
#season='winter'
K=0
NonData=['EC-EARTH-XXXX','CSIRO-Mk3-6-0-XXXXXX']
GCMs=[\
'ACCESS1-0',\
'BNU-ESM',\
'CCSM4',\
'CESM1-BGC',\
'CESM1-CAM5',\
'CESM1-FASTCHEM',\
'CESM1-WACCM',\
'CMCC-CESM',\
'CNRM-CM5',\
'CSIRO-Mk3-6-0',\
'CanESM2',\
'EC-EARTH',\
'FIO-ESM',\
'GFDL-ESM2M',\
'GISS-E2-H',\
'HadGEM2-AO',\
'HadGEM2-ES',\
'IPSL-CM5A-LR',\
'IPSL-CM5A-MR',\
'MIROC-ESM-CHEM',\
'MIROC-ESM',\
'MIROC5',\
'MPI-ESM-LR',\
'MPI-ESM-MR',\
'MPI-ESM-P',\
'MRI-CGCM3',\
'MRI-ESM1',\
'NorESM1-ME',\
'NorESM1-M',\
'bcc-csm1-1-m',\
'bcc-csm1-1',\
'inmcm4',\
]
ENSEMBLE=[ \
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r12i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r2i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
]
COLOR=['darkred','darkblue','darkgreen','deeppink',\
'black','orangered','cyan','magenta']
# read CERES data:
if OBS == 'CERES':
oVar='rsds'
obs1='/Users/tang/climate/GLOBALDATA/OBSDATA/CERES/rsds_CERES-EBAF_L3B_Ed2-8_2001-2005.NDJFMA.SWIO.nc'
else:
# read MODIS data:
oVar='clt'
obs1='/Users/tang/climate/GLOBALDATA/OBSDATA/MODIS/clt_MODIS_L3_C5_200101-200512.ymonmean.NDJFMA.SWIO.nc'
print obs1
obsfile1=IO.NetCDFFile(obs1,'r')
ObsVar=obsfile1.variables[oVar][0][:][:].copy()
for idx,Model in enumerate(GCMs):
if OBS == 'CERES':
infile1=DIR+\
'/rsds_Amon_'+Model+'_historical_'+ENSEMBLE[idx]+\
'_200101-200512.summer.remap.CERES.SWIO.nc'
#GFDL-ESM2M/clt_Amon_GFDL-ESM2M_historical_r1i1p1_200101-200512.nc.summer.mean.nc.remap.nc
#rsds_Amon_bcc-csm1-1_historical_r1i1p1_200101-200512.summer.remap.CERES.SWIO.nc
else:
infile1=DIR+'/'+\
'clt_Amon_'+Model+'_historical_'+ENSEMBLE[idx]+\
'_200101-200512.'+season+'.remap.modis.SWIO.nc'
print infile1
if Model in NonData:
infile1=obsfile1
VAR=infile1.variables[oVar][0,:,:].copy()
else:
print 'k=',idx
infile1=IO.NetCDFFile(infile1,'r')
VAR=infile1.variables[VARIABLE][0,:,:].copy()
print 'the variable tas ===============: '
print VAR
#open input files
# read the variables:
lat = infile1.variables['lat'][:].copy()
lon = infile1.variables['lon'][:].copy()
print np.shape(VAR)
print np.shape(ObsVar)
Bias=VAR-ObsVar
print np.shape(Bias)
#quit()
CoLev=10 #number of levels of colorbar
#=================================================== to plot
fig=plt.subplot(8,4,idx+1,aspect='equal')
print "============="
print idx; print Model
map=Basemap(projection='cyl',llcrnrlat=np.min(lat),urcrnrlat=np.max(lat),\
llcrnrlon=np.min(lon),urcrnrlon=np.max(lon),resolution='l')
map.drawcoastlines(linewidth=0.35)
map.drawparallels(np.arange(-90.,91.,15.),labels=[1,0,0,0],linewidth=0.35)
map.drawmeridians(np.arange(-180.,181.,20.),labels=[0,0,0,1],linewidth=0.35)
map.drawmapboundary()
x,y=map(lon,lat)
cmap=plt.get_cmap('bwr')
#cmap=plt.get_cmap('RdBu_r')
pic=map.pcolormesh(x,y,Bias,cmap=cmap)
plt.title(GCMs[idx])
#plt.figtext(0.68,0.73,timestamp, size="small")
#set the same colorbar range
pic.set_clim(vmin=-100,vmax=100)
plt.subplots_adjust(bottom=0.1, right=0.8, top=0.9)
cax = plt.axes([0.85, 0.1, 0.01, 0.8])
plt.colorbar(cax=cax)
#if idx > 11:
#plt.colorbar(orientation='horizontal') # draw colorbar
#plt.legend(loc=2)
plt.suptitle('seasonal mean bias of Surface Downwelling SW radiation (W m-2) vs CERES',fontsize=18)
plt.show()
quit()
|
gpl-3.0
| -1,949,927,725,766,545,200
| 24.323383
| 109
| 0.550295
| false
| 2.649662
| false
| false
| false
|
pumbaEO/undiff1c
|
setup.py
|
1
|
3623
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Based on https://github.com/pypa/sampleproject/blob/master/setup.py."""
from __future__ import unicode_literals
# To use a consistent encoding
import codecs
import os
from setuptools import setup, find_packages
import sys
# Shortcut for building/publishing to Pypi
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist bdist_wheel upload')
sys.exit()
def parse_reqs(req_path='./requirements.txt'):
"""Recursively parse requirements from nested pip files."""
install_requires = []
with codecs.open(req_path, 'r') as handle:
# remove comments and empty lines
lines = (line.strip() for line in handle
if line.strip() and not line.startswith('#'))
for line in lines:
# check for nested requirements files
if line.startswith('-r'):
# recursively call this function
install_requires += parse_reqs(req_path=line[3:])
else:
# add the line as a new requirement
install_requires.append(line)
return install_requires
def parse_readme():
"""Parse contents of the README."""
# Get the long description from the relevant file
here = os.path.abspath(os.path.dirname(__file__))
readme_path = os.path.join(here, 'README.md')
with codecs.open(readme_path, encoding='utf-8') as handle:
long_description = handle.read()
return long_description
setup(
name='undiff1c',
# Versions should comply with PEP440. For a discussion on
# single-sourcing the version across setup.py and the project code,
# see http://packaging.python.org/en/latest/tutorial.html#version
version='1.0.1',
description='Vanguard contains all the boilerplate you need to bootstrap a modern Python package.',
long_description=parse_readme(),
# What does your project relate to? Separate with spaces.
keywords='undiff1c',
author='Shenja Sosna',
author_email='shenja@sosna.zp.ua',
license='Apache 2.0',
# The project's main homepage
url='https://github.com/pumbaEO/undiff1c',
packages=find_packages(exclude=('tests*', 'docs', 'examples')),
# If there are data files included in your packages that need to be
# installed, specify them here.
include_package_data=True,
zip_safe=False,
# Install requirements loaded from ``requirements.txt``
install_requires=parse_reqs(),
test_suite='tests',
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and
# allow pip to create the appropriate form of executable for the
# target platform.
entry_points=dict(
console_scripts=[
'undiff1c = undiff1c.undiff1c:main',
],
),
# See: http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are:
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.4',
'Environment :: Console',
],
)
|
apache-2.0
| 4,677,452,139,056,141,000
| 31.63964
| 103
| 0.646426
| false
| 4.079955
| false
| false
| false
|
matevzmihalic/wlansi-store
|
wlansi_store/migrations/0003_auto__add_field_product_language.py
|
1
|
5905
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Product.language'
db.add_column('wlansi_store_product', 'language',
self.gf('django.db.models.fields.CharField')(default='en', max_length=15),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Product.language'
db.delete_column('wlansi_store_product', 'language')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'wlansi_store.item': {
'Meta': {'object_name': 'Item'},
'has_nodewatcher_firmware': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'wlansi_store.price': {
'Meta': {'object_name': 'Price'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'}),
'price_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"})
},
'wlansi_store.product': {
'Meta': {'object_name': 'Product', '_ormbases': ['shop.Product']},
'language': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'placeholders': ('djangocms_utils.fields.M2MPlaceholderField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'})
},
'wlansi_store.productplugin': {
'Meta': {'object_name': 'ProductPlugin', 'db_table': "'cmsplugin_productplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"})
}
}
complete_apps = ['wlansi_store']
|
agpl-3.0
| 71,648,788,243,909,860
| 65.359551
| 193
| 0.55597
| false
| 3.695244
| false
| false
| false
|
andredalton/bcc
|
2014/MAC0242/miniep5/test.py
|
1
|
8640
|
#! /usr/bin/env python3
import os, random, unittest, subprocess
from miniep5 import Calc
def gera_var():
""" Funcao que gera um nome de variavel aleatorio com 1 a 10 caracteres. """
p = ""
for i in range(random.randint(1,10)):
p += random.choice(["a", "e", "i", "o", "u"])
return p
def gera_teste(num, op, vr=0):
"""
Funcao que gera um teste aleatorio onde:
num: Numeros de floats e variaveis gerado
op: Numero de operadores gerados
vr: Probabilidade de inserir uma nova variavel
"""
lst = []
var = []
var_atrr = set()
i = 0 # Numero de floats e variáveis
j = 0 # Numero de operadores
p = random.random() # Probabilidade de um operador ser inserido no início da expressao
pws = random.random() # Probabilidade de insercao de espacos em branco
pvr = vr # Probabilidade de ocorrencia de variáveis
patr = random.random() # Probabilidade de atribuição em uma variável.
tokens = ["+", "-", "*", "/"]
while i < num:
r = random.random()
if r < pws:
# Inserindo espacos em branco
lst.append( random.choice([" ", "\t"]) * random.randint(1, 30) )
if r < patr:
if len(var) > 0 and var[-1]['num']==1:
# Atribuindo a uma variavel
v = var.pop()
var_atrr.add(v['nome'])
lst.append("=")
if len(var)>0:
var[-1]['num'] += 1
j += 1
elif i > j + 1 + len(var) + len(var_atrr):
# Inserindo um operador.
if len(var) == 0 or ( len(var)>0 and var[-1]['num']>0 ):
if len(var) > 0:
var[-1]['num'] -= 1
lst.append( random.choice(tokens) )
j += 1
if i < num-1 and r < pvr:
# Inserindo uma variavel.
v = gera_var()
var.append({'nome': v, 'num': 0})
lst.append(v)
else:
# Inserindo numero
if len(var) > 0:
var[-1]['num'] += 1
lst.append( str(random.random()*10**random.randint(-30,30)) )
i += 1
while len(var)>0:
if var[-1]['num'] <= 1:
var.pop()
lst.append("=")
if len(var) > 0:
var[-1]['num'] += 1
else:
lst.append( random.choice(tokens) )
var[-1]['num'] -= 1
j += 1
for j in range(j, op):
lst.append( random.choice(tokens) )
return " ".join(lst)
class Test(unittest.TestCase):
def setUp(self):
""" Inicializa a calculadora."""
self.calc = Calc(True)
# Testando inf, -inf e nan
def test_inf(self):
""" Verifica a ocorrencia do inf."""
inf = self.calc.analisar("1" * 1000)
self.assertEqual( inf, float("inf") )
def test_minf(self):
""" Verifica a ocorrencia do -inf."""
minf = self.calc.analisar("0 %s -" % ("1"*1000))
self.assertEqual( minf, float("-inf") )
def test_nan(self):
""" Verifica a ocorrencia do nan."""
out = self.calc.analisar("%(b)s 0 %(b)s - +" % {'b': "1"*1000})
self.assertNotEqual( out, out )
# Testando propriedades básicas das operações.
def test_som_comutativa(self):
""" Verifica a comutatividade da soma."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f +" % dic)
bout = self.calc.analisar("%(b)f %(a)f +" % dic)
self.assertEqual( aout, bout )
def test_som_elemento_neutro(self):
""" Verifica o elemento neutro da soma."""
a = random.random()
aout = self.calc.analisar("%.100f 0 +" % a)
self.assertEqual( a, aout )
def test_som_anulamento(self):
""" Verifica o anulamento da soma."""
dic = {'a': random.random()}
aout = self.calc.analisar("%(a)f 0 %(a)f - +" %dic)
self.assertEqual( 0, aout )
def test_sub_comutativa(self):
""" Verifica a não comutatividade da subtração."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f -" % dic)
bout = self.calc.analisar("%(b)f %(a)f -" % dic)
self.assertNotEqual( aout, bout )
def test_sub_elemento_neutro(self):
""" Verifica o elemento neutro da subtração."""
a = random.random()
aout = self.calc.analisar("%.100f 0 -" % a)
self.assertEqual( a, aout )
def test_sub_anulamento(self):
""" Verifica o anulamento da subtração."""
dic = {'a': random.random()}
aout = self.calc.analisar("%(a)f %(a)f -" % dic)
self.assertEqual( 0, aout )
def test_mul_comutativa(self):
""" Verifica a comutatividade da multiplicacao."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f *" % dic)
bout = self.calc.analisar("%(a)f %(b)f *" % dic)
self.assertEqual( aout, bout )
def test_mul_elemento_neutro(self):
""" Verifica o elemento neutro da multiplicacao."""
a = random.random()
aout = self.calc.analisar("%.100f 1 *" % a)
self.assertEqual( a, aout )
def test_mul_elemento_nulo(self):
""" Verifica o elemento nulo da multiplicacao."""
aout = self.calc.analisar("%.100f 0 *" % random.random())
self.assertEqual( 0, aout )
def test_div_comutativa(self):
""" Verifica a não comutatividade da divisão."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f /" % dic)
bout = self.calc.analisar("%(b)f %(a)f /" % dic)
self.assertNotEqual( aout, bout )
def test_div_elemento_neutro(self):
""" Verifica o elemento neutro da divisão."""
a = random.random()
aout = self.calc.analisar("%.100f 1 /" % a)
self.assertEqual( a, aout )
def test_div_zero(self):
""" Verifica a divisao por zero."""
a = random.random()
self.assertRaises(ZeroDivisionError, self.calc.analisar, "%.100f 0 /" % a)
# Testes de sintaxe.
def test_sintaxe(self):
"""
Verifica sintaxe quando existem numeros = operações - 1.
Queria comparar com o não erro, mas não encontrei a maneira adequada de se fazer isso.
"""
n = random.randint(1, 10000)
s = gera_teste(n, n-1)
try:
out = float(self.calc.analisar(s))
conv = True
except ValueError:
conv = False
self.assertTrue(conv)
def test_erro_sintaxe1(self):
""" Verifica erros de sintaxe quando existem mais numeros do que operações - 1."""
n = random.randint(1, 10000)
s = gera_teste(n + random.randint(2, 100), n)
self.assertRaises(LookupError, self.calc.analisar, s)
def test_erro_sintaxe2(self):
""" Verifica erros de sintaxe quando existem menos numeros do que operações - 1."""
n = random.randint(1, 10000)
s = gera_teste(n, n + random.randint(0, 100))
self.assertRaises(LookupError, self.calc.analisar, s)
def test_caracter_incorreto(self):
""" Verifica a ocorrencia de erro quando se utiliza um caracter não especificado."""
self.assertRaises(TypeError, self.calc.analisar, random.choice(["!", "@", "$", "?"]) )
# Teste com variaveis.
def test_variavel_nao_inicializada(self):
""" Verifica a ocorrencia de erro quando se utiliza uma variável não inicializa."""
self.assertRaises(KeyError, self.calc.analisar, gera_var())
def test_sintaxe_atribuicao(self):
"""
Verifica sintaxe quando existem numeros + variaveis = operações - 1.
Queria comparar com o não erro, mas não encontrei a maneira adequada de se fazer isso.
"""
n = random.randint(1, 10000)
s = gera_teste(n, n-1, 0.3)
try:
out = float(self.calc.analisar(s))
conv = True
except ValueError:
conv = False
self.assertTrue(conv)
def test_atribuicao(self):
""" Verifica a ocorrencia de erro ao se atribuir o valor de uma variavel e reutiliza-lo na mesma expressao."""
a = random.random()
dic = {'a': a, 'v': gera_var()}
self.assertEqual(a*a+a, self.calc.analisar("%(v)s %(a).100f = %(v)s %(v)s * +" % dic))
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -2,664,951,957,438,070,300
| 36.421739
| 118
| 0.537881
| false
| 3.065907
| true
| false
| false
|
graphql-python/graphql-core
|
src/graphql/validation/rules/unique_operation_names.py
|
1
|
1401
|
from typing import Any, Dict
from ...error import GraphQLError
from ...language import NameNode, OperationDefinitionNode, VisitorAction, SKIP
from . import ASTValidationContext, ASTValidationRule
__all__ = ["UniqueOperationNamesRule"]
class UniqueOperationNamesRule(ASTValidationRule):
"""Unique operation names
A GraphQL document is only valid if all defined operations have unique names.
"""
def __init__(self, context: ASTValidationContext):
super().__init__(context)
self.known_operation_names: Dict[str, NameNode] = {}
def enter_operation_definition(
self, node: OperationDefinitionNode, *_args: Any
) -> VisitorAction:
operation_name = node.name
if operation_name:
known_operation_names = self.known_operation_names
if operation_name.value in known_operation_names:
self.report_error(
GraphQLError(
"There can be only one operation"
f" named '{operation_name.value}'.",
[known_operation_names[operation_name.value], operation_name],
)
)
else:
known_operation_names[operation_name.value] = operation_name
return SKIP
@staticmethod
def enter_fragment_definition(*_args: Any) -> VisitorAction:
return SKIP
|
mit
| 7,453,067,494,000,858,000
| 34.025
| 86
| 0.619557
| false
| 4.654485
| false
| false
| false
|
goshow-jp/Kraken
|
Python/kraken_examples/bob_rig.py
|
1
|
10457
|
from kraken.core.maths import Vec3, Quat, Xfo
from kraken.core.objects.rig import Rig
from kraken_components.generic.mainSrt_component import MainSrtComponentRig
from kraken_components.biped.head_component import HeadComponentRig
from kraken_components.biped.clavicle_component import ClavicleComponentGuide, ClavicleComponentRig
from kraken_components.biped.arm_component import ArmComponentGuide, ArmComponentRig
from kraken_components.biped.leg_component import LegComponentGuide, LegComponentRig
from kraken_components.biped.spine_component import SpineComponentRig
from kraken_components.biped.neck_component import NeckComponentGuide, NeckComponentRig
from kraken.core.profiler import Profiler
class BobRig(Rig):
"""Simple biped test rig.
This example shows how to create a simple scripted biped rig that loads data
onto component rig classes and also onto guide classes. It also demonstrates
how to make connections between components.
"""
def __init__(self, name):
Profiler.getInstance().push("Construct BobRig:" + name)
super(BobRig, self).__init__(name)
# Add Components
mainSrtComponent = MainSrtComponentRig("mainSrt", self)
spineComponent = SpineComponentRig("spine", self)
spineComponent.loadData(data={
'cogPosition': Vec3(0.0, 11.1351, -0.1382),
'spine01Position': Vec3(0.0, 11.1351, -0.1382),
'spine02Position': Vec3(0.0, 11.8013, -0.1995),
'spine03Position': Vec3(0.0, 12.4496, -0.3649),
'spine04Position': Vec3(0.0, 13.1051, -0.4821),
'numDeformers': 4
})
neckComponentGuide = NeckComponentGuide("neck")
neckComponentGuide.loadData({
"location": "M",
"neckXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 16.0, -0.75), sc=Vec3(1.00000011921, 1.0, 1.00000011921)),
"neckMidXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 16.5, -0.5), sc=Vec3(1.00000011921, 1.0, 1.00000011921)),
"neckEndXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 17.0, -0.25), sc=Vec3(1.0, 1.0, 1.0))
})
neckComponent = NeckComponentRig("neck", self)
neckComponent.loadData(neckComponentGuide.getRigBuildData())
headComponent = HeadComponentRig("head", self)
headComponent.loadData(data={
"headXfo": Xfo(Vec3(0.0, 17.5, -0.5)),
"eyeLeftXfo": Xfo(tr=Vec3(0.375, 18.5, 0.5), ori=Quat(Vec3(-0.0, -0.707106769085, -0.0), 0.707106769085)),
"eyeRightXfo": Xfo(tr=Vec3(-0.375, 18.5, 0.5), ori=Quat(Vec3(-0.0, -0.707106769085, -0.0), 0.707106769085)),
"jawXfo": Xfo(Vec3(0.0, 17.875, -0.275))
})
clavicleLeftComponentGuide = ClavicleComponentGuide("clavicle")
clavicleLeftComponentGuide.loadData({
"location": "L",
"clavicleXfo": Xfo(Vec3(0.1322, 15.403, -0.5723)),
"clavicleUpVXfo": Xfo(Vec3(0.0, 1.0, 0.0)),
"clavicleEndXfo": Xfo(Vec3(2.27, 15.295, -0.753))
})
clavicleLeftComponent = ClavicleComponentRig("clavicle", self)
clavicleLeftComponent.loadData(data=clavicleLeftComponentGuide.getRigBuildData())
clavicleRightComponentGuide = ClavicleComponentGuide("clavicle")
clavicleRightComponentGuide.loadData({
"location": "R",
"clavicleXfo": Xfo(Vec3(-0.1322, 15.403, -0.5723)),
"clavicleUpVXfo": Xfo(Vec3(0.0, 1.0, 0.0)),
"clavicleEndXfo": Xfo(Vec3(-2.27, 15.295, -0.753))
})
clavicleRightComponent = ClavicleComponentRig("clavicle", self)
clavicleRightComponent.loadData(data=clavicleRightComponentGuide.getRigBuildData())
armLeftComponentGuide = ArmComponentGuide("arm")
armLeftComponentGuide.loadData({
"location": "L",
"bicepXfo": Xfo(Vec3(2.27, 15.295, -0.753)),
"forearmXfo": Xfo(Vec3(5.039, 13.56, -0.859)),
"wristXfo": Xfo(Vec3(7.1886, 12.2819, 0.4906)),
"handXfo": Xfo(tr=Vec3(7.1886, 12.2819, 0.4906),
ori=Quat(Vec3(-0.0865, -0.2301, -0.2623), 0.9331)),
"bicepFKCtrlSize": 1.75,
"forearmFKCtrlSize": 1.5
})
armLeftComponent = ArmComponentRig("arm", self)
armLeftComponent.loadData(data=armLeftComponentGuide.getRigBuildData())
armRightComponentGuide = ArmComponentGuide("arm")
armRightComponentGuide.loadData({
"location": "R",
"bicepXfo": Xfo(Vec3(-2.27, 15.295, -0.753)),
"forearmXfo": Xfo(Vec3(-5.039, 13.56, -0.859)),
"wristXfo": Xfo(Vec3(-7.1886, 12.2819, 0.4906)),
"handXfo": Xfo(tr=Vec3(-7.1886, 12.2819, 0.4906),
ori=Quat(Vec3(-0.2301, -0.0865, -0.9331), 0.2623)),
"bicepFKCtrlSize": 1.75,
"forearmFKCtrlSize": 1.5
})
armRightComponent = ArmComponentRig("arm", self)
armRightComponent.loadData(data=armRightComponentGuide.getRigBuildData())
legLeftComponentGuide = LegComponentGuide("leg")
legLeftComponentGuide.loadData({
"name": "Leg",
"location": "L",
"femurXfo": Xfo(Vec3(0.9811, 9.769, -0.4572)),
"kneeXfo": Xfo(Vec3(1.4488, 5.4418, -0.5348)),
"ankleXfo": Xfo(Vec3(1.841, 1.1516, -1.237)),
"toeXfo": Xfo(Vec3(1.85, 0.4, 0.25)),
"toeTipXfo": Xfo(Vec3(1.85, 0.4, 1.5))
})
legLeftComponent = LegComponentRig("leg", self)
legLeftComponent.loadData(data=legLeftComponentGuide.getRigBuildData())
legRightComponentGuide = LegComponentGuide("leg")
legRightComponentGuide.loadData({
"name": "Leg",
"location": "R",
"femurXfo": Xfo(Vec3(-0.9811, 9.769, -0.4572)),
"kneeXfo": Xfo(Vec3(-1.4488, 5.4418, -0.5348)),
"ankleXfo": Xfo(Vec3(-1.85, 1.1516, -1.237)),
"toeXfo": Xfo(Vec3(-1.85, 0.4, 0.25)),
"toeTipXfo": Xfo(Vec3(-1.85, 0.4, 1.5))
})
legRightComponent = LegComponentRig("leg", self)
legRightComponent.loadData(data=legRightComponentGuide.getRigBuildData())
# ============
# Connections
# ============
# Spine to Main SRT
mainSrtRigScaleOutput = mainSrtComponent.getOutputByName('rigScale')
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
spineGlobalSrtInput = spineComponent.getInputByName('globalSRT')
spineGlobalSrtInput.setConnection(mainSrtOffsetOutput)
spineRigScaleInput = spineComponent.getInputByName('rigScale')
spineRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Neck to Main SRT
neckGlobalSrtInput = neckComponent.getInputByName('globalSRT')
neckGlobalSrtInput.setConnection(mainSrtOffsetOutput)
# Neck to Spine
spineEndOutput = spineComponent.getOutputByName('spineEnd')
neckSpineEndInput = neckComponent.getInputByName('neckBase')
neckSpineEndInput.setConnection(spineEndOutput)
# Head to Main SRT
headGlobalSrtInput = headComponent.getInputByName('globalSRT')
headGlobalSrtInput.setConnection(mainSrtOffsetOutput)
headBaseInput = headComponent.getInputByName('worldRef')
headBaseInput.setConnection(mainSrtOffsetOutput)
# Head to Neck
neckEndOutput = neckComponent.getOutputByName('neckEnd')
headBaseInput = headComponent.getInputByName('neckRef')
headBaseInput.setConnection(neckEndOutput)
# Clavicle to Spine
spineEndOutput = spineComponent.getOutputByName('spineEnd')
clavicleLeftSpineEndInput = clavicleLeftComponent.getInputByName('spineEnd')
clavicleLeftSpineEndInput.setConnection(spineEndOutput)
clavicleRightSpineEndInput = clavicleRightComponent.getInputByName('spineEnd')
clavicleRightSpineEndInput.setConnection(spineEndOutput)
# Arm to Global SRT
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
armLeftGlobalSRTInput = armLeftComponent.getInputByName('globalSRT')
armLeftGlobalSRTInput.setConnection(mainSrtOffsetOutput)
armLeftRigScaleInput = armLeftComponent.getInputByName('rigScale')
armLeftRigScaleInput.setConnection(mainSrtRigScaleOutput)
armRightGlobalSRTInput = armRightComponent.getInputByName('globalSRT')
armRightGlobalSRTInput.setConnection(mainSrtOffsetOutput)
armRightRigScaleInput = armRightComponent.getInputByName('rigScale')
armRightRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Arm To Clavicle Connections
clavicleLeftEndOutput = clavicleLeftComponent.getOutputByName('clavicleEnd')
armLeftClavicleEndInput = armLeftComponent.getInputByName('root')
armLeftClavicleEndInput.setConnection(clavicleLeftEndOutput)
clavicleRightEndOutput = clavicleRightComponent.getOutputByName('clavicleEnd')
armRightClavicleEndInput = armRightComponent.getInputByName('root')
armRightClavicleEndInput.setConnection(clavicleRightEndOutput)
# Leg to Global SRT
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
legLeftGlobalSRTInput = legLeftComponent.getInputByName('globalSRT')
legLeftGlobalSRTInput.setConnection(mainSrtOffsetOutput)
legLeftRigScaleInput = legLeftComponent.getInputByName('rigScale')
legLeftRigScaleInput.setConnection(mainSrtRigScaleOutput)
legRightGlobalSRTInput = legRightComponent.getInputByName('globalSRT')
legRightGlobalSRTInput.setConnection(mainSrtOffsetOutput)
legRightRigScaleInput = legRightComponent.getInputByName('rigScale')
legRightRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Leg To Pelvis Connections
spinePelvisOutput = spineComponent.getOutputByName('pelvis')
legLeftPelvisInput = legLeftComponent.getInputByName('pelvisInput')
legLeftPelvisInput.setConnection(spinePelvisOutput)
legRightPelvisInput = legRightComponent.getInputByName('pelvisInput')
legRightPelvisInput.setConnection(spinePelvisOutput)
Profiler.getInstance().pop()
|
bsd-3-clause
| 4,258,437,837,112,419,000
| 44.663755
| 182
| 0.670556
| false
| 3.114056
| false
| false
| false
|
trevor/calendarserver
|
txdav/who/idirectory.py
|
1
|
5509
|
# -*- test-case-name: txdav.who.test -*-
##
# Copyright (c) 2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
from __future__ import absolute_import
"""
Calendar and contacts directory extensions to L{twext.who.idirectory}.
"""
__all__ = [
"AutoScheduleMode",
"RecordType",
"FieldName",
]
from twisted.python.constants import Names, NamedConstant
from twext.who.idirectory import FieldName as BaseFieldName
#
# Data types
#
class AutoScheduleMode(Names):
"""
Constants for automatic scheduling modes.
@cvar none: Invitations are not automatically handled.
@cvar accept: Accept all invitations.
@cvar decline: Decline all invitations.
@cvar acceptIfFree: Accept invitations that do not conflict with a busy
time slot. Other invitations are not automatically handled.
@cvar declineIfBusy: Decline invitations that conflict with a busy time
slot. Other invitations are not automatically handled.
@cvar acceptIfFreeDeclineIfBusy: Accept invitations that do not conflict
with a busy time slot. Decline invitations that conflict with a busy
time slot. Other invitations are not automatically handled.
"""
none = NamedConstant()
none.description = u"no action"
accept = NamedConstant()
accept.description = u"accept"
decline = NamedConstant()
decline.description = u"decline"
acceptIfFree = NamedConstant()
acceptIfFree.description = u"accept if free"
declineIfBusy = NamedConstant()
declineIfBusy.description = u"decline if busy"
acceptIfFreeDeclineIfBusy = NamedConstant()
acceptIfFreeDeclineIfBusy.description = u"accept if free, decline if busy"
class RecordType(Names):
"""
Constants for calendar and contacts directory record types.
@cvar location: Location record.
Represents a schedulable location (eg. a meeting room).
@cvar resource: Resource record.
Represents a schedulable resource (eg. a projector, conference line,
etc.).
@cvar address: Address record.
Represents a physical address (street address and/or geolocation).
"""
location = NamedConstant()
location.description = u"location"
resource = NamedConstant()
resource.description = u"resource"
address = NamedConstant()
address.description = u"physical address"
class FieldName(Names):
"""
Constants for calendar and contacts directory record field names.
Fields as associated with either a single value or an iterable of values.
@cvar serviceNodeUID: For a calendar and contacts service with multiple
nodes, this denotes the node that the user's data resides on.
The associated value must be a L{unicode}.
@cvar loginAllowed: Determines whether a record can log in.
The associated value must be a L{bool}.
@cvar hasCalendars: Determines whether a record has calendar data.
The associated value must be a L{bool}.
@cvar hasContacts: Determines whether a record has contact data.
The associated value must be a L{bool}.
@cvar autoScheduleMode: Determines the auto-schedule mode for a record.
The associated value must be a L{NamedConstant}.
@cvar autoAcceptGroup: Contains the UID for a group record which contains
members for whom auto-accept will behave as "accept if free", even if
auto-accept is set to "manual".
The associated value must be a L{NamedConstant}.
"""
serviceNodeUID = NamedConstant()
serviceNodeUID.description = u"service node UID"
loginAllowed = NamedConstant()
loginAllowed.description = u"login permitted"
loginAllowed.valueType = bool
hasCalendars = NamedConstant()
hasCalendars.description = u"has calendars"
hasCalendars.valueType = bool
hasContacts = NamedConstant()
hasContacts.description = u"has contacts"
hasContacts.valueType = bool
autoScheduleMode = NamedConstant()
autoScheduleMode.description = u"auto-schedule mode"
autoScheduleMode.valueType = AutoScheduleMode
autoAcceptGroup = NamedConstant()
autoAcceptGroup.description = u"auto-accept group"
autoAcceptGroup.valueType = BaseFieldName.valueType(BaseFieldName.uid)
# For "locations", i.e., scheduled spaces:
associatedAddress = NamedConstant()
associatedAddress.description = u"associated address UID"
capacity = NamedConstant()
capacity.description = u"room capacity"
capacity.valueType = int
floor = NamedConstant()
floor.description = u"building floor"
# For "addresses", i.e., non-scheduled areas containing locations:
abbreviatedName = NamedConstant()
abbreviatedName.description = u"abbreviated name"
geographicLocation = NamedConstant()
geographicLocation.description = u"geographic location URI"
streetAddress = NamedConstant()
streetAddress.description = u"street address"
|
apache-2.0
| -5,969,269,903,024,639,000
| 29.605556
| 78
| 0.721002
| false
| 4.250772
| false
| false
| false
|
bparzella/secsgem
|
secsgem/secs/functions/s01f00.py
|
1
|
1407
|
#####################################################################
# s01f00.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Class for stream 01 function 00."""
from secsgem.secs.functions.base import SecsStreamFunction
class SecsS01F00(SecsStreamFunction):
"""
abort transaction stream 1.
**Structure**::
>>> import secsgem.secs
>>> secsgem.secs.functions.SecsS01F00
Header only
**Example**::
>>> import secsgem.secs
>>> secsgem.secs.functions.SecsS01F00()
S1F0 .
:param value: function has no parameters
:type value: None
"""
_stream = 1
_function = 0
_data_format = None
_to_host = True
_to_equipment = True
_has_reply = False
_is_reply_required = False
_is_multi_block = False
|
lgpl-2.1
| 1,722,601,378,394,116,000
| 26.057692
| 69
| 0.61194
| false
| 4.1261
| false
| false
| false
|
mangpo/cacheall-proxy-server
|
httpmessage/_headers.py
|
1
|
2137
|
import _setup
import inspect
import pprint
from httpmessage._multidict import MultiDict
def header_case(header_key):
return "-".join([part.capitalize() for part in header_key.split("-")])
def _key_wrap(func):
"""creates a function where the value of the 'key' argument, if there
is one, has the function 'header_case' run on it.
"""
varnames = func.func_code.co_varnames
def key_filter(kv):
name, value = kv
if name == 'key':
return header_case(value)
else:
return value
def wrapped(*args):
if len(args) == len(varnames):
args = [key_filter(kv) for kv in zip(varnames, args)]
return func(*args)
wrapped.func_name = func.func_name
wrapped.func_doc = func.func_doc
return wrapped
class Headers(MultiDict):
for attrname in dir(MultiDict):
attrvalue = getattr(MultiDict, attrname)
if inspect.ismethod(attrvalue):
attrvalue = attrvalue.im_func
if inspect.isfunction(attrvalue) and \
'key' in attrvalue.func_code.co_varnames:
locals()[attrname] = _key_wrap(attrvalue)
#---------------------------------------------------------------
def iteritems(self):
return iter(sorted(super(Headers,self).iteritems()))
#---------------------------------------------------------------
def __repr__(self):
data = pprint.pformat(list(self.iteritems()))
if '\n' in data:
data = ''.join([data[0], '\n ', data[1:-1], '\n', data[-1]])
return '<%s(%s)>' % (
type(self).__name__, data
)
#---------------------------------------------------------------
def __copy__(self):
dup = Headers()
for k,v in self.iteritems():
dup.append_at(k,v)
return dup
if __name__ == "__main__":
h = Headers()
h['foo'] = 'bar'
h['content-lenGth'] = 5
print h
h['CONTENT-length'] = 10
print h
del h['foO']
print h
h['content-type'] = 'wack wack wackiness'
h['rover-dookie'] = 'oh yah, lots'
print h
|
bsd-2-clause
| -1,192,584,434,577,835,800
| 26.766234
| 74
| 0.504913
| false
| 3.979516
| false
| false
| false
|
mikoro/pymazing
|
pymazing/level_loader.py
|
1
|
4241
|
"""Load and generate world mesh data from files."""
# Copyright © 2014 Mikko Ronkainen <firstname@mikkoronkainen.com>
# License: MIT, see the LICENSE file.
from pymazing import color, mesh
# http://en.wikipedia.org/wiki/Truevision_TGA
def generate_blocks_from_tga(file_name):
"""
Generate block data from a TGA formatted image file - each pixels corresponds to one block.
:param string file_name: A path to the image file.
:return: A two dimensional array of colors representing the blocks.
"""
blocks = None
with open(file_name, "rb") as file:
file.read(1) # image ID length
file.read(1) # color map type
# image type
if file.read(1) != b"\x02":
raise Exception("Invalid file format")
# color map specification
file.read(2)
file.read(2)
file.read(1)
file.read(2) # x-origin
file.read(2) # y-origin
width = int.from_bytes(file.read(2), byteorder="little")
height = int.from_bytes(file.read(2), byteorder="little")
depth = file.read(1)[0]
if width < 1 or height < 1 or depth != 32:
raise Exception("Invalid file format")
file.read(1) # image descriptor
blocks = [[None] * width for _ in range(height)]
for y in range(0, height):
for x in range(0, width):
pixel_data = file.read(4)
if len(pixel_data) != 4:
raise Exception("Invalid file format")
r = pixel_data[2]
g = pixel_data[1]
b = pixel_data[0]
a = pixel_data[3]
if a > 0:
blocks[y][x] = color.from_int(r, g, b, a)
return blocks
def generate_full_meshes(blocks):
"""
Generate mesh data from the block data.
:param blocks: A two dimensional array of colors.
:return: A list of meshes.
"""
meshes = []
height = len(blocks)
width = len(blocks[0])
# add the floor plane
mesh_ = mesh.create_partial_cube(color.from_int(80, 80, 80), mesh.TOP)
mesh_.scale = [width / 2.0 + 2.0, 1.0, height / 2.0 + 2.0]
mesh_.position = [width / 2.0, -1.0, -height / 2.0]
meshes.append(mesh_)
for y in range(height):
for x in range(width):
color_ = blocks[y][x]
if color_ is not None:
mesh_ = mesh.create_cube(color_)
mesh_.scale = [0.5, 0.5, 0.5]
mesh_.position[0] = 1.0 * x + 0.5
mesh_.position[1] = 0.5
mesh_.position[2] = -1.0 * y - 0.5
meshes.append(mesh_)
return meshes
def generate_partial_meshes(blocks):
"""
Generate mesh data from the block data - but leave out sides that are not visible.
:param blocks: A two dimensional array of colors.
:return: A list of meshes.
"""
meshes = []
height = len(blocks)
width = len(blocks[0])
# add the floor plane
mesh_ = mesh.create_partial_cube(color.from_int(80, 80, 80), mesh.TOP)
mesh_.scale = [width / 2.0 + 2.0, 1.0, height / 2.0 + 2.0]
mesh_.position = [width / 2.0, -1.0, -height / 2.0]
meshes.append(mesh_)
for y in range(height):
for x in range(width):
color_ = blocks[y][x]
if color_ is not None:
sides = mesh.TOP
if x == 0 or (blocks[y][x - 1] is None):
sides |= mesh.LEFT
if x == (width - 1) or (blocks[y][x + 1] is None):
sides |= mesh.RIGHT
if y == 0 or (blocks[y - 1][x] is None):
sides |= mesh.FRONT
if y == (height - 1) or (blocks[y + 1][x] is None):
sides |= mesh.BACK
mesh_ = mesh.create_partial_cube(color_, sides)
mesh_.scale = [0.5, 0.5, 0.5]
mesh_.position[0] = 1.0 * x + 0.5
mesh_.position[1] = 0.5
mesh_.position[2] = -1.0 * y - 0.5
meshes.append(mesh_)
return meshes
|
mit
| -3,347,367,948,999,858,000
| 28.724638
| 95
| 0.504245
| false
| 3.611584
| false
| false
| false
|
foone/7gen
|
code/obj.py
|
1
|
9542
|
#!/usr/env python
from error import LoadError
from bmdl import BRenderModel
import pygame
from colorsys import rgb_to_hsv,hsv_to_rgb
import os
COLORWIDTH=8
class WavefrontModel:
def __init__(self,filename=None):
if filename is not None:
self.load(filename)
def load(self,filename):
fop=open(filename,'r')
self.verts=[(0,0,0)] # Start from 1 my ass. IDIOTS.
#self.tris=[]
self.colors={'DefaultColor':(255,255,255)}
self.color_order=['DefaultColor']
self.trispiles={}
current_pile_name='DefaultColor'
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
for i,line in enumerate(fop):
self.linenumber=i
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='v':
x,y,z=[float(x) for x in stuff[1].split(' ')][:3] # ignore w
self.verts.append((x,y,z))
elif command=='f':
pieces=stuff[1].split(' ')
if len(pieces)==3:
verts,tex,normals=self.faceref(pieces)
current_pile.append(verts)
elif len(pieces)==4:
verts,tex,normals=self.faceref(pieces)
current_pile.append((verts[0],verts[1],verts[2]))
current_pile.append((verts[0],verts[2],verts[3]))
elif command=='usemtl':
current_pile_name=stuff[1].strip()
if current_pile_name not in self.trispiles:
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
if current_pile_name not in self.colors:
self.colors[current_pile_name]=(255,255,255) # default to white.
if current_pile_name not in self.color_order:
self.color_order.append(current_pile_name)
elif command=='mtllib':
try:
self.loadMTL(stuff[1])
except IOError:
pass # Couldn't load colors/textures. OH WELL.
def loadMTL(self,filename):
current_name=None
fop=open(filename,'r')
for line in fop:
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='newmtl':
current_name=stuff[1]
elif command=='kd':
if current_name is not None:
r,g,b=[int(float(x)*255.0) for x in stuff[1].strip().split(' ')]
self.colors[current_name]=(r,g,b)
if current_name not in self.color_order:
self.color_order.append(current_name)
def dump(self):
print 'Verts:',len(self.verts)
print 'Tris:',len(self.tris)
def faceref(self,pieces):
verts,tex,normal=[],[],[]
for piece in pieces:
parts=piece.split('/')
if len(parts)>3:
raise LoadError('Too many parts in faceref, line %i' % (self.linenumber))
if len(parts)==0:
raise LoadError('Too few parts in faceref, line %i' % (self.linenumber))
if len(parts)==1:
verts.append(self.vref(int(parts[0])))
tex.append(None)
normal.append(None)
elif len(parts)==2:
verts.append(self.vref(int(parts[0])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None)
elif len(parts)==3:
verts.append(self.vref(int(parts[0])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None) # TODO: Fix. Create nref?
return verts,tex,normal
def vref(self,v):
if v<0:
return len(self.verts)+v
else:
return v
def makeBMDL(self,statusfunc=None):
bmdl=BRenderModel()
bmdl.tris_normals=[]
bmdl.filename='<JKL>'
bmdl.normals=True
# for x,y,z in self.verts:
# bmdl.verts.append((x,y,z,0,0))
width=float(len(self.color_order))
for x,color in enumerate(self.color_order):
u=(x+0.5)/width
if color in self.trispiles:
r,g,b=self.colors[color]
else:
r,g,b=(255,0,255) # default to white if we are missing this color.
if statusfunc is not None:
statusfunc('Converting %i verts in %s' % (len(self.trispiles[color]),color))
for v1,v2,v3 in self.trispiles[color]:
x,y,z=self.verts[v1]
a=bmdl.happyVertex(x,y,z,u,0.5)
x,y,z=self.verts[v2]
b=bmdl.happyVertex(x,y,z,u,0.5)
x,y,z=self.verts[v3]
c=bmdl.happyVertex(x,y,z,u,0.5)
bmdl.tris.append((a,b,c))
if statusfunc is not None:
statusstr='%i verts, %i tris' % (len(bmdl.verts),len(bmdl.tris))
statusfunc(statusstr)
return bmdl
def makeTexture(self,palette_surf,enhance_color=True):
size=(len(self.color_order)*COLORWIDTH,COLORWIDTH)
surf=pygame.Surface(size,pygame.SWSURFACE,palette_surf)
surf.set_palette(palette_surf.get_palette())
for x,color in enumerate(self.color_order):
r,g,b=self.colors[color]
if enhance_color:
h,s,v=rgb_to_hsv(r/255.0,g/255.0,b/255.0)
s=min(1.0,s+0.1)
r,g,b=[int(temp*255.0) for temp in hsv_to_rgb(h,s,v)]
nearest=None
ndiff=None
for i,(nr,ng,nb) in enumerate(palette_surf.get_palette()):
rdelta=r-nr
gdelta=g-ng
bdelta=b-nb
diff=rdelta**2 + gdelta**2 + bdelta**2
if nearest is None or diff<ndiff:
ndiff=diff
nearest=i
surf.fill(nearest,(x*COLORWIDTH,0,COLORWIDTH,COLORWIDTH))
return surf
class WavefrontModelTextured:
def __init__(self,filename=None):
if filename is not None:
self.load(filename)
def load(self,filename):
fop=open(filename,'r')
self.verts=[(0,0,0)] # Start from 1 my ass. IDIOTS.
self.texverts=[(0,0,0)]
self.colors={'DefaultColor':(255,255,255)}
self.color_order=['DefaultColor']
self.textures={}
self.trispiles={}
current_pile_name='DefaultColor'
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
for i,line in enumerate(fop):
self.linenumber=i
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='v':
x,y,z=[float(x) for x in stuff[1].split(' ')][:3] # ignore w
self.verts.append((x,y,z))
elif command=='vt':
u,v=[float(x) for x in stuff[1].split(' ')]
self.texverts.append((u,v))
elif command=='usemtl':
current_pile_name=stuff[1].strip()
if current_pile_name not in self.trispiles:
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
if current_pile_name not in self.colors:
self.colors[current_pile_name]=(255,255,255) # default to white.
if current_pile_name not in self.color_order:
self.color_order.append(current_pile_name)
elif command=='f':
pieces=stuff[1].split(' ')
if len(pieces)==3:
verts,tex,normals=self.faceref(pieces)
current_pile.append(verts+tex)
elif len(pieces)==4:
verts,tex,normals=self.faceref(pieces)
current_pile.append((verts[0],verts[1],verts[2],tex[0],tex[1],tex[2]))
current_pile.append((verts[0],verts[2],verts[3],tex[0],tex[2],tex[3]))
elif command=='mtllib':
try:
self.loadMTL(stuff[1])
except IOError:
pass # Couldn't load colors/textures. OH WELL.
def loadMTL(self,filename):
current_name=None
fop=open(filename,'r')
for line in fop:
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='newmtl':
current_name=stuff[1]
elif command=='kd':
if current_name is not None:
r,g,b=[int(float(x)*255.0) for x in stuff[1].strip().split(' ')]
self.colors[current_name]=(r,g,b)
if current_name not in self.color_order:
self.color_order.append(current_name)
elif command=='map_kd':
filename=stuff[1]
if not os.path.exists(filename):
raise LoadError('Texture Missing: ' +filename)
self.textures[current_name]=filename
def dump(self):
print 'Verts:',len(self.verts)
print 'Tris:',len(self.tris)
print 'Textures:'
for texname in self.textures:
r,g,b=self.colors[texname]
print ' %s:%s (%i,%i,%i)' % (texname,self.textures[texname],r,g,b)
def faceref(self,pieces):
verts,tex,normal=[],[],[]
for piece in pieces:
parts=piece.split('/')
if len(parts)>3:
raise LoadError('Too many parts in faceref, line %i' % (self.linenumber))
if len(parts)==0:
raise LoadError('Too few parts in faceref, line %i' % (self.linenumber))
if len(parts)==1:
verts.append(self.vref(int(parts[0])))
tex.append(None)
normal.append(None)
elif len(parts)==2:
verts.append(self.vref(int(parts[0])))
tex.append(self.tref(int(parts[1])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None)
elif len(parts)==3:
verts.append(self.vref(int(parts[0])))
tex.append(self.tref(int(parts[1])))
normal.append(None) # TODO: Fix. Create nref?
return verts,tex,normal
def vref(self,v):
if v<0:
return len(self.verts)+v
else:
return v
def tref(self,t):
if t<0:
return len(self.texterts)+t
else:
return t
def getTextureGroups(self):
out=[]
for key in self.trispiles:
if len(self.trispiles[key])>0:
out.append(key)
return out
def getTextureNames(self):
out={}
for key in self.trispiles:
if len(self.trispiles[key])>0:
out[self.textures[key]]=True
return out.keys()
def makeBMDL(self,pile,statusfunc=None):
bmdl=BRenderModel()
bmdl.tris_normals=[]
bmdl.filename='<JKL>'
bmdl.normals=True
# for x,y,z in self.verts:
# bmdl.verts.append((x,y,z,0,0))
width=float(len(self.color_order))
for pilename in self.trispiles:
if self.textures.has_key(pilename) and self.textures[pilename]==pile:
for v1,v2,v3,t1,t2,t3 in self.trispiles[pilename]:
vs=[]
for vi,ti in ((v1,t1),(v2,t2),(v3,t3)):
x,y,z=self.verts[vi]
u,v=self.texverts[ti]
vs.append(bmdl.happyVertex(x,y,z,u,v))
bmdl.tris.append(vs)
return bmdl
|
gpl-2.0
| -6,269,479,059,025,849,000
| 31.455782
| 80
| 0.646301
| false
| 2.571275
| false
| false
| false
|
jpardobl/naman
|
naman/core/pypelib/persistence/backends/rawfile/RAWFile.py
|
1
|
1316
|
try:
import cPickle as pickle
except:
import pickle
from threading import Lock
from naman.core.pypelib.resolver.Resolver import Resolver
'''
@author: lbergesio,omoya,cbermudo
@organization: i2CAT, OFELIA FP7
RAWFile
Implementes persistence engine to a raw file for RuleTables
'''
class RAWFile():
#XXX: lbergesio: Is it necessary to use a mutex here?
_mutex = Lock()
@staticmethod
def save(obj, parser, **kwargs):
if "fileName" not in kwargs:
raise Exception("FileName is required")
with RAWFile._mutex:
fileObj = open(kwargs["fileName"], "wb" )
try:
cObj = obj.clone()
except Exception,e:
print "Could not clone original obj %s\n%s" %(str(obj),str(e))
pickle.dump(cObj,fileObj)
fileObj.close()
@staticmethod
def load(tableName, resolverMappings, parser, **kwargs):
with RAWFile._mutex:
if not kwargs["fileName"]:
raise Exception("FileName is required")
fileObj = open(kwargs["fileName"], "r" )
table = pickle.load(fileObj)
table._mutex = Lock()
table._mappings = resolverMappings
table._resolver = Resolver(table._mappings)
fileObj.close()
if table.name != tableName:
raise Exception("Table name mismatch; did you specify the correct file?")
return table
|
bsd-3-clause
| 2,492,388,815,307,089,400
| 22.5
| 77
| 0.663374
| false
| 3.357143
| false
| false
| false
|
bmya/tkobr-addons
|
tko_l10n_br_point_of_sale/res_company.py
|
1
|
1548
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, fields, _
from openerp.osv import osv
class res_compamy(models.Model):
_inherit = 'res.company'
average_federal_tax = fields.Float(
'Average Federal Tax [%]',
company_dependent=True,
help='The average federal tax percentage [0..100]')
average_state_tax = fields.Float(
'Average State Tax Value [%]',
company_dependent=True,
help='The average state tax percentage [0..100]')
|
agpl-3.0
| -3,421,115,839,997,290,000
| 38.692308
| 78
| 0.618217
| false
| 4.195122
| false
| false
| false
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractHeyitsmehyupperstranslationsCom.py
|
1
|
1096
|
def extractHeyitsmehyupperstranslationsCom(item):
'''
Parser for 'heyitsmehyupperstranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if item['tags'] == ['Uncategorized']:
titlemap = [
('TIAOFM Chapcter ', 'This is an Obvious Fraudulent Marriage', 'translated'),
('TIAOFM Chapter ', 'This is an Obvious Fraudulent Marriage', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
bsd-3-clause
| -5,802,952,744,336,902,000
| 33.28125
| 105
| 0.643248
| false
| 3.070028
| false
| false
| false
|
kgullikson88/General
|
StellarModel.py
|
1
|
48138
|
__author__ = 'Kevin Gullikson'
import os
import sys
import re
from collections import defaultdict
import warnings
from collections import OrderedDict
import itertools
import FittingUtilities
import logging
from astropy import units
from scipy.interpolate import InterpolatedUnivariateSpline as spline, LinearNDInterpolator, NearestNDInterpolator, \
interp1d
import pandas
import numpy as np
import h5py
import DataStructures
import HelperFunctions
import Broaden
"""
This code provides the GetModelList function.
It is used in GenericSearch.py and SensitivityAnalysis.py
"""
if "darwin" in sys.platform:
modeldir = "/Volumes/DATADRIVE/Stellar_Models/Sorted/Stellar/Vband/"
HDF5_FILE = '/Volumes/DATADRIVE/PhoenixGrid/Search_Grid.hdf5'
elif "linux" in sys.platform:
modeldir = "/media/FreeAgent_Drive/SyntheticSpectra/Sorted/Stellar/Vband/"
HDF5_FILE = '/media/ExtraSpace/PhoenixGrid/Search_Grid.hdf5'
else:
modeldir = raw_input("sys.platform not recognized. Please enter model directory below: ")
if not modeldir.endswith("/"):
modeldir = modeldir + "/"
def GetModelList(type='phoenix',
metal=[-0.5, 0, 0.5],
logg=[4.5, ],
temperature=range(3000, 6900, 100),
alpha=[0, 0.2],
model_directory=modeldir,
hdf5_file=HDF5_FILE):
"""This function searches the model directory (hard coded in StellarModels.py) for stellar
models with the appropriate parameters
:param type: the type of models to get. Right now, only 'phoenix', 'kurucz', and 'hdf5' are implemented
:param metal: a list of the metallicities to include
:param logg: a list of the surface gravity values to include
:param temperature: a list of the temperatures to include
:param model_directory: The absolute path to the model directory (only used for type=phoenix or kurucz)
:param hdf5_file: The absolute path to the HDF5 file with the models (only used for type=hdf5)
:return: a list of filenames for the requested models
"""
# Error checking
metal = np.atleast_1d(metal)
logg = np.atleast_1d(logg)
temperature = np.atleast_1d(temperature)
alpha = np.atleast_1d(alpha)
if type.lower() == 'phoenix':
all_models = sorted([f for f in os.listdir(model_directory) if 'phoenix' in f.lower()])
chosen_models = []
for model in all_models:
Teff, gravity, metallicity = ClassifyModel(model)
if Teff in temperature and gravity in logg and metallicity in metal:
chosen_models.append("{:s}{:s}".format(model_directory, model))
elif type.lower() == "kurucz":
all_models = [f for f in os.listdir(modeldir) if f.startswith("t") and f.endswith(".dat.bin.asc")]
chosen_models = []
for model in all_models:
Teff, gravity, metallicity, a = ClassifyModel(model, type='kurucz')
if Teff in temperature and gravity in logg and metallicity in metal and a in alpha:
chosen_models.append("{:s}{:s}".format(model_directory, model))
elif type.lower() == 'hdf5':
hdf5_int = HDF5Interface(hdf5_file)
chosen_models = []
for par in hdf5_int.list_grid_points:
if par['temp'] in temperature and par['logg'] in logg and par['Z'] in metal and par['alpha'] in alpha:
chosen_models.append(par)
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
return chosen_models
def ClassifyModel(filename, type='phoenix'):
"""Get the effective temperature, log(g), and [Fe/H] of a stellar model from the filename
:param filename:
:param type: Currently, only phoenix type files are supported
:return:
"""
if not isinstance(filename, basestring):
raise TypeError("Filename must be a string!")
if type.lower() == 'phoenix':
segments = re.split("-|\+", filename.split("/")[-1])
temp = float(segments[0].split("lte")[-1]) * 100
gravity = float(segments[1])
metallicity = float(segments[2][:3])
if not "+" in filename and metallicity > 0:
metallicity *= -1
return temp, gravity, metallicity
elif type.lower() == 'kurucz':
fname = filename.split('/')[-1]
temp = float(fname[1:6])
gravity = float(fname[8:12])
metallicity = float(fname[14:16]) / 10.0
alpha = float(fname[18:20]) / 10.0
if fname[13] == "m":
metallicity *= -1
if fname[17] == "m":
alpha *= -1
return temp, gravity, metallicity, alpha
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
return temp, gravity, metallicity
def MakeModelDicts(model_list, vsini_values=[10, 20, 30, 40], type='phoenix',
vac2air=True, logspace=False, hdf5_file=HDF5_FILE, get_T_sens=False):
"""This will take a list of models, and output two dictionaries that are
used by GenericSearch.py and Sensitivity.py
:param model_list: A list of model filenames
:param vsini_values: a list of vsini values to broaden the spectrum by (we do that later!)
:param type: the type of models. Currently, phoenix, kurucz, and hdf5 are implemented
:param vac2air: If true, assumes the model is in vacuum wavelengths and converts to air
:param logspace: If true, it will rebin the data to a constant log-spacing
:param hdf5_file: The absolute path to the HDF5 file with the models. Only used if type=hdf5
:param get_T_sens: Boolean flag for getting the temperature sensitivity.
If true, it finds the derivative of each pixel dF/dT
:return: A dictionary containing the model with keys of temperature, gravity, metallicity, and vsini,
and another one with a processed flag with the same keys
"""
vsini_values = np.atleast_1d(vsini_values)
if type.lower() == 'phoenix':
modeldict = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint))))
processed = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool))))
for fname in model_list:
temp, gravity, metallicity = ClassifyModel(fname)
print "Reading in file %s" % fname
data = pandas.read_csv(fname,
header=None,
names=["wave", "flux"],
usecols=(0, 1),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['flux'].values
# x, y = np.loadtxt(fname, usecols=(0, 1), unpack=True)
if vac2air:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=10 ** y)
if logspace:
xgrid = np.logspace(np.log(model.x[0]), np.log(model.x[-1]), model.size(), base=np.e)
model = FittingUtilities.RebinData(model, xgrid)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][vsini] = model
processed[temp][gravity][metallicity][vsini] = False
elif type.lower() == 'kurucz':
modeldict = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
processed = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool)))))
for fname in model_list:
temp, gravity, metallicity, a = ClassifyModel(fname)
print "Reading in file %s" % fname
data = pandas.read_csv(fname,
header=None,
names=["wave", "flux"],
usecols=(0, 1),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['flux'].values
# x, y = np.loadtxt(fname, usecols=(0, 1), unpack=True)
if vac2air:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=10 ** y)
if logspace:
xgrid = np.logspace(np.log(model.x[0]), np.log(model.x[-1]), model.size(), base=np.e)
model = FittingUtilities.RebinData(model, xgrid)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][a][vsini] = model
processed[temp][gravity][metallicity][a][vsini] = False
elif type.lower() == 'hdf5':
hdf5_int = HDF5Interface(hdf5_file)
x = hdf5_int.wl
wave_hdr = hdf5_int.wl_header
if vac2air:
if not wave_hdr['air']:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
elif wave_hdr['air']:
raise GridError(
'HDF5 grid is in air wavelengths, but you requested vacuum wavelengths. You need a new grid!')
modeldict = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
processed = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool)))))
for pars in model_list:
temp, gravity, metallicity, a = pars['temp'], pars['logg'], pars['Z'], pars['alpha']
y = hdf5_int.load_flux(pars)
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=y)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][a][vsini] = model
processed[temp][gravity][metallicity][a][vsini] = False
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
if get_T_sens:
# Get the temperature sensitivity. Warning! This assumes the wavelength grid is the same in all models.
sensitivity = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
Tvals = sorted(modeldict.keys())
for i, T in enumerate(Tvals):
gvals = sorted(modeldict[T].keys())
for gravity in gvals:
metal_vals = sorted(modeldict[T][gravity].keys())
for metal in metal_vals:
alpha_vals = sorted(modeldict[T][gravity][metal].keys())
for alpha in alpha_vals:
# get the temperature just under this one
lower, l_idx = get_model(modeldict, Tvals, i, gravity, metal, vsini_values[0], alpha, mode='lower')
upper, u_idx = get_model(modeldict, Tvals, i, gravity, metal, vsini_values[0], alpha, mode='upper')
T_low = Tvals[l_idx]
T_high = Tvals[u_idx]
slope = (upper.y - lower.y) / (T_high - T_low)
for vsini in vsini_values:
sensitivity[T][gravity][metal][alpha][vsini] = slope**2
return modeldict, processed, sensitivity
return modeldict, processed
def get_model(mdict, Tvals, i, logg, metal, vsini, alpha=None, mode='same'):
"""
Get the model with the requested parameters
:param mode: How to get the model. valid options:
- 'same': Get the model with the exact requested parameters.
- 'lower': Get model with the exact values of everything except temperature (find the next lowest temperature)
- 'upper': Get model with the exact values of everything except temperature (find the next highest temperature)
"""
if mode == 'same':
if alpha is None:
mdict[Tvals[i]][logg][metal][vsini]
else:
return mdict[Tvals[i]][logg][metal][alpha][vsini]
elif mode == 'lower':
done = False
idx = i - 1
idx = max(0, idx)
idx = min(len(Tvals), idx)
while not done:
if idx == 0 or idx == len(Tvals) - 1:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
try:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
except KeyError:
idx -= 1
elif mode == 'upper':
done = False
idx = i +1
idx = max(0, idx)
idx = min(len(Tvals)-1, idx)
while not done:
if idx == 0 or idx == len(Tvals) - 1:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
try:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
except KeyError:
idx += 1
class HDF5Interface:
'''
Connect to an HDF5 file that stores spectra. Stolen shamelessly from Ian Czekala's Starfish code
'''
def __init__(self, filename, ranges={"temp": (0, np.inf),
"logg": (-np.inf, np.inf),
"Z": (-np.inf, np.inf),
"alpha": (-np.inf, np.inf)}):
'''
:param filename: the name of the HDF5 file
:type param: string
:param ranges: optionally select a smaller part of the grid to use.
:type ranges: dict
'''
self.filename = filename
self.flux_name = "t{temp:.0f}g{logg:.1f}z{Z:.1f}a{alpha:.1f}"
grid_parameters = ("temp", "logg", "Z", "alpha") # Allowed grid parameters
grid_set = frozenset(grid_parameters)
with h5py.File(self.filename, "r") as hdf5:
self.wl = hdf5["wl"][:]
self.wl_header = dict(hdf5["wl"].attrs.items())
grid_points = []
for key in hdf5["flux"].keys():
# assemble all temp, logg, Z, alpha keywords into a giant list
hdr = hdf5['flux'][key].attrs
params = {k: hdr[k] for k in grid_set}
#Check whether the parameters are within the range
for kk, vv in params.items():
low, high = ranges[kk]
if (vv < low) or (vv > high):
break
else:
#If all parameters have passed successfully through the ranges, allow.
grid_points.append(params)
self.list_grid_points = grid_points
# determine the bounding regions of the grid by sorting the grid_points
temp, logg, Z, alpha = [], [], [], []
for param in self.list_grid_points:
temp.append(param['temp'])
logg.append(param['logg'])
Z.append(param['Z'])
alpha.append(param['alpha'])
self.bounds = {"temp": (min(temp), max(temp)),
"logg": (min(logg), max(logg)),
"Z": (min(Z), max(Z)),
"alpha": (min(alpha), max(alpha))}
self.points = {"temp": np.unique(temp),
"logg": np.unique(logg),
"Z": np.unique(Z),
"alpha": np.unique(alpha)}
self.ind = None #Overwritten by other methods using this as part of a ModelInterpolator
def load_flux(self, parameters):
'''
Load just the flux from the grid, with possibly an index truncation.
:param parameters: the stellar parameters
:type parameters: dict
:raises KeyError: if spectrum is not found in the HDF5 file.
:returns: flux array
'''
key = self.flux_name.format(**parameters)
with h5py.File(self.filename, "r") as hdf5:
try:
if self.ind is not None:
fl = hdf5['flux'][key][self.ind[0]:self.ind[1]]
else:
fl = hdf5['flux'][key][:]
except KeyError as e:
raise GridError(e)
# Note: will raise a KeyError if the file is not found.
return fl
grid_parameters = ("temp", "logg", "Z", "alpha") # Allowed grid parameters
grid_set = frozenset(grid_parameters)
var_default = {"temp": 5800, "logg": 4.5, "Z": 0.0, "alpha": 0.0,
"vsini": 0.0, "FWHM": 0.0, "vz": 0.0, "Av": 0.0, "logOmega": 0.0}
class IndexInterpolator:
'''
Object to return fractional distance between grid points of a single grid variable.
:param parameter_list: list of parameter values
:type parameter_list: 1-D list
'''
def __init__(self, parameter_list):
self.parameter_list = np.unique(parameter_list)
self.index_interpolator = interp1d(self.parameter_list, np.arange(len(self.parameter_list)), kind='linear')
pass
def __call__(self, value):
'''
Evaluate the interpolator at a parameter.
:param value:
:type value: float
:raises C.InterpolationError: if *value* is out of bounds.
:returns: ((low_val, high_val), (frac_low, frac_high)), the lower and higher bounding points in the grid
and the fractional distance (0 - 1) between them and the value.
'''
try:
index = self.index_interpolator(value)
except ValueError as e:
raise InterpolationError("Requested value {} is out of bounds. {}".format(value, e))
high = np.ceil(index)
low = np.floor(index)
frac_index = index - low
return ((self.parameter_list[low], self.parameter_list[high]), ((1 - frac_index), frac_index))
class Interpolator:
'''
Quickly and efficiently interpolate a synthetic spectrum for use in an MCMC simulation. Caches spectra for
easier memory load.
:param interface: :obj:`HDF5Interface` (recommended) or :obj:`RawGridInterface` to load spectra
:param DataSpectrum: data spectrum that you are trying to fit. Used for truncating the synthetic spectra to the relevant region for speed.
:type DataSpectrum: :obj:`spectrum.DataSpectrum`
:param cache_max: maximum number of spectra to hold in cache
:type cache_max: int
:param cache_dump: how many spectra to purge from the cache once :attr:`cache_max` is reached
:type cache_dump: int
:param trilinear: Should this interpolate in temp, logg, and [Fe/H] AND [alpha/Fe], or just the first three parameters.
:type trilinear: bool
Setting :attr:`trilinear` to **True** is useful for when you want to do a run with [Fe/H] > 0.0
'''
def __init__(self, interface, DataSpectrum, cache_max=256, cache_dump=64, trilinear=False, log=True):
'''
Param log decides how to chunk up the returned spectrum. If we are using a pre-instrument convolved grid,
then we want to use log=True.
If we are using the raw synthetic grid, then we want to use log=False.
'''
self.interface = interface
self.DataSpectrum = DataSpectrum
# If alpha only includes one value, then do trilinear interpolation
(alow, ahigh) = self.interface.bounds['alpha']
if (alow == ahigh) or trilinear:
self.parameters = grid_set - set(("alpha",))
else:
self.parameters = grid_set
self.wl = self.interface.wl
self.wl_dict = self.interface.wl_header
if log:
self._determine_chunk_log()
else:
self._determine_chunk()
self.setup_index_interpolators()
self.cache = OrderedDict([])
self.cache_max = cache_max
self.cache_dump = cache_dump #how many to clear once the maximum cache has been reached
def _determine_chunk_log(self, tol=50):
'''
Using the DataSpectrum, determine the minimum chunksize that we can use and then truncate the synthetic
wavelength grid and the returned spectra.
Assumes HDF5Interface is LogLambda spaced, because otherwise you shouldn't need a grid with 2^n points,
because you would need to interpolate in wl space after this anyway.
'''
wave_grid = self.interface.wl
wl_min, wl_max = np.min(self.DataSpectrum.wls) - tol, np.max(self.DataSpectrum.wls) + tol
# Length of the raw synthetic spectrum
len_wg = len(wave_grid)
#ind_wg = np.arange(len_wg) #Labels of pixels
#Length of the data
len_data = np.sum(
(self.wl > wl_min - tol) & (self.wl < wl_max + tol)) # How much of the synthetic spectrum do we need?
#Find the smallest length synthetic spectrum that is a power of 2 in length and larger than the data spectrum
chunk = len_wg
self.interface.ind = (0, chunk) #Set to be the full spectrum
while chunk > len_data:
if chunk / 2 > len_data:
chunk = chunk // 2
else:
break
assert type(chunk) == np.int, "Chunk is no longer integer!. Chunk is {}".format(chunk)
if chunk < len_wg:
# Now that we have determined the length of the chunk of the synthetic spectrum, determine indices
# that straddle the data spectrum.
# What index corresponds to the wl at the center of the data spectrum?
median_wl = np.median(self.DataSpectrum.wls)
median_ind = (np.abs(wave_grid - median_wl)).argmin()
#Take the chunk that straddles either side.
ind = [median_ind - chunk // 2, median_ind + chunk // 2]
if ind[0] < 0:
ind[1] -= ind[0]
ind[0] = 0
elif ind[1] >= len_wg:
ind[0] -= (ind[1] - len_wg - 1)
ind[1] -= (ind[1] - len_wg - 1)
ind = tuple(ind)
self.wl = self.wl[ind[0]:ind[1]]
assert min(self.wl) < wl_min and max(self.wl) > wl_max, "ModelInterpolator chunking ({:.2f}, {:.2f}) " \
"didn't encapsulate full DataSpectrum range ({:.2f}, {:.2f}).".format(
min(self.wl),
max(self.wl), wl_min, wl_max)
self.interface.ind = ind
print("Determine Chunk Log: Wl is {}".format(len(self.wl)))
def _determine_chunk(self):
'''
Using the DataSpectrum, set the bounds of the interpolator to +/- 50 Ang
'''
wave_grid = self.interface.wl
wl_min, wl_max = np.min(self.DataSpectrum.wls), np.max(self.DataSpectrum.wls)
ind_low = (np.abs(wave_grid - (wl_min - 50.))).argmin()
ind_high = (np.abs(wave_grid - (wl_max + 50.))).argmin()
self.wl = self.wl[ind_low:ind_high]
assert min(self.wl) < wl_min and max(self.wl) > wl_max, "ModelInterpolator chunking ({:.2f}, {:.2f}) " \
"didn't encapsulate full DataSpectrum range ({:.2f}, {:.2f}).".format(
min(self.wl),
max(self.wl), wl_min, wl_max)
self.interface.ind = (ind_low, ind_high)
print("Wl is {}".format(len(self.wl)))
def __call__(self, parameters):
'''
Interpolate a spectrum
:param parameters: stellar parameters
:type parameters: dict
Automatically pops :attr:`cache_dump` items from cache if full.
'''
if len(self.cache) > self.cache_max:
[self.cache.popitem(False) for i in range(self.cache_dump)]
self.cache_counter = 0
try:
return self.interpolate(parameters)
except:
logging.warning('Warning! Interpolation error found! Returning ones array!')
return np.ones_like(self.wl)
def setup_index_interpolators(self):
# create an interpolator between grid points indices. Given a temp, produce fractional index between two points
self.index_interpolators = {key: IndexInterpolator(self.interface.points[key]) for key in self.parameters}
lenF = self.interface.ind[1] - self.interface.ind[0]
self.fluxes = np.empty((2 ** len(self.parameters), lenF)) #8 rows, for temp, logg, Z
def interpolate(self, parameters):
'''
Interpolate a spectrum without clearing cache. Recommended to use :meth:`__call__` instead.
:param parameters: stellar parameters
:type parameters: dict
:raises C.InterpolationError: if parameters are out of bounds.
Now the interpolator also returns the 24 error spectra along with weights.
'''
# Here it really would be nice to return things in a predictable order
# (temp, logg, Z)
odict = OrderedDict()
for key in ("temp", "logg", "Z"):
odict[key] = parameters[key]
try:
edges = OrderedDict()
for key, value in odict.items():
edges[key] = self.index_interpolators[key](value)
except InterpolationError as e:
raise InterpolationError("Parameters {} are out of bounds. {}".format(parameters, e))
#Edges is a dictionary of {"temp": ((6000, 6100), (0.2, 0.8)), "logg": (())..}
names = [key for key in edges.keys()] #list of ["temp", "logg", "Z"],
params = [edges[key][0] for key in names] #[(6000, 6100), (4.0, 4.5), ...]
weights = [edges[key][1] for key in names] #[(0.2, 0.8), (0.4, 0.6), ...]
param_combos = itertools.product(*params) #Selects all the possible combinations of parameters
#[(6000, 4.0, 0.0), (6100, 4.0, 0.0), (6000, 4.5, 0.0), ...]
weight_combos = itertools.product(*weights)
#[(0.2, 0.4, 1.0), (0.8, 0.4, 1.0), ...]
parameter_list = [dict(zip(names, param)) for param in param_combos]
if "alpha" not in parameters.keys():
[param.update({"alpha": var_default["alpha"]}) for param in parameter_list]
key_list = [self.interface.flux_name.format(**param) for param in parameter_list]
weight_list = np.array([np.prod(weight) for weight in weight_combos])
assert np.allclose(np.sum(weight_list), np.array(1.0)), "Sum of weights must equal 1, {}".format(
np.sum(weight_list))
#Assemble flux vector from cache
for i, param in enumerate(parameter_list):
key = key_list[i]
if key not in self.cache.keys():
try:
fl = self.interface.load_flux(param) #This method allows loading only the relevant region from HDF5
except KeyError as e:
raise InterpolationError("Parameters {} not in master HDF5 grid. {}".format(param, e))
self.cache[key] = fl
#Note: if we are dealing with a ragged grid, a C.GridError will be raised here because a Z=+1, alpha!=0 spectrum can't be found.
self.fluxes[i, :] = self.cache[key] * weight_list[i]
return np.sum(self.fluxes, axis=0)
class DataSpectrum:
'''
Object to manipulate the data spectrum.
:param wls: wavelength (in AA)
:type wls: 1D or 2D np.array
:param fls: flux (in f_lam)
:type fls: 1D or 2D np.array
:param sigmas: Poisson noise (in f_lam)
:type sigmas: 1D or 2D np.array
:param masks: Mask to blot out bad pixels or emission regions.
:type masks: 1D or 2D np.array of boolean values
If the wl, fl, are provided as 1D arrays (say for a single order), they will be converted to 2D arrays with length 1
in the 0-axis.
.. note::
For now, the DataSpectrum wls, fls, sigmas, and masks must be a rectangular grid. No ragged Echelle orders allowed.
'''
def __init__(self, wls, fls, sigmas, masks=None, orders='all', name=None):
self.wls = np.atleast_2d(wls)
self.fls = np.atleast_2d(fls)
self.sigmas = np.atleast_2d(sigmas)
self.masks = np.atleast_2d(masks) if masks is not None else np.ones_like(self.wls, dtype='b')
self.shape = self.wls.shape
assert self.fls.shape == self.shape, "flux array incompatible shape."
assert self.sigmas.shape == self.shape, "sigma array incompatible shape."
assert self.masks.shape == self.shape, "mask array incompatible shape."
if orders != 'all':
# can either be a numpy array or a list
orders = np.array(orders) #just to make sure
self.wls = self.wls[orders]
self.fls = self.fls[orders]
self.sigmas = self.sigmas[orders]
self.masks = self.masks[orders]
self.shape = self.wls.shape
self.orders = orders
else:
self.orders = np.arange(self.shape[0])
self.name = name
class GridError(Exception):
'''
Raised when a spectrum cannot be found in the grid.
'''
def __init__(self, msg):
self.msg = msg
class InterpolationError(Exception):
'''
Raised when the :obj:`Interpolator` or :obj:`IndexInterpolator` cannot properly interpolate a spectrum,
usually grid bounds.
'''
def __init__(self, msg):
self.msg = msg
class KuruczGetter():
def __init__(self, modeldir, rebin=True, T_min=7000, T_max=9000, logg_min=3.5, logg_max=4.5, metal_min=-0.5,
metal_max=0.5, alpha_min=0.0, alpha_max=0.4, wavemin=0, wavemax=np.inf, debug=False):
"""
This class will read in a directory with Kurucz models
The associated methods can be used to interpolate a model at any
temperature, gravity, metallicity, and [alpha/Fe] value that
falls within the grid
modeldir: The directory where the models are stored. Can be a list of model directories too!
rebin: If True, it will rebin the models to a constant x-spacing
other args: The minimum and maximum values for the parameters to search.
You need to keep this as small as possible to avoid memory issues!
The whole grid would take about 36 GB of RAM!
"""
self.rebin = rebin
self.debug = debug
# First, read in the grid
if HelperFunctions.IsListlike(modeldir):
# There are several directories to combine
Tvals = []
loggvals = []
metalvals = []
alphavals = []
for i, md in enumerate(modeldir):
if i == 0:
T, G, Z, A, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max, logg_min=logg_min,
logg_max=logg_max, metal_min=metal_min, metal_max=metal_max,
alpha_min=alpha_min, alpha_max=alpha_max, wavemin=wavemin,
wavemax=wavemax,
xaxis=None)
spectra = np.array(S)
else:
T, G, Z, A, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max, logg_min=logg_min,
logg_max=logg_max, metal_min=metal_min, metal_max=metal_max,
alpha_min=alpha_min, alpha_max=alpha_max, wavemin=wavemin,
wavemax=wavemax,
xaxis=self.xaxis)
S = np.array(S)
spectra = np.vstack((spectra, S))
Tvals = np.hstack((Tvals, T))
loggvals = np.hstack((loggvals, G))
metalvals = np.hstack((metalvals, Z))
alphavals = np.hstack((alphavals, A))
else:
Tvals, loggvals, metalvals, alphavals, spectra = self.read_grid(modeldir,
rebin=rebin,
T_min=T_min,
T_max=T_max,
logg_min=logg_min,
logg_max=logg_max,
metal_min=metal_min,
metal_max=metal_max,
alpha_min=alpha_min,
alpha_max=alpha_max,
wavemin=wavemin,
wavemax=wavemax,
xaxis=None)
# Check if there are actually two different values of alpha/Fe
alpha_varies = True if max(alphavals) - min(alphavals) > 0.1 else False
# Scale the variables so they all have about the same range
self.T_scale = ((max(Tvals) + min(Tvals)) / 2.0, max(Tvals) - min(Tvals))
self.metal_scale = ((max(metalvals) + min(metalvals)) / 2.0, max(metalvals) - min(metalvals))
self.logg_scale = ((max(loggvals) + min(loggvals)) / 2.0, max(loggvals) - min(loggvals))
if alpha_varies:
self.alpha_scale = ((max(alphavals) + min(alphavals)) / 2.0, max(alphavals) - min(alphavals))
Tvals = (np.array(Tvals) - self.T_scale[0]) / self.T_scale[1]
loggvals = (np.array(loggvals) - self.logg_scale[0]) / self.logg_scale[1]
metalvals = (np.array(metalvals) - self.metal_scale[0]) / self.metal_scale[1]
if alpha_varies:
alphavals = (np.array(alphavals) - self.alpha_scale[0]) / self.alpha_scale[1]
print self.T_scale
print self.metal_scale
print self.logg_scale
if alpha_varies:
print self.alpha_scale
# Make the grid and interpolator instances
if alpha_varies:
self.grid = np.array((Tvals, loggvals, metalvals, alphavals)).T
else:
self.grid = np.array((Tvals, loggvals, metalvals)).T
self.spectra = np.array(spectra)
self.interpolator = LinearNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.NN_interpolator = NearestNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.alpha_varies = alpha_varies
def read_grid(self, modeldir, rebin=True, T_min=7000, T_max=9000, logg_min=3.5, logg_max=4.5, metal_min=-0.5,
metal_max=0.5, alpha_min=0.0, alpha_max=0.4, wavemin=0, wavemax=np.inf, xaxis=None):
Tvals = []
loggvals = []
metalvals = []
alphavals = []
spectra = []
firstkeeper = True
modelfiles = [f for f in os.listdir(modeldir) if f.startswith("t") and f.endswith(".dat.bin.asc")]
for i, fname in enumerate(modelfiles):
T = float(fname[1:6])
logg = float(fname[8:12])
metal = float(fname[14:16]) / 10.0
alpha = float(fname[18:20]) / 10.0
if fname[13] == "m":
metal *= -1
if fname[17] == "m":
alpha *= -1
# Read in and save file if it falls in the correct parameter range
if (T_min <= T <= T_max and
logg_min <= logg <= logg_max and
metal_min <= metal <= metal_max and
alpha_min <= alpha <= alpha_max):
if self.debug:
print "Reading in file {:s}".format(fname)
data = pandas.read_csv("{:s}/{:s}".format(modeldir, fname),
header=None,
names=["wave", "norm"],
usecols=(0, 3),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['norm'].values
# x, y = np.loadtxt("{:s}/{:s}".format(modeldir, fname), usecols=(0, 3), unpack=True)
x *= units.angstrom.to(units.nm)
y[np.isnan(y)] = 0.0
left = np.searchsorted(x, wavemin)
right = np.searchsorted(x, wavemax)
x = x[left:right]
y = y[left:right]
if rebin:
if firstkeeper:
xgrid = np.logspace(np.log10(x[0]), np.log10(x[-1]), x.size)
else:
xgrid = self.xaxis
fcn = spline(x, y)
x = xgrid
y = fcn(xgrid)
if firstkeeper:
self.xaxis = x if xaxis is None else xaxis
firstkeeper = False
elif np.max(np.abs(self.xaxis - x) > 1e-4):
warnings.warn("x-axis for file {:s} is different from the master one! Not saving!".format(fname))
continue
Tvals.append(T)
loggvals.append(logg)
metalvals.append(metal)
alphavals.append(alpha)
spectra.append(y)
return Tvals, loggvals, metalvals, alphavals, spectra
def __call__(self, T, logg, metal, alpha, vsini=0.0, return_xypoint=True, **kwargs):
"""
Given parameters, return an interpolated spectrum
If return_xypoint is False, then it will only return
a numpy.ndarray with the spectrum
Before interpolating, we will do some error checking to make
sure the requested values fall within the grid
"""
# Scale the requested values
if self.debug:
print T, logg, metal, alpha, vsini
T = (T - self.T_scale[0]) / self.T_scale[1]
logg = (logg - self.logg_scale[0]) / self.logg_scale[1]
metal = (metal - self.metal_scale[0]) / self.metal_scale[1]
if self.alpha_varies:
alpha = (alpha - self.alpha_scale[0]) / self.alpha_scale[1]
# Get the minimum and maximum values in the grid
T_min = min(self.grid[:, 0])
T_max = max(self.grid[:, 0])
logg_min = min(self.grid[:, 1])
logg_max = max(self.grid[:, 1])
metal_min = min(self.grid[:, 2])
metal_max = max(self.grid[:, 2])
alpha_min = min(self.grid[:, 3]) if self.alpha_varies else 0.0
alpha_max = max(self.grid[:, 3]) if self.alpha_varies else 0.0
if self.alpha_varies:
input_list = (T, logg, metal, alpha)
else:
input_list = (T, logg, metal)
# Check to make sure the requested values fall within the grid
if (T_min <= T <= T_max and
logg_min <= logg <= logg_max and
metal_min <= metal <= metal_max and
(not self.alpha_varies or alpha_min <= alpha <= alpha_max)):
y = self.interpolator(input_list)
else:
if self.debug:
warnings.warn("The requested parameters fall outside the model grid. Results may be unreliable!")
# print T, T_min, T_max
# print logg, logg_min, logg_max
#print metal, metal_min, metal_max
#print alpha, alpha_min, alpha_max
y = self.NN_interpolator(input_list)
# Test to make sure the result is valid. If the requested point is
# outside the Delaunay triangulation, it will return NaN's
if np.any(np.isnan(y)):
if self.debug:
warnings.warn("Found NaNs in the interpolated spectrum! Falling back to Nearest Neighbor")
y = self.NN_interpolator(input_list)
model = DataStructures.xypoint(x=self.xaxis, y=y)
vsini *= units.km.to(units.cm)
model = Broaden.RotBroad(model, vsini, linear=self.rebin)
# Return the appropriate object
if return_xypoint:
return model
else:
return model.y
"""
=======================================================================
=======================================================================
=======================================================================
"""
class PhoenixGetter():
def __init__(self, modeldir, rebin=True, T_min=3000, T_max=6800, metal_min=-0.5,
metal_max=0.5, wavemin=0, wavemax=np.inf, debug=False):
"""
This class will read in a directory with Phoenix models
The associated methods can be used to interpolate a model at any
temperature, and metallicity value that
falls within the grid
modeldir: The directory where the models are stored. Can be a list of model directories too!
rebin: If True, it will rebin the models to a constant x-spacing
other args: The minimum and maximum values for the parameters to search.
You need to keep this as small as possible to avoid memory issues!
"""
self.rebin = rebin
self.debug = debug
# First, read in the grid
if HelperFunctions.IsListlike(modeldir):
# There are several directories to combine
Tvals = []
metalvals = []
for i, md in enumerate(modeldir):
if i == 0:
T, Z, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=None)
spectra = np.array(S)
else:
T, Z, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=self.xaxis)
S = np.array(S)
spectra = np.vstack((spectra, S))
Tvals = np.hstack((Tvals, T))
metalvals = np.hstack((metalvals, Z))
else:
Tvals, metalvals, spectra = self.read_grid(modeldir, rebin=rebin,
T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=None)
# Scale the variables so they all have about the same range
self.T_scale = ((max(Tvals) + min(Tvals)) / 2.0, max(Tvals) - min(Tvals))
self.metal_scale = ((max(metalvals) + min(metalvals)) / 2.0, max(metalvals) - min(metalvals))
Tvals = (np.array(Tvals) - self.T_scale[0]) / self.T_scale[1]
metalvals = (np.array(metalvals) - self.metal_scale[0]) / self.metal_scale[1]
# Make the grid and interpolator instances
self.grid = np.array((Tvals, metalvals)).T
self.spectra = np.array(spectra)
self.interpolator = LinearNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.NN_interpolator = NearestNDInterpolator(self.grid, self.spectra) # , rescale=True)
def read_grid(self, modeldir, rebin=True, T_min=3000, T_max=6800, metal_min=-0.5,
metal_max=0.5, wavemin=0, wavemax=np.inf, xaxis=None, debug=False):
Tvals = []
metalvals = []
spectra = []
firstkeeper = True
modelfiles = [f for f in os.listdir(modeldir) if
f.startswith("lte") and "PHOENIX" in f and f.endswith(".sorted")]
for i, fname in enumerate(modelfiles):
T, logg, metal = ClassifyModel(fname)
# Read in and save file if it falls in the correct parameter range
if (T_min <= T <= T_max and
metal_min <= metal <= metal_max and
logg == 4.5):
if self.debug:
print "Reading in file {:s}".format(fname)
data = pandas.read_csv("{:s}{:s}".format(modeldir, fname),
header=None,
names=["wave", "flux", "continuum"],
usecols=(0, 1, 2),
sep=' ',
skipinitialspace=True)
x, y, c = data['wave'].values, data['flux'].values, data['continuum'].values
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
x *= units.angstrom.to(units.nm)
y = 10 ** y / 10 ** c
left = np.searchsorted(x, wavemin)
right = np.searchsorted(x, wavemax)
x = x[left:right]
y = y[left:right]
if rebin:
if firstkeeper:
xgrid = np.logspace(np.log10(x[0]), np.log10(x[-1]), x.size)
else:
xgrid = self.xaxis
fcn = spline(x, y)
x = xgrid
y = fcn(xgrid)
if firstkeeper:
self.xaxis = x if xaxis is None else xaxis
firstkeeper = False
elif np.max(np.abs(self.xaxis - x) > 1e-4):
warnings.warn("x-axis for file {:s} is different from the master one! Not saving!".format(fname))
continue
Tvals.append(T)
metalvals.append(metal)
spectra.append(y)
return Tvals, metalvals, spectra
def __call__(self, T, metal, vsini=0.0, return_xypoint=True, **kwargs):
"""
Given parameters, return an interpolated spectrum
If return_xypoint is False, then it will only return
a numpy.ndarray with the spectrum
Before interpolating, we will do some error checking to make
sure the requested values fall within the grid
"""
# Scale the requested values
T = (T - self.T_scale[0]) / self.T_scale[1]
metal = (metal - self.metal_scale[0]) / self.metal_scale[1]
# Get the minimum and maximum values in the grid
T_min = min(self.grid[:, 0])
T_max = max(self.grid[:, 0])
metal_min = min(self.grid[:, 1])
metal_max = max(self.grid[:, 1])
input_list = (T, metal)
# Check to make sure the requested values fall within the grid
if (T_min <= T <= T_max and
metal_min <= metal <= metal_max):
y = self.interpolator(input_list)
else:
if self.debug:
warnings.warn("The requested parameters fall outside the model grid. Results may be unreliable!")
print T, T_min, T_max
print metal, metal_min, metal_max
y = self.NN_interpolator(input_list)
# Test to make sure the result is valid. If the requested point is
# outside the Delaunay triangulation, it will return NaN's
if np.any(np.isnan(y)):
if self.debug:
warnings.warn("Found NaNs in the interpolated spectrum! Falling back to Nearest Neighbor")
y = self.NN_interpolator(input_list)
model = DataStructures.xypoint(x=self.xaxis, y=y)
vsini *= units.km.to(units.cm)
model = Broaden.RotBroad(model, vsini, linear=self.rebin)
# Return the appropriate object
if return_xypoint:
return model
else:
return model.y
|
gpl-3.0
| 7,087,829,652,987,181,000
| 42.563801
| 144
| 0.544809
| false
| 3.843353
| false
| false
| false
|
kstilwell/tcex
|
tcex/threat_intelligence/mappings/victim.py
|
1
|
14882
|
"""ThreatConnect TI Victim"""
from .mappings import Mappings
# import local modules for dynamic reference
module = __import__(__name__)
class Victim(Mappings):
"""Unique API calls for Victim API Endpoints"""
def __init__(self, tcex, **kwargs):
"""Initialize Class properties.
Args:
tcex (TcEx): An instantiated instance of TcEx object.
owner (str, kwargs): The owner for this Victim. Default to default Org when not provided
name (str, kwargs): [Required for Create] The name for this Victim.
"""
super().__init__(tcex, 'Victim', 'victims', None, 'victim', None, kwargs.pop('owner', None))
self.name = None
for arg, value in kwargs.items():
self.add_key_value(arg, value)
def _set_unique_id(self, json_response):
"""Set the unique id of the Group."""
self.unique_id = json_response.get('id', '')
def add_asset(self, asset_type, body):
"""Add an asset to the Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_type: (str) Either email, network, phone, social, or web.
body: (dict) the body of the asset being added.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if body is None:
body = {}
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_add_asset(self.unique_id, asset_type, body)
def add_email_asset(self, address, address_type):
"""Add a email asset to the Victim
Args:
address: (str) The asset address.
address_type: (str) The asset address_type
Return:
requests.Response: The response from the API call.
"""
asset_data = {'address': address, 'addressType': address_type}
return self.add_asset('EMAIL', asset_data)
def add_key_value(self, key, value):
"""Add the key-value to the Victim. """
key = self._metadata_map.get(key, key)
if key in ['unique_id', 'id']:
self._unique_id = str(value)
else:
self._data[key] = value
@property
def _metadata_map(self):
"""Return metadata map for Group objects."""
return {'work_location': 'workLocation'}
def add_network_asset(self, account, network):
"""Add a network asset to the Victim
Args:
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {'account': account, 'network': network}
return self.add_asset('NETWORK', asset_data)
def add_phone_asset(self, phone_type):
"""Add a phone asset to the Victim
Args:
phone_type: (str) The asset phone type.
Return:
requests.Response: The response from the API call.
"""
asset_data = {'phoneType': phone_type}
return self.add_asset('PHONE', asset_data)
def add_social_asset(self, account, network):
"""Add a social asset to the Victim
Args:
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {'account': account, 'network': network}
return self.add_asset('SOCIAL', asset_data)
def add_web_asset(self, web_site):
"""Add a web asset to the Victim
Args:
web_site: (str) The asset account.
Return:
requests.Response: The response from the API call.
"""
asset_data = {'webSite': web_site}
return self.add_asset('WEB', asset_data)
@property
def as_entity(self):
"""Return the entity representation of the Victim."""
return {
'type': 'Victim',
'value': self.name,
'id': int(self.unique_id) if self.unique_id else None,
}
def assets(self, asset_type=None):
"""
Gets the assets of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_type: (str) The type of asset to be retrieved. Defaults to all of them.
Yield:
Json: The asset being retrieved.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
return self.tc_requests.victim_assets(
self.api_type, self.api_branch, self.unique_id, asset_type
)
def can_create(self):
"""Return True if victim can be create."""
return self.data.get('name') is not None
def delete_asset(self, asset_id, asset_type):
"""Delete an asset of the Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) Either email, network, phone, social, or web.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_delete_asset(self.unique_id, asset_type, asset_id)
def delete_email_asset(self, asset_id):
"""Delete an email asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'EMAIL')
def delete_network_asset(self, asset_id):
"""Delete an network asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'NETWORK')
def delete_phone_asset(self, asset_id):
"""Delete an phone asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'PHONE')
def delete_social_asset(self, asset_id):
"""Delete an social asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'SOCIAL')
def delete_web_asset(self, asset_id):
"""Delete an web asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'WEB')
def email_assets(self):
"""
Gets the email assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='EMAIL')
def get_asset(self, asset_id, asset_type):
"""
Gets the assets of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) The type of asset to be retrieved.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_get_asset(self.unique_id, asset_type, asset_id)
def get_email_asset(self, asset_id):
"""Retrieve an email asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'EMAIL')
def get_network_asset(self, asset_id):
"""Retrieve an network asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'NETWORK')
def get_phone_asset(self, asset_id):
"""Retrieve an phone asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'PHONE')
def get_social_asset(self, asset_id):
"""Retrieve an social asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'SOCIAL')
def get_web_asset(self, asset_id):
"""Retrieve an web asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'WEB')
@staticmethod
def is_victim():
"""Return True if object is a victim."""
return True
@property
def name(self):
"""Return the Victim name."""
return self._data.get('name')
@name.setter
def name(self, name):
"""Set the Victim name."""
self._data['name'] = name
def network_assets(self):
"""
Gets the network assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='NETWORK')
def social_assets(self):
"""
Gets the social assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='SOCIAL')
def phone_assets(self):
"""
Gets the phone assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='PHONE')
def update_asset(self, asset_type, asset_id, body=None):
"""
Updates a asset of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) The type of asset to be retrieved.
body: (dict) the body of the asset being updated.
Return:
requests.Response: The response from the API call.
"""
if body is None:
body = {}
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_update_asset(self.unique_id, asset_type, asset_id, body)
def update_email_asset(self, asset_id, address=None, address_type=None):
"""Update a email asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
address: (str) The asset address.
address_type: (str) The asset address type
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if address:
asset_data['address'] = address
if address_type:
asset_data['addressType'] = address_type
return self.update_asset('EMAIL', asset_id, asset_data)
def update_network_asset(self, asset_id, account=None, network=None):
"""Update a network asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if account:
asset_data['account'] = account
if network:
asset_data['network'] = network
return self.update_asset('NETWORK', asset_id, asset_data)
def update_phone_asset(self, asset_id, phone_type=None):
"""Update a phone asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
phone_type: (str) The phone type account.
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if phone_type:
asset_data['phoneType'] = phone_type
return self.update_asset('PHONE', asset_id, asset_data)
def update_social_asset(self, asset_id, account=None, network=None):
"""Update a social asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if account:
asset_data['account'] = account
if network:
asset_data['network'] = network
return self.update_asset('SOCIAL', asset_id, asset_data)
def update_web_asset(self, asset_id, web_site=None):
"""Update a web asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
web_site: (str) The asset web_site.
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if web_site:
asset_data['webSite'] = web_site
return self.update_asset('WEB', asset_id, asset_data)
def web_assets(self):
"""
Gets the web assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='WEB')
|
apache-2.0
| 6,182,735,286,011,776,000
| 28.237721
| 100
| 0.556511
| false
| 4.0639
| false
| false
| false
|
wraithan/rplay
|
replayswithfriends/profiles/backends/urls.py
|
1
|
2568
|
"""
URLconf for registration and activation, using django-registration's
default backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.default.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead.
"""
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from registration.views import activate
from registration.views import register
from replayswithfriends.profiles.backends.forms import Sc2RegForm
urlpatterns = patterns('',
url(r'^activate/complete/$',
direct_to_template,
{'template': 'registration/activation_complete.html'},
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
activate,
{'backend': 'registration.backends.default.DefaultBackend'},
name='registration_activate'),
url(r'^register/$',
register,
{
'backend': 'registration.backends.default.DefaultBackend',
},
name='registration_register'),
url(r'^register/complete/$',
direct_to_template,
{'template': 'registration/registration_complete.html'},
name='registration_complete'),
url(r'^register/closed/$',
direct_to_template,
{'template': 'registration/registration_closed.html'},
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
)
|
mit
| -2,084,832,934,221,354,800
| 44.857143
| 99
| 0.551791
| false
| 5.429175
| false
| false
| false
|
maximebf/budgettracker
|
budgettracker/categories.py
|
1
|
3249
|
from collections import namedtuple
from .data import filter_transactions_period
import re
class Category(namedtuple('Category', ['name', 'color', 'keywords', 'warning_threshold'])):
@classmethod
def from_dict(cls, dct):
return cls(name=dct['name'], color=dct.get('color'), keywords=dct.get('keywords', []),
warning_threshold=dct.get('warning_threshold'))
def to_dict(self):
return {
'name': self.name,
'color': self.color,
'keywords': self.keywords,
'warning_threshold': self.warning_threshold
}
class ComputedCategory(namedtuple('ComputedCategory', ['name', 'color', 'keywords', 'warning_threshold', 'amount', 'pct'])):
@classmethod
def from_category(cls, category, **kwargs):
warning_threshold_multiplier = kwargs.pop('warning_threshold_multiplier', 1)
warning_threshold = category.warning_threshold * warning_threshold_multiplier if category.warning_threshold else None
return cls(name=category.name, color=category.color, keywords=category.keywords,
warning_threshold=warning_threshold, **kwargs)
@property
def has_warning(self):
return self.warning_threshold and self.amount > self.warning_threshold
def to_str(self, famount):
return "%s = %s (%s%%)%s" % (self.name or 'Uncategorized', famount(self.amount), self.pct,
' /!\ %s' % (famount(self.warning_threshold)) if self.has_warning else '')
def compute_categories(transactions, categories=None, start_date=None, end_date=None, warning_threshold_multiplier=1):
categories = {c.name: c for c in categories or []}
amounts = {}
total = 0
for tx in filter_transactions_period(transactions, start_date, end_date):
if tx.amount >= 0:
continue
if not tx.categories:
total += abs(tx.amount)
continue
for name in sorted(tx.categories or []):
amounts.setdefault(name, 0)
amounts[name] += abs(tx.amount)
total += abs(tx.amount)
categorized_total = sum(amounts.values())
if total - categorized_total > 0:
amounts[None] = total - categorized_total
final = []
for name, amount in sorted(amounts.items(), key=lambda t: t[0]):
pct = round(amount * 100 / total, 0)
if name in categories:
final.append(ComputedCategory.from_category(categories[name], amount=amount, pct=pct,
warning_threshold_multiplier=warning_threshold_multiplier))
else:
final.append(ComputedCategory(name=name, color=None, keywords=[],
warning_threshold=None, amount=amount, pct=pct))
for category in categories.values():
if category.name not in amounts:
final.append(ComputedCategory.from_category(category, amount=0, pct=0,
warning_threshold_multiplier=warning_threshold_multiplier))
return final
def match_categories(categories, label):
matches = []
for category in categories:
for keyword in (category.keywords or []):
if re.search(r"\b%s\b" % keyword, label, re.I):
matches.append(category.name)
continue
return matches
|
mit
| 3,897,346,843,493,143,000
| 40.139241
| 125
| 0.63558
| false
| 4.066333
| false
| false
| false
|
tellapart/taba
|
src/taba/handlers/totals_counter.py
|
1
|
2149
|
# Copyright 2014 TellApart, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple Tab Handler that accepts single numeric inputs and tracks the count
and total values.
"""
from taba.handlers.tab_handler import TabHandler
class TotalsCounterState(object):
"""Wrapper class for TotalCounter States.
"""
def __init__(self, count, total):
self.count = count
self.total = total
class TotalsCounter(TabHandler):
"""Simple Tab Handler that accepts single numeric inputs and tracks the count
and total values.
"""
CURRENT_VERSION = 0
def NewState(self, client_id, name):
"""See base class definition."""
return TotalsCounterState(0, 0.0)
def FoldEvents(self, state, events):
"""See base class definition."""
count = 0
total = 0.0
for event in events:
count += 1
total += int(event.payload[0])
state.count += count
state.total += total
return state
def Reduce(self, states):
"""See base class definition."""
if len(states) == 0:
return None
elif len(states) == 1:
return states[0]
base = states[0]
for state in states[1:]:
base.count += state.count
base.total += state.total
return base
def Render(self, state, accept):
"""See base class definition."""
avg = state.total / state.count if state.count != 0 else 0
return '{"count": %d, "total": %.2f, "average": %.2f}' % (
state.count, state.total, avg)
def Upgrade(self, state, version):
"""See base class definition."""
return state
def ShouldPrune(self, state):
"""See base class definition."""
return (state.count == 0)
|
apache-2.0
| -6,829,965,901,479,492,000
| 27.276316
| 79
| 0.669614
| false
| 3.91439
| false
| false
| false
|
jrrpanix/master
|
examples/python/math/numpyExample.py
|
1
|
6283
|
import numpy as np
"""
by jrr
"""
#
# output array as csv
#
def printArray( A ) :
if len(A.shape) == 1 :
r=A.shape[0]
# if python3
# for i in range(0,r) : print( ( '%5.2f' ) % ( A[i] ) ,end=',')
# print('')
for i in range(0,r) : print( ( '%5.2f,' ) % ( A[i] ) ) ,
print('')
else :
r,c=A.shape
for i in range(0,r) :
for j in range(0,c) :print( ( '%5.2f,' ) % ( A[i,j] ) ) ,
print('')
#
# np.array sizes
#
def getNumRows(A) :
return A.shape[0]
def getNumCols(A) :
if len(A.shape) == 1 : return 1
return A.shape[1]
def getNumElements(A) :
return A.size
def getNumDim(A) :
return A.ndim
#
# different ways to create a numpy array
#
def simple(N=4) :
return np.arange(N)
def nbyN(N=4) :
return np.arange(N*N).reshape(N,N)
def nbyN_identity(N=4,dtype=float) :
return np.identity(N,dtype=dtype)
def nbyN_ones(N=4,dtype=float) :
return np.ones(N*N,dtype=dtype).reshape(N,N)
def nbyN_ones_alternate(N=4,dtype=float) :
return np.ones((N,N),dtype=dtype)
def nbyN_zeros(N=4,dtype=float) :
return np.zeros(N*N,dtype=dtype).reshape(N,N)
def random_uniform(rows,cols) :
return np.random.rand(rows,cols)
def fromList_Example0(dtype=float) :
myvec=[ [ 0 , 1 ] , # row 0
[ 1 , 0 ] # row 1
]
return np.array(myvec,dtype=dtype)
def fromList_Example1(dtype=float) :
return np.array([[1,2,3],[4,5,6],[7,8,9]],dtype=dtype)
def fromList_Example2(dtype=float) :
vec=[i for i in range(0,16)]
return np.array(vec).reshape(4,4)
def fromList_Example3(dtype=float) :
return np.array( range(16) , float ).reshape(4,4)
#
# Examples of Math for rows or colums for 2D matricies
# Note that is just done by setting axis parameter
# for 2-D matrix
# axis=0 are for operations column wise
# axis=1 are for operations row wise
# this extends to higher dimension matricies via the axis parameter
def meanCols(A) :
return np.mean(A,axis=0)
def maxInEachCol(A) :
return np.max(A,axis=0)
def minInEachCol(A) :
return np.min(A,axis=0)
def sumCols(A) :
return np.sum(A,axis=0)
def stdCols(A) :
return np.std(A,axis=0)
def sumRows(A) :
return np.sum(A,axis=1)
def minInEachRow(A) :
return np.min(A,axis=1)
def maxInEachRow(A) :
return np.max(A,axis=1)
#
# run creation examples
#
def creationExamples() :
print('one dimensional ')
printArray(simple())
print('n x n')
printArray(nbyN())
print('idenity ')
printArray(nbyN_identity())
print('nxn of 0\'s')
printArray(nbyN_zeros())
print('nxn of 1\'s')
printArray(nbyN_ones())
print('nxn of 1\'s alternative way of creating')
printArray(nbyN_ones_alternate())
print('random uniform 4x3 matrix')
printArray(random_uniform(4,3))
print('create from python list,example0')
printArray(fromList_Example0())
print('create from python list,example1')
printArray(fromList_Example1())
print('create from python list example2')
printArray(fromList_Example2())
print('create from python list example3')
printArray(fromList_Example3())
#
# run math Examples
#
def mathExamples(A) :
print('math examples input matrix')
printArray(A)
# math by columns
print('mean col')
printArray(meanCols(A))
print('std dev col')
printArray(stdCols(A))
print('sum col')
printArray(sumCols(A))
print('min col')
printArray(minInEachCol(A))
print('max col')
printArray(maxInEachCol(A))
# math by rows
print('sum rows')
printArray(sumRows(A))
print('min rows')
printArray(minInEachRow(A))
print('max rows')
printArray(maxInEachRow(A))
#
# size Examples
#
def sizeExamples(A) :
print('size examples')
printArray(A)
print('rows =',getNumRows(A))
print('cols =',getNumCols(A))
print('size =',getNumElements(A))
print('dim =',getNumDim(A))
#
# slice examples
#
# for a 2 Dimensional npArray
# suppose X is a 2 Dimensional Array
# all the rows -> X[:,some_col] , use ':'
# example :
# X[:,1] -> get every thing from col 1 (all the rows, just col 1)
#
# n0:n1 -> interperted range(n0,n1) start at n0 stop n1 but don't include n1
# example :
# X[1:3,:] -> get rows 1 and 2 and all the colums
#
# Advantage of numpy array is that one can get rows or columns via random access
# example get first and last column of Matrix (this cant' be done with python list
# X[:,[0,-1]]
def sliceExamples(X) :
print('slice examples')
printArray(X)
print('get all elements in col 0')
printArray(X[:,0])
print('get all elements in row 1')
printArray(X[1,:])
print('get last column')
printArray(X[:,-1])
print('get last row')
printArray(X[-1,:])
print('get first 2 elements in row 0')
printArray(X[0,0:2])
print('get last 2 elements in row 2')
cols=getNumCols(X)
printArray(X[2,-2:cols])
print('get column 1 values in 1st and 2nd rows')
printArray(X[1:3,1])
print('get colum 0 and last column')
printArray(X[:,[0,-1]])
print('get 1st,3rd rows')
printArray(X[[1,3],:])
#
# conditional examples
#
def conditionalExamples(X,cutoff=5) :
# retrun tuples of where elements are < cutoff in X
print('contitonal examples input matrix X,cutoff vlaue=',cutoff)
printArray(X)
a=np.where(X < cutoff)
print('np.where(X < cutoff)')
print(a)
# get all the elements that are less than X
print('X[np.where(X < cutoff)]')
a=X[np.where(X < cutoff)]
print(a)
# for the matrix X if the value is < cutoff put in value from X
# otherwise put in 99
print('np.where(X < cutoff,X,99)')
a=np.where(X < cutoff,X,99)
print(a)
#
# lets change uniform 0-1 data to be either -1 if < .5 or 1 if >= .5
#
U=random_uniform(5,5)
print('random 5 x 5 matrix')
printArray(U)
print('change matrix for -1 or 1 if < .5 -1 otherwise 1')
print('U2=np.where(U<.5,-1,1)')
U2=np.where(U<.5,-1,1)
printArray(U2)
if __name__ == '__main__' :
creationExamples()
mathExamples(nbyN())
sizeExamples(nbyN(3))
sliceExamples(nbyN(5))
conditionalExamples(nbyN(4),cutoff=6)
|
gpl-3.0
| 1,206,430,020,656,617,500
| 20.891986
| 82
| 0.614515
| false
| 2.918254
| false
| false
| false
|
axsemantics/rohrpost
|
tests/test_ping.py
|
1
|
1151
|
from rohrpost.handlers import handle_ping
def test_ping(consumer):
handle_ping(consumer=consumer, request={"id": 123})
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert "data" not in data
def test_ping_additional_data(consumer):
handle_ping(
consumer=consumer,
request={
"id": 123,
"type": "ping",
"data": {"some": "data", "other": "data", "handler": "foo"},
},
)
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert data["data"]["some"] == "data"
assert data["data"]["handler"] == "foo"
def test_ping_additional_non_dict_data(consumer):
handle_ping(consumer=consumer, request={"id": 123, "type": "ping", "data": 1})
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert data["data"]["data"] == 1
|
mit
| 487,192,422,104,183,300
| 26.404762
| 82
| 0.586447
| false
| 3.630915
| false
| false
| false
|
project-oak/hafnium-verification
|
experiments/ownership-inference/infer/infer/lib/python/inferlib/capture/ant.py
|
1
|
2742
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
from . import util
from inferlib import jwlib
MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
ant [options] [target]
Analysis examples:
infer -- ant compile'''
LANG = ['java']
def gen_instance(*args):
return AntCapture(*args)
# This creates an empty argparser for the module, which provides only
# description/usage information and no arguments.
create_argparser = util.base_argparser(MODULE_DESCRIPTION, MODULE_NAME)
class AntCapture:
def __init__(self, args, cmd):
self.args = args
util.log_java_version()
logging.info(util.run_cmd_ignore_fail([cmd[0], '-version']))
# TODO: make the extraction of targets smarter
self.build_cmd = [cmd[0], '-verbose'] + cmd[1:]
def is_interesting(self, content):
return self.is_quoted(content) or content.endswith('.java')
def is_quoted(self, argument):
quote = '\''
return len(argument) > 2 and argument[0] == quote\
and argument[-1] == quote
def remove_quotes(self, argument):
if self.is_quoted(argument):
return argument[1:-1]
else:
return argument
def get_infer_commands(self, verbose_output):
javac_pattern = '[javac]'
argument_start_pattern = 'Compilation arguments'
calls = []
javac_arguments = []
collect = False
for line in verbose_output.split('\n'):
if javac_pattern in line:
if argument_start_pattern in line:
collect = True
if javac_arguments != []:
capture = jwlib.create_infer_command(javac_arguments)
calls.append(capture)
javac_arguments = []
if collect:
pos = line.index(javac_pattern) + len(javac_pattern)
content = line[pos:].strip()
if self.is_interesting(content):
arg = self.remove_quotes(content)
javac_arguments.append(arg)
if javac_arguments != []:
capture = jwlib.create_infer_command(javac_arguments)
calls.append(capture)
javac_arguments = []
return calls
def capture(self):
(code, (verbose_out, _)) = util.get_build_output(self.build_cmd)
if code != os.EX_OK:
return code
cmds = self.get_infer_commands(verbose_out)
return util.run_compilation_commands(cmds)
|
apache-2.0
| 8,061,196,830,654,131,000
| 31.642857
| 77
| 0.588257
| false
| 4.038292
| false
| false
| false
|
us-ignite/us_ignite
|
us_ignite/testbeds/views.py
|
1
|
2088
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from us_ignite.common import pagination
from us_ignite.common.response import json_response
from us_ignite.maps.utils import get_location_dict
from us_ignite.testbeds.models import Testbed
from us_ignite.testbeds.forms import TestbedFilterForm
def testbed_detail(request, slug):
"""Detail of a ``testbed``."""
instance = get_object_or_404(
Testbed.objects.select_related('contact'), slug__exact=slug)
if not instance.is_visible_by(request.user):
raise Http404
context = {
'object': instance,
'is_editable': instance.is_editable_by(request.user),
'app_list': instance.applications.all(),
}
return TemplateResponse(request, 'testbed/object_detail.html', context)
def get_testbed_query(data):
"""Transform cleaned data in Testbed."""
query = {}
for key, value in data.items():
if key.startswith('passes_'):
key = '%s__gte' % key
if value:
query[key] = value
return query
def testbed_list(request):
"""List of all the testbeds."""
testbed_query = {}
if request.GET:
form = TestbedFilterForm(request.GET)
if form.is_valid():
testbed_query = get_testbed_query(form.cleaned_data)
else:
form = TestbedFilterForm()
page_no = pagination.get_page_no(request.GET)
object_list = Testbed.active.filter(**testbed_query)
page = pagination.get_page(object_list, page_no)
context = {
'page': page,
'form': form,
}
return TemplateResponse(request, 'testbed/object_list.html', context)
def get_app_list(testbed):
return [get_location_dict(a, 'app') for a in testbed.applications.all()]
def testbed_locations_json(request, slug):
testbed = get_object_or_404(Testbed.active, slug__exact=slug)
item_list =[get_location_dict(testbed, 'testbed')]
item_list += get_app_list(testbed)
return json_response(item_list, callback='map.render')
|
bsd-3-clause
| 2,731,798,187,636,176,400
| 31.625
| 76
| 0.670019
| false
| 3.527027
| true
| false
| false
|
team23/django_backend
|
django_backend/backend/form_tabs.py
|
1
|
7071
|
from .renderable import Renderable
class BaseFormElement(Renderable):
def __init__(self, template_name=None, position=0):
self.position = position
super(BaseFormElement, self).__init__(template_name=template_name)
def resolve_help_text(self, context):
return None
@property
def states(self):
"""
A helper so that you can call in the template::
{% render tab.states %}
"""
tab = self
class RenderableStates(object):
def render(self, context=None):
return ' '.join(tab.get_states(context))
return RenderableStates()
def get_states(self, context):
"""
Return a list of states that this element is in. This could be ``error``
for example if a containing field has an error. Those states can be
added as css classes to the template. You can then use those to style it
accordingly.
Where and whether the css classes are added to the template is up the
the subclass like tabs, rows, etc.
"""
return []
class FormTab(BaseFormElement):
template_name = 'django_backend/formlayout/table.html'
def __init__(self, name, rows, *args, **kwargs):
self.name = name
self._rows = map(self._initialize_row, rows)
super(FormTab, self).__init__(*args, **kwargs)
def add_row(self, row):
self._rows.append(self._initialize_row(row))
# Make calls chainable.
return self
def _initialize_row(self, row):
if isinstance(row, dict):
return FormRow(row.get('label', ''), row.get('fields', []))
# TODO: Add possibility to just add field list directly
# (row should be created on the fly, using the first field label)
if isinstance(row, list):
return FormRow(None, row)
return row
def resolve_has_error(self, context):
"""
Return ``True`` if one of the containing rows contains an form
validation error.
"""
return any(
row.resolve_has_error(context)
for row in self._rows
if hasattr(row, 'resolve_has_error'))
def get_states(self, context):
states = list(super(FormTab, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def get_context_data(self, context, **kwargs):
kwargs.update({
'tab': self,
'tab_rows': self.rows,
})
return super(FormTab, self).get_context_data(context, **kwargs)
@property
def rows(self):
return list(sorted(self._rows, cmp=lambda x,y: cmp(x.position, y.position)))
@property
def fields(self):
fields = []
for row in self.rows:
fields = fields + row.fields
return fields
class FormRow(BaseFormElement):
template_name = 'django_backend/formlayout/tr.html'
def __init__(self, label, fields, help_text=None, *args, **kwargs):
self.label = label
self._fields = map(self._initialize_field, fields)
self.help_text = help_text
super(FormRow, self).__init__(*args, **kwargs)
def add_field(self, field):
self._fields.append(self._initialize_field(field))
# Make calls chainable.
return self
def _initialize_field(self, field):
if isinstance(field, basestring):
return FormField(field)
return field
def resolve_has_error(self, context):
"""
Return ``True`` if one of the containing fields contains an form
validation error.
"""
return any(
field.resolve_has_error(context)
for field in self._fields
if hasattr(field, 'resolve_has_error'))
def get_states(self, context):
states = list(super(FormRow, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def resolve_default_label(self, context):
if self.label:
return self.label
if len(self.fields) == 1:
return self.fields[0].resolve_label(context)
return ''
def resolve_help_text(self, context):
if self.help_text:
return self.help_text
if len(self.fields) == 1:
return self.fields[0].resolve_help_text(context)
return ''
def resolve_required(self, context):
return any(f.resolve_required(context) for f in self._fields)
def get_context_data(self, context, **kwargs):
kwargs.update({
'row': self,
'row_label': self.resolve_default_label(context),
'row_fields': self.fields,
# I think that's not required anymore.
#'row_form_fields': [f.resolve_field(context) for f in self.fields],
'row_help_text': self.resolve_help_text(context),
'row_required': self.resolve_required(context),
})
return kwargs
@property
def fields(self):
return list(sorted(self._fields,
cmp=lambda x, y: cmp(x.position, y.position)))
def field_names(self):
return [field.field for field in self.fields]
class FormField(BaseFormElement):
template_name = 'django_backend/formlayout/field.html'
def __init__(self, field, *args, **kwargs):
self.field = field
super(FormField, self).__init__(*args, **kwargs)
def get_states(self, context):
states = list(super(FormField, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def resolve_has_error(self, context):
field = self.resolve_field(context)
if field and hasattr(field, 'errors'):
return bool(field.errors)
return False
def resolve_form(self, context):
if 'form' in context:
return context['form']
def resolve_field(self, context):
form = self.resolve_form(context)
if form is None:
return # we need the form to exists
try:
return form[self.field]
except KeyError:
return
def resolve_label(self, context):
return self.resolve_field(context).label
def resolve_help_text(self, context):
return self.resolve_field(context).help_text
def resolve_required(self, context):
return self.resolve_field(context).field.required
def get_context_data(self, context, **kwargs):
form_field = self.resolve_field(context)
kwargs.update({
'field': self,
'field_name': self.field,
'field_form_field': form_field,
})
return super(FormField, self).get_context_data(context, **kwargs)
def render(self, context):
form_field = self.resolve_field(context)
if form_field is None:
return ''
return super(FormField, self).render(context)
|
bsd-3-clause
| -1,468,785,885,214,893,300
| 30.426667
| 84
| 0.589733
| false
| 4.087283
| false
| false
| false
|
edx/credentials
|
credentials/apps/credentials/models.py
|
1
|
12660
|
"""
Models for the credentials service.
"""
import logging
import uuid
import bleach
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.db import models
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django_extensions.db.models import TimeStampedModel
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from simple_history.models import HistoricalRecords
from credentials.apps.catalog.api import get_program_details_by_uuid
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.utils import _choices
from credentials.apps.credentials import constants
from credentials.apps.credentials.exceptions import NoMatchingProgramException
log = logging.getLogger(__name__)
def signatory_assets_path(instance, filename):
"""
Returns path for signatory assets.
Arguments:
instance(Signatory): Signatory object
filename(str): file to upload
Returns:
Path to asset.
"""
return f"signatories/{instance.id}/{filename}"
def validate_image(image):
"""
Validates that a particular image is small enough.
"""
if image.size > (250 * 1024):
raise ValidationError(_("The image file size must be less than 250KB."))
def validate_course_key(course_key):
"""
Validate the course_key is correct.
"""
try:
CourseKey.from_string(course_key)
except InvalidKeyError:
raise ValidationError(_("Invalid course key."))
class AbstractCredential(TimeStampedModel):
"""
Abstract Credentials configuration model.
.. no_pii: This model has no PII.
"""
site = models.ForeignKey(Site, on_delete=models.CASCADE)
is_active = models.BooleanField(default=False)
class Meta:
abstract = True
class Signatory(TimeStampedModel):
"""
Signatory model to add certificate signatories.
.. no_pii: This model has no learner PII. The name used here is the name of the professor who signed the
certificate.
"""
name = models.CharField(max_length=255)
title = models.CharField(max_length=255)
organization_name_override = models.CharField(
max_length=255,
null=True,
blank=True,
help_text=_("Signatory organization name if its different from issuing organization."),
)
image = models.ImageField(
help_text=_("Image must be square PNG files. The file size should be under 250KB."),
upload_to=signatory_assets_path,
validators=[validate_image],
)
class Meta:
verbose_name_plural = "Signatories"
def __str__(self):
return f"{self.name}, {self.title}"
def save(self, *args, **kwargs):
"""
A primary key/ID will not be assigned until the model is written to
the database. Given that our file path relies on this ID, save the
model initially with no file. After the initial save, update the file
and save again. All subsequent saves will write to the database only
once.
"""
if self.pk is None:
temp_image = self.image
self.image = None
super().save(*args, **kwargs)
self.image = temp_image
super().save(force_update=True)
class AbstractCertificate(AbstractCredential):
"""
Abstract Certificate configuration to support multiple type of certificates
i.e. Programs, Courses.
.. no_pii: This model has no PII.
"""
signatories = models.ManyToManyField(Signatory)
title = models.CharField(
max_length=255,
null=True,
blank=True,
help_text="Custom certificate title to override default display_name for a course/program.",
)
class Meta:
abstract = True
class UserCredential(TimeStampedModel):
"""
Credentials issued to a learner.
.. pii: Stores username for a user.
pii values: username
.. pii_types: username
.. pii_retirement: retained
"""
AWARDED, REVOKED = (
"awarded",
"revoked",
)
STATUSES_CHOICES = (
(AWARDED, _("awarded")),
(REVOKED, _("revoked")),
)
credential_content_type = models.ForeignKey(
ContentType,
limit_choices_to={"model__in": ("coursecertificate", "programcertificate")},
on_delete=models.CASCADE,
)
credential_id = models.PositiveIntegerField()
credential = GenericForeignKey("credential_content_type", "credential_id")
username = models.CharField(max_length=255, db_index=True)
status = models.CharField(
max_length=255,
choices=_choices(constants.UserCredentialStatus.AWARDED, constants.UserCredentialStatus.REVOKED),
default=constants.UserCredentialStatus.AWARDED,
)
download_url = models.CharField(
max_length=255, blank=True, null=True, help_text=_("URL at which the credential can be downloaded")
)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
unique_together = (("username", "credential_content_type", "credential_id"),)
def get_absolute_url(self):
return reverse("credentials:render", kwargs={"uuid": self.uuid.hex})
def revoke(self):
"""Sets the status to revoked, and saves this instance."""
self.status = UserCredential.REVOKED
self.save()
class CourseCertificate(AbstractCertificate):
"""
Configuration for Course Certificates.
.. no_pii: This model has no PII.
"""
course_id = models.CharField(max_length=255, validators=[validate_course_key])
course_run = models.OneToOneField(CourseRun, null=True, on_delete=models.PROTECT)
certificate_available_date = models.DateTimeField(
null=True,
blank=True,
help_text=_(
"The certificate available date and time that is set in Studio and copied to Credentials. "
"This should be edited in Studio."
),
)
certificate_type = models.CharField(
max_length=255,
choices=_choices(
constants.CertificateType.HONOR,
constants.CertificateType.PROFESSIONAL,
constants.CertificateType.VERIFIED,
constants.CertificateType.NO_ID_PROFESSIONAL,
constants.CertificateType.MASTERS,
),
)
user_credentials = GenericRelation(
UserCredential,
content_type_field="credential_content_type",
object_id_field="credential_id",
related_query_name="course_credentials",
)
class Meta:
unique_together = (("course_id", "certificate_type", "site"),)
verbose_name = "Course certificate configuration"
@cached_property
def course_key(self):
return CourseKey.from_string(self.course_id)
class ProgramCertificate(AbstractCertificate):
"""
Configuration for Program Certificates.
.. no_pii: This model has no PII.
"""
program_uuid = models.UUIDField(db_index=True, null=False, blank=False, verbose_name=_("Program UUID"))
# PROTECT prevents the Program from being delete if it's being used for a program cert. This allows copy_catalog
# to be safer when deleting
program = models.OneToOneField(Program, null=True, on_delete=models.PROTECT)
user_credentials = GenericRelation(
UserCredential,
content_type_field="credential_content_type",
object_id_field="credential_id",
related_query_name="program_credentials",
)
use_org_name = models.BooleanField(
default=False,
help_text=_(
"Display the associated organization's name (e.g. ACME University) "
"instead of its short name (e.g. ACMEx)"
),
verbose_name=_("Use organization name"),
)
include_hours_of_effort = models.BooleanField(
default=False,
help_text="Display the estimated total number of hours needed to complete all courses in the program. This "
"feature will only be displayed in the certificate if the attribute 'Total hours of effort' has "
"been set for the program in Discovery.",
)
language = models.CharField(
max_length=8, null=True, help_text="Locale in which certificates for this program will be rendered"
)
def __str__(self):
return f"ProgramCertificate: {self.program_uuid}"
class Meta:
verbose_name = "Program certificate configuration"
unique_together = (("site", "program_uuid"),)
@cached_property
def program_details(self):
"""Returns details about the program associated with this certificate."""
program_details = get_program_details_by_uuid(uuid=self.program_uuid, site=self.site)
if not program_details:
msg = f"No matching program with UUID [{self.program_uuid}] in credentials catalog for program certificate"
raise NoMatchingProgramException(msg)
if self.use_org_name:
for org in program_details.organizations:
org.display_name = org.name
if not self.include_hours_of_effort:
program_details.hours_of_effort = None
program_details.credential_title = self.title
return program_details
class UserCredentialAttribute(TimeStampedModel):
"""
Different attributes of User's Credential such as white list, grade etc.
.. no_pii: This model has no PII.
"""
user_credential = models.ForeignKey(UserCredential, related_name="attributes", on_delete=models.CASCADE)
name = models.CharField(max_length=255)
value = models.CharField(max_length=255)
class Meta:
unique_together = (("user_credential", "name"),)
class ProgramCompletionEmailConfiguration(TimeStampedModel):
"""
Template to add additional content into the program completion emails.
identifier should either be a:
- UUID <string> (for a specific program)
- program type <string> (for a program type)
- or "default" (the DEFAULT_TEMPLATE_IDENTIFIER) to be the global template used for all programs
html_template should be the HTML version of the email
plaintext_template should be the plaintext version of the email
enabled is what determines if we send the emails at all
.. no_pii: This model has no PII.
"""
DEFAULT_TEMPLATE_IDENTIFIER = "default"
# identifier will either be a:
# - UUID <string> (for a specific program)
# - program type <string> (for a program type)
# - or "default" (the DEFAULT_TEMPLATE_IDENTIFIER) to be the global template used for all programs
identifier = models.CharField(
max_length=50,
unique=True,
help_text=(
"""Should be either "default" to affect all programs, the program type slug, or the UUID of the program. """
"""Values are unique."""
),
)
html_template = models.TextField(
help_text=("For HTML emails." "Allows tags include (a, b, blockquote, div, em, i, li, ol, span, strong, ul)")
)
plaintext_template = models.TextField(help_text="For plaintext emails. No formatting tags. Text will send as is.")
enabled = models.BooleanField(default=False)
history = HistoricalRecords()
def save(self, **kwargs):
self.html_template = bleach.clean(self.html_template, tags=settings.ALLOWED_EMAIL_HTML_TAGS)
super().save(**kwargs)
@classmethod
def get_email_config_for_program(cls, program_uuid, program_type_slug):
"""
Gets the email config for the program, with the most specific match being returned,
or None of there are no matches
Because the UUID of the program will have hyphens, but we want to make it easy on PCs copying values,
we will check both the hyphenated version, and an unhyphenated version (.hex)
"""
# By converting the uuid parameter to a string then back to a UUID we can guarantee it will be a UUID later on
converted_program_uuid = uuid.UUID(str(program_uuid))
return (
cls.objects.filter(identifier=converted_program_uuid).first()
or cls.objects.filter(identifier=converted_program_uuid.hex).first()
or cls.objects.filter(identifier=program_type_slug).first()
or cls.objects.filter(identifier=cls.DEFAULT_TEMPLATE_IDENTIFIER).first()
)
|
agpl-3.0
| 5,030,639,838,359,889,000
| 32.76
| 120
| 0.670379
| false
| 4.249748
| true
| false
| false
|
jmosky12/huxley
|
scripts/assignment_db.py
|
1
|
1357
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import os
from os import environ
from os.path import abspath, dirname
import sys
sys.path.append(abspath(dirname(dirname(__file__))))
os.environ['DJANGO_SETTINGS_MODULE'] = 'huxley.settings'
from huxley.core.models import Country, Committee, Assignment
from xlrd import open_workbook
s = open_workbook('Country Matrix.xlsx').sheet_by_index(0)
country_range = s.nrows-2
committee_range = 22
for row in range(3, country_range):
Country.objects.get_or_create(name=s.cell(row, 0).value, special=(True if row > 204 else False))
for col in range(1, committee_range):
Committee.objects.get_or_create(name=s.cell(1, col).value, full_name=s.cell(2, col).value, delegation_size=(1 if s.cell(0, col).value == 'SINGLE' else 2), special=(True if col > 7 else False))
for row in range(3, country_range):
for col in range(1, committee_range):
if s.cell(row, col).value:
print s.cell(1, col).value
print s.cell(2, col).value
print s.cell(row, 0).value
print s.cell(row,col).value
print
country = Country.objects.get(name=s.cell(row, 0).value)
committee = Committee.objects.get(name=s.cell(1, col).value)
assignment = Assignment(committee=committee, country=country)
assignment.save()
|
bsd-3-clause
| 3,738,094,941,541,147,600
| 34.710526
| 193
| 0.725866
| false
| 2.982418
| false
| false
| false
|
Naught0/qtbot
|
cogs/osrs.py
|
1
|
15138
|
import json
from urllib.parse import quote_plus
from typing import Union
import discord
from discord.ext import commands
from utils import aiohttp_wrap as aw
from utils import dict_manip as dm
from utils.user_funcs import PGDB
class OSRS(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.db = PGDB(bot.pg_con)
self.aio_session = bot.aio_session
self.redis_client = bot.redis_client
self.items_uri = "https://rsbuddy.com/exchange/names.json"
# self.api_uri = 'https://api.rsbuddy.com/grandExchange?a=guidePrice&i={}'
self.prices_uri = "https://storage.googleapis.com/osb-exchange/summary.json"
self.player_uri = (
"http://services.runescape.com/m=hiscore_oldschool/index_lite.ws?player={}"
)
self.player_click_uri = "http://services.runescape.com/m=hiscore_oldschool/hiscorepersonal.ws?user1={}"
self.skills = [
"Overall",
"Attack",
"Defense",
"Strength",
"Hitpoints",
"Ranged",
"Prayer",
"Magic",
"Cooking",
"Woodcutting",
"Fletching",
"Fishing",
"Firemaking",
"Crafting",
"Smithing",
"Mining",
"Herblore",
"Agility",
"Thieving",
"Slayer",
"Farming",
"Runecrafting",
"Hunter",
"Construction",
"Clue (Easy)",
"Clue (Medium)",
"Clue (All)",
"Bounty Hunter: Rogue",
"Bounty Hunter: Hunter",
"Clue (Hard)",
"LMS",
"Clue (Elite)",
"Clue (Master)",
]
self.statmoji = {
"attack": ":dagger:",
"strength": ":fist:",
"defense": ":shield:",
"ranged": ":bow_and_arrow:",
"prayer": ":pray:",
"magic": ":sparkles:",
"runecrafting": ":crystal_ball:",
"construction": ":house:",
"hitpoints": ":heart:",
"agility": ":runner:",
"herblore": ":herb:",
"thieving": ":spy:",
"crafting": ":hammer_pick:",
"fletching": ":cupid:",
"slayer": ":skull_crossbones:",
"hunter": ":feet:",
"mining": ":pick:",
"fishing": ":fish:",
"cooking": ":cooking:",
"firemaking": ":fire:",
"woodcutting": ":deciduous_tree:",
"farming": ":corn:",
}
self.user_missing = "Please either add a username or supply one."
self.user_not_exist = "Couldn't find a user matching {}"
self.color = discord.Color.dark_gold()
with open("data/item-data.json") as f:
self.item_data = json.load(f)
@staticmethod
def get_level(stat: str) -> int:
"""Helps parse player level from strings that look like 0,0,0"""
return int(stat.split(",")[1])
def calc_combat(self, user_info: dict) -> str:
"""Helper method which returns the player's combat level
Formula here: http://oldschoolrunescape.wikia.com/wiki/Combat_level"""
at = self.get_level(user_info["Attack"])
st = self.get_level(user_info["Strength"])
de = self.get_level(user_info["Defense"])
hp = self.get_level(user_info["Hitpoints"])
rn = self.get_level(user_info["Ranged"])
mg = self.get_level(user_info["Magic"])
pr = self.get_level(user_info["Prayer"])
base = 0.25 * (de + hp + (pr // 2))
melee = 0.325 * (at + st)
range = 0.325 * ((rn // 2) + rn)
mage = 0.325 * ((mg // 2) + mg)
return str(int(base + max(melee, range, mage)))
async def get_user_info(self, username: str) -> Union[dict, None]:
"""Helper method to see whether a user exists, if so, retrieves the data and formats it in a dict
returns None otherwise"""
user_info = await aw.aio_get_text(
self.aio_session, self.player_uri.format(quote_plus(username))
)
if user_info is None:
return None
# Player data is returned like so:
# Rank, Level, XP
# For clues, LMS, and Bounty Hunter it's:
# Rank, Score
# -1's denote no rank or xp
return dict(zip(self.skills, user_info.split()))
@commands.group(
name="osrs", aliases=["hiscores", "hiscore", "rs"], invoke_without_command=True
)
async def _osrs(self, ctx, *, username: str = None):
"""Get information about your OSRS stats"""
image = None
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
image = await self.db.fetch_user_info(ctx.author.id, "osrs_pic")
# No users found
if not username:
return await ctx.error(self.user_missing)
# User doesn't exist
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
# Create embed
em = discord.Embed(
title=f":bar_chart: {username}",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
# See get_user_info for why things are wonky and split like this
overall = user_info["Overall"].split(",")
em.add_field(
name="Combat Level", value=self.calc_combat(user_info), inline=False
)
em.add_field(name="Total Level", value=f"{int(overall[1]):,}")
em.add_field(name="Overall Rank", value=f"{int(overall[0]):,}")
# Set image if one exists & if the player == the author
if image:
em.set_image(url=image)
await ctx.send(embed=em)
@_osrs.command()
async def user(self, ctx, *, username: str):
"""Save your OSRS username so that you don't have to supply it later"""
await self.db.insert_user_info(ctx.author.id, "osrs_name", username)
await ctx.success(f"Added {username} ({ctx.author.display_name}) to database!")
@_osrs.command()
async def rmuser(self, ctx):
"""Remove your OSRS username from the database"""
await self.db.remove_user_info(ctx.author.id, "osrs_name")
await ctx.success(f"Removed username from the database.")
@_osrs.command(aliases=["avatar", "pic"])
async def picture(self, ctx, *, url: str):
"""Add a custom picture of your OSRS character to appear in the osrs command
(Only when called by you)"""
await self.db.insert_user_info(ctx.author.id, "osrs_pic", url)
await ctx.success(f"Added picture successfully")
@_osrs.command(aliases=["rmavatar", "rmpic"])
async def rmpicture(self, ctx):
"""Remove your custom OSRS picture from the database"""
await self.db.remove_user_info(ctx.author.id, "osrs_pic")
await ctx.success(f"Removed picture.")
@_osrs.command(aliases=["clues", "clu", "cluescroll", "cluescrolls"])
async def clue(self, ctx, *, username: str = None):
"""Get your clue scroll counts & ranks"""
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
if not username:
return await ctx.error(self.user_missing)
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
em = discord.Embed(
title=f":scroll: {username}'s clues",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
for item in user_info:
if {"clue"} & set(item.lower().split()):
v = user_info[item].split(",")
# Handle no rank
if v == ["-1", "-1"]:
v = ["n/a", "0"]
em.add_field(name=item, value=f"Rank: {v[0]} ({v[1]} clues)")
# Cast to int for str formatting otherwise
else:
v = [int(x) for x in v]
em.add_field(name=item, value=f"Rank: {v[0]:,} ({v[1]:,} clues)")
# Now to swap Clue (All) to the first field
overall = em._fields.pop(2)
em._fields.insert(0, overall)
await ctx.send(embed=em)
@_osrs.command(aliases=["cb"])
async def combat(self, ctx, *, username: str = None):
"""Check the combat stats of yourself or someone else"""
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
if not username:
return await ctx.error(self.user_missing)
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
em = discord.Embed(
title=f":right_facing_fist::left_facing_fist: {username}'s Combat Stats",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
col1 = [
f":crossed_swords: Combat `{self.calc_combat(user_info)}`",
f':heart: Hitpoints `{self.get_level(user_info["Hitpoints"])}`',
f':dagger: Attack `{self.get_level(user_info["Attack"])}`',
f':fist: Strength `{self.get_level(user_info["Strength"])}`',
]
col2 = [
f':shield: Defence `{self.get_level(user_info["Defense"])}`',
f':bow_and_arrow: Range `{self.get_level(user_info["Ranged"])}`',
f':sparkles: Magic `{self.get_level(user_info["Magic"])}`',
f':pray: Prayer `{self.get_level(user_info["Prayer"])}`',
]
em.add_field(name="\u200B", value="\n".join(col1))
em.add_field(name="\u200B", value="\n".join(col2))
await ctx.send(embed=em)
@_osrs.command(aliases=["stats"])
async def stat(self, ctx, username: str, stat_name: str):
"""Get a specific stat for a user
Note:
Be sure to wrap the username in quotation marks if it has spaces
Username is required here per the limitations of Discord, sorry"""
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
# If input doesn't match exactly
# Hopefully this handles common abbreviations (but I'm nearly sure it won't)
if stat_name.lower() not in self.statmoji:
stat_name = dm.get_closest(self.statmoji, stat_name)
em = discord.Embed(
title=f"{self.statmoji[stat_name.lower()]} {stat_name.title()} - {username}",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
labels = ["Rank", "Level", "XP"]
stat_list = user_info[stat_name.title()].split(",")
for idx, label in enumerate(labels):
em.add_field(name=label, value=f"{int(stat_list[idx]):,}")
await ctx.send(embed=em)
@_osrs.command(name="ge", invoke_without_command=True)
async def ge_search(self, ctx, *, query):
""" Get the buying/selling price and quantity of an OSRS item """
# All items in the JSON are lowercase
item = query.lower()
# Checks whether item in json file
if item in self.item_data:
item_id = self.item_data[item]["id"]
# Uses closest match to said item if no exact match
else:
item = dm.get_closest(self.item_data, item)
item_id = self.item_data[item]["id"]
if await self.redis_client.exists("osrs_prices"):
item_prices = json.loads((await self.redis_client.get("osrs_prices")))
else:
item_prices = await aw.aio_get_json(self.aio_session, self.prices_uri)
if not item_prices:
return await ctx.error(
"The RSBuddy API is dead yet again. Try again in a bit."
)
await self.redis_client.set(
"osrs_prices", json.dumps(item_prices), ex=(5 * 60)
)
# Create pretty embed
em = discord.Embed(title=item.capitalize(), color=self.color)
em.url = f"https://rsbuddy.com/exchange?id={item_id}"
em.set_thumbnail(
url=f"https://services.runescape.com/m=itemdb_oldschool/obj_big.gif?id={item_id}"
)
em.add_field(
name="Buying Price", value=f'{item_prices[item_id]["buy_average"]:,}gp'
)
em.add_field(
name="Selling Price", value=f'{item_prices[item_id]["sell_average"]:,}gp'
)
em.add_field(
name="Buying Quantity", value=f'{item_prices[item_id]["buy_quantity"]:,}/hr'
)
em.add_field(
name="Selling Quantity",
value=f'{item_prices[item_id]["sell_quantity"]:,}/hr',
)
await ctx.send(embed=em)
@commands.command(name="geupdate")
@commands.is_owner()
async def _update(self, ctx):
"""A command to update the OSRS GE item list"""
new_items = await aw.aio_get_json(self.aio_session, self.items_uri)
# This 503's a lot, if not every time, not sure yet
if new_items is None:
em = discord.Embed(
title=":no_entry_sign: RS buddy is serving up a 503!",
color=discord.Color.dark_red(),
)
return await ctx.send(embed=em)
if len(new_items) == len(self.item_data):
em = discord.Embed(
title=":no_entry_sign: Items already up-to-date boss!",
color=discord.Color.dark_red(),
)
return await ctx.send(embed=em)
filtered_items = {}
for item in new_items:
filtered_items[new_items[item]["name"].lower()] = {
"id": item,
"name": new_items[item]["name"],
}
with open("data/item-data.json", "w") as f:
json.dump(filtered_items, f, indent=2)
self.item_data = filtered_items
num_updated = len(new_items) - len(self.item_data)
await ctx.success(f"Updated `{num_updated}` item(s).")
# The osbuddy api just 503s every time, keeping this commented in the hopes that it works in the future
# em = discord.Embed(title=':white_check_mark: Check here',
# url='https://rsbuddy.com/exchange/names.json',
# color=self.color)
# em.description = ("```py\n"
# "data = requests.get('https://rsbuddy.com/exchange/names.json').json() d = {}\n\n"
# "for item in data:\n"
# "\td[data[item]['name'].lower()] = {'id': item, 'name': data[item]['name']}"
# "```")
# await ctx.send(embed=em)
def setup(bot):
bot.add_cog(OSRS(bot))
|
mit
| 6,672,994,157,585,110,000
| 36.93985
| 111
| 0.544854
| false
| 3.583807
| false
| false
| false
|
ndp-systemes/odoo-addons
|
account_invoice_dunning/models/dunning.py
|
1
|
9802
|
# -*- coding: utf8 -*-
#
# Copyright (C) 2017 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp import fields, models, api, _, osv
class AccountInvoiceRelanceConfig(models.Model):
_name = 'account.invoice.dunning.type'
def _get_domain_mail_template(self):
return [('model_id', '=', self.env.ref('account_invoice_dunning.model_account_invoice_dunning').id)]
name = fields.Char(required=True, string=u"Name")
number = fields.Integer(u"Dunning number", default=1, required=True)
sequence_id = fields.Many2one('ir.sequence', string=u"Sequence")
report_id = fields.Many2one('ir.actions.report.xml', u"Report", domain=[('model', '=', 'account.invoice.dunning')],
required=True)
mail_template_id = fields.Many2one('email.template', u"Mail Template", domain=_get_domain_mail_template,
required=True)
company_id = fields.Many2one('res.company', u"Company", groups='base.group_multi_company',
default=lambda self: self.env.user.company_id)
@api.multi
def _get_dunning_name(self):
return self.ensure_one().sequence_id and self.sequence_id._next() or ""
_sql_constraints = [
('dunning_number_unique', 'unique (number, company_id)', u"The Dunning number must be unique per company !"),
]
class AccountInvoiceRelance(models.Model):
_name = 'account.invoice.dunning'
name = fields.Char(u"Name")
date_done = fields.Date(u"Dunning date done", readonly=True)
state = fields.Selection([
('draft', u"Draft"),
('send', u"Send"),
('cancel', u"Cancel"),
('done', u"Done")], string=u"State", readonly=True, default='draft')
partner_id = fields.Many2one('res.partner', u"Partner")
company_id = fields.Many2one('res.company', u"Company", groups='base.group_multi_company',
default=lambda self: self.env.user.company_id)
dunning_type_id = fields.Many2one('account.invoice.dunning.type', string=u"Dunning Type")
report_id = fields.Many2one('ir.actions.report.xml', u"Report", related='dunning_type_id.report_id', readonly=True)
sequence_id = fields.Many2one('ir.sequence', related='dunning_type_id.sequence_id', readonly=True)
mail_template_id = fields.Many2one('email.template', u"Mail Template",
related='dunning_type_id.mail_template_id', readonly=True)
invoice_ids = fields.Many2many('account.invoice', string=u"Invoices")
amount_total_signed = fields.Float(u"Total", compute='_compute_amounts')
residual_signed = fields.Float(u"Residual", compute='_compute_amounts')
@api.multi
def _compute_amounts(self):
for rec in self:
amount = 0
residual = 0
for invoice in rec.invoice_ids:
amount += invoice.amount_total_signed
residual += invoice.residual_signed
rec.amount_total_signed = amount
rec.residual_signed = residual
@api.model
def _get_existing_dunning(self, invoice_id, dunning_config_id):
return self.search(self._get_existing_dunning_domain(invoice_id, dunning_config_id))
@api.multi
def action_done(self):
self.write({'state': 'done'})
@api.multi
def action_cancel(self):
self.write({'state': 'cancel'})
@api.model
def _get_existing_dunning_domain(self, invoice_id, dunning_type_id):
return [('partner_id', '=', invoice_id.partner_id.id),
('dunning_type_id', '=', dunning_type_id.id),
('company_id', '=', invoice_id.company_id.id),
('state', '=', 'draft')
]
@api.multi
def action_print_dunning(self):
self.ensure_one()
res = self.env['report'].with_context(active_ids=self.ids).get_action(self, self.report_id.report_name)
self.write({
'state': 'send',
'date_done': fields.Date.today(),
})
return res
@api.multi
def action_send_mail(self):
self.ensure_one()
compose_form = self.env.ref('mail.email_compose_message_wizard_form', False)
ctx = dict(
default_model=self._name,
default_res_id=self.id,
default_composition_mode='comment',
default_template_id=self.mail_template_id.ensure_one().id,
)
ctx.update(self._default_dict_send_mail_action())
return {
'name': _(u"Send a message"),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form.id, 'form')],
'view_id': compose_form.id,
'target': 'new',
'context': ctx,
}
def _default_dict_send_mail_action(self):
return {'final_dunning_state': 'send'}
@api.multi
def _get_action_view(self):
if len(self.ids) > 1:
ctx = dict(self.env.context,
search_default_group_partner_id=True,
search_default_group_dunning_type_id=True)
res = {
'name': _(u"Dunning"),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice.dunning',
'type': 'ir.actions.act_window',
'context': ctx,
'domain': [('id', 'in', self.ids)]
}
else:
res = {
'name': self.name,
'view_type': 'form',
'view_mode': 'form',
'view_id': self.env.ref("account_invoice_dunning.invoice_dunning_form_view").id,
'res_model': 'account.invoice.dunning',
'res_id': self.id,
'type': 'ir.actions.act_window',
}
return res
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
invoice_dunning_ids = fields.Many2many('account.invoice.dunning', string=u"Dunnings")
dunning_number = fields.Integer(u"Number of Dunning send", compute='_compute_dunning_number')
@api.multi
def _compute_dunning_number(self):
for rec in self:
rec.dunning_number = len(rec.invoice_dunning_ids.filtered(lambda it: it.state == 'send'))
@api.multi
def action_create_dunning(self):
result = self.env['account.invoice.dunning']
for rec in self:
rec._validate_to_create_dunning()
next_dunning_type = rec._get_next_dunning_type()
if next_dunning_type:
existing_dunning = self.env['account.invoice.dunning']._get_existing_dunning(rec, next_dunning_type)
if existing_dunning:
existing_dunning.invoice_ids = [(4, rec.id, {})]
else:
existing_dunning = self.env['account.invoice.dunning'].create(
rec._prepare_invoice_dunning(next_dunning_type))
result |= existing_dunning
else:
rec._no_next_dunning()
return result._get_action_view()
@api.multi
def _no_next_dunning(self):
raise osv.osv.except_orm(_(u"Error !"), _(u"No next Dunning Type for the invoice %s" % self.number))
@api.multi
def _validate_to_create_dunning(self):
if self.state != 'open':
raise osv.osv.except_orm(_(u"Error !"), _(u"You can't create a Dunning on an invoice with the state draft"))
if self.type != 'out_invoice':
raise osv.osv.except_orm(_(u"Error !"), _(u"You can only create a Dunning on an Sale Invoice"))
@api.multi
def _get_next_dunning_type(self):
dunning_type_ids = self.invoice_dunning_ids.filtered(lambda it: it.state == 'send').mapped('dunning_type_id')
return self.env['account.invoice.dunning.type'].search(
[('id', 'not in', dunning_type_ids.ids), ('company_id', '=', self.company_id.id)],
order='number asc', limit=1)
@api.multi
def _prepare_invoice_dunning(self, dunning_type_id):
self.ensure_one()
return {
'dunning_type_id': dunning_type_id.id,
'invoice_ids': [(4, self.id, {})],
'partner_id': self.partner_id.id,
'company_id': self.company_id.id,
'name': dunning_type_id._get_dunning_name()
}
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
@api.multi
def send_mail(self):
context = self.env.context or {}
if context.get('default_model') == 'account.invoice.dunning' \
and context.get('default_res_id', -1) > 0 \
and context.get('final_dunning_state'):
self.env['account.invoice.dunning'].browse(context['default_res_id']).write({
'state': context.get('final_dunning_state'),
'date_done': fields.Date.today(),
})
return super(MailComposeMessage, self).send_mail()
|
agpl-3.0
| -1,621,267,915,577,783,600
| 40.180672
| 120
| 0.584124
| false
| 3.552374
| false
| false
| false
|
jleivaizq/freesquare
|
freesquare/geo/feeds/gadm.py
|
1
|
4921
|
# -*- coding: utf-8 -*-
import logging
import os
import zipfile
from collections import defaultdict
from django.contrib.gis.gdal import DataSource
from .utils import download
from ..models import Region
logger = logging.getLogger(__name__)
LEVELS = [r[1] for r in Region.REGION_LEVELS]
LOCAL_DICT = {
'Catalu\xf1a': 'Catalonia'
}
REGIONS_TO_IGNORE = ('las palmas', 'ceuta y melilla')
class Gadm():
"""
Download and import Region Borders from Global Administrative Areas site
"""
# Static attributes
app_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + '/../..')
url = 'http://data.biogeo.ucdavis.edu/data/gadm2/shp/'
feature_cache = defaultdict(dict)
def __init__(self, country_code, max_level=0):
if country_code:
self.country_code = country_code.upper()
self.max_level = int(max_level)
def find_region(self, iso_code, raw_name, level, parent_names):
logger.debug('Searching for region: raw_name => {} iso_code => {} level => {}'
.format(raw_name, iso_code, level))
result = None
if raw_name in LOCAL_DICT:
name = LOCAL_DICT[raw_name].lower()
else:
name = raw_name.lower()
if level == 3:
return self.find_region(iso_code,
parent_names[-1],
level - 1,
parent_names[:-1])
if name in REGIONS_TO_IGNORE:
return None
if raw_name.lower() in self.feature_cache[level]:
result = self.feature_cache[level][name]
else:
if parent_names:
parent = self.find_region(iso_code, parent_names[-1], level - 1, parent_names[:-1])
candidates = Region.objects.filter(level=min(level, Region.CITY))
if parent:
candidates = Region.objects.filter(parent=parent)
candidates = candidates.filter(name__iexact=name)
else:
candidates = Region.objects.filter(level=Region.COUNTRY)\
.filter(iso3=iso_code).filter(name__iexact=name)
if candidates:
result = candidates[0]
self.feature_cache[level][name] = result
return result
def _load_country_level_border(self, data_dir, name, level):
try:
import ipdb; ipdb.set_trace() # XXX BREAKPOINT
datasource = DataSource(os.path.join(data_dir, '{0}{1}.shp'.format(name, level)))
layer = datasource[0]
logger.info('Loading boundaries for {} ({})'.format(name, LEVELS[level]))
for feature in layer:
code = feature.get('ISO')
if level:
region_names = [feature.get('NAME_{0}'.format(l)) for l in range(level + 1)]
else:
region_names = [feature.get('NAME_ISO')]
if 'n.a.' not in region_names[-1]:
region = self.find_region(code, region_names[-1], level, region_names[:-1])
if feature.geom:
if region:
region.geom = feature.geom
region.save()
#logger.debug('Saved border for region: {}'.format(region))
else:
others = Region.objects.filter(name__iexact=region_names[-1])
if others:
logger.warning('Region not found for {} but {}'
.format(region_names, others))
except:
logger.exception('Could not load border of '
'level {} for country {}'.format(level, name))
def load(self, country_code):
try:
data_dir = os.path.join(self.app_dir, 'geo/data/gadm/{}'.format(country_code))
filename = "{}_adm.zip".format(country_code)
name, _ = filename.rsplit('.', 1)
# Download the zip file from thematicmapping
download(data_dir, filename, self.url + filename)
file_ = open(os.path.join(data_dir, filename), mode='rb')
zipfile_ = zipfile.ZipFile(file_)
zipfile_.extractall(data_dir)
zipfile_.close()
for level in range(self.max_level + 1):
self._load_country_level_border(data_dir, name, level)
except:
logger.exception('Could not load borders of country {}'.format(country_code))
def run(self):
if hasattr(self, 'country_code'):
for code in self.country_code.split(','):
self.load(code)
else:
for country in Region.objects.filter(level=Region.COUNTRY):
self.load(country.iso3)
|
mit
| 6,486,755,235,228,144,000
| 33.412587
| 99
| 0.527535
| false
| 4.163283
| false
| false
| false
|
tensorflow/tfx
|
tfx/tools/cli/testdata/test_pipeline_local_1.py
|
1
|
2688
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Chicago taxi example using TFX on Local orchestrator."""
import os
from typing import Text
from absl import logging
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen
from tfx.components.schema_gen.component import SchemaGen
from tfx.components.statistics_gen.component import StatisticsGen
from tfx.orchestration import metadata
from tfx.orchestration import pipeline
from tfx.orchestration.local import local_dag_runner
_pipeline_name = 'chicago_taxi_local'
_taxi_root = os.path.join(os.environ['HOME'], 'taxi')
_data_root = os.path.join(_taxi_root, 'data', 'simple')
_tfx_root = os.path.join(os.environ['HOME'], 'tfx')
_pipeline_root = os.path.join(_tfx_root, 'pipelines', _pipeline_name)
# Sqlite ML-metadata db path.
_metadata_path = os.path.join(_tfx_root, 'metadata', _pipeline_name,
'metadata.db')
def _create_pipeline(pipeline_name: Text, pipeline_root: Text, data_root: Text,
metadata_path: Text) -> pipeline.Pipeline:
"""Implements the chicago taxi pipeline with TFX."""
# Brings data into the pipeline or otherwise joins/converts training data.
example_gen = CsvExampleGen(input_base=data_root)
# Computes statistics over data for visualization and example validation.
statistics_gen = StatisticsGen(examples=example_gen.outputs['examples'])
# Generates schema based on statistics files.
infer_schema = SchemaGen(statistics=statistics_gen.outputs['statistics'])
return pipeline.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_root,
components=[example_gen, statistics_gen, infer_schema],
enable_cache=True,
metadata_connection_config=metadata.sqlite_metadata_connection_config(
metadata_path),
additional_pipeline_args={},
)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
local_dag_runner.LocalDagRunner().run(
_create_pipeline(
pipeline_name=_pipeline_name,
pipeline_root=_pipeline_root,
data_root=_data_root,
metadata_path=_metadata_path))
|
apache-2.0
| 2,677,516,085,262,274,600
| 37.956522
| 79
| 0.725446
| false
| 3.759441
| false
| false
| false
|
jericksanjuan/lab-student-draft
|
lab_student_draft/students/models.py
|
1
|
3490
|
from django.conf import settings
from django.db import models
from django.db.models.signals import pre_save
from model_utils.models import TimeStampedModel
from model_utils import Choices
from labs.models import Lab
_PICKVAL = 100
# TODO: Get minimum_groups from settings
# TODO: Get maximum groups from settings
class Batch(TimeStampedModel):
minimum_groups = models.IntegerField(default=1)
maximum_groups = models.IntegerField(default=10)
class Meta:
verbose_name = "Batch"
verbose_name_plural = "Batches"
def __unicode__(self):
return u'{}-{}'.format(self.created.month, self.created.year)
class StudentGroup(TimeStampedModel):
batch = models.ForeignKey('Batch')
user = models.OneToOneField(settings.AUTH_USER_MODEL)
lab = models.ForeignKey(Lab, null=True, blank=True, related_name="assigned_set")
group_preferences = models.ManyToManyField(
Lab, through='GroupPreference', null=True, blank=True)
has_preference = models.BooleanField('Has Submitted Preference', default=False)
class Meta:
verbose_name = "Student Group"
verbose_name_plural = "Student Groups"
def __unicode__(self):
return u'{} group'.format(self.user)
def students(self):
return ', '.join([unicode(x) for x in self.student_set.all()])
class Student(models.Model):
student_group = models.ForeignKey('StudentGroup')
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
class Meta:
verbose_name = "Student"
verbose_name_plural = "Students"
def __unicode__(self):
return u'{} {}'.format(self.first_name, self.last_name)
class GroupPreference(models.Model):
student_group = models.ForeignKey('StudentGroup')
lab = models.ForeignKey(Lab)
preference = models.IntegerField(default=0)
class Meta:
verbose_name = "Group Preference"
verbose_name_plural = "Group Preferences"
unique_together = ('student_group', 'lab')
def __unicode__(self):
return u'{}={}-{}'.format(self.preference, self.student_group, self.lab)
class Selection(models.Model):
ITERATIONS = Choices('1', '2', '3')
lab = models.ForeignKey(Lab)
student_group = models.ForeignKey('StudentGroup')
phase = models.CharField(max_length=1, choices=ITERATIONS)
is_selected = models.BooleanField(default=False)
selection_score = models.IntegerField(default=0)
class Meta:
verbose_name = "Selection"
verbose_name_plural = "Selections"
unique_together = ('lab', 'student_group', 'phase')
def __unicode__(self):
return u'{}: {}<>{}, Phase {}'.format(
self.selection_score, self.lab,
self.student_group, self.phase)
@property
def score_color(self):
base_score = self.selection_score % 100
if base_score > 5:
return 'green'
else:
return 'yellow'
def update_selection_score(sender, instance, raw, *args, **kwargs):
if raw:
return
self = instance
if not (self.lab and self.student_group):
return
obj, _ = GroupPreference.objects.get_or_create(
lab=self.lab, student_group=self.student_group)
if self.is_selected:
score = _PICKVAL + obj.preference
else:
score = obj.preference
self.selection_score = score
pre_save.connect(update_selection_score, Selection, dispatch_uid='students.Selection')
|
bsd-3-clause
| -1,875,968,631,157,454,600
| 28.82906
| 86
| 0.659885
| false
| 3.818381
| false
| false
| false
|
dceoy/fract
|
fract/model/base.py
|
1
|
23500
|
#!/usr/bin/env python
import json
import logging
import os
import signal
import time
from abc import ABCMeta, abstractmethod
from datetime import datetime
from math import ceil
from pathlib import Path
from pprint import pformat
import numpy as np
import pandas as pd
import yaml
from oandacli.util.logger import log_response
from v20 import Context, V20ConnectionError, V20Timeout
from .bet import BettingSystem
from .ewma import Ewma
from .kalman import Kalman
class APIResponseError(RuntimeError):
pass
class TraderCore(object):
def __init__(self, config_dict, instruments, log_dir_path=None,
quiet=False, dry_run=False):
self.__logger = logging.getLogger(__name__)
self.cf = config_dict
self.__api = Context(
hostname='api-fx{}.oanda.com'.format(
self.cf['oanda']['environment']
),
token=self.cf['oanda']['token']
)
self.__account_id = self.cf['oanda']['account_id']
self.instruments = (instruments or self.cf['instruments'])
self.__bs = BettingSystem(strategy=self.cf['position']['bet'])
self.__quiet = quiet
self.__dry_run = dry_run
if log_dir_path:
log_dir = Path(log_dir_path).resolve()
self.__log_dir_path = str(log_dir)
os.makedirs(self.__log_dir_path, exist_ok=True)
self.__order_log_path = str(log_dir.joinpath('order.json.txt'))
self.__txn_log_path = str(log_dir.joinpath('txn.json.txt'))
self._write_data(
yaml.dump(
{
'instrument': self.instruments,
'position': self.cf['position'],
'feature': self.cf['feature'],
'model': self.cf['model']
},
default_flow_style=False
).strip(),
path=str(log_dir.joinpath('parameter.yml')),
mode='w', append_linesep=False
)
else:
self.__log_dir_path = None
self.__order_log_path = None
self.__txn_log_path = None
self.__last_txn_id = None
self.pos_dict = dict()
self.balance = None
self.margin_avail = None
self.__account_currency = None
self.txn_list = list()
self.__inst_dict = dict()
self.price_dict = dict()
self.unit_costs = dict()
def _refresh_account_dicts(self):
res = self.__api.account.get(accountID=self.__account_id)
# log_response(res, logger=self.__logger)
if 'account' in res.body:
acc = res.body['account']
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
self.balance = float(acc.balance)
self.margin_avail = float(acc.marginAvailable)
self.__account_currency = acc.currency
pos_dict0 = self.pos_dict
self.pos_dict = {
p.instrument: (
{'side': 'long', 'units': int(p.long.units)} if p.long.tradeIDs
else {'side': 'short', 'units': int(p.short.units)}
) for p in acc.positions if p.long.tradeIDs or p.short.tradeIDs
}
for i, d in self.pos_dict.items():
p0 = pos_dict0.get(i)
if p0 and all([p0[k] == d[k] for k in ['side', 'units']]):
self.pos_dict[i]['dt'] = p0['dt']
else:
self.pos_dict[i]['dt'] = datetime.now()
def _place_order(self, closing=False, **kwargs):
if closing:
p = self.pos_dict.get(kwargs['instrument'])
f_args = {
'accountID': self.__account_id, **kwargs,
**{
f'{k}Units': ('ALL' if p and p['side'] == k else 'NONE')
for k in ['long', 'short']
}
}
else:
f_args = {'accountID': self.__account_id, **kwargs}
if self.__dry_run:
self.__logger.info(
os.linesep + pformat({
'func': ('position.close' if closing else 'order.create'),
'args': f_args
})
)
else:
if closing:
res = self.__api.position.close(**f_args)
else:
res = self.__api.order.create(**f_args)
log_response(res, logger=self.__logger)
if not (100 <= res.status <= 399):
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
elif self.__order_log_path:
self._write_data(res.raw_body, path=self.__order_log_path)
else:
time.sleep(0.5)
def refresh_oanda_dicts(self):
t0 = datetime.now()
self._refresh_account_dicts()
self._sleep(last=t0, sec=0.5)
self._refresh_txn_list()
self._sleep(last=t0, sec=1)
self._refresh_inst_dict()
self._sleep(last=t0, sec=1.5)
self._refresh_price_dict()
self._refresh_unit_costs()
def _refresh_txn_list(self):
res = (
self.__api.transaction.since(
accountID=self.__account_id, id=self.__last_txn_id
) if self.__last_txn_id
else self.__api.transaction.list(accountID=self.__account_id)
)
# log_response(res, logger=self.__logger)
if 'lastTransactionID' in res.body:
self.__last_txn_id = res.body['lastTransactionID']
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
if res.body.get('transactions'):
t_new = [t.dict() for t in res.body['transactions']]
self.print_log(yaml.dump(t_new, default_flow_style=False).strip())
self.txn_list = self.txn_list + t_new
if self.__txn_log_path:
self._write_data(json.dumps(t_new), path=self.__txn_log_path)
def _refresh_inst_dict(self):
res = self.__api.account.instruments(accountID=self.__account_id)
# log_response(res, logger=self.__logger)
if 'instruments' in res.body:
self.__inst_dict = {
c.name: vars(c) for c in res.body['instruments']
}
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def _refresh_price_dict(self):
res = self.__api.pricing.get(
accountID=self.__account_id,
instruments=','.join(self.__inst_dict.keys())
)
# log_response(res, logger=self.__logger)
if 'prices' in res.body:
self.price_dict = {
p.instrument: {
'bid': p.closeoutBid, 'ask': p.closeoutAsk,
'tradeable': p.tradeable
} for p in res.body['prices']
}
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def _refresh_unit_costs(self):
self.unit_costs = {
i: self._calculate_bp_value(instrument=i) * float(e['marginRate'])
for i, e in self.__inst_dict.items() if i in self.instruments
}
def _calculate_bp_value(self, instrument):
cur_pair = instrument.split('_')
if cur_pair[0] == self.__account_currency:
bpv = 1 / self.price_dict[instrument]['ask']
elif cur_pair[1] == self.__account_currency:
bpv = self.price_dict[instrument]['ask']
else:
bpv = None
for i in self.__inst_dict.keys():
if bpv:
break
elif i == cur_pair[0] + '_' + self.__account_currency:
bpv = self.price_dict[i]['ask']
elif i == self.__account_currency + '_' + cur_pair[0]:
bpv = 1 / self.price_dict[i]['ask']
elif i == cur_pair[1] + '_' + self.__account_currency:
bpv = (
self.price_dict[instrument]['ask']
* self.price_dict[i]['ask']
)
elif i == self.__account_currency + '_' + cur_pair[1]:
bpv = (
self.price_dict[instrument]['ask']
/ self.price_dict[i]['ask']
)
assert bpv, f'bp value calculatiton failed:\t{instrument}'
return bpv
def design_and_place_order(self, instrument, act):
pos = self.pos_dict.get(instrument)
if pos and act and (act == 'closing' or act != pos['side']):
self.__logger.info('Close a position:\t{}'.format(pos['side']))
self._place_order(closing=True, instrument=instrument)
self._refresh_txn_list()
if act in ['long', 'short']:
limits = self._design_order_limits(instrument=instrument, side=act)
self.__logger.debug(f'limits:\t{limits}')
units = self._design_order_units(instrument=instrument, side=act)
self.__logger.debug(f'units:\t{units}')
self.__logger.info(f'Open a order:\t{act}')
self._place_order(
order={
'type': 'MARKET', 'instrument': instrument, 'units': units,
'timeInForce': 'FOK', 'positionFill': 'DEFAULT', **limits
}
)
def _design_order_limits(self, instrument, side):
ie = self.__inst_dict[instrument]
r = self.price_dict[instrument][{'long': 'ask', 'short': 'bid'}[side]]
ts_range = [
float(ie['minimumTrailingStopDistance']),
float(ie['maximumTrailingStopDistance'])
]
ts_dist_ratio = int(
r * self.cf['position']['limit_price_ratio']['trailing_stop'] /
ts_range[0]
)
if ts_dist_ratio <= 1:
trailing_stop = ie['minimumTrailingStopDistance']
else:
ts_dist = np.float16(ts_range[0] * ts_dist_ratio)
if ts_dist >= ts_range[1]:
trailing_stop = ie['maximumTrailingStopDistance']
else:
trailing_stop = str(ts_dist)
tp = {
k: str(
np.float16(
r + r * v * {
'take_profit': {'long': 1, 'short': -1}[side],
'stop_loss': {'long': -1, 'short': 1}[side]
}[k]
)
) for k, v in self.cf['position']['limit_price_ratio'].items()
if k in ['take_profit', 'stop_loss']
}
tif = {'timeInForce': 'GTC'}
return {
'takeProfitOnFill': {'price': tp['take_profit'], **tif},
'stopLossOnFill': {'price': tp['stop_loss'], **tif},
'trailingStopLossOnFill': {'distance': trailing_stop, **tif}
}
def _design_order_units(self, instrument, side):
max_size = int(self.__inst_dict[instrument]['maximumOrderUnits'])
avail_size = max(
ceil(
(
self.margin_avail - self.balance *
self.cf['position']['margin_nav_ratio']['preserve']
) / self.unit_costs[instrument]
), 0
)
self.__logger.debug(f'avail_size:\t{avail_size}')
sizes = {
k: ceil(self.balance * v / self.unit_costs[instrument])
for k, v in self.cf['position']['margin_nav_ratio'].items()
if k in ['unit', 'init']
}
self.__logger.debug(f'sizes:\t{sizes}')
bet_size = self.__bs.calculate_size_by_pl(
unit_size=sizes['unit'],
inst_pl_txns=[
t for t in self.txn_list if (
t.get('instrument') == instrument and t.get('pl') and
t.get('units')
)
],
init_size=sizes['init']
)
self.__logger.debug(f'bet_size:\t{bet_size}')
return str(
int(min(bet_size, avail_size, max_size)) *
{'long': 1, 'short': -1}[side]
)
@staticmethod
def _sleep(last, sec=0.5):
rest = sec - (datetime.now() - last).total_seconds()
if rest > 0:
time.sleep(rest)
def print_log(self, data):
if self.__quiet:
self.__logger.info(data)
else:
print(data, flush=True)
def print_state_line(self, df_rate, add_str):
i = df_rate['instrument'].iloc[-1]
net_pl = sum([
float(t['pl']) for t in self.txn_list
if t.get('instrument') == i and t.get('pl')
])
self.print_log(
'|{0:^11}|{1:^29}|{2:^15}|'.format(
i,
'{0:>3}:{1:>21}'.format(
'B/A',
np.array2string(
df_rate[['bid', 'ask']].iloc[-1].values,
formatter={'float_kind': lambda f: f'{f:8g}'}
)
),
'PL:{:>8}'.format(f'{net_pl:.1g}')
) + (add_str or '')
)
def _write_data(self, data, path, mode='a', append_linesep=True):
with open(path, mode) as f:
f.write(str(data) + (os.linesep if append_linesep else ''))
def write_turn_log(self, df_rate, **kwargs):
i = df_rate['instrument'].iloc[-1]
df_r = df_rate.drop(columns=['instrument'])
self._write_log_df(name=f'rate.{i}', df=df_r)
if kwargs:
self._write_log_df(
name=f'sig.{i}', df=df_r.tail(n=1).assign(**kwargs)
)
def _write_log_df(self, name, df):
if self.__log_dir_path and df.size:
self.__logger.debug(f'{name} df:{os.linesep}{df}')
p = str(Path(self.__log_dir_path).joinpath(f'{name}.tsv'))
self.__logger.info(f'Write TSV log:\t{p}')
self._write_df(df=df, path=p)
def _write_df(self, df, path, mode='a'):
df.to_csv(
path, mode=mode, sep=(',' if path.endswith('.csv') else '\t'),
header=(not Path(path).is_file())
)
def fetch_candle_df(self, instrument, granularity='S5', count=5000):
res = self.__api.instrument.candles(
instrument=instrument, price='BA', granularity=granularity,
count=int(count)
)
# log_response(res, logger=self.__logger)
if 'candles' in res.body:
return pd.DataFrame([
{
'time': c.time, 'bid': c.bid.c, 'ask': c.ask.c,
'volume': c.volume
} for c in res.body['candles'] if c.complete
]).assign(
time=lambda d: pd.to_datetime(d['time']), instrument=instrument
).set_index('time', drop=True)
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def fetch_latest_price_df(self, instrument):
res = self.__api.pricing.get(
accountID=self.__account_id, instruments=instrument
)
# log_response(res, logger=self.__logger)
if 'prices' in res.body:
return pd.DataFrame([
{'time': r.time, 'bid': r.closeoutBid, 'ask': r.closeoutAsk}
for r in res.body['prices']
]).assign(
time=lambda d: pd.to_datetime(d['time']), instrument=instrument
).set_index('time')
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
class BaseTrader(TraderCore, metaclass=ABCMeta):
def __init__(self, model, standalone=True, ignore_api_error=False,
**kwargs):
super().__init__(**kwargs)
self.__logger = logging.getLogger(__name__)
self.__ignore_api_error = ignore_api_error
self.__n_cache = self.cf['feature']['cache']
self.__use_tick = (
'TICK' in self.cf['feature']['granularities'] and not standalone
)
self.__granularities = [
a for a in self.cf['feature']['granularities'] if a != 'TICK'
]
self.__cache_dfs = {i: pd.DataFrame() for i in self.instruments}
if model == 'ewma':
self.__ai = Ewma(config_dict=self.cf)
elif model == 'kalman':
self.__ai = Kalman(config_dict=self.cf)
else:
raise ValueError(f'invalid model name:\t{model}')
self.__volatility_states = dict()
self.__granularity_lock = dict()
def invoke(self):
self.print_log('!!! OPEN DEALS !!!')
signal.signal(signal.SIGINT, signal.SIG_DFL)
while self.check_health():
try:
self._update_volatility_states()
for i in self.instruments:
self.refresh_oanda_dicts()
self.make_decision(instrument=i)
except (V20ConnectionError, V20Timeout, APIResponseError) as e:
if self.__ignore_api_error:
self.__logger.error(e)
else:
raise e
@abstractmethod
def check_health(self):
return True
def _update_volatility_states(self):
if not self.cf['volatility']['sleeping']:
self.__volatility_states = {i: True for i in self.instruments}
else:
self.__volatility_states = {
i: self.fetch_candle_df(
instrument=i,
granularity=self.cf['volatility']['granularity'],
count=self.cf['volatility']['cache']
).pipe(
lambda d: (
np.log(d[['ask', 'bid']].mean(axis=1)).diff().rolling(
window=int(self.cf['volatility']['window'])
).std(ddof=0) * d['volume']
)
).dropna().pipe(
lambda v: (
v.iloc[-1]
> v.quantile(self.cf['volatility']['sleeping'])
)
) for i in set(self.instruments)
}
@abstractmethod
def make_decision(self, instrument):
pass
def update_caches(self, df_rate):
self.__logger.info(f'Rate:{os.linesep}{df_rate}')
i = df_rate['instrument'].iloc[-1]
df_c = self.__cache_dfs[i].append(df_rate).tail(n=self.__n_cache)
self.__logger.info('Cache length:\t{}'.format(len(df_c)))
self.__cache_dfs[i] = df_c
def determine_sig_state(self, df_rate):
i = df_rate['instrument'].iloc[-1]
pos = self.pos_dict.get(i)
pos_pct = (
round(
abs(pos['units'] * self.unit_costs[i] * 100 / self.balance), 1
) if pos else 0
)
history_dict = self._fetch_history_dict(instrument=i)
if not history_dict:
sig = {
'sig_act': None, 'granularity': None, 'sig_log_str': (' ' * 40)
}
else:
if self.cf['position']['side'] == 'auto':
inst_pls = [
t['pl'] for t in self.txn_list
if t.get('instrument') == i and t.get('pl')
]
contrary = bool(inst_pls and float(inst_pls[-1]) < 0)
else:
contrary = (self.cf['position']['side'] == 'contrarian')
sig = self.__ai.detect_signal(
history_dict=(
{
k: v for k, v in history_dict.items()
if k == self.__granularity_lock[i]
} if self.__granularity_lock.get(i) else history_dict
),
pos=pos, contrary=contrary
)
if self.cf['feature']['granularity_lock']:
self.__granularity_lock[i] = (
sig['granularity']
if pos or sig['sig_act'] in {'long', 'short'} else None
)
if pos and sig['sig_act'] and sig['sig_act'] == pos['side']:
self.pos_dict[i]['dt'] = datetime.now()
if not sig['granularity']:
act = None
state = 'LOADING'
elif not self.price_dict[i]['tradeable']:
act = None
state = 'TRADING HALTED'
elif (pos and sig['sig_act']
and (sig['sig_act'] == 'closing'
or (not self.__volatility_states[i]
and sig['sig_act'] != pos['side']))):
act = 'closing'
state = 'CLOSING'
elif (pos and not sig['sig_act']
and ((datetime.now() - pos['dt']).total_seconds()
> self.cf['position']['ttl_sec'])):
act = 'closing'
state = 'POSITION EXPIRED'
elif int(self.balance) == 0:
act = None
state = 'NO FUND'
elif (pos
and ((sig['sig_act'] and sig['sig_act'] == pos['side'])
or not sig['sig_act'])):
act = None
state = '{0:.1f}% {1}'.format(pos_pct, pos['side'].upper())
elif self._is_margin_lack(instrument=i):
act = None
state = 'LACK OF FUNDS'
elif self._is_over_spread(df_rate=df_rate):
act = None
state = 'OVER-SPREAD'
elif not self.__volatility_states[i]:
act = None
state = 'SLEEPING'
elif not sig['sig_act']:
act = None
state = '-'
elif pos:
act = sig['sig_act']
state = '{0} -> {1}'.format(
pos['side'].upper(), sig['sig_act'].upper()
)
else:
act = sig['sig_act']
state = '-> {}'.format(sig['sig_act'].upper())
return {
'act': act, 'state': state,
'log_str': (
(
'{:^14}|'.format('TICK:{:>5}'.format(len(df_rate)))
if self.__use_tick else ''
) + sig['sig_log_str'] + f'{state:^18}|'
),
**sig
}
def _fetch_history_dict(self, instrument):
df_c = self.__cache_dfs[instrument]
return {
**(
{'TICK': df_c.assign(volume=1)}
if self.__use_tick and len(df_c) == self.__n_cache else dict()
),
**{
g: self.fetch_candle_df(
instrument=instrument, granularity=g, count=self.__n_cache
).rename(
columns={'closeAsk': 'ask', 'closeBid': 'bid'}
)[['ask', 'bid', 'volume']] for g in self.__granularities
}
}
def _is_margin_lack(self, instrument):
return (
not self.pos_dict.get(instrument) and
self.balance * self.cf['position']['margin_nav_ratio']['preserve']
>= self.margin_avail
)
def _is_over_spread(self, df_rate):
return (
df_rate.tail(n=1).pipe(
lambda d: (d['ask'] - d['bid']) / (d['ask'] + d['bid']) * 2
).values[0]
>= self.cf['position']['limit_price_ratio']['max_spread']
)
|
gpl-3.0
| 4,704,809,548,591,564,000
| 37.02589
| 79
| 0.481787
| false
| 3.868313
| false
| false
| false
|
buwx/logger
|
Logger.py
|
1
|
2078
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
'''
Created on 10.03.2015
@author: micha
'''
import logging
import serial
import time
import MySQLdb as mdb
from util import formatData, description, sensor
# the main procedure
logging.basicConfig(format='%(asctime)s\t%(levelname)s\t%(message)s', level=logging.INFO)
logging.info("Starting weather station sensor logging")
con = None
port = None
try:
con = mdb.connect('localhost', 'davis', 'davis', 'davis');
cur = con.cursor()
port = serial.Serial(
port='/dev/ttyUSB0',\
baudrate=115200,\
parity=serial.PARITY_NONE,\
stopbits=serial.STOPBITS_ONE,\
bytesize=serial.EIGHTBITS,\
timeout=310)
stage = 0;
# main loop
while True:
ts_old = int(time.time())
line = port.readline().strip()
ts = int(time.time())
if ts - ts_old > 300:
logging.critical("Timeout!")
break
if stage == 0 and line[0] == '?':
stage = 1
elif stage == 1 and line[0] == '#':
port.write("x200\n") # Threshold set to -100db
stage = 2
elif stage == 2 and line[0] == '#':
port.write("t1\n") # Tansmitter 1
stage = 3
elif stage == 3 and line[0] == '#':
port.write("f1\n") # Filter on
stage = 4
elif stage == 4 and line[0] == '#':
port.write("o0\n") # Output original data
stage = 5
elif stage == 5 and line[0] == '#':
port.write("m1\n") # Frequency band 1
stage = 6
elif stage == 6 and len(line) > 3:
sid = line[0]
if sid == 'B' or sid == 'I' or sid == 'W' or sid == 'T' or sid == 'R' or sid == 'P':
cur.execute("INSERT INTO logger(dateTime,sensor,data,description) VALUES(%s,%s,%s,%s)", (ts,sensor(line),formatData(line),description(line)))
con.commit()
except Exception, e:
logging.critical(str(e))
finally:
if con:
con.close()
if port:
port.close()
|
gpl-3.0
| 4,441,962,244,224,371,000
| 25.641026
| 157
| 0.532243
| false
| 3.516074
| false
| false
| false
|
bgr/quadpy
|
demo/demo.py
|
1
|
6825
|
try:
import tkinter
except ImportError:
import Tkinter as tkinter
# hsmpy is Hierarchical State Machine implementation for Python
# it's used here to implement GUI logic
import quadpy
from quadpy.rectangle import Rectangle
from hsmpy import HSM, State, T, Initial, Internal, Choice, EventBus, Event
# you can enable logging to see what's going on under the hood of HSM
#import logging
#logging.basicConfig(level=logging.DEBUG)
# tool aliases
Selection_tool, Drawing_tool = ('Select', 'Draw')
# eventbus will be used for all events
# Tkinter events will also be routed through it
eb = EventBus()
# HSM events
class Tool_Changed(Event): pass
class Mouse_Event(Event):
def __init__(self, x, y):
self.x = x
self.y = y
self.data = (x, y)
class Canvas_Up(Mouse_Event): pass
class Canvas_Down(Mouse_Event): pass
class Canvas_Move(Mouse_Event): pass
# create Tkinter GUI
root = tkinter.Tk()
canvas = tkinter.Canvas(width=700, height=700,
highlightthickness=0, background='white')
canvas.pack(fill='both', expand=True, padx=6, pady=6)
frame = tkinter.Frame()
labels = []
for i, tool in enumerate([Selection_tool, Drawing_tool]):
lbl = tkinter.Label(frame, text=tool, width=8, relief='raised')
wtf = tool
def get_closure(for_tool):
return lambda _: eb.dispatch(Tool_Changed(for_tool))
lbl.bind('<Button-1>', get_closure(tool))
lbl.pack(padx=6, pady=6 * (i % 2))
labels.append(lbl)
frame.pack(side='left', fill='y', expand=True, pady=6)
canvas.bind('<Button-1>', lambda e: eb.dispatch(Canvas_Down(e.x, e.y)))
canvas.bind('<B1-Motion>', lambda e: eb.dispatch(Canvas_Move(e.x, e.y)))
canvas.bind('<ButtonRelease-1>', lambda e: eb.dispatch(Canvas_Up(e.x, e.y)))
# I'll just put these here and reference them directly later, for simplicity
quad = quadpy.Node(0, 0, 700, 700, max_depth=9)
selected_elems = []
canvas_grid = {} # quadtree grid, mapping: bounds -> tkinter rectangle id
##### HSM state and transition actions #####
def update_chosen_tool(evt, hsm):
for lbl in labels:
lbl['relief'] = 'sunken' if evt.data == lbl['text'] else 'raised'
hsm.data.canvas_tool = evt.data
# quadtree grid visualization:
def update_grid():
updated_bounds = set(quad._get_grid_bounds())
current_bounds = set(canvas_grid.keys())
deleted = current_bounds.difference(updated_bounds)
added = updated_bounds.difference(current_bounds)
for d in deleted:
canvas.delete(canvas_grid[d])
del canvas_grid[d]
for a in added:
added_id = canvas.create_rectangle(a, outline='grey')
canvas_grid[a] = added_id
# drawing new rectangle:
def initialize_rectangle(evt, hsm):
x, y = evt.data
bounds = (x, y, x + 1, y + 1)
rect = Rectangle(*bounds)
rect.canvas_id = canvas.create_rectangle(bounds, outline='blue')
hsm.data.canvas_temp_data = (x, y, rect)
quad.insert(rect)
update_grid()
def draw_rectangle(evt, hsm):
x, y, rect = hsm.data.canvas_temp_data
bounds = (x, y, evt.x, evt.y)
rect.bounds = bounds
canvas.coords(rect.canvas_id, bounds)
quad.reinsert(rect)
update_grid()
# selecting and moving:
def elems_under_cursor(evt, hsm):
return quad.get_children_under_point(evt.x, evt.y)
def select_elems(elems):
global selected_elems
[canvas.itemconfig(e.canvas_id, outline='blue') for e, _ in selected_elems]
selected_elems = [(el, el.bounds) for el in elems]
[canvas.itemconfig(e.canvas_id, outline='red') for e, _ in selected_elems]
def select_under_cursor(evt, hsm):
hsm.data.moving_start = (evt.x, evt.y)
elems = elems_under_cursor(evt, hsm)
if not elems:
assert False, "this cannot happen"
just_elems = set(el for el, _ in selected_elems)
if not any(el in just_elems for el in elems):
# clicked non-selected element, select it
select_elems([elems[0]])
else:
# hack to refresh initial bounds for each tuple in selected_elems
select_elems([el for el, _ in selected_elems])
def move_elements(evt, hsm):
x, y = hsm.data.moving_start
off_x, off_y = evt.x - x, evt.y - y
for el, original_bounds in selected_elems:
x1, y1, x2, y2 = original_bounds
el.bounds = (x1 + off_x, y1 + off_y, x2 + off_x, y2 + off_y)
canvas.coords(el.canvas_id, el.bounds)
quad.reinsert(el)
update_grid()
# selection marquee
def create_marquee_rect(evt, hsm):
rect_id = canvas.create_rectangle((evt.x, evt.y, evt.x, evt.y),
outline='orange')
hsm.data.canvas_marquee = (evt.x, evt.y, rect_id)
select_elems([])
def drag_marquee_rect(evt, hsm):
x, y, rect_id = hsm.data.canvas_marquee
bounds = (x, y, evt.x, evt.y)
select_elems(quad.get_overlapped_children(bounds))
canvas.coords(rect_id, bounds)
def clear_marquee_rect(evt, hsm):
_, _, rect_id = hsm.data.canvas_marquee
canvas.delete(rect_id)
# define HSM state structure and transitions between states:
states = {
'app': State({
'select_tool_chosen': State({
'select_tool_hovering': State(),
'dragging_marquee': State(on_enter=create_marquee_rect,
on_exit=clear_marquee_rect),
'moving_elements': State(on_enter=select_under_cursor),
}),
'draw_tool_chosen': State({
'draw_tool_hovering': State(),
'drawing': State(),
})
})
}
transitions = {
'app': {
Initial: T('draw_tool_chosen'),
Tool_Changed: Choice({
Selection_tool: 'select_tool_chosen',
Drawing_tool: 'draw_tool_chosen' },
default='select_tool_chosen',
action=update_chosen_tool)
},
'select_tool_chosen': {
Initial: T('select_tool_hovering'),
Canvas_Up: T('select_tool_hovering'),
},
####
'select_tool_hovering': {
Canvas_Down: Choice({
False: 'dragging_marquee',
True: 'moving_elements', },
default='dragging_marquee',
key=lambda e, h: len(elems_under_cursor(e, h)) > 0),
},
'dragging_marquee': {
Canvas_Move: Internal(action=drag_marquee_rect),
},
'moving_elements': {
Canvas_Move: Internal(action=move_elements),
},
###
'draw_tool_chosen': {
Initial: T('draw_tool_hovering'),
Canvas_Up: T('draw_tool_hovering'),
},
'draw_tool_hovering': {
Canvas_Down: T('drawing', action=initialize_rectangle),
},
'drawing': {
Canvas_Move: Internal(action=draw_rectangle),
},
}
# initialize HSM with defined states and transitions and run
hsm = HSM(states, transitions)
hsm.start(eb)
eb.dispatch(Tool_Changed(Drawing_tool))
root.mainloop()
|
mit
| 7,489,607,430,619,016,000
| 27.676471
| 79
| 0.626081
| false
| 3.279673
| false
| false
| false
|
colour-science/colour-analysis
|
colour_analysis/visuals/__init__.py
|
1
|
1700
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .primitive import Primitive, PrimitiveVisual
from .symbol import Symbol
from .axis import axis_visual
from .box import Box, BoxVisual
from .diagrams import (CIE_1931_chromaticity_diagram,
CIE_1960_UCS_chromaticity_diagram,
CIE_1976_UCS_chromaticity_diagram)
from .image import image_visual
from .plane import Plane, PlaneVisual
from .pointer_gamut import (pointer_gamut_boundaries_visual,
pointer_gamut_hull_visual, pointer_gamut_visual)
from .rgb_colourspace import (RGB_identity_cube, RGB_colourspace_volume_visual,
RGB_colourspace_whitepoint_axis_visual,
RGB_colourspace_triangle_visual)
from .rgb_scatter import RGB_scatter_visual
from .spectral_locus import (spectral_locus_visual,
chromaticity_diagram_construction_visual)
__all__ = []
__all__ += ['Primitive', 'PrimitiveVisual']
__all__ += ['Symbol']
__all__ += ['Axis', 'AxisVisual', 'axis_visual']
__all__ += ['Box', 'BoxVisual']
__all__ += [
'CIE_1931_chromaticity_diagram', 'CIE_1960_UCS_chromaticity_diagram',
'CIE_1976_UCS_chromaticity_diagram'
]
__all__ += ['image_visual']
__all__ += ['Plane', 'PlaneVisual']
__all__ += [
'pointer_gamut_boundaries_visual', 'pointer_gamut_hull_visual',
'pointer_gamut_visual'
]
__all__ += [
'RGB_identity_cube', 'RGB_colourspace_volume_visual',
'RGB_colourspace_whitepoint_axis_visual', 'RGB_colourspace_triangle_visual'
]
__all__ += ['RGB_scatter_visual']
__all__ += [
'spectral_locus_visual', 'chromaticity_diagram_construction_visual'
]
|
bsd-3-clause
| 7,652,326,354,322,501,000
| 36.777778
| 79
| 0.647647
| false
| 3.262956
| false
| true
| false
|
jgagneastro/FireHose_OLD
|
3-XIDL/23-XIDL/idlspec2d/bin/putils.py
|
2
|
2033
|
#!/usr/bin/env python
import sys, os, os, subprocess
import string, imp, time, shlex
import gzip
"""
putils is a set of miscellaneous python tools.
Written by Gary Kushner (LBL). Nov 2009. Latest update April 2010.
"""
def searchPath(name, paths):
"""Search a path for a name (file, direcory, link, etc). Return the absolute
path to the found file or None"""
for path in paths:
if os.path.exists(os.path.join(path, name)):
return os.path.abspath(os.path.join(path, name))
return None
def loadModuleRaw(module):
"""import a python module using a raw file name (doesn't need to end in .py)"""
path = searchPath(module, sys.path)
if path == None:
raise ImportError("No module named " + module)
return imp.load_source(module, path)
def runCommand(cmd, echo=False, logCmd=None, prefix="", shell=False):
"""Run a command with the option to asynchronously display or log output.
If shell=False, the cmd needs to be a list, but if you pass in a string
it will be parsed into a list.
echo will echo output to stdout.
logCmd is a function pointer to use to put the output into a log.
Returns (return code, output)."""
output = ""
# Handle the command parsing
if isinstance(cmd, str) and not shell:
cmd = [c for c in shlex.split(cmd)]
# Call the process
p = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.STDOUT,
shell=shell)
# Process output until process dies
while True:
l = p.stdout.readline()
if not l: break
output += l
l = l[:-1] # yea, only safe on unix...
if echo:
print prefix + l
if logCmd != None:
logCmd(prefix + l)
return (p.wait(), output)
def openRead(filename, mode = "r"):
"""Open a gzip or normal file for text reading. Valid modes are 'r' and 'rb'"""
gzSig = '\x1f\x8b'
if mode != 'r' and mode != 'rb':
raise ValueError("Illegal mode: " + mode)
f = open(filename, mode)
try:
if (f.read(2) == gzSig):
f = gzip.open(filename, mode)
finally:
f.seek(0)
return f
|
gpl-2.0
| -3,833,361,437,633,648,600
| 23.493976
| 81
| 0.661584
| false
| 3.099085
| false
| false
| false
|
tiagocoutinho/bliss
|
bliss/controllers/motors/carnac.py
|
1
|
2689
|
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
"""
ID31 motion hook for the carnac motors.
"""
import logging
import gevent
from bliss.common.hook import MotionHook
class CarnacHook(MotionHook):
"""
Motion hook specific for ID31 carnac motors.
Configuration example:
.. code-block:: yaml
hooks:
- name: carnac_hook
class: CarnacHook
module: motors.hooks
plugin: bliss
controllers:
- name: ice313
class: IcePAP
host: iceid313
plugin: emotion
axes:
- name: cncx
motion_hooks:
- $carnac_hook
- name: cncy
motion_hooks:
- $carnac_hook
- name: cncz
motion_hooks:
- $carnac_hook
"""
def __init__(self, name, config):
self._log = logging.getLogger('{0}({1})'.format(self.__class__.__name__,
name))
self.debug = self._log.debug
self.config = config
self.name = name
super(CarnacHook, self).__init__()
def _wait_ready(self, axes):
with gevent.Timeout(1, RuntimeError('not all motors ready after timeout')):
while True:
ready = [axis for axis in axes if axis.state(read_hw=True).READY]
if len(ready) == len(axes):
break
self._log.debug('All motors ready!')
def pre_move(self, motion_list):
axes = [motion.axis for motion in motion_list]
axes_names = ', '.join([axis.name for axis in axes])
self._log.debug('Start power ON for %s', axes_names)
tasks = [gevent.spawn(axis.controller.set_on, axis) for axis in axes]
gevent.joinall(tasks, timeout=1, raise_error=True)
self._log.debug('Finished power ON for %s', axes_names)
# we know empirically that the carnac takes ~1.3s to reply it is
# ready after a power on
gevent.sleep(1.2)
self._wait_ready(axes)
def post_move(self, motion_list):
axes = [motion.axis for motion in motion_list]
axes_names = ', '.join([axis.name for axis in axes])
self._log.debug('Start power OFF for %s', axes_names)
tasks = [gevent.spawn(axis.controller.set_off, axis) for axis in axes]
gevent.joinall(tasks, timeout=1, raise_error=True)
self._log.debug('Finished power OFF for %s', axes_names)
self._wait_ready(axes)
|
lgpl-3.0
| -3,396,081,586,227,965,400
| 31.39759
| 83
| 0.554481
| false
| 3.771388
| false
| false
| false
|
amaurywalbert/twitter
|
communities_detection/infomap/hashmap_infomap_method_v1.0.py
|
2
|
6588
|
# -*- coding: latin1 -*-
################################################################################################
import snap,datetime, sys, time, json, os, os.path, shutil, time, struct, random
import subprocess
import networkx as nx
import matplotlib.pyplot as plt
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
######################################################################################################################################################################
## Status - Versão 1 - INFOMAP - http://www.mapequation.org/code.html
##
##
## # INPUT: Grafos
##
## # OUTPUT:
## Communities
######################################################################################################################################################################
######################################################################################################################################################################
#
# Cálculos iniciais sobre o conjunto de dados lidos.
#
######################################################################################################################################################################
def calculate_alg(output,net,uw,ud,g_type,alg,graphs):
if not os.path.exists(graphs):
print ("\nDiretório com grafos não encontrado: "+str(graphs)+"\n")
else:
print
print("######################################################################")
print ("Os arquivos serão armazenados em: "+str(output))
print("######################################################################")
if not os.path.exists(output):
os.makedirs(output)
i=0
for file in os.listdir(graphs):
ego_id = file.split(".edge_list")
ego_id = long(ego_id[0])
i+=1
print("Detectando comunidades: "+str(g_type)+" - "+str(alg)+" - Rede: "+str(net)+" - ego("+str(i)+"): "+str(file))
try:
if ud is False: # Para grafo Directed
execute = subprocess.Popen(["/home/amaury/algoritmos/Infomap/Infomap","-i link-list", str(graphs)+str(file), str(output), "--out-name "+str(ego_id), "-N 10", "--directed", "--two-level", "--map"], stdout=subprocess.PIPE)
else: # Para grafos Undirected
execute = subprocess.Popen(["/home/amaury/algoritmos/Infomap/Infomap","-i link-list", str(graphs)+str(file), str(output), "--out-name "+str(ego_id), "-N 10", "--undirected", "--two-level", "--map"], stdout=subprocess.PIPE)
value = execute.communicate()[0]
print value
except Exception as e:
print e
print("######################################################################")
######################################################################################################################################################################
#
# Método principal do programa.
# Realiza teste e coleta dos dados de cada user especificado no arquivo.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Detecção de Comunidades - INFOMAP Method "
print" "
print"#################################################################################"
print
print
print" 1 - Follow"
print" 9 - Follwowers"
print" 2 - Retweets"
print" 3 - Likes"
print" 4 - Mentions"
print " "
print" 5 - Co-Follow"
print" 10 - Co-Followers"
print" 6 - Co-Retweets"
print" 7 - Co-Likes"
print" 8 - Co-Mentions"
print
op = int(raw_input("Escolha uma opção acima: "))
if op in (5,6,7,8,10): # Testar se é um grafo direcionado ou não
ud = True
elif op in (1,2,3,4,9):
ud = False
else:
print("Opção inválida! Saindo...")
sys.exit()
if op == 1 or op == 9: # Testar se é um grafo direcionado ou não
uw = True
else:
uw = False
######################################################################
net = "n"+str(op)
######################################################################################################################
g_type1 = "graphs_with_ego"
g_type2 = "graphs_without_ego"
alg = "infomap"
######################################################################################################################
output = "/home/amaury/communities_hashmap/"+str(g_type1)+"/"+str(alg)+"/raw/"+str(net)+"/10/"
graphs = "/home/amaury/graphs_hashmap_infomap/"+str(net)+"/"+str(g_type1)+"/"
print ("Calculando Comunidades para a rede: "+str(net)+" - COM o ego")
calculate_alg(output,net,uw,ud,g_type1,alg,graphs)
######################################################################################################################
######################################################################################################################
output = "/home/amaury/communities_hashmap/"+str(g_type2)+"/"+str(alg)+"/raw/"+str(net)+"/10/"
graphs = "/home/amaury/graphs_hashmap_infomap/"+str(net)+"/"+str(g_type2)+"/"
print ("Calculando Comunidades para a rede: "+str(net)+" - SEM o ego")
calculate_alg(output,net,uw,ud,g_type2,alg,graphs)
######################################################################################################################
print("######################################################################")
print
print("######################################################################")
print("Script finalizado!")
print("######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
######################################################################################################################
if __name__ == "__main__": main()
|
gpl-3.0
| -3,754,045,302,539,895,300
| 43.09396
| 227
| 0.318569
| false
| 4.626761
| false
| false
| false
|
shodimaggio/SaivDr
|
appendix/pytorch/nsoltBlockDct2dLayer.py
|
1
|
2466
|
import torch
import torch.nn as nn
import torch_dct as dct
import math
from nsoltUtility import Direction
class NsoltBlockDct2dLayer(nn.Module):
"""
NSOLTBLOCKDCT2DLAYER
ベクトル配列をブロック配列を入力:
nSamples x nComponents x (Stride(1)xnRows) x (Stride(2)xnCols)
コンポーネント別に出力(nComponents):
nSamples x nDecs x nRows x nCols
Requirements: Python 3.7.x, PyTorch 1.7.x
Copyright (c) 2020-2021, Shogo MURAMATSU
All rights reserved.
Contact address: Shogo MURAMATSU,
Faculty of Engineering, Niigata University,
8050 2-no-cho Ikarashi, Nishi-ku,
Niigata, 950-2181, JAPAN
http://msiplab.eng.niigata-u.ac.jp/
"""
def __init__(self,
name='',
decimation_factor=[],
number_of_components=1
):
super(NsoltBlockDct2dLayer, self).__init__()
self.decimation_factor = decimation_factor
self.name = name
self.description = "Block DCT of size " \
+ str(self.decimation_factor[Direction.VERTICAL]) + "x" \
+ str(self.decimation_factor[Direction.HORIZONTAL])
#self.type = ''
self.num_outputs = number_of_components
#self.num_inputs = 1
def forward(self,X):
nComponents = self.num_outputs
nSamples = X.size(0)
height = X.size(2)
width = X.size(3)
stride = self.decimation_factor
nrows = int(math.ceil(height/stride[Direction.VERTICAL]))
ncols = int(math.ceil(width/stride[Direction.HORIZONTAL]))
ndecs = stride[0]*stride[1] #math.prod(stride)
# Block DCT (nSamples x nComponents x nrows x ncols) x decV x decH
arrayshape = stride.copy()
arrayshape.insert(0,-1)
Y = dct.dct_2d(X.view(arrayshape),norm='ortho')
# Rearrange the DCT Coefs. (nSamples x nComponents x nrows x ncols) x (decV x decH)
cee = Y[:,0::2,0::2].reshape(Y.size(0),-1)
coo = Y[:,1::2,1::2].reshape(Y.size(0),-1)
coe = Y[:,1::2,0::2].reshape(Y.size(0),-1)
ceo = Y[:,0::2,1::2].reshape(Y.size(0),-1)
A = torch.cat((cee,coo,coe,ceo),dim=-1)
Z = A.view(nSamples,nComponents,nrows,ncols,ndecs)
if nComponents<2:
return torch.squeeze(Z,dim=1)
else:
return map(lambda x: torch.squeeze(x,dim=1),torch.chunk(Z,nComponents,dim=1))
|
bsd-2-clause
| -1,141,814,849,013,926,000
| 33.457143
| 91
| 0.59204
| false
| 2.977778
| false
| false
| false
|
ctag/cpe453
|
JMRI/jython/xAPadapter.py
|
1
|
7516
|
# Adapter to xAP automation protocol
#
# Uses xAPlib to listen to the network, creating and
# maintaining internal Turnout and Sensor objects that
# reflect what is seen.
#
# The Turnouts' commanded state is updated, not the
# known state, so feedback needs to be considered in
# any more permanent implementation. Note that
# this does not yet send anything on modification,
# due to race conditions.
#
# Author: Bob Jacobsen, copyright 2010
# Part of the JMRI distribution
# Ver 1.2 01/11/2011 NW Changes to the input code
# Ver 1.3 07/11/2011 NW Added a "Type" to the BSC message format
# Ver 1.4 07/12/2011 NW Changes to xAP Tx Message area
#
#
#
#
# The next line is maintained by CVS, please don't change it
# $Revision: 27263 $
import jarray
import jmri
import xAPlib
# create the network
print "opening "
global myNetwork
myNetwork = xAPlib.xAPNetwork("listener.xap")
# display some info
properties = myNetwork.getProperties()
print "getBroadcastIP()", properties.getBroadcastIP()
print "getHeartbeatInterval()", properties.getHeartbeatInterval()
print "getInstance() ", properties.getInstance()
print "getPort() ", properties.getPort()
print "getSource() ", properties.getSource()
print "getUID() ", properties.getUID()
print "getVendor() ", properties.getVendor()
print "getxAPAddress() ", properties.getxAPAddress()
print
# Define thexAPRxEventListener: Print some
# information when event arrives
class InputListener(xAPlib.xAPRxEventListener):
def myEventOccurred(self, event, message):
print "==== rcvd ===="
print message
print "--------------"
# try parsing and printing
fmtMsg = xAPlib.xAPParser(message)
print "source: ", fmtMsg.getSource()
print "target: ", fmtMsg.getTarget()
print "class: ", fmtMsg.getClassName()
print "uid: ", fmtMsg.getUID()
if (fmtMsg.getClassName() == "xAPBSC.info" or fmtMsg.getClassName() == "xAPBSC.event") :
print " --- Acting on "+fmtMsg.getClassName()+" ---"
if (fmtMsg.getNameValuePair("output.state","Type") != None) :
print " --- Acting on output.state ---"
pair = fmtMsg.getNameValuePair("output.state","Type")
if (pair == None) :
print "No Type, ending"
return
type = pair.getValue().upper()
print "NWE Type:", type,":"
if (type == "TURNOUT" or type == "SIGNAL") :
print "NWE Turnout/Signal"
self.processTurnout(fmtMsg, message)
if (fmtMsg.getNameValuePair("input.state","Type") != None) :
pair = fmtMsg.getNameValuePair("input.state","Type")
type = pair.getValue().upper()
if (type == "SENSOR") :
print "NWE Sensor"
print " --- Acting on input.state ---"
self.processSensor(fmtMsg, message)
print "=============="
return
# Process Turnout
def processTurnout(self, fmtMsg, message) :
pair = fmtMsg.getNameValuePair("output.state","Name")
if (pair == None) :
print "No Name"
name = None
else :
name = pair.getValue()
print " Name:", name
pair = fmtMsg.getNameValuePair("output.state","Location")
if (pair == None) :
print "No Location"
location = None
else :
location = pair.getValue()
print " Location: ", location
pair = fmtMsg.getNameValuePair("output.state","State")
if (pair == None) :
print "No State, ending"
return
state = pair.getValue().upper()
print " State: ", state
# now create a Turnout and set
value = CLOSED
if (state == "ON") :
value = THROWN
turnout = turnouts.getTurnout("IT:XAP:XAPBSC:"+fmtMsg.getSource())
if (turnout == None) :
print " create x turnout IT:XAP:XAPBSC:"+fmtMsg.getSource()
turnout = turnouts.provideTurnout("IT:XAP:XAPBSC:"+fmtMsg.getSource())
if (name != None) :
turnout.setUserName(name)
turnout.setCommandedState(value)
print " set turnout IT:XAP:XAPBSC:"+fmtMsg.getSource()+" to", value
return
# Process Sensor
def processSensor(self, fmtMsg, message) :
pair = fmtMsg.getNameValuePair("input.state","Name")
if (pair == None) :
print "No Name"
name = None
else :
name = pair.getValue()
print " Name:", name
pair = fmtMsg.getNameValuePair("input.state","Location")
if (pair == None) :
print "No Location"
location = None
else :
location = pair.getValue()
print " Location: ", location
pair = fmtMsg.getNameValuePair("input.state","State")
if (pair == None) :
print "No State, ending"
return
state = pair.getValue().upper()
print " State: ", state
# now create a Sensor and set
value = INACTIVE
if (state == "ON") :
value = ACTIVE
sensor = sensors.getSensor("IS:XAP:XAPBSC:"+fmtMsg.getSource())
if (sensor == None) :
print " create x sensor IS:XAP:XAPBSC:"+fmtMsg.getSource()
sensor = sensors.provideSensor("IS:XAP:XAPBSC:"+fmtMsg.getSource())
if (name != None) :
sensor.setUserName(name)
sensor.setState(value)
print " set sensor IS:XAP:XAPBSC:"+fmtMsg.getSource()+" to ", value
return
# Define the turnout listener class, which drives output messages
class TurnoutListener(java.beans.PropertyChangeListener):
def propertyChange(self, event):
global myNetwork
print " ************** Sending xAP Message **************"
print "change",event.propertyName
print "from", event.oldValue, "to", event.newValue
print "source systemName", event.source.systemName
print "source userName", event.source.userName
# format and send the message
# the final message will look like this on the wire:
#
# xap-header
# {
# v=12
# hop=1
# uid=FFFF0000
# class=xAPBSC.cmd
# source=JMRI.DecoderPro.1
# destination=NWE.EVA485.DEFAULT:08
# }
# output.state.1
# {
# ID=08
# State=ON
# }
# *
myProperties = myNetwork.getProperties()
myMessage = xAPlib.xAPMessage("xAPBSC.cmd", myProperties.getxAPAddress())
myMessage.setUID(self.uid)
myMessage.setTarget(self.target)
if (event.newValue == CLOSED) :
myMessage.addNameValuePair( "output.state.1", "ID", self.id)
myMessage.addNameValuePair( "output.state.1", "State", "OFF")
myMessage.addNameValuePair( "output.state.1", "Text", "CLOSED") # Optional
else :
myMessage.addNameValuePair( "output.state.1", "ID", self.id)
myMessage.addNameValuePair( "output.state.1", "State", "ON")
myMessage.addNameValuePair( "output.state.1", "Text", "THROWN") # Optional
myNetwork.sendMessage(myMessage)
print myMessage.toString()
return
def defineTurnout(name, uid, id, target) :
t = turnouts.provideTurnout(name)
m = TurnoutListener()
m.uid = uid
m.id = id
m.target = target
t.addPropertyChangeListener(m)
return
# register xAPRxEvents listener
print "register"
myNetwork.addMyEventListener(InputListener())
# define the turnouts
defineTurnout("IT:XAP:XAPBSC:NWE.EVA485.DEFAULT:99", "FF010100", "99", "NWE.EVA485.DEFAULT")
print "End of Script"
|
gpl-2.0
| 9,160,168,916,993,520
| 33.319635
| 110
| 0.612826
| false
| 3.484469
| false
| false
| false
|
cjgrady/pamRandomization
|
convertBackToCsv.py
|
1
|
3520
|
"""
@summary: Converts the random PAMs back into CSVs and add back in headers
"""
import concurrent.futures
import csv
import os
import numpy as np
# .............................................................................
def writeCSVfromNpy(outFn, mtxFn, headerRow, metaCols):
"""
@summary: Converts a numpy array back into a CSV file
@param outFn: The filename where to write the data
@param mtx: The numpy matrix
@param headerRow: The headers for the file
@param metaCols: Meta columns
"""
mtx = np.load(mtxFn)
# Sanity checks
numRows, numCols = mtx.shape
#print numRows, numCols, len(metaCols), len(headerRow), len(metaCols[0])
#assert numRows == len(metaCols)
#assert numCols == len(headerRow) - len(metaCols[0])
with open(outFn, 'w') as outF:
writer = csv.writer(outF, delimiter=',')
# Write header row
writer.writerow(headerRow)
# Write each row
for i in range(numRows):
row = []
row.extend(metaCols[i])
#row = metaCols[i]
row.extend(mtx[i])
writer.writerow(row)
# .............................................................................
def getHeaderRowAndMetaCols(csvFn):
"""
@summary: Extracts the header row and the meta columns
@note: Assumes that both are present
"""
headerRow = []
metaCols = []
with open(csvFn) as inF:
reader = csv.reader(inF)
headerRow = reader.next()
for row in reader:
metaCols.append(row[0:3])
return headerRow, metaCols
# .............................................................................
if __name__ == "__main__":
import glob
globFns = [
('/home/cjgrady/ecosim/rand/pam2650-*.npy', '/home/cjgrady/ecosim/csvs/pam_2650_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam2670-*.npy', '/home/cjgrady/ecosim/csvs/pam_2670_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam8550-*.npy', '/home/cjgrady/ecosim/csvs/pam_8550_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam8570-*.npy', '/home/cjgrady/ecosim/csvs/pam_8570_reclass3.csv')
]
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
for inFn, origCsv in globFns:
fns = glob.glob(inFn)
print len(fns)
#origCsv = "/home/cjgrady/ecosim/csvs/pam_presentM.csv"
headerRow, metaCols = getHeaderRowAndMetaCols(origCsv)
for fn in fns:
baseFn = os.path.splitext(os.path.basename(fn))[0]
outFn = os.path.join('/home/cjgrady/ecosim/randCsvs/', '%s.csv' % baseFn)
if not os.path.exists(outFn):
executor.submit(writeCSVfromNpy, outFn, fn, headerRow, metaCols)
#writeCSVfromNpy(outFn, fn, headerRow, metaCols)
#for fn in fns:
# print fn
# mtx = np.load(fn)
# baseFn = os.path.splitext(os.path.basename(fn))[0]
# outFn = os.path.join('/home/cjgrady/ecosim/randCsvs/', '%s.csv' % baseFn)
# print "Writing out to:", outFn
# #outFn = '/home/cjgrady/ecosim/randCsvs/pam_presentM-200.csv'
# writeCSVfromNpy(outFn, mtx, headerRow, metaCols)
# mtx = None
#def runMultiprocess(myArgs):
# with concurrent.futures.ProcessPoolExecutor(max_workers=4) as executor:
# #for url, cl in myArgs:
# # executor.submit(testGetUrl, url, cl)
# for e in executor.map(pushJobData, myArgs):
# print e
|
gpl-2.0
| 6,710,682,274,103,524,000
| 33.174757
| 109
| 0.571591
| false
| 3.26228
| false
| false
| false
|
annoviko/pyclustering
|
pyclustering/cluster/examples/birch_examples.py
|
1
|
5638
|
"""!
@brief Examples of usage and demonstration of abilities of BIRCH algorithm in cluster analysis.
@authors Andrei Novikov (pyclustering@yandex.ru)
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.cluster import cluster_visualizer
from pyclustering.cluster.birch import birch
from pyclustering.container.cftree import measurement_type
from pyclustering.utils import read_sample
from pyclustering.samples.definitions import SIMPLE_SAMPLES, FCPS_SAMPLES
def template_clustering(number_clusters, path, branching_factor=50, max_node_entries=100, initial_diameter=0.5, type_measurement=measurement_type.CENTROID_EUCLIDEAN_DISTANCE, entry_size_limit=200, diameter_multiplier=1.5, show_result=True):
print("Sample: ", path)
sample = read_sample(path)
birch_instance = birch(sample, number_clusters, branching_factor, max_node_entries, initial_diameter,
type_measurement, entry_size_limit, diameter_multiplier)
birch_instance.process()
clusters = birch_instance.get_clusters()
if show_result is True:
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, sample)
visualizer.show()
return sample, clusters
def cluster_sample1():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1)
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 5, 5, 0.1, measurement_type.CENTROID_EUCLIDEAN_DISTANCE, 2) # only two entries available
def cluster_sample2():
template_clustering(3, SIMPLE_SAMPLES.SAMPLE_SIMPLE2)
def cluster_sample3():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE3)
def cluster_sample4():
template_clustering(5, SIMPLE_SAMPLES.SAMPLE_SIMPLE4)
def cluster_sample5():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE5)
def cluster_sample7():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE7)
def cluster_sample8():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE8)
def cluster_elongate():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_ELONGATE)
def cluster_lsun():
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN)
def cluster_lsun_rebuilt():
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN, entry_size_limit=20, diameter_multiplier=1.5)
def cluster_target():
template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET)
def cluster_two_diamonds():
template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS)
def cluster_wing_nut():
template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT)
def cluster_chainlink():
template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK)
def cluster_hepta():
template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA)
def cluster_tetra():
template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA)
def cluster_engy_time():
template_clustering(2, FCPS_SAMPLES.SAMPLE_ENGY_TIME)
def experiment_execution_time(ccore=False):
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1)
template_clustering(3, SIMPLE_SAMPLES.SAMPLE_SIMPLE2)
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE3)
template_clustering(5, SIMPLE_SAMPLES.SAMPLE_SIMPLE4)
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE5)
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_ELONGATE)
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN)
template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET)
template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS)
template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT)
template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK)
template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA)
template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA)
template_clustering(2, FCPS_SAMPLES.SAMPLE_ATOM)
def display_fcps_clustering_results():
(lsun, lsun_clusters) = template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN, show_result=False)
(target, target_clusters) = template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET, show_result=False)
(two_diamonds, two_diamonds_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS, show_result=False)
(wing_nut, wing_nut_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT, show_result=False)
(chainlink, chainlink_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK, show_result=False)
(hepta, hepta_clusters) = template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA, show_result=False)
(tetra, tetra_clusters) = template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA, show_result=False)
(atom, atom_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_ATOM, show_result=False)
visualizer = cluster_visualizer(8, 4)
visualizer.append_clusters(lsun_clusters, lsun, 0)
visualizer.append_clusters(target_clusters, target, 1)
visualizer.append_clusters(two_diamonds_clusters, two_diamonds, 2)
visualizer.append_clusters(wing_nut_clusters, wing_nut, 3)
visualizer.append_clusters(chainlink_clusters, chainlink, 4)
visualizer.append_clusters(hepta_clusters, hepta, 5)
visualizer.append_clusters(tetra_clusters, tetra, 6)
visualizer.append_clusters(atom_clusters, atom, 7)
visualizer.show()
cluster_sample1()
cluster_sample2()
cluster_sample3()
cluster_sample4()
cluster_sample5()
cluster_sample7()
cluster_sample8()
cluster_elongate()
cluster_lsun()
cluster_lsun_rebuilt()
cluster_target()
cluster_two_diamonds()
cluster_wing_nut()
cluster_chainlink()
cluster_hepta()
cluster_tetra()
cluster_engy_time()
experiment_execution_time(True) # C++ code + Python env.
display_fcps_clustering_results()
|
gpl-3.0
| 559,214,982,133,781,200
| 35.337748
| 240
| 0.73182
| false
| 3.094402
| false
| false
| false
|
plantigrade/geni-tools
|
src/gcf/geni/util/urn_util.py
|
1
|
10380
|
#----------------------------------------------------------------------
# Copyright (c) 2010-2015 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
'''
URN creation and verification utilities.
'''
import re
from ...sfa.util.xrn import Xrn # for URN_PREFIX
class URN(object):
"""
A class that creates and extracts values from URNs
URN Convention:
urn:publicid:IDN+<authority>+<type>+<name>
Authority, type, and name are public ids transcribed into URN format
By convention a CH's name should be "ch" and an AM's should be "am"
The authority of the CH should be the prefix for all of your AM and user authorities
For instance: CH authority = "gcf//gpo//bbn", AM authority = "gcf//gpo/bbn//am1", user authority = "gcf//gpo//bbn"
EXAMPLES:
ch_urn = URN("gcf//gpo//bbn", "authority", "sa").urn_string() for a clearinghouse URN
am1_urn = URN("gcf//gpo//bbn//site1", "authority", "am").urn_string() for an AM at this authority
Looks like urn:publicid:IDN+gcf:gpo:bbn:site1+authority+am
am2_urn = URN("gcf//gpo//bbn//site2", "authority", "am").urn_string() for a second AM at this authority
Looks like urn:publicid:IDN+gcf:gpo:bbn:site2+authority+am
user_urn = URN("gcf//gpo//bbn", "user", "jane").urn_string() for a user made by the clearinghouse
Looks like urn:publicid:IDN+gcf:gpo:bbn+user+jane
slice_urn = URN("gcf//gpo//bbn", "slice", "my-great-experiment").urn_string()
Looks like urn:publicid:IDN+gcf:gpo:bbn+slice+my-great-experiment
resource_at_am1_urn = URN("gcf//gpo//bbn/site1", "node", "LinuxBox23").urn_string() for Linux Machine 23 managed by AM1 (at site 1)
Looks like urn:publicid:IDN+gcf:gpo:bbn:site1+node+LinuxBox23
"""
def __init__(self, authority=None, type=None, name=None, urn=None):
if not urn is None:
if not is_valid_urn(urn):
raise ValueError("Invalid URN %s" % urn)
spl = urn.split('+')
if len(spl) < 4:
raise ValueError("Invalid URN %s" % urn)
self.authority = urn_to_string_format(spl[1])
self.type = urn_to_string_format(spl[2])
self.name = urn_to_string_format('+'.join(spl[3:]))
self.urn = urn
else:
if not authority or not type or not name:
raise ValueError("Must provide either all of authority, type, and name, or a urn must be provided")
for i in [authority, type, name]:
if i.strip() == '':
raise ValueError("Parameter to create_urn was empty string")
self.authority = authority
self.type = type
self.name = name
# FIXME: check these are valid more?
if not is_valid_urn_string(authority):
authority = string_to_urn_format(authority)
if not is_valid_urn_string(type):
type = string_to_urn_format(type)
if not is_valid_urn_string(name):
name = string_to_urn_format(name)
self.urn = '%s+%s+%s+%s' % (Xrn.URN_PREFIX, authority, type, name)
if not is_valid_urn(self.urn):
raise ValueError("Failed to create valid URN from args %s, %s, %s" % (self.authority, self.type, self.name))
def __str__(self):
return self.urn_string()
def urn_string(self):
return self.urn
def getAuthority(self):
'''Get the authority in un-escaped publicid format'''
return self.authority
def getType(self):
'''Get the URN type in un-escaped publicid format'''
return self.type
def getName(self):
'''Get the name in un-escaped publicid format'''
return self.name
# Translate publicids to URN format.
# The order of these rules matters
# because we want to catch things like double colons before we
# translate single colons. This is only a subset of the rules.
# See the GENI Wiki: GAPI_Identifiers
# See http://www.faqs.org/rfcs/rfc3151.html
publicid_xforms = [('%', '%25'),
(';', '%3B'),
('+', '%2B'),
(' ', '+' ), # note you must first collapse WS
('#', '%23'),
('?', '%3F'),
("'", '%27'),
('::', ';' ),
(':', '%3A'),
('//', ':' ),
('/', '%2F')]
# FIXME: See sfa/util/xrn/Xrn.URN_PREFIX which is ...:IDN
publicid_urn_prefix = 'urn:publicid:'
def nameFromURN(instr):
'''Get the name from the given URN, or empty if not a valid URN'''
if not instr:
return ""
try:
urn = URN(urn=instr)
return urn.getName()
except Exception, e:
# print 'exception parsing urn: %s' % e
return ""
# validate urn
# Note that this is not sufficient but it is necessary
def is_valid_urn_string(instr):
'''Could this string be part of a URN'''
if instr is None or not (isinstance(instr, str) or
isinstance(instr, unicode)):
return False
#No whitespace
# no # or ? or /
if isinstance(instr, unicode):
instr = instr.encode('utf8')
if re.search("[\s|\?\/\#]", instr) is None:
return True
return False
# Note that this is not sufficient but it is necessary
def is_valid_urn(inurn):
''' Check that this string is a valid URN'''
# FIXME: This could pull out the type and do the type specific
# checks that are currently below
# FIXME: This should check for non empty authority and name pieces
return is_valid_urn_string(inurn) and \
inurn.startswith(publicid_urn_prefix) and \
len(inurn.split('+')) > 3
def is_valid_urn_bytype(inurn, urntype, logger=None):
if not is_valid_urn(inurn):
return False
urnObj = URN(urn=inurn)
if not urntype:
urntype = ""
urntype = urntype.lower()
if not urnObj.getType().lower() == urntype:
if logger:
logger.warn("URN %s not of right type: %s, not %s", inurn, urnObj.getType().lower(), urntype)
return False
if len(urnObj.getAuthority()) == 0:
if logger:
logger.warn("URN %s has empty authority", inurn)
return False
name = urnObj.getName()
if urntype == 'slice':
# Slice names are <=19 characters, only alphanumeric plus hyphen (no hyphen in first character): '^[a-zA-Z0-9][-a-zA-Z0-9]{0,18}$'
if len(name) > 19:
if logger:
logger.warn("URN %s too long. Slice names are max 19 characters", inurn)
return False
if not re.match("^[a-zA-Z0-9][-a-zA-Z0-9]{0,18}$", name):
if logger:
logger.warn("Slice names may only be alphanumeric plus hyphen (no leading hyphen): %s", name)
return False
elif urntype == 'sliver':
# May use only alphanumeric characters plus hyphen
# Note that EG uses a ':' as well.
if not re.match("^[-a-zA-Z0-9_\.]+$", name):
if logger:
logger.warn("Sliver names may only be alphanumeric plus hyphen, underscore, or period: %s", name)
return False
elif urntype == 'user':
# Usernames should begin with a letter and be alphanumeric or underscores; no hyphen or '.': ('^[a-zA-Z][\w]{0,7}$').
# Usernames are limited to 8 characters.
if len(name) > 8:
if logger:
logger.warn("URN %s too long. User names are max 8 characters", inurn)
return False
if not re.match("^[a-zA-Z][\w]{0,7}$", name):
if logger:
logger.warn("User names may only be alphanumeric plus underscore, beginning with a letter: %s", name)
return False
elif len(name) == 0:
if logger:
logger.warn("Empty name in URN %s", inurn)
return False
return True
def urn_to_publicid(urn):
'''Convert a URN like urn:publicid:... to a publicid'''
# Remove prefix
if urn is None or not is_valid_urn(urn):
# Erroneous urn for conversion
raise ValueError('Invalid urn: ' + urn)
publicid = urn[len(publicid_urn_prefix):]
# return the un-escaped string
return urn_to_string_format(publicid)
def publicid_to_urn(id):
'''Convert a publicid to a urn like urn:publicid:.....'''
# prefix with 'urn:publicid:' and escape chars
return publicid_urn_prefix + string_to_urn_format(id)
def string_to_urn_format(instr):
'''Make a string URN compatible, collapsing whitespace and escaping chars'''
if instr is None or instr.strip() == '':
raise ValueError("Empty string cant be in a URN")
# Collapse whitespace
instr = ' '.join(instr.strip().split())
for a, b in publicid_xforms:
instr = instr.replace(a, b)
return instr
def urn_to_string_format(urnstr):
'''Turn a part of a URN into publicid format, undoing transforms'''
if urnstr is None or urnstr.strip() == '':
return urnstr
publicid = urnstr
# Validate it is reasonable URN string?
for a, b in reversed(publicid_xforms):
publicid = publicid.replace(b, a)
return publicid
|
mit
| 3,436,441,151,345,414,700
| 40.854839
| 138
| 0.593353
| false
| 3.647224
| false
| false
| false
|
amfarrell/pickhost
|
src/pickhost/pickhost/settings.py
|
1
|
4605
|
"""
Django settings for pickhost project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import dj_database_url
import json
DEBUG = os.environ.get('DEBUG', False) in ['True', 'TRUE', 'true']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
WEBPACK_BASE = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = (
os.path.join(WEBPACK_BASE, 'assets'),
)
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/', #os.path.join(os.path.join(WEBPACK_BASE, 'assets'), 'bundles/'),
'STATS_FILE': os.path.join(WEBPACK_BASE, 'webpack-stats.json'),
'POLL_INTERVAL': 0.1,
'IGNORE': ['.+\.hot-update.js', '.+\.map']
}
}
if not DEBUG:
WEBPACK_LOADER['DEFAULT'].update({
'BUNDLE_DIR_NAME': 'dist/',
'STATS_FILE': os.path.join(WEBPACK_BASE, 'webpack-stats-prod.json')
})
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY', 'XXX')
# SECURITY WARNING: don't run with debug turned on in production!
ALLOWED_HOSTS = json.loads(os.environ.get('DOMAINS', '["0.0.0.0"]'))
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'webpack_loader',
'party'
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pickhost.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pickhost.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
if os.environ.get('DATABASE_URL'):
DATABASES = {
'default': dj_database_url.config(),
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = os.environ.get('STATIC_URL', '/static/')
STATIC_ROOT = os.environ.get('STATIC_ROOT', './static_root/')
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO'),
},
},
}
CITYMAPPER_API_KEY = os.environ['CITYMAPPER_API_KEY']
CITYMAPPER_URL = os.environ.get('CITYMAPPER_URL', 'https://developer.citymapper.com')
|
mit
| -8,103,281,417,396,609,000
| 26.740964
| 103
| 0.6519
| false
| 3.408586
| false
| false
| false
|
hbradleyiii/ww
|
ww/main.py
|
1
|
1722
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# name: main.py
# author: Harold Bradley III
# email: harold@bradleystudio.net
# date: 12/11/2015
#
# description: A program for managing websites
#
from __future__ import absolute_import, print_function
try:
from ext_pylib.prompt import prompt, prompt_str, warn_prompt
except ImportError:
raise ImportError('ext_pylib must be installed to run ww')
import platform
import sys
from ww import Website, WebsiteDomain, Vhost, WPWebsite
__author__ = 'Harold Bradley III'
__copyright__ = 'Copyright (c) 2015-2016 Harold Bradley III'
__license__ = 'MIT'
def display_help():
"""Displays script help."""
print('Help not yet implemented.')
def main():
"""Main entry point for the script."""
if platform.system() != 'Linux':
raise SysError('ERROR: ww cannot be run from ' + platform.system() + '.')
try:
script = sys.argv.pop(0)
except IndexError: # Unknown Error
raise UnknownError('ERROR: sys.argv was not set in main()')
try:
command = sys.argv.pop(0)
except IndexError: # No arguments given
display_help() # If no argmuments are given, display help
return
if command not in ['install', 'remove', 'pack', 'unpack', 'verify', 'repair']:
print('ERROR: Command "' + command + '" not understood.')
return 1
wp = False
if sys.argv and sys.argv[0] == 'wp':
sys.argv.pop(0)
wp = True
domain = ''
if sys.argv:
domain = sys.argv.pop(0)
website = WPWebsite(domain) if wp else Website(domain)
getattr(website, command)()
if __name__ == '__main__':
sys.exit(main())
|
mit
| 5,718,139,895,927,511,000
| 23.956522
| 82
| 0.608595
| false
| 3.5875
| false
| false
| false
|
ajpina/uffema
|
uffema/slots/type1.py
|
1
|
7525
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
# ==========================================================================
# Copyright (C) 2016 Dr. Alejandro Pina Ortega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==========================================================================
"""
Class for slots of type 1
"""
# ==========================================================================
# Program: type1.py
# Author: ajpina
# Date: 12/23/16
# Version: 0.1.1
#
# Revision History:
# Date Version Author Description
# - 12/23/16: 0.1.1 ajpina Defines mandatory methods and properties
#
# ==========================================================================
__author__ = 'ajpina'
import numpy as np
from uffema.slots import Slot
from uffema.misc.constants import *
class Type1(Slot):
@property
def h0(self):
return self._h0
@h0.setter
def h0(self, value):
self._h0 = value
@property
def h2(self):
return self._h2
@h2.setter
def h2(self, value):
self._h2 = value
@property
def w0(self):
return self._w0
@w0.setter
def w0(self, value):
self._w0 = value
@property
def w1(self):
return self._w1
@w1.setter
def w1(self, value):
self._w1 = value
@property
def w2(self):
return self._w2
@w2.setter
def w2(self, value):
self._w2 = value
@property
def so_position(self):
return self._so_position
@so_position.setter
def so_position(self, value):
self._so_position = value
@property
def s_position(self):
return self._s_position
@s_position.setter
def s_position(self, value):
self._s_position = value
@property
def liner_thickness(self):
return self._liner_thickness
@liner_thickness.setter
def liner_thickness(self, value):
self._liner_thickness = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def __init__(self, slot_settings, stator_mode):
super(Type1, self).__init__(slot_settings)
self.h0 = slot_settings['h0']
self.h2 = slot_settings['h2']
self.w0 = slot_settings['w0']
self.w1 = slot_settings['w1']
self.w2 = slot_settings['w2']
self.so_position = slot_settings['SOpos']
self.s_position = slot_settings['Spos']
# It is assumed an insulation liner of 0.5mm thickness
self.liner_thickness = 0.5e-3
self.type = self.type + 'Type1'
def get_slot_center(self):
return self.h0 + (2.0/5.0)*self.h2
def get_type(self):
return 'Type1'
def get_area(self):
return 0
def get_slot_total_height(self):
return self.h0 + self.h2
def get_conductor_area_width(self):
return (self.w1 + self.w2) / 2.0
def get_conductor_area_height(self):
return self.h2
def get_coil_area_base_point(self, inner_radius):
return inner_radius + self.h0
def get_slot_opening_geometry(self, inner_radius):
angle_slot_opening_bottom = np.arcsin(-(self.w0/2.0)/ inner_radius + self.h0 )
angle_slot_opening_top = np.arcsin(-(self.w0 / 2.0) / inner_radius )
points = {
'2': [inner_radius, 0, 0],
'3': [inner_radius + self.h0, 0, 0],
'4': [(inner_radius + self.h0)*np.cos(angle_slot_opening_bottom), (inner_radius + self.h0)*np.sin(angle_slot_opening_bottom) , 0],
'5': [(inner_radius)*np.cos(angle_slot_opening_bottom), (inner_radius)*np.sin(angle_slot_opening_bottom) , 0]
}
lines = {
'1': [2, 3],
'2': [3, 4],
'3': [4, 5],
'4': [5, 2]
}
return points, lines
def get_slot_wedge_geometry(self, inner_radius):
points = None
lines = None
return points, lines
def get_backiron_geometry(self, inner_radius, outer_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
angle_slot_base = np.arcsin(-(self.w2 / 2.0) / (inner_radius + self.h2))
points = {
'6': [inner_radius + self.h2, 0, 0],
'7': [outer_radius, 0, 0],
'8': [outer_radius * np.cos( -slot_pitch/2.0 ), outer_radius * np.sin( -slot_pitch/2.0 ), 0],
'9': [(inner_radius + self.h0 + self.h2) * np.cos( -slot_pitch/2.0 ),
(inner_radius + self.h0 + self.h2) * np.sin( -slot_pitch/2.0 ) , 0],
'10': [(inner_radius + self.h2) * np.cos(angle_slot_base),
(inner_radius + self.h2) * np.sin(angle_slot_base), 0]
}
lines = {
'5': [6, 7],
'6': [7, 1, 8],
'7': [8, 9],
'8': [9, 10],
'9': [10, 1, 6]
}
return points, lines
def get_tooth_geometry(self, inner_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
angle_slot_top = np.arcsin(-(self.w1 / 2.0) / (inner_radius + self.h0))
points = {
'11': [(inner_radius + self.h0 ) * np.cos( -slot_pitch/2.0 ),
(inner_radius + self.h0 ) * np.sin( -slot_pitch/2.0 ) , 0],
'12': [(inner_radius + self.h0) * np.cos(angle_slot_top), (inner_radius + self.h0) * np.sin(angle_slot_top),
0]
}
lines = {
'10': [9, 11],
'11': [11, 1, 12],
'12': [12, 10],
'-8': [0]
}
return points, lines
def get_coil_area_geometry(self, inner_radius):
points = None
lines = {
'13': [12, 1, 4],
'-2': [0],
'14': [3, 6],
'-9': [0],
'-12': [0]
}
return points, lines
def get_toothtip_geometry(self, inner_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
points = {
'14': [inner_radius * np.cos( -slot_pitch/2.0 ), inner_radius * np.sin( -slot_pitch/2.0 ) , 0]
}
lines = {
'15': [11, 14],
'16': [14, 1, 5],
'-3': [0],
'-13': [0],
'-11': [0]
}
return points, lines
def get_stator_airgap_geometry(self, airgap_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
points = {
'15': [airgap_radius * np.cos( -slot_pitch/2.0 ), airgap_radius * np.sin( -slot_pitch/2.0 ) , 0],
'16': [airgap_radius, 0, 0]
}
lines = {
'17': [14, 15],
'18': [15, 1, 16],
'19': [16, 2],
'-4': [0],
'-16': [0]
}
return points, lines
def get_stator_airgap_boundary(self):
return {'18': [15, 1, 16]}
def get_outer_stator_boundary(self):
return [6]
def get_master_boundary(self):
return [7, 10, 15, 17]
|
apache-2.0
| 8,788,168,946,986,012,000
| 27.612167
| 142
| 0.505249
| false
| 3.325232
| false
| false
| false
|
aptivate/django-registration
|
registration/models.py
|
1
|
10475
|
import datetime
import hashlib
import random
import re
from django.conf import settings
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
try:
from django.utils.timezone import now as datetime_now
except ImportError:
datetime_now = datetime.datetime.now
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = get_user_model().objects.create_user(username, email,
password)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.commit_on_success(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
activation_key = hashlib.sha1(salt+username).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
try:
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
profile.delete()
except get_user_model().DoesNotExist:
profile.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True,
verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime_now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
|
bsd-3-clause
| -6,932,972,600,714,886,000
| 38.23221
| 83
| 0.621289
| false
| 4.978612
| false
| false
| false
|
umitproject/tease-o-matic
|
mediagenerator/filters/yuicompressor.py
|
1
|
1533
|
from django.conf import settings
from django.utils.encoding import smart_str
from mediagenerator.generators.bundles.base import Filter
class YUICompressor(Filter):
def __init__(self, **kwargs):
super(YUICompressor, self).__init__(**kwargs)
assert self.filetype in ('css', 'js'), (
'YUICompressor only supports compilation to css and js. '
'The parent filter expects "%s".' % self.filetype)
def get_output(self, variation):
# We import this here, so App Engine Helper users don't get import
# errors.
from subprocess import Popen, PIPE
for input in self.get_input(variation):
try:
compressor = settings.YUICOMPRESSOR_PATH
cmd = Popen(['java', '-jar', compressor,
'--charset', 'utf-8', '--type', self.filetype],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True)
output, error = cmd.communicate(smart_str(input))
assert cmd.wait() == 0, 'Command returned bad result:\n%s' % error
yield output
except Exception, e:
raise ValueError("Failed to execute Java VM or yuicompressor. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"YUICOMPRESSOR_PATH in your settings correctly.\n"
"Error was: %s" % e)
|
bsd-3-clause
| -8,061,247,068,897,401,000
| 48.451613
| 82
| 0.566862
| false
| 4.430636
| false
| false
| false
|
manaschaturvedi/oscarbuddy
|
main.py
|
1
|
22607
|
import os
import re
import random
import hashlib
import hmac
from string import letters
import mimetypes
import webapp2
import jinja2
from google.appengine.ext import db
import webbrowser
from urllib2 import urlopen
import requests
from bs4 import BeautifulSoup
import json
import html5lib
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),autoescape = True)
secret = 'fart'
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def make_secure_val(val):
return '%s|%s' % (val, hmac.new(secret, val).hexdigest())
def check_secure_val(secure_val):
val = secure_val.split('|')[0]
if secure_val == make_secure_val(val):
return val
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
params['user'] = self.user
return render_str(template, **params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def set_secure_cookie(self, name, val):
cookie_val = make_secure_val(val)
self.response.headers.add_header('Set-Cookie','%s=%s; Path=/' % (name, cookie_val))
def read_secure_cookie(self, name):
cookie_val = self.request.cookies.get(name)
return cookie_val and check_secure_val(cookie_val)
def login(self, user):
self.set_secure_cookie('user_id', str(user.key().id()))
def logout(self):
self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')
def render_json(self, d):
json_txt = json.dumps(d)
self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'
self.write(json_txt)
def initialize(self, *a, **kw):
webapp2.RequestHandler.initialize(self, *a, **kw)
uid = self.read_secure_cookie('user_id')
self.user = uid and User.by_id(int(uid))
class MainPage(Handler):
def get(self):
self.render("home.html")
def make_salt(length = 5):
return ''.join(random.choice(letters) for x in xrange(length))
def make_pw_hash(name, pw, salt = None):
if not salt:
salt = make_salt()
h = hashlib.sha256(name + pw + salt).hexdigest()
return '%s,%s' % (salt, h)
def valid_pw(name, password, h):
salt = h.split(',')[0]
return h == make_pw_hash(name, password, salt)
def users_key(group = 'default'):
return db.Key.from_path('users', group)
class User(db.Model):
name = db.StringProperty(required = True)
pw_hash = db.StringProperty(required = True)
email = db.StringProperty()
@classmethod
def by_id(cls, uid):
return User.get_by_id(uid, parent = users_key())
@classmethod
def by_name(cls, name):
u = User.all().filter('name =', name).get()
return u
@classmethod
def register(cls, name, pw, email = None):
pw_hash = make_pw_hash(name, pw)
return User(parent = users_key(),name = name,pw_hash = pw_hash,email = email)
@classmethod
def login(cls, name, pw):
u = cls.by_name(name)
if u and valid_pw(name, pw, u.pw_hash):
return u
USER_RE = re.compile(r"^[a-zA-Z0-9_-]{3,20}$")
def valid_username(username):
return username and USER_RE.match(username)
PASS_RE = re.compile(r"^.{3,20}$")
def valid_password(password):
return password and PASS_RE.match(password)
EMAIL_RE = re.compile(r'^[\S]+@[\S]+\.[\S]+$')
def valid_email(email):
return not email or EMAIL_RE.match(email)
class Signup(Handler):
def post(self):
have_error = False
self.username = self.request.get('username')
self.password = self.request.get('password')
self.verify = self.request.get('verify')
self.email = self.request.get('email')
params = dict(username = self.username,email = self.email)
if not valid_username(self.username):
params['error_username'] = "That's not a valid username."
have_error = True
if not valid_password(self.password):
params['error_password'] = "That wasn't a valid password."
have_error = True
elif self.password != self.verify:
params['error_verify'] = "Your passwords didn't match."
have_error = True
if not valid_email(self.email):
params['error_email'] = "That's not a valid email."
have_error = True
if have_error:
self.render('home.html', **params)
else:
self.done()
def done(self, *a, **kw):
raise NotImplementedError
class Register(Signup):
def done(self):
#make sure the user doesn't already exist
u = User.by_name(self.username)
if u:
msg = 'That user already exists.'
self.render('home.html', error_username = msg)
else:
u = User.register(self.username, self.password, self.email)
u.put()
self.login(u)
self.redirect('/')
class Login(Handler):
def post(self):
username = self.request.get('username')
password = self.request.get('password')
u = User.login(username, password)
if u:
self.login(u)
frontuser = username
self.redirect('/')
else:
msg = 'Invalid login'
self.render('home.html', error = msg)
class Logout(Handler):
def get(self):
self.logout()
self.redirect('/')
class NewBooks(Handler):
def get(self):
self.render("newbooks.html")
def post(self):
branch = self.request.get("branch")
semester = self.request.get("semester")
publications = self.request.get("publications")
subject = self.request.get("subject")
if semester:
yo = int(semester)
if(branch and semester and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester):
disp = Books.all().filter("branch =", branch).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and publications):
disp = Books.all().filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and subject):
disp = Books.all().filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch):
disp = Books.all().filter("branch =", branch).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester):
disp = Books.all().filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(publications):
disp = Books.all().filter("publisher =", publications).fetch(10)
self.render("newbooks.html", disp = disp)
elif(subject):
disp = Books.all().filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
else:
self.render("newbooks.html")
class OldBooks(Handler):
def get(self):
self.render("oldbooks.html")
def post(self):
branch = self.request.get("branch")
semester = self.request.get("semester")
publications = self.request.get("publications")
subject = self.request.get("subject")
if semester:
yo = int(semester)
if(branch and semester and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester):
disp = Books.all().filter("branch =", branch).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and publications):
disp = Books.all().filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and subject):
disp = Books.all().filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch):
disp = Books.all().filter("branch =", branch).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester):
disp = Books.all().filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(publications):
disp = Books.all().filter("publisher =", publications).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(subject):
disp = Books.all().filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
else:
self.render("oldbooks.html")
class Books(db.Model):
prod_id = db.StringProperty()
name = db.StringProperty()
semester = db.IntegerProperty()
author = db.StringProperty()
stock = db.IntegerProperty()
actual_price = db.IntegerProperty()
discount_price = db.IntegerProperty()
branch = db.StringProperty()
publisher = db.StringProperty()
publishing_date = db.StringProperty()
edition = db.StringProperty()
def as_dict(self):
d = {'name': self.name,
'author': self.author,
'actual_price': self.actual_price,
'publisher': self.publisher}
return d
class Orders(db.Model):
cust_name = db.StringProperty()
address = db.PostalAddressProperty()
college = db.StringProperty()
book_name = db.StringProperty()
quantity = db.IntegerProperty()
total_amount = db.IntegerProperty()
contact_no = db.IntegerProperty()
book_id = db.StringProperty()
email_id = db.EmailProperty()
"""
d = Orders(cust_name = "Manas Chaturvedi", address = "Borivali", college = "TCET",
book_name = "OOSE", quantity = 1, total_amount = 325, contact_no = 9022380436,
book_id = "oose.jpeg", email_id = "manas.oid@gmail.com")
d.put()
a = Books(prod_id = "oose.jpeg", name = "Object Oriented Software Engineering",
semester = 6, author = "Bernard Bruegge, Allen H. Dutoit", stock = 5,
actual_price = 325, discount_price = 275, branch = "Computers",
publisher = "Pearson", publishing_date = "2010", edition = "2013")
a.put()
a2 = Books(prod_id = "dwm.png", name = "Data Warehouse and Data Mining",
semester = 6, author = "Aarti Deshpande", stock = 5,
actual_price = 315, discount_price = 260, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a2.put()
a3 = Books(prod_id = "cg.jpeg", name = "Computer Graphics",
semester = 4, author = "A.P. Godse, D.A. Godse", stock = 2,
actual_price = 330, discount_price = 280, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a3.put()
a4 = Books(prod_id = "spccjohn.jpeg", name = "System Programming and Compiler Construction",
semester = 6, author = "John Donovan", stock = 2,
actual_price = 410, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a4.put()
a5 = Books(prod_id = "1.jpg", name = "Advanced Microprocessors",
semester = 6, author = "J. S. Katre", stock = 2,
actual_price = 320, discount_price = 290, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a5.put()
a6 = Books(prod_id = "ampburchandi.gif", name = "Advanced Microprocessors",
semester = 6, author = "K.M. Burchandi, A.K. Ray", stock = 2,
actual_price = 390, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a6.put()
a7 = Books(prod_id = "CN.jpg", name = "Computer Networks",
semester = 5, author = "Andrew Tenenbaum", stock = 1,
actual_price = 390, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a7.put()
a8 = Books(prod_id = "mp.jpeg", name = "Microprocessors and Interfacing",
semester = 5, author = "J. S. Katre", stock = 2,
actual_price = 320, discount_price = 290, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a8.put()
"""
class Template(Handler):
def get(self):
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.render("template.html", disp = disp)
def post(self):
if self.user:
qua = self.request.get("quantity")
if not qua:
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_quantity'] = "That's not a valid quantity."
self.render("template.html", disp = disp, **params)
else:
quantity = int(qua)
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.redirect('/cart?quantity=%s&key=%s' % (quantity,k))
else:
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("template.html", disp = disp, **params)
class Cart(Handler):
def get(self):
qu = self.request.get("quantity")
quantity = int(qu)
k = self.request.get("key")
disp = Books.all().filter("prod_id =", k).get()
self.render("cart.html", disp = disp, quantity = quantity)
def post(self):
if self.user:
qua = self.request.get("quantity")
quantity = int(qua)
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.redirect('/order?quantity=%s&key=%s' % (quantity,k))
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("cart.html", disp = disp, quantity = quantity, **params)
class Order(Handler):
def get(self):
qu = self.request.get("quantity")
quantity = int(qu)
k = self.request.get("key")
disp = Books.all().filter("prod_id =", k).get()
self.render("order1.html", disp = disp, quantity = quantity)
def post(self):
if self.user:
cust_name = self.request.get("cusname")
address = self.request.get("address")
college = self.request.get("college")
book_name = self.request.get("book_name")
qua = self.request.get("quant")
tot_amount = self.request.get("tot_amount")
cont = self.request.get("mobile")
book_id = self.request.get("book_id")
email_id = self.request.get("email")
if(cust_name and address and college and book_name and qua and tot_amount and cont and book_id and email_id):
quantity = int(qua)
total_amount = int(tot_amount)
contact_no = int(cont)
ordered = Orders(cust_name = cust_name, address = address, college = college,
book_name = book_name, quantity = quantity, total_amount = total_amount,
contact_no = contact_no, book_id = book_id, email_id = email_id)
ordered.put()
self.redirect("/successful_order")
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_form'] = "please fill all the order details"
self.render("order1.html", disp = disp, quantity = quantity, **params)
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("order1.html", disp = disp, quantity = quantity, **params)
class ContactUs(Handler):
def get(self):
self.render("cont.html")
class AboutUs(Handler):
def get(self):
self.render("aboutus.html")
class SuccessOrder(Handler):
def get(self):
self.render("successorder.html")
class AjaxHandler(Handler):
def get(self):
self.response.write("If you can read this message, do know that Ajax is working tirelessly behind the scenes to load this data dynamically ! ")
class GGHandler(Handler):
def get(self):
self.render("gg.html")
class SearchHandler(Handler):
def get(self):
self.render("search.html")
def post(self):
keey = self.request.get('keey')
url = "http://www.amazon.in/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=" + str(keey)
url1 = "http://books.rediff.com/" + str(keey) + "?sc_cid=books_inhomesrch"
ss = requests.get(url)
ss1 = requests.get(url1)
src = ss.text
src1 = ss1.text
obj = BeautifulSoup(src, 'html5lib')
obj1 = BeautifulSoup(src1, 'html5lib')
li = []
ai = []
for e in obj.findAll("span", {'class' : 'lrg bold'}):
title = e.string
li.append(title)
for e in obj1.findAll("a", {'class' : 'bold'}):
title = e.string
ai.append(title)
self.render("searchresult.html", li = li, ai = ai, keey = keey)
class RSSHandler(Handler):
def get(self):
rsss = Books.all().fetch(1000)
rss = list(rsss)
return self.render_json([p.as_dict() for p in rss])
class EbayHandler(Handler):
def get(self):
app = webapp2.WSGIApplication([('/', MainPage),
('/signup', Register),
('/login', Login),
('/logout', Logout),
('/template', Template),
('/newbooks', NewBooks),
('/contactus', ContactUs),
('/aboutus', AboutUs),
('/oldbooks',OldBooks),
('/order',Order),
('/successful_order', SuccessOrder),
('/cart',Cart),
('/ajax', AjaxHandler),
('/tcet', GGHandler),
('/search', SearchHandler),
('/rss', RSSHandler),
('/ebay', EbayHandler)], debug=True)
|
mit
| 342,617,429,224,100,900
| 41.41651
| 154
| 0.572035
| false
| 3.659275
| false
| false
| false
|
JarbasAI/JarbasAI
|
jarbas_skills/skill_wiki/__init__.py
|
1
|
3331
|
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from random import randrange
import re
import wikipedia as wiki
from adapt.intent import IntentBuilder
from os.path import join, dirname
from mycroft.skills.core import MycroftSkill
from mycroft.util import read_stripped_lines
from mycroft.util.log import getLogger
__author__ = 'jdorleans'
LOGGER = getLogger(__name__)
class WikipediaSkill(MycroftSkill):
def __init__(self):
super(WikipediaSkill, self).__init__(name="WikipediaSkill")
self.max_results = self.config.get('max_results', 3)
self.max_phrases = self.config.get('max_phrases', 3)
self.question = 'Would you like to know more about ' # TODO - i10n
self.feedback_prefix = read_stripped_lines(
join(dirname(__file__), 'dialog', self.lang,
'FeedbackPrefix.dialog'))
self.feedback_search = read_stripped_lines(
join(dirname(__file__), 'dialog', self.lang,
'FeedbackSearch.dialog'))
def initialize(self):
intent = IntentBuilder("WikipediaIntent").require(
"WikipediaKeyword").require("ArticleTitle").build()
self.register_intent(intent, self.handle_intent)
def handle_intent(self, message):
try:
title = message.data.get("ArticleTitle")
self.__feedback_search(title)
results = wiki.search(title, self.max_results)
summary = re.sub(
r'\([^)]*\)|/[^/]*/', '',
wiki.summary(results[0], self.max_phrases))
self.speak(summary)
except wiki.exceptions.DisambiguationError as e:
options = e.options[:self.max_results]
LOGGER.debug("Multiple options found: " + ', '.join(options))
self.__ask_more_about(options)
except Exception as e:
LOGGER.error("Error: {0}".format(e))
def __feedback_search(self, title):
prefix = self.feedback_prefix[randrange(len(self.feedback_prefix))]
feedback = self.feedback_search[randrange(len(self.feedback_search))]
sentence = feedback.replace('<prefix>', prefix).replace(
'<title>', title)
self.speak(sentence, metadata={"more_speech": True})
def __ask_more_about(self, opts):
sentence = self.question
size = len(opts)
for idx, opt in enumerate(opts):
sentence += opt
if idx < size - 2:
sentence += ', '
elif idx < size - 1:
sentence += ' or ' # TODO - i10n
self.speak(sentence)
def stop(self):
pass
def create_skill():
return WikipediaSkill()
|
gpl-3.0
| -5,092,535,073,440,309,000
| 33.340206
| 77
| 0.631042
| false
| 3.877765
| false
| false
| false
|
ha1fpint/PeepingTom-Modified
|
peepingtom2.py
|
1
|
11299
|
#!/usr/bin/env python
import sys
import urllib2
import subprocess
import re
import time
import os
import hashlib
import random
import requests
import urllib2
#=================================================
# MAIN FUNCTION
#=================================================
def main():
# depenency check
if not all([os.path.exists('phantomjs'), os.path.exists('/usr/bin/curl')]):
print '[!] PhantomJS and cURL required.'
return
# parse options
import argparse
usage = """
PeepingTom - Tim Tomes (@LaNMaSteR53) (www.lanmaster53.com)
Dependencies:
- PhantomJS
- cURL
$ python ./%(prog)s <mode> <path>"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('-l', help='list input mode. path to list file.', dest='list_file', action='store')
parser.add_argument('-x', help='xml input mode. path to Nessus/Nmap XML file.', dest='xml_file', action='store')
parser.add_argument('-s', help='single input mode. path to target, remote URL or local path.', dest='target', action='store')
parser.add_argument('-o', help='output directory', dest='output', action='store')
parser.add_argument('-t', help='socket timeout in seconds. default is 8 seconds.', dest='timeout', type=int, action='store')
parser.add_argument('-v', help='verbose mode', dest='verbose', action='store_true', default=False)
parser.add_argument('-b', help='open results in browser', dest='browser', action='store_true', default=False)
opts = parser.parse_args()
# process options
# input source
if opts.list_file:
try:
targets = open(opts.list_file).read().split()
except IOError:
print '[!] Invalid path to list file: \'%s\'' % opts.list_file
return
elif opts.xml_file:
# optimized portion of Peeper (https://github.com/invisiblethreat/peeper) by Scott Walsh (@blacktip)
import xml.etree.ElementTree as ET
try: tree = ET.parse(opts.xml_file)
except IOError:
print '[!] Invalid path to XML file: \'%s\'' % opts.xml_file
return
except ET.ParseError:
print '[!] Not a valid XML file: \'%s\'' % opts.xml_file
return
root = tree.getroot()
if root.tag.lower() == 'nmaprun':
# parse nmap file
targets = parseNmap(root)
elif root.tag.lower() == 'nessusclientdata_v2':
# parse nessus file
targets = parseNessus(root)
print '[*] Parsed targets:'
for x in targets: print x
elif opts.target:
targets = [opts.target]
else:
print '[!] Input mode required.'
return
# storage location
if opts.output:
directory = opts.output
if os.path.isdir(directory):
print '[!] Output directory already exists: \'%s\'' % directory
return
else:
random.seed()
directory = time.strftime('%y%m%d_%H%M%S', time.localtime()) + '_%04d' % random.randint(1, 10000)
# connection timeout
timeout = opts.timeout if opts.timeout else 8
print '[*] Analyzing %d targets.' % (len(targets))
print '[*] Storing data in \'%s/\'' % (directory)
os.mkdir(directory)
report = 'peepingtom.html'
outfile = '%s/%s' % (directory, report)
# logic to gather screenshots and headers for the given targets
db = {'targets': []}
cnt = 0
tot = len(targets) * 2
previouslen = 0
try:
for target in targets:
# Displays the target name to the right of the progress bar
if opts.verbose:
printProgress(cnt, tot, target, previouslen)
else:
printProgress(cnt, tot)
imgname = '%s.png' % re.sub('\W','',target)
srcname = '%s.txt' % re.sub('\W','',target)
imgpath = '%s/%s' % (directory, imgname)
srcpath = '%s/%s' % (directory, srcname)
getCapture(target, imgpath, timeout)
cnt += 1
previouslen = len(target)
target_data = {}
target_data['url'] = target
target_data['imgpath'] = imgname
target_data['srcpath'] = srcname
target_data['hash'] = hashlib.md5(open(imgpath).read()).hexdigest() if os.path.exists(imgpath) else 'z'*32
target_data['headers'] = getHeaders(target, srcpath, timeout)
#SJ edit
if get_status(target + '/robots.txt') == 200:
try:
robots = requests.get(target + "/robots.txt", verify=False)
print robots.headers['content-type'].split(';',2)
if robots.headers['content-type'].split(';',2)[0] == "text/plain":
robotText = robots.text.encode('utf-8')
#robots2 = robotText.splitlines()
target_data['robots'] = robotText
else:
target_data['robots'] = "empty robots.txt"
except Exception:
target_data['robots'] = "exception empty robots.txt"
else:
robots = 'no robots file'
target_data['robots'] = robots
db['targets'].append(target_data)
cnt += 1
print printProgress(1,1)
except Exception as e:
print '[!] %s' % (e.__str__())
# build the report and exit
buildReport(db, outfile)
if opts.browser:
import webbrowser
path = os.getcwd()
w = webbrowser.get()
w.open('file://%s/%s/%s' % (path, directory, report))
print '[*] Done.'
#=================================================
# SUPPORT FUNCTIONS
#=================================================
#SJ edit - check up
def get_status(target):
try:
conn = urllib2.urlopen(target, timeout = 2)
print target
print conn.code
return conn.code
except urllib2.URLError as e:
return 123
except Exception:
return 123
def parseNmap(root):
http_ports = [80,81,8000,8080,8081,8082]
https_ports = [443,444,8443]
targets = []
# iterate through all host nodes
for host in root.iter('host'):
hostname = host.find('address').get('addr')
# hostname node doesn't always exist. when it does, overwrite address previously assigned to hostanme
hostname_node = host.find('hostnames').find('hostname')
if hostname_node is not None: hostname = hostname_node.get('name')
# iterate through all port nodes reported for the current host
for item in host.iter('port'):
state = item.find('state').get('state')
if state.lower() == 'open':
# service node doesn't always exist when a port is open
service = item.find('service').get('name') if item.find('service') is not None else ''
port = item.get('portid')
if 'http' in service.lower() or int(port) in (http_ports + https_ports):
proto = 'http'
if 'https' in service.lower() or int(port) in https_ports:
proto = 'https'
url = '%s://%s:%s' % (proto, hostname, port)
if not url in targets:
targets.append(url)
elif not service:
# show the host and port for unknown services
print '[-] Unknown service: %s:%s' % (hostname, port)
return targets
def parseNessus(root):
targets = []
for host in root.iter('ReportHost'):
name = host.get('name')
for item in host.iter('ReportItem'):
svc = item.get('svc_name')
plugname = item.get('pluginName')
if (svc in ['www','http?','https?'] and plugname.lower().startswith('service detection')):
port = item.get('port')
output = item.find('plugin_output').text.strip()
proto = guessProto(output)
url = '%s://%s:%s' % (proto, name, port)
if not url in targets:
targets.append(url)
return targets
def guessProto(output):
# optimized portion of Peeper (https://github.com/invisiblethreat/peeper) by Scott Walsh (@blacktip)
secure = re.search('TLS|SSL', output)
if secure:
return "https"
return "http"
def getCapture(url, imgpath, timeout):
cookie_file = 'cookies'
cmd = './phantomjs --ssl-protocol=any --ignore-ssl-errors=yes --cookies-file="%s" ./capture.js "%s" "%s" %d' % (cookie_file, url, imgpath, timeout*1000)
returncode, response = runCommand(cmd)
# delete cookie file
#os.remove(cookie_file)
return returncode
def getHeaders(url, srcpath, timeout):
#cmd = 'curl -sILk %s --connect-timeout %d' % (url, timeout)
cmd = 'curl -sLkD - %s -o %s --max-time %d' % (url, srcpath, timeout)
returncode, response = runCommand(cmd)
return response
def runCommand(cmd):
proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
stdout, stderr = proc.communicate()
response = ''
if stdout: response += str(stdout)
if stderr: response += str(stderr)
return proc.returncode, response.strip()
def printProgress(cnt, tot, target='', previouslen=0):
percent = 100 * float(cnt) / float(tot)
if target and previouslen > len(target):
target = target + ' ' * (previouslen - len(target))
sys.stdout.write('[%-40s] %d%% %s\r' % ('='*int(float(percent)/100*40), percent, target))
sys.stdout.flush()
return ''
def buildReport(db, outfile):
live_markup = ''
error_markup = ''
dead_markup = ''
# process markup for live targets
for live in sorted(db['targets'], key=lambda k: k['hash']):
live_markup += "<tr><td class='tg-0ord'><a href='{0}' target='_blank'><img src='{0}' onerror=\"this.parentNode.parentNode.innerHTML='No image available.';\" /></a></td><td class='tg-0ord'><a href='{1}' target='_blank'>{1}</a> (<a href='{2}' target='_blank'>source</a>)<br/><pre>{3}</pre><pre><p>{4}</p></pre></td></tr>\n".format(live['imgpath'],live['url'],live['srcpath'],live['headers'],live['robots']) #addded robots
# add markup to the report
file = open(outfile, 'w')
file.write("""
<!doctype html>
<head>
<style type="text/css">
.tg {border-collapse:collapse;border-spacing:0;border-color:#ccc;}
.tg td{font-family:Arial, sans-serif;font-size:14px;padding:10px 5px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#ccc;color:#333;background-color:#fff;}
.tg th{font-family:Arial, sans-serif;font-size:14px;font-weight:normal;padding:10px 5px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#ccc;color:#333;background-color:#f0f0f0;}
.tg .tg-0ord{text-align:left;background-color:#f0f0f0;}
.tg .tg-s6z2{text-align:center;background-color:#c0c0c0;}
</style>
</head>
<body>
<table class="tg">
<tr>
<th class="tg-s6z2">Screenshot</th>
<th class="tg-s6z2">Details</th>
</tr>
%s
</table>
</body>
</html>""" % (live_markup))
file.close()
#=================================================
# START
#=================================================
if __name__ == "__main__": main()
|
gpl-3.0
| -3,573,121,943,859,222,000
| 38.645614
| 427
| 0.570139
| false
| 3.693691
| false
| false
| false
|
dgarrett622/ObsDist
|
ObsDist/Population.py
|
1
|
5657
|
# -*- coding: utf-8 -*-
"""
v1: Created on November 28, 2016
author: dg622@cornell.edu
"""
import numpy as np
import astropy.units as u
class Population(object):
"""This class contains all the planetary parameters necessary for sampling
or finding probability distribution functions
Args:
a_min (Quantity or float):
minimum population semi-major axis with unit (Quantity) attached or
in AU (float)
a_max (Quantity or float):
maximum population semi-major axis with unit (Quantity) attached or
in AU (float)
e_min (float):
minimum population eccentricity
e_max (float):
maximum population eccentricity
R_min (Quantity or float):
minimum population planetary radius with unit (Quantity) attached
or in AU (float)
R_max (Quantity or float):
maximum population planetary radius with unit (Quantity) attached
or in AU (float)
p_min (float):
minimum population geometric albedo
p_max (float):
maximum population geometric albedo
Attributes:
arange (ndarray):
1D numpy ndarray containing minimum and maximum semi-major axis in
AU
erange (ndarray):
1D numpy ndarray containing minimum and maximum eccentricity
Rrange (ndarray):
1D numpy ndarray containing minimum and maximum planetary radius in
AU
prange (ndarray):
1D numpy ndarray containing minimum and maximum geometric albedo
Phi (callable):
phase function
"""
def __init__(self, a_min=None, a_max=None, e_min=None, e_max=None, \
R_min=None, R_max=None, p_min=None, p_max=None):
unittest = u.quantity.Quantity
# minimum semi-major axis (AU)
if a_min == None:
a_min = 0.5
elif type(a_min) == unittest:
a_min = a_min.to('AU').value
# maximum semi-major axis (AU)
if a_max == None:
a_max = 5.0
elif type(a_max) == unittest:
a_max = a_max.to('AU').value
# semi-major axis range
self.arange = np.array([a_min, a_max])
# minimum eccentricity
if e_min == None:
e_min = np.finfo(float).eps*100.0
# maximum eccentricity
if e_max == None:
e_max = 0.35
# eccentricity range
self.erange = np.array([e_min, e_max])
# minimum planetary radius
if R_min == None:
R_min = 6000*u.km
R_min = R_min.to('AU').value
elif type(R_min) == unittest:
R_min = R_min.to('AU').value
# maximum planetary radius
if R_max == None:
R_max = 30000*u.km
R_max = R_max.to('AU').value
elif type(R_max) == unittest:
R_max = R_max.to('AU').value
self.Rrange = np.array([R_min, R_max]) # in AU
# minimum albedo
if p_min == None:
p_min = 0.2
# maximum albedo
if p_max == None:
p_max = 0.3
self.prange = np.array([p_min, p_max])
# phase function
self.Phi = lambda b: (1.0/np.pi)*(np.sin(b) + (np.pi-b)*np.cos(b))
def f_a(self, a):
"""Probability density function for semi-major axis in AU
Args:
a (float or ndarray):
Semi-major axis value(s) in AU
Returns:
f (ndarray):
Probability density (units of 1/AU)
"""
a = np.array(a, ndmin=1, copy=False)
# uniform
# f = ((a >= self.arange[0]) & (a <= self.arange[1])).astype(int)/(self.arange[1]-self.arange[0])
# log-uniform
f = ((a >= self.arange[0]) & (a <= self.arange[1])).astype(int)/(a*np.log(self.arange[1]/self.arange[0]))
return f
def f_e(self, e):
"""Probability density function for eccentricity
Args:
e (float or ndarray):
eccentricity value(s)
Returns:
f (ndarray):
probability density
"""
e = np.array(e, ndmin=1, copy=False)
# uniform
f = ((e >= self.erange[0]) & (e <= self.erange[1])).astype(int)/(self.erange[1]-self.erange[0])
return f
def f_R(self, R):
"""Probability density function for planet radius (AU)
Args:
R (float or ndarray):
planet radius in AU
Returns:
f (ndarray):
probability density function value
"""
R = np.array(R, ndmin=1, copy=False)
# uniform
# f = ((R >= self.Rrange[0]) & (R <= self.Rrange[1])).astype(int)/(self.Rrange[1]-self.Rrange[0])
# log-uniform
f = ((R >= self.Rrange[0]) & (R <= self.Rrange[1])).astype(int)/(R*np.log(self.Rrange[1]/self.Rrange[0]))
return f
def f_p(self, p):
"""Probability density function for geometric albedo
Args:
x (float or ndarray):
geometric albedo
Returns:
f (ndarray):
probability density function value
"""
p = np.array(p, ndmin=1, copy=False)
# uniform
f = ((p >= self.prange[0]) & (p <= self.prange[1])).astype(int)/(self.prange[1]-self.prange[0])
return f
|
mit
| -879,130,761,740,373,000
| 31.331429
| 113
| 0.505745
| false
| 3.794098
| false
| false
| false
|
tbphu/fachkurs_2016_project
|
model.py
|
1
|
5801
|
import modeldata
import molecules as mol
import translation
import replication as rep
import transcription
class Output:
"""
class for handling the simulation results of the different species types
"""
def __init__(self, model):
self.meta = {}
self.model = model
self.timecourses = {state: SimulationResult(model.states[state]) for state in model.states}
def add_timepoint(self, species):
"""
add a simulation time point for one species
@param species: mol.BioMolecule
@return: None
"""
if isinstance(self.model.states[species], mol.Polymer):
pass # TODO: implement a useful method for Polymers
elif isinstance(self.model.states[species], mol.BioMoleculeCount):
self.timecourses[species].add_timepoint(self.model.states[species].count, self.model.timestep)
class SimulationResult:
"""
handles and stores a simulation result for one species
"""
def __init__(self, species):
self.name = species.name
self.value = []
self.time = []
def add_timepoint(self, time, value):
self.value.append(value)
self.time.append(time)
class Model:
"""
Initializes the states and processes for the model and lets the processes update their corresponding states.
"""
def __init__(self):
self.states = {} #dictionary with all molecules {Rib_name: Rib_object, mrna_ids: mrna_object, mrna2_id: ...}
self.processes = {} #dictionary filled with all active processes
self.timestep = 0
self.mrnas = {} # all selfs should be initialized in the constructor
self.ribosomes = {} #dictionary will be filled with 10 Ribosomes
self.helicases = {}
self.polymerases = {}
self.chromosomes = {}
self.volume = 1
self.db = modeldata.ModelData()
# self.chromosomes=modeldata.ModelData.createchromosomes() #list with chromosomes
self.genes=modeldata.ModelData.creategenes() #dictionary with genes
self.__initialize_macromolecules()
self.__initialize_states()
self.__initialize_processes()
#self.results = Output(self)
def __initialize_macromolecules(self):
self.ribosomes = {'Ribosomes': mol.Ribosome('Ribos', 'Ribosomes', 187000)}
self.polymerase2= mol.RNAPolymeraseII('Pol2', 'Polymerase2', 100000000)
self.nucleotides= mol.NucleotidPool('Nucs','Nucleotides', 1000000000000)
self.helicases = {'DnaB': rep.Helicase("Helicase", "DnaB", 100)}
self.polymerases = {'Polymerase3' :rep.Polymerase("Polymerase", "Polymerase3", 100)}
self.chromosomes = {x.id:x for x in modeldata.ModelData.createchromosomes()}
#for i, mrna in enumerate(self.db.get_states(mol.MRNA)):
# mid, name, sequence = mrna
# sequence=list(sequence)
# sequence[0:3]='AUG'
#sequence[12:15]='UGA'
#sequence=''.join(sequence)
#self.mrnas[mid] = [mol.MRNA(mid, name, sequence)]
def __initialize_states(self):
"""
initialize the different states
"""
self.states.update(self.ribosomes) #adding dictionaries to self.states
self.states.update(self.helicases)
self.states.update(self.polymerases)
self.states.update(self.chromosomes)
self.states.update(self.mrnas)
self.states["Nucleotides"] = self.nucleotides
def __initialize_processes(self):
"""
initialize processes
"""
# transcription
trsc = transcription.Transcription(0, 'Transcription')
trsc.set_states(self.genes.keys(), self.polymerase2)
self.processes["Transkription"] = trsc
# translation
trsl = translation.Translation(1, "Translation")
trsl.set_states(self.mrnas.keys(), self.ribosomes.keys()) #states in Process are keys: Rib_name, mrna_name?!
self.processes["Translation"] = trsl
# replication
repl =rep.Replication(2, "Replication")
replication_enzyme_ids= list(self.helicases.keys()).extend(list(self.polymerases.keys()))
repl.set_states(list(self.chromosomes.keys()), replication_enzyme_ids)
self.processes.update({"Replication":repl})
def step(self):
"""
Do one update step for each process.
"""
for p in self.processes:
self.processes[p].update(self)
#for state in self.states:
# self.results.add_timepoint(state)
self.timestep += 1
def simulate(self, steps, log=True):
"""
Simulate the model for some time.
"""
for s in range(steps):
self.step()
if log: # This could be an entry point for further logging
#print all states
print(self.states.keys())
a = 0
for i in self.states.keys():
if str(i)[0].isdigit():
a = 1+a
print("Die Anzahl der Chromosomen nach " + str(s) + " update Schritten beträgt " + str(a))
keylist = self.states.keys()
keylist = [str(x) for x in keylist]
mrnalist = [x for x in keylist if "mRNA" in x]
print("Die Anzahl der mRNAs nach " + str(s) + " update Schritten beträgt " + str(len(mrnalist)))
print("Folgende mRNAs wurden kreiert: " + str([x for x in keylist if "mRNA" in x]))
print("es sind noch " + str(self.states["Nucleotides"].count) + " freie vorhanden")
if __name__ == "__main__":
c = Model()
c.simulate(300, log=True)
|
mit
| -8,622,250,622,444,500,000
| 34.359756
| 126
| 0.594068
| false
| 3.691279
| false
| false
| false
|
dziobas/ChangesCheckstyle
|
checkstyle.py
|
1
|
3398
|
#!/usr/bin/python
import os
import re
import sys
import getopt
def run_checkstyle(file_name, checkstyle, project_dir):
output = os.popen("(cd " + project_dir + "; \
java -jar checkstyle.jar \
-c " + checkstyle + " \
" + file_name + ")").read()
output = output.split("\n")
length = len(output)
return output[1:length - 2] # remove first and last line
def find_changed_lines(git_diff): # returns changed line numbers
changed_lines_pattern = "@@ [0-9\-+,]+ ([0-9\-+,]+) @@"
lines = []
for change in re.findall(changed_lines_pattern, git_diff):
value = change.split(",")
if len(value) == 1: # one line changed
line_number = (value[0])
lines.append(int(line_number))
elif len(value) == 2: # more lines changed
line_number = int(value[0])
count = int(value[1])
for i in range(line_number, line_number + count):
lines.append(i)
return lines
def filter_out(processed_line): # True when file should be filtered out
column = processed_line.split("\t")
if len(column) != 3:
return True
added_lines = column[0]
name = column[2]
return not name.endswith(".java") or not added_lines.isdigit() or not int(added_lines) > 0
def get_file_name(processed_line):
return processed_line.split("\t")[2]
def introduced_error(error_message, changed_lines, out):
line_pattern = ":(\d+):"
number = re.search(line_pattern, error_message).group(1)
number = int(number)
if number in changed_lines:
print "Introduced Error: " + error_message
out.append(error_message)
else:
print "Warning: " + error_message
def usage():
return "checkstyle -c <checkstyle.xml> -d <project directory> -h <help>"
def main(argv):
try:
opts, args = getopt.getopt(argv, "c:d:hx")
except getopt.GetoptError:
print usage()
sys.exit(2)
checkstyle_rules = "checkstyle-rules.xml"
project_dir = "."
debug = False
for opt, arg in opts:
if opt == "-c":
checkstyle_rules = arg
elif opt == "-d":
project_dir = arg
elif opt == "-h":
print usage()
elif opt == "-x":
debug = True
if debug:
print "dir: " + project_dir + " rules: " + checkstyle_rules
diff_command = "(cd " + project_dir + "; git diff HEAD^ --numstat)"
print "Processing"
errors = []
list_of_files = os.popen(diff_command).read().split("\n")
for file_line in list_of_files:
if filter_out(file_line):
# skip non java files and without added lines
continue
file_name = get_file_name(file_line)
if debug:
print "check " + file_name
# get changed lines
changes = os.popen("(cd " + project_dir + "; git diff -U0 HEAD^ " + file_name + ")").read()
lines = find_changed_lines(changes)
checkstyle = run_checkstyle(file_name, checkstyle_rules, project_dir)
for item in checkstyle:
# extract errors introduced in added lines and append errors list
introduced_error(item, lines, errors)
if errors:
print "Errors in added lines:"
for item in errors:
print item
sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
|
apache-2.0
| -1,082,372,101,561,743,700
| 28.051282
| 99
| 0.572396
| false
| 3.630342
| false
| false
| false
|
mattwilliamson/webhookr
|
webhookr/sockets.py
|
1
|
3157
|
import logging
from socketio.namespace import BaseNamespace
from socketio.mixins import RoomsMixin, BroadcastMixin
class WebhookrChannelMixin(object):
room_key = 'rooms'
def __init__(self, *args, **kwargs):
super(WebhookrChannelMixin, self).__init__(*args, **kwargs)
if self.room_key not in self.session:
self.session[self.room_key] = set() # a set of simple strings
def join(self, room):
"""Lets a user join a room on a specific Namespace."""
self.session[self.room_key].add(self._get_room_name(room))
def leave(self, room):
"""Lets a user leave a room on a specific Namespace."""
self.session[self.room_key].remove(self._get_room_name(room))
def _get_room_name(self, room):
return self.ns_name + '_' + room
def room_subscribers(self, room, include_self=False):
room_name = self._get_room_name(room)
for sessid, socket in self.socket.server.sockets.iteritems():
if self.room_key not in socket.session:
continue
if room_name in socket.session[self.room_key] and (include_self or self.socket != socket):
yield (sessid, socket)
def all_rooms(self):
return (x[len(self.ns_name):] for x in self.session.get(self.room_key, []))
def _emit_to_channel(self, room, event, include_self, *args):
"""This is sent to all in the room (in this particular Namespace)"""
message = dict(type="event", name=event, args=args, endpoint=self.ns_name)
for sessid, socket in self.room_subscribers(room, include_self=include_self):
socket.send_packet(message)
def emit_to_channel(self, room, event, *args):
self._emit_to_channel(room, event, False, *args)
def emit_to_channel_and_me(self, room, event, *args):
self._emit_to_channel(room, event, True, *args)
class WebhookNamespace(BaseNamespace, WebhookrChannelMixin, BroadcastMixin):
def initialize(self):
self.logger = logging.getLogger("socketio.webhook")
self.log("WebhookNamespace socketio session started: %s" % self.socket)
def log(self, message):
self.logger.info("[{0}] {1}".format(self.socket.sessid, message))
def emit_subscriber_count(self, room):
# Enumerate to get length of subscribers while being lazy
i = 0
for i, x in enumerate(self.room_subscribers(room, include_self=True)):
self.logger.debug('[emit_subscriber_count] i= {}'.format(i))
total_subscribers = i + 1
self.log('Emitting totalSubscribers for {}: {}'.format(room, total_subscribers))
self.emit_to_channel_and_me(room, 'subscriber_joined', {'totalSubscribers': total_subscribers})
def on_join(self, room):
self.log('Connected')
self.room = room
self.join(room)
self.emit_subscriber_count(room)
return True
def recv_disconnect(self):
# Remove nickname from the list.
self.log('Disconnected')
for room in self.all_rooms():
self.emit_subscriber_count(room)
self.disconnect(silent=True)
return True
|
mit
| -3,892,924,647,368,414,700
| 36.583333
| 103
| 0.638264
| false
| 3.675204
| false
| false
| false
|
yubbie/googleapps-message-recall
|
message_recall/frontend_views.py
|
1
|
19041
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Frontend view implementations that handle user requests."""
import os
import re
import socket
import time
import jinja2
import log_utils
from models import domain_user
from models import error_reason
from models import recall_task
from models import sharded_counter
import recall_errors
import user_retriever
import view_utils
import webapp2
import wtforms
from wtforms import validators
import xsrf_helper
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.api.taskqueue import Error as TaskQueueError
from google.appengine.api.taskqueue import Task
from google.appengine.runtime import apiproxy_errors
_APPLICATION_DIR = os.path.dirname(__file__)
_CREATE_TASK_ACTION = 'CreateTask#ns'
_GET_USER_MAX_RETRIES = 2
_LOG = log_utils.GetLogger('messagerecall.views')
_MESSAGE_ID_REGEX = re.compile(r'^[\w+-=.]+@[\w.]+$')
_MESSAGE_ID_MAX_LEN = 100
_USER_ADMIN_CACHE_NAMESPACE = 'messagerecall_useradmin#ns'
_USER_ADMIN_CACHE_TIMEOUT_S = 60 * 60 * 2 # 2 hours
_USER_BILLING_CACHE_TIMEOUT_S = 60 * 60 * 24 # 24 hours
_APPLICATION_BILLING_CACHE_NAMESPACE = 'messagerecall_billing#ns'
def _CacheUserEmailBillingEnabled(user_email):
"""Cache the user_email to avoid billing-check rountrips.
Wrapped in a separate method to aid error handling.
Args:
user_email: String email address of the form user@domain.com.
Raises:
MessageRecallMemcacheError: If the add fails so cache issues can be noticed.
"""
if not memcache.add(user_email, True, time=_USER_BILLING_CACHE_TIMEOUT_S,
namespace=_APPLICATION_BILLING_CACHE_NAMESPACE):
raise recall_errors.MessageRecallMemcacheError(
'Unexpectedly unable to add application billing information to '
'memcache. Please try again.')
def _CacheUserEmailAsAdmin(user_email):
"""Cache the admin user_email to avoid rountrips.
Wrapped in a separate method to aid error handling.
Args:
user_email: String email address of the form user@domain.com.
Raises:
MessageRecallMemcacheError: If the add fails so cache issues can be noticed.
"""
if not memcache.add(user_email, True, time=_USER_ADMIN_CACHE_TIMEOUT_S,
namespace=_USER_ADMIN_CACHE_NAMESPACE):
raise recall_errors.MessageRecallMemcacheError(
'Unexpectedly unable to add admin user information to memcache. '
'Please try again.')
def _SafelyGetCurrentUserEmail():
"""Retrieve the current user's email or raise an exception.
We set 'login: required' in app.yaml so all users should be logged-in.
But, is has been observed that users.get_current_user() *can* return None.
Therefore, this must be checked.
Returns:
String email address of the currently logged-in user.
Raises:
MessageRecallAuthenticationError: If current user is noticed as None.
"""
user = None
get_user_attempts = 0
while not user and get_user_attempts < _GET_USER_MAX_RETRIES:
user = users.get_current_user()
get_user_attempts += 1
if not user:
raise recall_errors.MessageRecallAuthenticationError(
'A logged-in user was not retrieved. Please try again.')
return user.email()
def _FailIfBillingNotEnabled(user_email):
"""Ensure Google Apps Domain has billing enabled.
Billing-enabled is required to use sockets in AppEngine.
The IMAP mail api uses sockets. So this application requires billing
to be enabled.
If billing not enabled, this is observed:
FeatureNotEnabledError: The Socket API will be enabled for this application
once billing has been enabled in the admin console.
Args:
user_email: String email address of the form user@domain.com.
Raises:
MessageRecallAuthenticationError: If user is not properly authorized.
"""
if memcache.get(user_email, namespace=_APPLICATION_BILLING_CACHE_NAMESPACE):
return
imap_host = 'imap.gmail.com'
imap_port = 993
# The socket is discarded after 2min of no use.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((imap_host, imap_port))
except apiproxy_errors.FeatureNotEnabledError as e:
raise recall_errors.MessageRecallError(
'This AppEngine application requires billing status: '
'"Billing Enabled". Please choose "Enable Billing" in the AppEngine '
'admin console for this application (%s).' % e)
except Exception as e:
# Expect "[Errno 13] Permission denied" once billing enabled.
if str(e) != '[Errno 13] Permission denied':
raise
_CacheUserEmailBillingEnabled(user_email)
def _FailIfNonAdminUser(user_email):
"""Ensure user possesses adequate Admin authority.
This AppEngine application should set Authentication Type to:
'Google Accounts API'.
Per documentation, isAdmin is True if the user is a member of the
Google Apps System: Role = Super Admin.
https://developers.google.com/admin-sdk/directory/v1/reference/
If the user is found to be a properly authorized admin-user of this
application, then cache that fact to avoid roundtrips to the Admin SDK
for a little while.
Args:
user_email: String email address of the form user@domain.com.
Raises:
MessageRecallAuthenticationError: If user is not properly authorized.
"""
if memcache.get(user_email, namespace=_USER_ADMIN_CACHE_NAMESPACE):
return
retriever = user_retriever.DomainUserRetriever(
owner_email=user_email,
user_domain=view_utils.GetUserDomain(user_email),
search_query='email:%s' % user_email)
if not retriever.GetUserAttribute(user_email, 'isAdmin'):
# User is not a super-admin...
raise recall_errors.MessageRecallAuthenticationError(
'User %s is not authorized for Message Recall in this domain.'
% user_email)
_CacheUserEmailAsAdmin(user_email)
def _PreventUnauthorizedAccess():
"""Ensure user possesses adequate Admin authority."""
current_user_email = _SafelyGetCurrentUserEmail()
_FailIfNonAdminUser(current_user_email)
_FailIfBillingNotEnabled(current_user_email)
class UIBasePageHandler(webapp2.RequestHandler):
"""Setup common template handling for derived handlers."""
def __init__(self, request, response):
"""RequestHandler initialization requires base class initialization."""
self.initialize(request, response)
self.init_time = time.time()
template_dir = os.path.join(_APPLICATION_DIR, 'templates')
self._jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(template_dir),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
def __del__(self):
_LOG.debug('Handler for %s took %.2f seconds',
self.request.url, time.time() - self.init_time)
def handle_exception(self, exception, debug): # pylint: disable=g-bad-name
"""Common exception handler for webapp2."""
_LOG.exception(exception)
_LOG.debug('Is the web application in debug mode? %s.', debug)
self._WriteTemplate(
template_file='error',
tpl_exception=exception,
tpl_unauthorized=isinstance(
exception, recall_errors.MessageRecallAuthenticationError))
def _WriteTemplate(self, template_file, **kwargs):
"""Common method to write from a template.
Args:
template_file: String name of a file that exists within the template
folder. For subdirectories the name may be 'sub/file'.
**kwargs: A dictionary of key-value pairs that will be available
within the template.
"""
kwargs['tpl_logout_url'] = users.create_logout_url('/')
kwargs['tpl_user_name'] = _SafelyGetCurrentUserEmail()
if '.' not in template_file:
template_file = '%s.html' % template_file
self.response.headers['X-Frame-Options'] = 'DENY' # Prevent clickjacking.
self.response.write(
self._jinja_env.get_template(template_file).render(kwargs))
class AboutPageHandler(UIBasePageHandler):
"""Handle '/about' requests to show app about info."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /about get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate('about')
class CreateTaskForm(wtforms.Form):
"""Wrap and validate the form that ingests user input for a recall task.
Uses Regexp for xss protection to ensure no html tag characters are allowed.
"""
message_criteria = wtforms.TextField(
label='Message-ID', default='', validators=[
validators.Length(min=1, max=_MESSAGE_ID_MAX_LEN,
message=(u'message-id must be 1-%s characters.' %
_MESSAGE_ID_MAX_LEN)),
validators.Regexp(_MESSAGE_ID_REGEX,
message=(u'message-id format is: local-part@domain.'
'com (no spaces allowed).'))])
@property
def sanitized_message_criteria(self):
"""Helper to ensure message-id field has no extra junk.
Returns:
String as a safely scrubbed searchable message-id.
"""
return self.message_criteria.data.strip()
class CreateTaskPageHandler(UIBasePageHandler, xsrf_helper.XsrfHelper):
"""Handle '/create_task' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /create_task get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='create_task',
tpl_create_task_form=CreateTaskForm(self.request.GET),
xsrf_token=self.GetXsrfToken(user_email=_SafelyGetCurrentUserEmail(),
action_id=_CREATE_TASK_ACTION))
def _EnqueueMasterRecallTask(self, owner_email, message_criteria,
task_key_id):
"""Add master recall task with error handling.
Args:
owner_email: String email address of user running this recall.
message_criteria: String criteria (message-id) to recall.
task_key_id: Int unique id of the parent task.
Raises:
re-raises any task queue errors.
"""
task_name = '%s_%s' % (
view_utils.CreateSafeUserEmailForTaskName(owner_email),
view_utils.GetCurrentDateTimeForTaskName())
master_task = Task(name=task_name,
params={'owner_email': owner_email,
'task_key_id': task_key_id,
'message_criteria': message_criteria},
target='0.recall-backend',
url='/backend/recall_messages')
try:
master_task.add(queue_name='recall-messages-queue')
except TaskQueueError:
view_utils.FailRecallTask(task_key_id=task_key_id,
reason_string='Failed to enqueue master task.')
raise
def _CreateNewTask(self, owner_email, message_criteria):
"""Helper to create new task db entity and related Task for the backend.
If the master task fails creation in the db, the error will be raised
for the user to view.
If the master task fails to be enqueued, the task state is updated to
ABORTED.
Args:
owner_email: String email address of the user. Used in authorization.
message_criteria: String criteria used to find message(s) to recall.
Returns:
Urlsafe (String) key for the RecallTaskModel entity that was created.
"""
recall_task_entity = recall_task.RecallTaskModel(
owner_email=owner_email,
message_criteria=message_criteria)
recall_task_key = recall_task_entity.put()
self._EnqueueMasterRecallTask(owner_email=owner_email,
message_criteria=message_criteria,
task_key_id=recall_task_key.id())
return recall_task_key.urlsafe()
def post(self): # pylint: disable=g-bad-name
"""Handler for /create_task post requests."""
_PreventUnauthorizedAccess()
current_user_email = _SafelyGetCurrentUserEmail()
create_task_form = CreateTaskForm(self.request.POST)
if not self.IsXsrfTokenValid(
user_email=current_user_email,
action_id=_CREATE_TASK_ACTION,
submitted_xsrf_token=self.request.get('xsrf_token')):
raise recall_errors.MessageRecallXSRFError(
'[%s] Cross Site Request Forgery Checks Failed!' % current_user_email)
if not create_task_form.validate():
self._WriteTemplate(
template_file='create_task',
tpl_create_task_form=create_task_form,
xsrf_token=self.GetXsrfToken(user_email=current_user_email,
action_id=_CREATE_TASK_ACTION))
return
self.redirect('/task/%s' % self._CreateNewTask(
owner_email=current_user_email,
message_criteria=create_task_form.sanitized_message_criteria))
class DebugTaskPageHandler(UIBasePageHandler):
"""Handle '/task/debug' requests to show app debug info."""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/debug get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
task = recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe)
task_key_id = task.key.id() if task else 0
counter_tuples = [
('User Retrieval Tasks Started (Expected)',
sharded_counter.GetCounterCount(
view_utils.MakeRetrievalStartedCounterName(task_key_id))),
('User Retrieval Tasks Ended (Actual)',
sharded_counter.GetCounterCount(
view_utils.MakeRetrievalEndedCounterName(task_key_id))),
('Task Backend Errors (Automatically Retried)',
sharded_counter.GetCounterCount(
view_utils.MakeBackendErrorCounterName(task_key_id)))]
self._WriteTemplate(template_file='debug_task',
tpl_counter_tuples=counter_tuples, tpl_task=task)
class HistoryPageHandler(UIBasePageHandler):
"""Handle '/history' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /history get requests."""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('task_cursor')
results, cursor, more = (
recall_task.RecallTaskModel.FetchOneUIPageOfTasksForDomain(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
urlsafe_cursor=previous_cursor))
self._WriteTemplate(template_file='history', tpl_tasks=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more)
class LandingPageHandler(UIBasePageHandler):
"""Handle '/' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for / get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate('landing')
class TaskDetailsPageHandler(UIBasePageHandler):
"""Handle '/task' requests to show task details.
This page will show model fields such as task_state and calculated items
such as 'elapsed time' for a single task.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='task',
tpl_task=recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe))
class TaskProblemsPageHandler(UIBasePageHandler):
"""Handle '/task/problems' requests to show user details.
This page will show a list of errors encountered during a recall.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/errors get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('error_cursor')
results, cursor, more = (
error_reason.ErrorReasonModel.FetchOneUIPageOfErrorsForTask(
task_key_urlsafe=task_key_urlsafe,
urlsafe_cursor=previous_cursor))
self._WriteTemplate(template_file='task_error_reasons', tpl_errors=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more, tpl_task_key_urlsafe=task_key_urlsafe)
class TaskReportPageHandler(UIBasePageHandler):
"""Handle '/task/report' requests to show user details.
This page will show summary results from a recall task including
categories of user_state with counts and user email lists.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/report get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='task_report',
tpl_task=recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe),
tpl_user_states=domain_user.USER_STATES,
tpl_message_states=domain_user.MESSAGE_STATES,
tpl_task_key_urlsafe=task_key_urlsafe)
class TaskUsersPageHandler(UIBasePageHandler):
"""Handle '/task/users' requests to show user details.
This page will show full lists of users to compare against previous runs.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/users/debug get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('user_cursor')
results, cursor, more = (
domain_user.DomainUserToCheckModel.FetchOneUIPageOfUsersForTask(
task_key_urlsafe=task_key_urlsafe,
urlsafe_cursor=previous_cursor,
user_state_filters=self.request.params.getall('user_state'),
message_state_filters=self.request.params.getall('message_state')))
self._WriteTemplate(template_file='task_users', tpl_users=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more, tpl_task_key_urlsafe=task_key_urlsafe)
|
apache-2.0
| 1,862,183,678,027,718,700
| 36.70495
| 80
| 0.686308
| false
| 3.936531
| false
| false
| false
|
globocom/database-as-a-service
|
dbaas/maintenance/migrations/0035_auto__add_field_databasemigrate_origin_environment.py
|
1
|
51269
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DatabaseMigrate.origin_environment'
db.add_column(u'maintenance_databasemigrate', 'origin_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['physical.Environment']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'DatabaseMigrate.origin_environment'
db.delete_column(u'maintenance_databasemigrate', 'origin_environment_id')
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseconfiguressl': {
'Meta': {'object_name': 'DatabaseConfigureSSL'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'configure_ssl'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_configure_ssl'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasedestroy': {
'Meta': {'object_name': 'DatabaseDestroy'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasemigrate': {
'Meta': {'object_name': 'DatabaseMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'origin_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmigrate': {
'Meta': {'object_name': 'HostMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database_migrate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'hosts'", 'null': 'True', 'to': u"orm['maintenance.DatabaseMigrate']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'migrate'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zone': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'disable_alarms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relevance': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '1'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'metric_collector': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance']
|
bsd-3-clause
| 6,568,704,805,031,850,000
| 97.40691
| 227
| 0.566034
| false
| 3.586248
| true
| false
| false
|
RiceMunk/omnifit
|
omnifit/spectrum/tests/test_spectrumplotting.py
|
1
|
1494
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from astropy.tests.helper import pytest
import numpy as np
import os
from ..spectrum import *
import matplotlib.pyplot as plt
from ...tests.helpers import *
class TestSpectrumPlotting:
def test_plotbasic(self):
"""
Make sure that basic spectrum plotting works as expected
"""
testspec = generate_spectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plot(ax)
testspec.plot(ax,drawstyle='steps-mid')
plt.close()
def test_plotwrong(self):
"""
Make sure that plotting fails when it should
"""
testspec = generate_spectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
with pytest.raises(Exception):
testspec.plot(ax,plotstyle='non-existent style')
with pytest.raises(Exception):
testspec.plot(ax,x='baselined')
def test_plotnk(self):
"""
Make sure that n and k spectrum plotting works as expected
"""
testspec = generate_cdespectrum()
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212)
fig = testspec.plotnk(ax1,ax2)
plt.close()
def test_plotabs(self):
"""
Make sure that OD spectrum plotting works as expected
"""
testspec = generate_absspectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plotod(ax,in_wl=False)
plt.close()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plotod(ax,in_wl=True)
plt.close()
|
bsd-3-clause
| -2,019,736,266,411,463,200
| 27.188679
| 63
| 0.659304
| false
| 3.490654
| true
| false
| false
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/snapshot_update_py3.py
|
1
|
2814
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_update import ResourceUpdate
class SnapshotUpdate(ResourceUpdate):
"""Snapshot update resource.
:param tags: Resource tags
:type tags: dict[str, str]
:param account_type: the storage account type of the disk. Possible values
include: 'Standard_LRS', 'Premium_LRS'
:type account_type: str or
~azure.mgmt.compute.v2016_04_30_preview.models.StorageAccountTypes
:param os_type: the Operating System type. Possible values include:
'Windows', 'Linux'
:type os_type: str or
~azure.mgmt.compute.v2016_04_30_preview.models.OperatingSystemTypes
:param creation_data: disk source information. CreationData information
cannot be changed after the disk has been created.
:type creation_data:
~azure.mgmt.compute.v2016_04_30_preview.models.CreationData
:param disk_size_gb: If creationData.createOption is Empty, this field is
mandatory and it indicates the size of the VHD to create. If this field is
present for updates or creation with other options, it indicates a resize.
Resizes are only allowed if the disk is not attached to a running VM, and
can only increase the disk's size.
:type disk_size_gb: int
:param encryption_settings: Encryption settings for disk or snapshot
:type encryption_settings:
~azure.mgmt.compute.v2016_04_30_preview.models.EncryptionSettings
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'account_type': {'key': 'properties.accountType', 'type': 'StorageAccountTypes'},
'os_type': {'key': 'properties.osType', 'type': 'OperatingSystemTypes'},
'creation_data': {'key': 'properties.creationData', 'type': 'CreationData'},
'disk_size_gb': {'key': 'properties.diskSizeGB', 'type': 'int'},
'encryption_settings': {'key': 'properties.encryptionSettings', 'type': 'EncryptionSettings'},
}
def __init__(self, *, tags=None, account_type=None, os_type=None, creation_data=None, disk_size_gb: int=None, encryption_settings=None, **kwargs) -> None:
super(SnapshotUpdate, self).__init__(tags=tags, **kwargs)
self.account_type = account_type
self.os_type = os_type
self.creation_data = creation_data
self.disk_size_gb = disk_size_gb
self.encryption_settings = encryption_settings
|
mit
| -6,854,801,527,586,538,000
| 47.517241
| 158
| 0.658138
| false
| 4.037303
| false
| false
| false
|
sistason/pa3
|
src/pa3_frontend/pa3_django/pa3/migrations/0017_auto_20180413_0948.py
|
1
|
1619
|
# Generated by Django 2.0.4 on 2018-04-13 07:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pa3', '0016_auto_20180413_0948'),
]
operations = [
migrations.AlterField(
model_name='newestnumberbatch',
name='src',
field=models.CharField(choices=[('pa_10', 'H 10'), ('pa_13', 'H 13'), ('pa_23', 'H 23'), ('pa_02', 'H 02')], max_length=50),
),
migrations.AlterField(
model_name='statisticaldata',
name='src',
field=models.CharField(choices=[(['H 10'], ['H 10']), (['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11'], ['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11']), (['H 19', 'H 23', 'H 25'], ['H 19', 'H 23', 'H 25']), (['H 02'], ['H 02'])], max_length=50),
),
migrations.AlterField(
model_name='waitingnumber',
name='src',
field=models.CharField(choices=[(['H 10'], ['H 10']), (['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11'], ['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11']), (['H 19', 'H 23', 'H 25'], ['H 19', 'H 23', 'H 25']), (['H 02'], ['H 02'])], max_length=50),
),
migrations.AlterField(
model_name='waitingnumberbatch',
name='src',
field=models.CharField(choices=[('pa_10', 'H 10'), ('pa_13', 'H 13'), ('pa_23', 'H 23'), ('pa_02', 'H 02')], db_index=True, max_length=50),
),
]
|
gpl-3.0
| -1,901,392,672,714,096,400
| 48.060606
| 332
| 0.521309
| false
| 3.131528
| false
| false
| false
|
semussan/pythfinder
|
popup_menu.py
|
1
|
23014
|
"""popup_menu.py - A low-fuss, infinitely nested popup menu with simple blocking
behavior, and more advanced non-blocking behavior.
Classes:
PopupMenu -> A blocking menu.
NonBlockingPopupMenu -> A non-blocking menu.
Menu -> The graphics and geometry for a menu panel. Note: You'll typically
want to use PopupMenu or NonBlockingPopupMenu instead.
MenuItem -> The graphics and geometry for a menu item. Note: You'll
typically want to use PopupMenu or NonBlockingPopupMenu instead.
SubmenuLabel -> A helper class for strong-typing of submenu labels. Note:
You'll typically want to use PopupMenu or NonBlockingPopupMenu instead.
Module data (can be changed after importing the module):
font -> pygame.font.Font object used to render menus.
bg_color -> pygame.Color object used for the menu panel background.
hi_color -> pygame.Color object used for the highlighted item background.
text_color -> pygame.Color object used for the text.
glint_color -> pygame.Color object used for bright beveled edge.
shadow_color -> pygame.Color object used for dark beveled edge.
margin -> int used for menu and item padding.
Example blocking menu:
menu_data = ['Main', 'Item 0', ['Submenu', 'Item 0'], 'Quit']
while 1:
# game stuff...
for e in pygame.event.get():
if e.type == MOUSEBUTTONUP and e.button == 3:
PopupMenu(menu_data)
elif e.type == USEREVENT and e.code == 'MENU':
print 'menu event: %s.%d: %s' % (e.name,e.item_id,e.text)
if (e.name,e.text) == ('Main','Quit'):
quit()
else:
# handle all game events normally
pass
Example non-blocking menu:
menu_data = ['Main', 'Item 0', ['Submenu', 'Item 0'], 'Quit']
menu = NonBlockingPopupMenu(menu_data)
while 1:
# update game
# clear screen
# draw game
menu.draw()
# update/flip screen
for e in menu.handle_events(pygame.event.get()):
if e.type == MOUSEBUTTONUP and e.button == 3:
menu.show()
elif e.type == USEREVENT and e.code == 'MENU':
if e.name is None:
menu.hide()
elif (e.name,e.text) == ('Main','Quit'):
quit()
else:
# handle all game events normally
pass
"""
# PopupMenu
# Version: v1.2.1
# Description: A low-fuss, infinitely nested popup menu for pygame.
# Author: Gummbum
# Home: http://code.google.com/p/simple-pygame-menu/
# Source: See home.
import pygame
from pygame import Color, Rect, MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION, USEREVENT
# pygame must be initialized before we can create a Font.
pygame.init()
try:
# "data.py" is a skellington-ism. The included custom version supports
# subdirectories by type.
import data
except:
print 'warning: no data.py in module path: proceeding without it'
finally:
try:
font = pygame.font.Font(data.filepath('font', 'Vera.ttf'), 14)
except:
print 'warning: cannot load font Vera.ttf: using system default'
font = pygame.font.SysFont(None, 20)
bg_color = Color('grey')
hi_color = Color(155,155,155)
text_color = Color('black')
glint_color = Color(220,220,220)
shadow_color = Color(105,105,105)
margin = 2
class PopupMenu(object):
"""popup_menu.PopupMenu
PopupMenu(data, block=True) : return menu
data -> list; the list of strings and nested lists.
pos -> tuple; the xy screen coordinate for the topleft of the main menu; if
None, the mouse position is used.
block -> boolean; when True popup_menu will run its own event loop, blocking
your main loop until it exits; when False popup_menu.get_events() will
intercept events it cares about and return unhandled events to the
caller.
Note: For a non-blocking menu, use the NonBlockingPopupMenu instead. This
class supports non-blocking, but it is more cumbersome to use than the
NonBlockingPopupMenu class.
The first string in the data list is taken as the menu title. The remaining
strings are menu items. A nested list becomes a submenu. Submenu lists must
also contain strings for menu title and menu items. Submenus can be
theoretically infinitely nested.
The menu runs a mini event loop. This will block the caller until it exits.
Upon exiting, the screen is restored to its prior state.
Left-clicking outside the topmost menu will quit the entire menu. Right-
clicking anywhere will close the topmost submenu; if only the main menu
remains the menu will exit. Left-clicking a menu item in the topmost menu
will post a USEREVENT for the caller to process.
The USEREVENT will have attributes: code='MENU', name=popup_menu.name,
item_id=menu_item.item_id, text=menu_item.text. name is first element in a
menu data list. item_id corresponds to the Nth element in a menu data list,
incremented from 0; submenu items count as one menu_id even though they are
never posted in an event. text is the string value of the Nth element in the
menu data list. Thus, combinations of name and menu_id or name and text can
be used to uniquely identify menu selections.
Example menu data and resulting event data:
['Main', # main menu title
'Item 0', # name='Main', menu_id=0, text='Item 0'
['Submenu', # submenu title
'Item 0', # name='Submenu', menu_id=0, text='Item 0'
'Item 1', # name='Submenu', menu_id=0, text='Item 1'
],
'Item 2', # name='Main', menu_id=2, text='Item 2'
]
High-level steps for a blocking menu:
1. Fashion a nested list of strings for the PopupMenu constructor.
2. Upon creation, the menu runs its own loop.
3. Upon exit, control is returned to the caller.
4. Handle the resulting USEREVENT event in the caller where
event.name=='your menu title', event.item_id holds the selected item
number, and event.text holds the item label.
High-level steps for a non-blocking menu:
Note: This usage exists to support the NonBlockingPopupMenu class and
custom non-blocking implementations; for typical use NonBlockingPopupMenu
is recommended.
1. Fashion a nested list of strings for the PopupMenu constructor.
2. Store the menu object in a variable.
3. Devise a means for the main loop to choose whether to draw the menu and pass
it events.
4. Call menu.draw() to draw the menu.
5. Pass pygame events to menu.handle_events() and process the unhandled events
that are returned as you would pygame's events.
6. Upon menu exit, one or two USEREVENTs are posted via pygame. Retrieve
them and recognize they are menu events (event.code=='MENU').
a. The menu-exit event signals the main loop it has exited, with or
without a menu selection. Recognize this by event.name==None. Upon
receiving this event the main loop should stop using the menu's
draw() and get_events() (until the next time it wants to post the
menu to the user).
b. The menu-selection event signals the main loop that a menu item was
selected. Recognize this by event.name=='your menu title'.
event.menu_id holds the selected item number, and event.text holds
the item label.
7. Destroying the menu is not necessary. But creating and destroying it may
be a convenient means to manage the menu state (i.e. to post it or not).
"""
def __init__(self, data, pos=None, block=True):
# list of open Menu() objects
self.menus = []
# key to main menu data
self.top = data[0]
# dict of menus, keyed by menu title
self.data = {self.top:[]}
# walk the nested list, creating the data dict for easy lookup
self._walk(self.top, list(data))
# make the main menu
self._make_menu(self.data[self.top], pos)
# Save the display surface; use to clear screen
self.screen = pygame.display.get_surface()
self.clear_screen = self.screen.copy()
if block:
self.selection=self._run(block)
if self.selection is not None:
return
def handle_events(self, events, block=False):
unhandled = []
for e in events:
if e.type == MOUSEBUTTONUP:
if e.button == 1:
menu = self.menus[-1]
item = menu.menu_item
if item:
if isinstance(item.text, SubmenuLabel):
# open submenu
key = item.text[:-3]
self._make_menu(self.data[key])
else:
# pick item (post event)
self._quit(block)
return [(menu, item)]
else:
# close menu
self._quit(block)
return []
elif e.button == 3:
# close menu
if len(self.menus) == 1:
self._quit(block)
return []
else:
self._del_menu()
elif e.type == MOUSEMOTION:
self.mouse_pos = e.pos
self.menus[-1].check_collision(self.mouse_pos)
unhandled.append(e)
elif e.type == MOUSEBUTTONDOWN:
pass
else:
unhandled.append(e)
return None
def draw(self):
for menu in self.menus:
menu.draw()
def _pick_event(self, menu, item):
event = pygame.event.Event(USEREVENT, code='MENU',
name=menu.name, item_id=item.item_id, text=item.text)
return event
def _quit_event(self):
event = pygame.event.Event(USEREVENT, code='MENU',
name=None, item_id=-1, text='_MENU_EXIT_')
return event
def _run(self, block=True):
screen = self.screen
clock = pygame.time.Clock()
self.mouse_pos = pygame.mouse.get_pos()
self.running = True
while self.running:
self.screen.blit(self.clear_screen, (0,0))
self.draw()
pygame.display.flip()
ret=self.handle_events(pygame.event.get())
if ret is not None:
return ret
clock.tick(60)
def _walk(self, key, data):
# Recursively walk the nested data lists, building the data dict for
# easy lookup.
for i,ent in enumerate(data):
if isinstance(ent, str):
self.data[key].append(ent)
else:
ent = list(ent)
new_key = ent[0]
ent[0] = SubmenuLabel(new_key)
self.data[key].append(ent[0])
self.data[new_key] = []
self._walk(new_key, list(ent))
def _make_menu(self, data, pos=None):
# Make a menu from data list and add it to the menu stack.
if self.menus:
# position submenu relative to parent
parent = self.menus[-1]
rect = parent.menu_item.rect
pos = rect.right,rect.top
# unset the parent's menu_item (for appearance)
parent.menu_item = None
else:
# position main menu at mouse
if pos is None:
pos = pygame.mouse.get_pos()
name = data[0]
items = data[1:]
self.menus.append(Menu(pos, name, items))
def _del_menu(self):
# Remove the topmost menu from the menu stack.
self.menus.pop()
def _quit(self, block):
# Put the original screen contents back.
if block:
self.screen.blit(self.clear_screen, (0,0))
pygame.display.flip()
self.running = False
class NonBlockingPopupMenu(PopupMenu):
"""popup_menu.NonBlockingPopupMenu
NonBlockingPopupMenu(data, pos=None, show=False) : return menu
data -> list; the list of strings and nested lists.
pos -> tuple; the xy screen coordinate for the topleft of the main menu; if
None, the mouse position is used.
show -> boolean; make the menu visible in the constructor.
visible is a read-write property that sets and gets the boolean value
representing the state. The show() and hide() methods are equivalent
alternatives to using the property.
Note that the constructor does not copy the data argument. Changes to the
contents will result in changes to the menus once show() is called or
visible is set to True. In addition, data can be entirely replaced by
setting menu.init_data.
High-level steps for a non-blocking menu:
1. Fashion a nested list of strings for the NonBlockingPopupMenu constructor.
2. Store the menu object in a variable.
3. Construct the NonBlockingPopupMenu object.
4. Detect the condition that triggers the menu to post, and call menu.show()
(or set menu.visible=True).
5. Call menu.draw() to draw the menu. If it is visible, it will be drawn.
6. Pass pygame events to menu.handle_events() and process the unhandled events
that are returned as you would pygame's events. If the menu is not visible
the method will immediately return the list passed in, unchanged.
7. Upon menu exit, one or two USEREVENTs are posted via pygame. Retrieve them
and recognize they are menu events (i.e., event.code=='MENU').
a. A menu-exit event signals the menu has detected an exit condition, which
may or many not be accompanied by a menu selection. Recognize this by
event.name==None or event.menu_id==-1. Upon receiving this event the
main loop should call menu.hide() (or set menu.visible=False).
b. A menu-selection event signals the main loop that a menu item was
selected. Recognize this by event.name=='your menu title'. event.menu_id
holds the selected item number, and event.text holds the item label.
8. Destroying the menu is optional.
9. Assigning to menu.init_data, or changing its contents or that of the
original list variable, will result in a modified menu the next time
menu.show() is called (or menu.visible is set to True).
"""
def __init__(self, data, pos=None, show=False):
self.init_data = data
self._init_pos = pos
if show:
self.show()
else:
self.hide()
def show(self):
"""generate the menu geometry and graphics, and makes the menu visible"""
super(NonBlockingPopupMenu, self).__init__(
self.init_data, pos=self._init_pos, block=False)
self._show = True
def hide(self):
"""destroy the menu geometry and grpahics, and hides the menu"""
if hasattr(self, 'menus'):
del self.menus[:]
self._show = False
@property
def visible(self):
return self._show
@visible.setter
def visible(self, val):
if val:
self.show()
else:
self.hide()
def handle_events(self, events):
"""preemptively return if the menu is not visible; else, call the
superclass's method.
"""
if self._show:
return super(NonBlockingPopupMenu, self).handle_events(events)
else:
return events
def draw(self):
"""preemptively return if the menu is not visible; else, call the
superclass's method.
"""
if self._show:
super(NonBlockingPopupMenu, self).draw()
class SubmenuLabel(str):
"""popup_menu.SubmenuLabel
SubmenuLabel(s) : return label
s -> str; the label text
This is a helper class for strong-typing of submenu labels.
This class is not intended to be used directly. See PopupMenu or
NonBlockingPopupMenu.
"""
def __new__(cls, s):
return str.__new__(cls, s+'...')
class MenuItem(object):
"""popup_menu.MenuItem
MenuItem(text, item_id) : return menu_item
text -> str; the display text.
item_id -> int; the numeric ID; also the item_id attribute returned in the
pygame event.
This class is not intended to be used directly. Use PopupMenu or
NonBlockingPopupMenu instead, unless designing your own subclass.
"""
def __init__(self, text, item_id):
self.text = text
self.item_id = item_id
self.image = font.render(text, True, text_color)
self.rect = self.image.get_rect()
class Menu(object):
"""popup_menu.Menu
Menu(pos, name, items) : return menu
pos -> (x,y); topleft coordinates of the menu.
name -> str; the name of the menu.
items -> list; a list containing strings for menu items labels.
This class is not intended to be used directly. Use PopupMenu or
NonBlockingPopupMenu instead, unless designing your own subclass.
"""
def __init__(self, pos, name, items):
screen = pygame.display.get_surface()
screen_rect = screen.get_rect()
self.name = name
self.items = []
self.menu_item = None
# Make the frame rect
x,y = pos
self.rect = Rect(x,y,0,0)
self.rect.width += margin * 2
self.rect.height += margin * 2
# Make the title image and rect, and grow the frame rect
self.title_image = font.render(name, True, text_color)
self.title_rect = self.title_image.get_rect(topleft=(x+margin,y+margin))
self.rect.width = margin*2 + self.title_rect.width
self.rect.height = margin + self.title_rect.height
# Make the item highlight rect
self.hi_rect = Rect(0,0,0,0)
# Make menu items
n = 0
for item in items:
menu_item = MenuItem(item, n)
self.items.append(menu_item)
self.rect.width = max(self.rect.width, menu_item.rect.width+margin*2)
self.rect.height += menu_item.rect.height + margin
n += 1
self.rect.height += margin
# Position menu fully within view
if not screen_rect.contains(self.rect):
savex,savey = self.rect.topleft
self.rect.clamp_ip(screen_rect)
self.title_rect.top = self.rect.top + margin
self.title_rect.left = self.rect.left + margin
# Position menu items within menu frame
y = self.title_rect.bottom + margin
for item in self.items:
item.rect.x = self.rect.x + margin
item.rect.y = y
y = item.rect.bottom + margin
item.rect.width = self.rect.width - margin*2
# Calculate highlight rect's left-alignment and size
self.hi_rect.left = menu_item.rect.left
self.hi_rect.width = self.rect.width - margin*2
self.hi_rect.height = menu_item.rect.height
# Create the menu frame and highlight frame images
self.bg_image = pygame.surface.Surface(self.rect.size)
self.hi_image = pygame.surface.Surface(self.hi_rect.size)
self.bg_image.fill(bg_color)
self.hi_image.fill(hi_color)
# Draw menu border
rect = self.bg_image.get_rect()
pygame.draw.rect(self.bg_image, glint_color, rect, 1)
t,l,b,r = rect.top,rect.left,rect.bottom,rect.right
pygame.draw.line(self.bg_image, shadow_color, (l,b-1), (r,b-1), 1)
pygame.draw.line(self.bg_image, shadow_color, (r-1,t), (r-1,b), 1)
# Draw title divider in menu frame
left = margin
right = self.rect.width - margin*2
y = self.title_rect.height + 1
pygame.draw.line(self.bg_image, shadow_color, (left,y), (right,y))
def draw(self):
# Draw the menu on the main display.
screen = pygame.display.get_surface()
screen.blit(self.bg_image, self.rect)
screen.blit(self.title_image, self.title_rect)
for item in self.items:
if item is self.menu_item:
self.hi_rect.top = item.rect.top
screen.blit(self.hi_image, self.hi_rect)
screen.blit(item.image, item.rect)
def check_collision(self, mouse_pos):
# Set self.menu_item if the mouse is hovering over one.
self.menu_item = None
if self.rect.collidepoint(mouse_pos):
for item in self.items:
if item.rect.collidepoint(mouse_pos):
self.menu_item = item
break
if __name__ == '__main__':
# Test non-blocking.
screen = pygame.display.set_mode((600,600), RESIZABLE)
clock = pygame.time.Clock()
menu_data = (
'Main',
'Item 0',
'Item 1',
(
'More Things',
'Item 0',
'Item 1',
),
'Quit',
)
class Cursor(object):
def __init__(self):
self.image = pygame.surface.Surface((13,13))
pygame.draw.line(self.image, Color('yellow'), (6,0), (6,12), 5)
pygame.draw.line(self.image, Color('yellow'), (0,6), (12,6), 5)
pygame.draw.line(self.image, Color(0,0,99), (6,0), (6,12), 3)
pygame.draw.line(self.image, Color(0,0,99), (0,6), (12,6), 3)
pygame.draw.line(self.image, Color('black'), (6,0), (6,120), 1)
pygame.draw.line(self.image, Color('black'), (0,6), (12,6), 1)
self.image.set_colorkey(Color('black'))
self.rect = self.image.get_rect(center=(0,0))
pygame.mouse.set_visible(False)
def draw(self):
pygame.display.get_surface().blit(self.image, self.rect)
cursor = Cursor()
def handle_menu(e):
print 'menu event: %s.%d: %s' % (e.name,e.item_id,e.text)
if e.name == 'Main':
if e.text == 'Quit':
quit()
menu = NonBlockingPopupMenu(menu_data)
while 1:
clock.tick(60)
for e in menu.handle_events(pygame.event.get()):
if e.type == MOUSEBUTTONUP:
menu.show()
elif e.type == MOUSEMOTION:
cursor.rect.center = e.pos
elif e.type == USEREVENT:
if e.code == 'MENU':
if e.name is None:
menu.hide()
else:
handle_menu(e)
screen.fill(Color('darkblue'))
menu.draw()
cursor.draw()
pygame.display.flip()
|
gpl-2.0
| 423,739,067,350,770,560
| 37.614094
| 86
| 0.589424
| false
| 3.978907
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.