code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from __future__ import absolute_import, division, print_function
import numpy as np
from matplotlib.colors import Normalize
from matplotlib.collections import LineCollection
from mpl_scatter_density import ScatterDensityArtist
from astropy.visualization import (ImageNormalize, LinearStretch, SqrtStretch,
AsinhStretch, LogStretch)
from glue.utils import defer_draw, broadcast_to
from glue.viewers.scatter.state import ScatterLayerState
from glue.viewers.matplotlib.layer_artist import MatplotlibLayerArtist
from glue.core.exceptions import IncompatibleAttribute
STRETCHES = {'linear': LinearStretch,
'sqrt': SqrtStretch,
'arcsinh': AsinhStretch,
'log': LogStretch}
CMAP_PROPERTIES = set(['cmap_mode', 'cmap_att', 'cmap_vmin', 'cmap_vmax', 'cmap'])
MARKER_PROPERTIES = set(['size_mode', 'size_att', 'size_vmin', 'size_vmax', 'size_scaling', 'size'])
LINE_PROPERTIES = set(['linewidth', 'linestyle'])
DENSITY_PROPERTIES = set(['dpi', 'stretch', 'density_contrast'])
VISUAL_PROPERTIES = (CMAP_PROPERTIES | MARKER_PROPERTIES | DENSITY_PROPERTIES |
LINE_PROPERTIES | set(['color', 'alpha', 'zorder', 'visible']))
DATA_PROPERTIES = set(['layer', 'x_att', 'y_att', 'cmap_mode', 'size_mode', 'density_map',
'xerr_att', 'yerr_att', 'xerr_visible', 'yerr_visible',
'vector_visible', 'vx_att', 'vy_att', 'vector_arrowhead', 'vector_mode',
'vector_origin', 'line_visible', 'markers_visible', 'vector_scaling'])
class InvertedNormalize(Normalize):
def __call__(self, *args, **kwargs):
return 1 - super(InvertedNormalize, self).__call__(*args, **kwargs)
class DensityMapLimits(object):
contrast = 1
def min(self, array):
return 0
def max(self, array):
return 10. ** (np.log10(np.nanmax(array)) * self.contrast)
def set_mpl_artist_cmap(artist, values, state):
vmin = state.cmap_vmin
vmax = state.cmap_vmax
cmap = state.cmap
if isinstance(artist, ScatterDensityArtist):
artist.set_c(values)
else:
artist.set_array(values)
artist.set_cmap(cmap)
if vmin > vmax:
artist.set_clim(vmax, vmin)
artist.set_norm(InvertedNormalize(vmax, vmin))
else:
artist.set_clim(vmin, vmax)
artist.set_norm(Normalize(vmin, vmax))
class ScatterLayerArtist(MatplotlibLayerArtist):
_layer_state_cls = ScatterLayerState
def __init__(self, axes, viewer_state, layer_state=None, layer=None):
super(ScatterLayerArtist, self).__init__(axes, viewer_state,
layer_state=layer_state, layer=layer)
# Watch for changes in the viewer state which would require the
# layers to be redrawn
self._viewer_state.add_global_callback(self._update_scatter)
self.state.add_global_callback(self._update_scatter)
# Scatter
self.scatter_artist = self.axes.scatter([], [])
self.plot_artist = self.axes.plot([], [], 'o', mec='none')[0]
self.errorbar_artist = self.axes.errorbar([], [], fmt='none')
self.vector_artist = None
self.line_collection = LineCollection(np.zeros((0, 2, 2)))
self.axes.add_collection(self.line_collection)
# Scatter density
self.density_auto_limits = DensityMapLimits()
self.density_artist = ScatterDensityArtist(self.axes, [], [], color='white',
vmin=self.density_auto_limits.min,
vmax=self.density_auto_limits.max)
self.axes.add_artist(self.density_artist)
self.mpl_artists = [self.scatter_artist, self.plot_artist,
self.errorbar_artist, self.vector_artist,
self.line_collection, self.density_artist]
self.errorbar_index = 2
self.vector_index = 3
self.reset_cache()
def reset_cache(self):
self._last_viewer_state = {}
self._last_layer_state = {}
@defer_draw
def _update_data(self, changed):
# Layer artist has been cleared already
if len(self.mpl_artists) == 0:
return
try:
x = self.layer[self._viewer_state.x_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.x_att)
return
else:
self.enable()
try:
y = self.layer[self._viewer_state.y_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.y_att)
return
else:
self.enable()
if self.state.markers_visible:
if self.state.density_map:
self.density_artist.set_xy(x, y)
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
else:
if self.state.cmap_mode == 'Fixed' and self.state.size_mode == 'Fixed':
# In this case we use Matplotlib's plot function because it has much
# better performance than scatter.
self.plot_artist.set_data(x, y)
self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], [])
else:
self.plot_artist.set_data([], [])
offsets = np.vstack((x, y)).transpose()
self.scatter_artist.set_offsets(offsets)
self.density_artist.set_xy([], [])
else:
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], [])
if self.state.line_visible:
if self.state.cmap_mode == 'Fixed':
points = np.array([x, y]).transpose()
self.line_collection.set_segments([points])
else:
# In the case where we want to color the line, we need to over
# sample the line by a factor of two so that we can assign the
# correct colors to segments - if we didn't do this, then
# segments on one side of a point would be a different color
# from the other side. With oversampling, we can have half a
# segment on either side of a point be the same color as a
# point
x_fine = np.zeros(len(x) * 2 - 1, dtype=float)
y_fine = np.zeros(len(y) * 2 - 1, dtype=float)
x_fine[::2] = x
x_fine[1::2] = 0.5 * (x[1:] + x[:-1])
y_fine[::2] = y
y_fine[1::2] = 0.5 * (y[1:] + y[:-1])
points = np.array([x_fine, y_fine]).transpose().reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
self.line_collection.set_segments(segments)
else:
self.line_collection.set_segments(np.zeros((0, 2, 2)))
for eartist in list(self.errorbar_artist[2]):
if eartist is not None:
try:
eartist.remove()
except ValueError:
pass
except AttributeError: # Matplotlib < 1.5
pass
if self.vector_artist is not None:
self.vector_artist.remove()
self.vector_artist = None
if self.state.vector_visible:
if self.state.vx_att is not None and self.state.vy_att is not None:
vx = self.layer[self.state.vx_att].ravel()
vy = self.layer[self.state.vy_att].ravel()
if self.state.vector_mode == 'Polar':
ang = vx
length = vy
# assume ang is anti clockwise from the x axis
vx = length * np.cos(np.radians(ang))
vy = length * np.sin(np.radians(ang))
else:
vx = None
vy = None
if self.state.vector_arrowhead:
hw = 3
hl = 5
else:
hw = 1
hl = 0
v = np.hypot(vx, vy)
vmax = np.nanmax(v)
vx = vx / vmax
vy = vy / vmax
self.vector_artist = self.axes.quiver(x, y, vx, vy, units='width',
pivot=self.state.vector_origin,
headwidth=hw, headlength=hl,
scale_units='width',
scale=10 / self.state.vector_scaling)
self.mpl_artists[self.vector_index] = self.vector_artist
if self.state.xerr_visible or self.state.yerr_visible:
if self.state.xerr_visible and self.state.xerr_att is not None:
xerr = self.layer[self.state.xerr_att].ravel()
else:
xerr = None
if self.state.yerr_visible and self.state.yerr_att is not None:
yerr = self.layer[self.state.yerr_att].ravel()
else:
yerr = None
self.errorbar_artist = self.axes.errorbar(x, y, fmt='none',
xerr=xerr, yerr=yerr)
self.mpl_artists[self.errorbar_index] = self.errorbar_artist
@defer_draw
def _update_visual_attributes(self, changed, force=False):
if not self.enabled:
return
if self.state.markers_visible:
if self.state.density_map:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.density_artist.set_color(self.state.color)
self.density_artist.set_c(None)
self.density_artist.set_clim(self.density_auto_limits.min,
self.density_auto_limits.max)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.density_artist, c, self.state)
if force or 'stretch' in changed:
self.density_artist.set_norm(ImageNormalize(stretch=STRETCHES[self.state.stretch]()))
if force or 'dpi' in changed:
self.density_artist.set_dpi(self._viewer_state.dpi)
if force or 'density_contrast' in changed:
self.density_auto_limits.contrast = self.state.density_contrast
self.density_artist.stale = True
else:
if self.state.cmap_mode == 'Fixed' and self.state.size_mode == 'Fixed':
if force or 'color' in changed:
self.plot_artist.set_color(self.state.color)
if force or 'size' in changed or 'size_scaling' in changed:
self.plot_artist.set_markersize(self.state.size *
self.state.size_scaling)
else:
# TEMPORARY: Matplotlib has a bug that causes set_alpha to
# change the colors back: https://github.com/matplotlib/matplotlib/issues/8953
if 'alpha' in changed:
force = True
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.scatter_artist.set_facecolors(self.state.color)
self.scatter_artist.set_edgecolor('none')
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.scatter_artist, c, self.state)
self.scatter_artist.set_edgecolor('none')
if force or any(prop in changed for prop in MARKER_PROPERTIES):
if self.state.size_mode == 'Fixed':
s = self.state.size * self.state.size_scaling
s = broadcast_to(s, self.scatter_artist.get_sizes().shape)
else:
s = self.layer[self.state.size_att].ravel()
s = ((s - self.state.size_vmin) /
(self.state.size_vmax - self.state.size_vmin)) * 30
s *= self.state.size_scaling
# Note, we need to square here because for scatter, s is actually
# proportional to the marker area, not radius.
self.scatter_artist.set_sizes(s ** 2)
if self.state.line_visible:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.line_collection.set_array(None)
self.line_collection.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
# Higher up we oversampled the points in the line so that
# half a segment on either side of each point has the right
# color, so we need to also oversample the color here.
c = self.layer[self.state.cmap_att].ravel()
cnew = np.zeros((len(c) - 1) * 2)
cnew[::2] = c[:-1]
cnew[1::2] = c[1:]
set_mpl_artist_cmap(self.line_collection, cnew, self.state)
if force or 'linewidth' in changed:
self.line_collection.set_linewidth(self.state.linewidth)
if force or 'linestyle' in changed:
self.line_collection.set_linestyle(self.state.linestyle)
if self.state.vector_visible and self.vector_artist is not None:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.vector_artist.set_array(None)
self.vector_artist.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.vector_artist, c, self.state)
if self.state.xerr_visible or self.state.yerr_visible:
for eartist in list(self.errorbar_artist[2]):
if eartist is None:
continue
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
eartist.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(eartist, c, self.state)
if force or 'alpha' in changed:
eartist.set_alpha(self.state.alpha)
if force or 'visible' in changed:
eartist.set_visible(self.state.visible)
if force or 'zorder' in changed:
eartist.set_zorder(self.state.zorder)
for artist in [self.scatter_artist, self.plot_artist,
self.vector_artist, self.line_collection,
self.density_artist]:
if artist is None:
continue
if force or 'alpha' in changed:
artist.set_alpha(self.state.alpha)
if force or 'zorder' in changed:
artist.set_zorder(self.state.zorder)
if force or 'visible' in changed:
artist.set_visible(self.state.visible)
self.redraw()
@defer_draw
def _update_scatter(self, force=False, **kwargs):
if (self._viewer_state.x_att is None or
self._viewer_state.y_att is None or
self.state.layer is None):
return
# Figure out which attributes are different from before. Ideally we shouldn't
# need this but currently this method is called multiple times if an
# attribute is changed due to x_att changing then hist_x_min, hist_x_max, etc.
# If we can solve this so that _update_histogram is really only called once
# then we could consider simplifying this. Until then, we manually keep track
# of which properties have changed.
changed = set()
if not force:
for key, value in self._viewer_state.as_dict().items():
if value != self._last_viewer_state.get(key, None):
changed.add(key)
for key, value in self.state.as_dict().items():
if value != self._last_layer_state.get(key, None):
changed.add(key)
self._last_viewer_state.update(self._viewer_state.as_dict())
self._last_layer_state.update(self.state.as_dict())
if force or len(changed & DATA_PROPERTIES) > 0:
self._update_data(changed)
force = True
if force or len(changed & VISUAL_PROPERTIES) > 0:
self._update_visual_attributes(changed, force=force)
def get_layer_color(self):
if self.state.cmap_mode == 'Fixed':
return self.state.color
else:
return self.state.cmap
@defer_draw
def update(self):
self._update_scatter(force=True)
self.redraw()
|
stscieisenhamer/glue
|
glue/viewers/scatter/layer_artist.py
|
Python
|
bsd-3-clause
| 18,014
|
# This script calculates how many error reports are in each subdirectory
# and how many error reports are in total.
# Edit in_dir and out_file parameters as you need.
import os
in_dir = "D:/Projects/CrashRpt/valid_reports"
out_file = "stats.txt"
f = open(out_file, "w")
def get_txt_file_count(dirname):
count = 0
for root, dirs, files in os.walk(dirname, True):
for file in files:
if file[-4:] != ".txt":
continue;
count += 1
break;
return count
multimap = dict()
for root, dirs, files in os.walk(in_dir):
for dir in dirs:
dir_name = os.path.join(root, dir)
report_count_in_dir = get_txt_file_count(dir_name)
if report_count_in_dir in multimap.keys():
multimap[report_count_in_dir].append(dir)
else:
multimap[report_count_in_dir] = [dir]
ordered_list = list(multimap.keys())
ordered_list.sort()
ordered_list.reverse()
total_count = 0
total_groups = 0
for count in ordered_list:
total_groups += len(multimap[count]);
total_count += count * len(multimap[count])
f.write("Total %d reports (100%%) in %d directories\n"%(total_count, total_groups))
n = 1
for key in ordered_list:
for dir in multimap[key]:
percent = key/total_count*100
f.write("%d. %d reports (%0.1f%%) in '%s'\n"%(n, key, percent, dir))
n = n+1
f.close()
|
BeamNG/crashrpt
|
processing/scripts/basic_stats.py
|
Python
|
bsd-3-clause
| 1,335
|
import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
return "Meterian JSON report output file can be imported."
def get_findings(self, report, test):
findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory:
if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9:
severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
severity = "Info"
else:
severity = advisory["severity"].title()
return severity
def get_reference_url(self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url
|
rackerlabs/django-DefectDojo
|
dojo/tools/meterian/parser.py
|
Python
|
bsd-3-clause
| 5,223
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import time
output = sys.argv[1]
persistoutput = "%s.persist" % sys.argv[1]
count = 0
try:
count = open(persistoutput, 'r').read()
except:
pass
count = int(count) + 1
if len(sys.argv) > 2:
max_count = int(sys.argv[2])
if count > max_count:
count = max_count
oldcount = 0
try:
oldcount = open(output, 'r').read()
except:
pass
# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
# to do this because, prior to running commands, some build systems deletes
# any declared outputs, so we would lose our count if we just wrote to the
# given output file.
open(persistoutput, 'w').write('%d' % (count))
# Only write the given output file if the count has changed.
if int(oldcount) != count:
open(output, 'w').write('%d' % (count))
# Sleep so the next run changes the file time sufficiently to make the build
# detect the file as changed.
time.sleep(1)
sys.exit(0)
|
Jet-Streaming/gyp
|
test/actions/src/subdir1/counter.py
|
Python
|
bsd-3-clause
| 1,151
|
from setuptools import setup, find_packages
import os
import cms
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
]
INSTALL_REQUIREMENTS = [
'Django>=1.8,<1.11',
'django-classy-tags>=0.7.2',
'django-formtools>=1.0',
'django-treebeard>=4.0.1',
'django-sekizai>=0.7',
'djangocms-admin-style>=1.0',
]
setup(
author='Patrick Lauber',
author_email='digi@treepy.com',
name='django-cms',
version=cms.__version__+"+nimbis.1",
description='An Advanced Django CMS',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://www.django-cms.org/',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIREMENTS,
packages=find_packages(exclude=['project', 'project.*']),
include_package_data=True,
zip_safe=False,
test_suite='runtests.main',
)
|
nimbis/django-cms
|
setup.py
|
Python
|
bsd-3-clause
| 1,639
|
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import numpy as np
import hftools.dataset.arrayobj as aobj
import hftools.networks.spar_functions as spfun
from hftools.testing import TestCase
basepath = os.path.split(__file__)[0]
def make_array(a):
dims = (aobj.DimSweep("f", len(a)),
aobj.DimMatrix_i("i", 2),
aobj.DimMatrix_j("j", 2))
return aobj.hfarray(a, dims=dims)
class Test_cascade(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.cascadeS(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.cascadeS(self.a, self.b)
self.assertTrue(np.allclose(r, self.b))
def test_cascade_3(self):
r = spfun.cascadeS(self.b, self.b)
self.assertTrue(np.allclose(r, self.a * 4))
def test_cascade_4(self):
r = spfun.cascadeS(self.a, self.c)
self.assertTrue(np.allclose(r, self.c))
def test_cascade_5(self):
r = spfun.cascadeS(self.b, self.c)
self.assertTrue(np.allclose(r, make_array([[[0.4, 0j], [0, 0.1]]])))
def test_cascade_6(self):
r = spfun.cascadeS(self.c, self.b)
self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.4]]])))
class Test_deembedleft(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembedleft(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembedleft(self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembedleft(self.b, self.c)
self.assertTrue(np.allclose(r, make_array([[[0.025, 0j], [0, 0.1]]])))
class Test_deembedright(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembedright(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembedright(self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembedright(self.c, self.b)
self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.025]]])))
class Test_deembed(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembed(self.a, self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembed(self.b, self.b, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembed(self.a, self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_4(self):
r = spfun.deembed(self.b, self.c, self.b)
self.assertAllclose(r, make_array([[[0.025, 0j], [0, 0.025]]]))
|
hftools/hftools
|
hftools/networks/tests/test_spar_functions.py
|
Python
|
bsd-3-clause
| 3,696
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "django-pressroom",
version = "0.4.2.1",
url = 'https://github.com/petry/django-pressroom',
license = 'BSD',
description = "A pressroom application for django.",
author = 'Justin Driscoll, Michael Thornhill, Marcos Daniel Petry',
author_email = 'marcospetry@gmail.com',
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'django-photologue'],
dependency_links = [
'http://github.com/petry/django-photologue/tarball/master#egg=django-photologue',
],
classifiers = [
'Development Status :: 4.2 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
petry/django-pressroom-googlecode
|
setup.py
|
Python
|
bsd-3-clause
| 1,034
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import getopt
try:
import pyclamd
HAVE_CLAMD = True
except ImportError:
HAVE_CLAMD = False
from viper.common.out import *
from viper.common.abstracts import Module
from viper.core.session import __sessions__
class ClamAV(Module):
cmd = 'clamav'
description = 'Scan file from local ClamAV daemon'
authors = ['neriberto']
def run(self):
def usage():
self.log('', "usage: clamav [-h] [-s]")
def help():
usage()
self.log('', "")
self.log('', "Options:")
self.log('', "\t--help (-h)\tShow this help message")
self.log('', "\t--socket(-s)\tSpecify an unix socket (default: Clamd Unix Socket)")
self.log('', "")
if not HAVE_CLAMD:
self.log('error', "Missing dependency, install requests (`pip install pyclamd`)")
return
try:
opts, argv = getopt.getopt(self.args, 'hs:', ['help', 'socket='])
except getopt.GetoptError as e:
self.log('', e)
usage()
return
daemon = None
result = None
socket = None
for opt, value in opts:
if opt in ('-h', '--help'):
help()
return
elif opt in ('-s', '--socket'):
self.log('info', "Using socket {0} to connect to ClamAV daemon".format(value))
socket = value
try:
daemon = pyclamd.ClamdUnixSocket(socket)
except Exception as e:
self.log('error', "Daemon connection failure, {0}".format(e))
return
if not __sessions__.is_set():
self.log('error', "No session opened")
return
try:
if not daemon:
daemon = pyclamd.ClamdUnixSocket()
socket = 'Clamav'
except Exception as e:
self.log('error', "Daemon connection failure, {0}".format(e))
return
try:
if daemon.ping():
results = daemon.scan_file(__sessions__.current.file.path)
else:
self.log('error', "Unable to connect to the daemon")
except Exception as e:
self.log('error', "Unable to scan with antivirus daemon, {0}".format(e))
return
found = None
name = 'not found'
if results:
for item in results:
found = results[item][0]
name = results[item][1]
if found == 'ERROR':
self.log('error', "Check permissions of the binary folder, {0}".format(name))
else:
self.log('info', "Daemon {0} returns: {1}".format(socket, name))
|
LMSlay/wiper
|
modules/clamav.py
|
Python
|
bsd-3-clause
| 2,875
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
class Migration(DataMigration):
# Flag to indicate if this migration is too risky
# to run online and needs to be coordinated for offline
is_dangerous = False
def forwards(self, orm):
db.commit_transaction()
try:
self._forwards(orm)
except Exception:
db.start_transaction()
raise
db.start_transaction()
def _forwards(self, orm):
idps = orm.IdentityProvider.objects.filter(external_id=None)
for r in RangeQuerySetWrapperWithProgressBar(idps):
try:
# Limit to one integration. It *is possible* that multiple
# integrations could have been configured and only one identity
# provider would have been setup, but at the time of writing
# this, in getsentry, there are no cases like such.
integration = orm.Integration.objects.filter(organizations=r.organization_id)[0]
except IndexError:
# Identity provider exists without an external_id. Nothing we
# can do to determine the external ID.
continue
orm.IdentityProvider.objects.filter(id=r.id, external_id=None).update(external_id=integration.external_id)
def backwards(self, orm):
"Write your backwards methods here."
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apiapplication': {
'Meta': {'object_name': 'ApiApplication'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "'7659033185d64a589638801d051ec5fa77cdbd017cbe4b6a84bea5d474b31e70'", 'unique': 'True', 'max_length': '64'}),
'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'34ea9b3a3a1f4840a604ed94e65dd7538e9eee68b1f64942a5afad91a8e8a0d1'"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Ideal Monkfish'", 'max_length': '64', 'blank': 'True'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.apiauthorization': {
'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apigrant': {
'Meta': {'object_name': 'ApiGrant'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
'code': ('django.db.models.fields.CharField', [], {'default': "'70dc648487874d2386767832f2c4185a'", 'max_length': '64', 'db_index': 'True'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 4, 25, 0, 0)', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 5, 25, 0, 0)', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'084bec36d9554d97a2fd071c29ff3f996df84065b4084507a692308b55fd5cf6'", 'max_length': '64', 'unique': 'True', 'null': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'0dcfeaf9276d4bc8b0f78e24f6c17dd3fe957f3951e041528c2ac3d1f15c0d59'", 'unique': 'True', 'max_length': '64'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.assistantactivity': {
'Meta': {'unique_together': "(('user', 'guide_id'),)", 'object_name': 'AssistantActivity', 'db_table': "'sentry_assistant_activity'"},
'dismissed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'guide_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'useful': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'viewed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authenticator': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 5, 2, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.commit': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.commitfilechange': {
'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sentry.counter': {
'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deletedorganization': {
'Meta': {'object_name': 'DeletedOrganization'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deletedproject': {
'Meta': {'object_name': 'DeletedProject'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deletedteam': {
'Meta': {'object_name': 'DeletedTeam'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deploy': {
'Meta': {'object_name': 'Deploy'},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.distribution': {
'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.dsymapp': {
'Meta': {'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp'},
'app_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'sync_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'})
},
'sentry.email': {
'Meta': {'object_name': 'Email'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.environment': {
'Meta': {'unique_together': "(('organization_id', 'name'),)", 'object_name': 'Environment'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
},
'sentry.environmentproject': {
'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_hidden': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
},
'sentry.eventtag': {
'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('group_id', 'key_id', 'value_id'),)"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.featureadoption': {
'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption'},
'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'db_index': 'True'}),
'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.fileblobowner': {
'Meta': {'unique_together': "(('blob', 'organization'),)", 'object_name': 'FileBlobOwner'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.Team']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcommitresolution': {
'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.groupenvironment': {
'Meta': {'unique_together': "[('group_id', 'environment_id')]", 'object_name': 'GroupEnvironment', 'index_together': "[('environment_id', 'first_release_id')]"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'first_release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouplink': {
'Meta': {'unique_together': "(('group_id', 'linked_type', 'linked_id'),)", 'object_name': 'GroupLink'},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'linked_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'linked_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'relationship': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '2'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {'object_name': 'GroupRedirect'},
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
},
'sentry.grouprelease': {
'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupshare': {
'Meta': {'object_name': 'GroupShare'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'f885826653534a0dbb3f15d14cf5ea18'", 'unique': 'True', 'max_length': '32'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'state': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.groupsubscription': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey'},
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptombstone': {
'Meta': {'object_name': 'GroupTombstone'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.identity': {
'Meta': {'unique_together': "(('idp', 'external_id'), ('idp', 'user'))", 'object_name': 'Identity'},
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'idp': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.IdentityProvider']"}),
'scopes': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.identityprovider': {
'Meta': {'unique_together': "(('type', 'organization'), ('type', 'organization', 'external_id'))", 'object_name': 'IdentityProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.integration': {
'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.latestrelease': {
'Meta': {'unique_together': "(('repository_id', 'environment_id'),)", 'object_name': 'LatestRelease'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'deploy_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationavatar': {
'Meta': {'object_name': 'OrganizationAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
},
'sentry.organizationintegration': {
'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationonboardingtask': {
'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'sentry.project': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectTeam']", 'to': "orm['sentry.Team']"})
},
'sentry.projectavatar': {
'Meta': {'object_name': 'ProjectAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Project']"})
},
'sentry.projectbookmark': {
'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.projectdsymfile': {
'Meta': {'unique_together': "(('project', 'debug_id'),)", 'object_name': 'ProjectDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'debug_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_column': "'uuid'"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.projectintegration': {
'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectownership': {
'Meta': {'object_name': 'ProjectOwnership'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'fallthrough': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'raw': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'schema': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'})
},
'sentry.projectplatform': {
'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.projectredirect': {
'Meta': {'unique_together': "(('organization', 'redirect_slug'),)", 'object_name': 'ProjectRedirect'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'redirect_slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'sentry.projectsymcachefile': {
'Meta': {'unique_together': "(('project', 'dsym_file'),)", 'object_name': 'ProjectSymCacheFile'},
'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.projectteam': {
'Meta': {'unique_together': "(('project', 'team'),)", 'object_name': 'ProjectTeam'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.pullrequest': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'PullRequest', 'db_table': "'sentry_pull_request'", 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'merge_commit_sha': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'title': ('django.db.models.fields.TextField', [], {'null': 'True'})
},
'sentry.pullrequestcommit': {
'Meta': {'unique_together': "(('pull_request', 'commit'),)", 'object_name': 'PullRequestCommit', 'db_table': "'sentry_pullrequest_commit'"},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'pull_request': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.PullRequest']"})
},
'sentry.rawevent': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.release': {
'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'sentry.releasecommit': {
'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseenvironment': {
'Meta': {'unique_together': "(('organization_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseheadcommit': {
'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseprojectenvironment': {
'Meta': {'unique_together': "(('project', 'release', 'environment'),)", 'object_name': 'ReleaseProjectEnvironment'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'new_issues_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.repository': {
'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
'config': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.reprocessingreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.scheduleddeletion': {
'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion'},
'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 5, 25, 0, 0)'}),
'guid': ('django.db.models.fields.CharField', [], {'default': "'86ade1aaec63437996db26adf4f1d34d'", 'unique': 'True', 'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.scheduledjob': {
'Meta': {'object_name': 'ScheduledJob'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'payload': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'})
},
'sentry.servicehook': {
'Meta': {'object_name': 'ServiceHook'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'events': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'fd82a667ea0640c88d9e0f9345e7cd8f116b39ef7e674f51a854c113921d7229'"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'}),
'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'", 'index_together': "(('project_id', 'key', 'last_seen'),)"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.teamavatar': {
'Meta': {'object_name': 'TeamAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Team']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useravatar': {
'Meta': {'object_name': 'UserAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
},
'sentry.useremail': {
'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'1dTZ4lJyYtyCetHSmDSNzoU8M5VgoXRJ'", 'max_length': '32'})
},
'sentry.userip': {
'Meta': {'unique_together': "(('user', 'ip_address'),)", 'object_name': 'UserIP'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userpermission': {
'Meta': {'unique_together': "(('user', 'permission'),)", 'object_name': 'UserPermission'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'permission': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.userreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']", 'null': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.versiondsymfile': {
'Meta': {'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile'},
'build': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dsym_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymApp']"}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '32'})
}
}
complete_apps = ['sentry']
symmetrical = True
|
mvaled/sentry
|
src/sentry/south_migrations/0408_identity_provider_external_id.py
|
Python
|
bsd-3-clause
| 105,752
|
# Import the Evernote client
from evernote.api.client import EvernoteClient
# Import the Evernote note storetypes to get note datatypes
# to properly get note/tag counts (note filter)
import evernote.edam.notestore.ttypes as NoteStoreTypes
# Define access token either:
# Developer Tokens (https://dev.evernote.com/doc/articles/dev_tokens.php)
# or OAuth (https://dev.evernote.com/doc/articles/authentication.php)
access_token = "insert dev or oauth token here"
# Setup the client
client = EvernoteClient(token = access_token, sandbox = True)
# Get note store object
note_store = client.get_note_store()
# Create note filter object
note_filter = NoteStoreTypes.NoteFilter()
# Set note filter search grammer to get notes created in the last 2 days
note_filter.words = "created:day-2"
# Uncommend the following line to set note filter tag GUIDs
#note_filter.tagGuids = ["GUID of tag1", "GUID of tag 2", "...."]
# Set note filter order to descending
note_filter.ascending = False
# Set note filter inative attribute to False (will search only active notes)
# setting this value to True will only return search results that are in the trash
note_filter.inactive = False
# Uncomment the following line to set note time zone of the search to 'America/Los_Angeles'
#note_filter.timeZone = "America/Los_Angeles"
# Uncomment the following line to set note filter emphasized attribute to additional
# 'wish list' search grammer to be used in conjunction with the orinigal search query to
# highlight search results
#note_filter.emphasized = "any: tag:cool -tag:uncool"
# Uncomment the following line to set note filter includeAllReadableNotebooks attribute
# to include all readable business notebooks in a search
# search must be performed on a business note store with a business auth token
#note_filter.includeAllReadableNotebooks=True
# (Boolean) Include note/tags that are in the trash in your note counts
include_trash = True
# Returns an object which maps the number of notes captured by the filter to the corresponding
# notebook GUID
note_counts = note_store.findNoteCounts( note_filter, include_trash )
if note_counts.notebookCounts != None:
print "Found results from %s notebooks" % len(note_counts.notebookCounts)
for notebook in note_counts.notebookCounts:
print " Notebook with GUID %s has %s note(s) that match the filter" % (notebook, note_counts.notebookCounts[notebook])
if note_counts.tagCounts != None:
print "Found results from %s tags" % len(note_counts.notebookCounts)
for tag in note_counts.tagCounts:
print " Tag with GUID %s has %s note(s) that match the filter" % (tag, note_counts.tagCounts[tag])
if not note_counts.tagCounts and not note_counts.notebookCounts:
print "No results"
|
matthewayne/evernote-sdk-python
|
sample/all_methods/findNoteCounts.py
|
Python
|
bsd-3-clause
| 2,735
|
import datetime
import re
from django.http import HttpResponse
from django.utils.http import urlencode
import smsgateway
from smsgateway.models import SMS
from smsgateway.backends.base import SMSBackend
from smsgateway.utils import check_cell_phone_number
class MobileWebBackend(SMSBackend):
def get_send_url(self, sms_request, account_dict):
# Encode message
msg = sms_request.msg
try:
msg = msg.encode('latin-1')
except:
pass
querystring = urlencode({
'login': account_dict['username'],
'pass': account_dict['password'],
'gsmnr': sms_request.to[0][1:],
'sid': account_dict['sid'],
'msgcontent': msg,
})
return u'http://gateway.mobileweb.be/smsin.asp?%s' % querystring
def validate_send_result(self, result):
return 'accepted' in result
def handle_incoming(self, request, reply_using=None):
request_dict = request.POST if request.method == 'POST' else request.GET
# Check whether we've gotten a SendDateTime
if not 'SendDateTime' in request_dict:
return HttpResponse('')
# Check whether we've already received this message
if SMS.objects.filter(gateway_ref=request_dict['MessageID']).exists():
return HttpResponse('OK')
# Parse and process message
year, month, day, hour, minute, second, ms = map(int, re.findall(r'(\d+)', request_dict['SendDateTime']))
sms_dict = {
'sent': datetime.datetime(year, month, day, hour, minute, second),
'content': request_dict['MsgeContent'],
'sender': check_cell_phone_number(request_dict['SenderGSMNR']),
'to': request_dict['ShortCode'],
'operator': int(request_dict['Operator']),
'gateway_ref': request_dict['MessageID'],
'backend': self.get_slug(),
}
sms = SMS(**sms_dict)
response = self.process_incoming(request, sms)
# If necessary, send response SMS
if response is not None:
signature = smsgateway.get_account(reply_using)['reply_signature']
success = smsgateway.send([sms.sender], response, signature, using=reply_using)
# Sending failed, queue SMS
if not success:
smsgateway.send_queued(sms.sender, response, signature, reply_using)
return HttpResponse(response)
return HttpResponse('OK')
def get_slug(self):
return 'mobileweb'
def get_url_capacity(self):
return 1
|
peterayeni/django-smsgateway
|
smsgateway/backends/mobileweb.py
|
Python
|
bsd-3-clause
| 2,600
|
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'ordre Virer."""
from secondaires.navigation.equipage.signaux import *
from ..ordre import *
class Virer(Ordre):
"""Ordre virer.
Cet ordre demande au matelot de faire virer le navire sur bâbord
ou tribord en fonction du besoin, jusqu'à ce qu'il soit dans une certaine
direction. Le matelot ciblé doit tenir le gouvernail.
"""
cle = "virer"
etats_autorises = ("tenir_gouvernail", )
def __init__(self, matelot, navire, direction=0):
Ordre.__init__(self, matelot, navire, direction)
self.direction = direction
def executer(self):
"""Exécute l'ordre : vire sur bâbord."""
navire = self.navire
matelot = self.matelot
personnage = matelot.personnage
salle = personnage.salle
direction = self.direction
nav_direction = navire.direction.direction
if not hasattr(salle, "gouvernail") or salle.gouvernail is None:
return
gouvernail = salle.gouvernail
if gouvernail.tenu is not personnage:
yield SignalInutile("je ne tiens pas ce gouvernail")
else:
par_babord = (nav_direction - direction) % 360
par_tribord = (direction - nav_direction) % 360
if par_tribord < par_babord:
cote = 1
else:
cote = -1
# On change d'inclinaison du gouvernail si nécessaire
direction_actuelle = round(nav_direction)
direction_voulue = round(direction)
diff = (direction_voulue - direction_actuelle) % 360
if diff > 180:
diff = 360 - diff
if diff == 0:
gouvernail.centrer(personnage)
yield SignalTermine()
elif diff < 5:
orientation = 1
elif diff < 15:
orientation = 3
else:
orientation = 5
if gouvernail.orientation != cote * orientation:
if cote == -1:
gouvernail.virer_babord(personnage, orientation, True)
else:
gouvernail.virer_tribord(personnage, orientation, True)
yield SignalRepete(1)
|
stormi/tsunami
|
src/secondaires/navigation/equipage/ordres/virer.py
|
Python
|
bsd-3-clause
| 3,807
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Handling of the <message> element.
'''
from __future__ import print_function
import re
import six
from grit.node import base
from grit import clique
from grit import exception
from grit import lazy_re
from grit import tclib
from grit import util
# Matches exactly three dots ending a line or followed by whitespace.
_ELLIPSIS_PATTERN = lazy_re.compile(r'(?<!\.)\.\.\.(?=$|\s)')
_ELLIPSIS_SYMBOL = u'\u2026' # Ellipsis
# Finds whitespace at the start and end of a string which can be multiline.
_WHITESPACE = lazy_re.compile(r'(?P<start>\s*)(?P<body>.+?)(?P<end>\s*)\Z',
re.DOTALL | re.MULTILINE)
# <ph> placeholder elements should contain the special character formatters
# used to format <ph> element content.
# Android format.
_ANDROID_FORMAT = (r'%[1-9]+\$'
r'([-#+ 0,(]*)([0-9]+)?(\.[0-9]+)?'
r'([bBhHsScCdoxXeEfgGaAtT%n])')
# Chrome l10n format.
_CHROME_FORMAT = r'\$+\d'
# Windows EWT numeric and GRIT %s %d formats.
_OTHER_FORMAT = r'%[0-9sd]'
# Finds formatters that must be in a placeholder (<ph>) element.
_FORMATTERS = lazy_re.compile(
'(%s)|(%s)|(%s)' % (_ANDROID_FORMAT, _CHROME_FORMAT, _OTHER_FORMAT))
_BAD_PLACEHOLDER_MSG = ('ERROR: Placeholder formatter found outside of <ph> '
'tag in message "%s" in %s.')
_INVALID_PH_CHAR_MSG = ('ERROR: Invalid format characters found in message '
'"%s" <ph> tag in %s.')
# Finds HTML tag tokens.
_HTMLTOKEN = lazy_re.compile(r'<[/]?[a-z][a-z0-9]*[^>]*>', re.I)
# Finds HTML entities.
_HTMLENTITY = lazy_re.compile(r'&[^\s]*;')
class MessageNode(base.ContentNode):
'''A <message> element.'''
# For splitting a list of things that can be separated by commas or
# whitespace
_SPLIT_RE = lazy_re.compile(r'\s*,\s*|\s+')
def __init__(self):
super(MessageNode, self).__init__()
# Valid after EndParsing, this is the MessageClique that contains the
# source message and any translations of it that have been loaded.
self.clique = None
# We don't send leading and trailing whitespace into the translation
# console, but rather tack it onto the source message and any
# translations when formatting them into RC files or what have you.
self.ws_at_start = '' # Any whitespace characters at the start of the text
self.ws_at_end = '' # --"-- at the end of the text
# A list of "shortcut groups" this message is in. We check to make sure
# that shortcut keys (e.g. &J) within each shortcut group are unique.
self.shortcut_groups_ = []
# Formatter-specific data used to control the output of individual strings.
# formatter_data is a space separated list of C preprocessor-style
# definitions. Names without values are given the empty string value.
# Example: "foo=5 bar baz=100"
self.formatter_data = {}
# Whether or not to convert ... -> U+2026 within Translate().
self._replace_ellipsis = False
def _IsValidChild(self, child):
return isinstance(child, (PhNode))
def _IsValidAttribute(self, name, value):
if name not in ['name', 'offset', 'translateable', 'desc', 'meaning',
'internal_comment', 'shortcut_groups', 'custom_type',
'validation_expr', 'use_name_for_id', 'sub_variable',
'formatter_data']:
return False
if (name in ('translateable', 'sub_variable') and
value not in ['true', 'false']):
return False
return True
def SetReplaceEllipsis(self, value):
r'''Sets whether to replace ... with \u2026.
'''
self._replace_ellipsis = value
def MandatoryAttributes(self):
return ['name|offset']
def DefaultAttributes(self):
return {
'custom_type' : '',
'desc' : '',
'formatter_data' : '',
'internal_comment' : '',
'meaning' : '',
'shortcut_groups' : '',
'sub_variable' : 'false',
'translateable' : 'true',
'use_name_for_id' : 'false',
'validation_expr' : '',
}
def HandleAttribute(self, attrib, value):
base.ContentNode.HandleAttribute(self, attrib, value)
if attrib != 'formatter_data':
return
# Parse value, a space-separated list of defines, into a dict.
# Example: "foo=5 bar" -> {'foo':'5', 'bar':''}
for item in value.split():
name, _, val = item.partition('=')
self.formatter_data[name] = val
def GetTextualIds(self):
'''
Returns the concatenation of the parent's node first_id and
this node's offset if it has one, otherwise just call the
superclass' implementation
'''
if 'offset' not in self.attrs:
return super(MessageNode, self).GetTextualIds()
# we search for the first grouping node in the parents' list
# to take care of the case where the first parent is an <if> node
grouping_parent = self.parent
import grit.node.empty
while grouping_parent and not isinstance(grouping_parent,
grit.node.empty.GroupingNode):
grouping_parent = grouping_parent.parent
assert 'first_id' in grouping_parent.attrs
return [grouping_parent.attrs['first_id'] + '_' + self.attrs['offset']]
def IsTranslateable(self):
return self.attrs['translateable'] == 'true'
def EndParsing(self):
super(MessageNode, self).EndParsing()
# Make the text (including placeholder references) and list of placeholders,
# verify placeholder formats, then strip and store leading and trailing
# whitespace and create the tclib.Message() and a clique to contain it.
text = ''
placeholders = []
for item in self.mixed_content:
if isinstance(item, six.string_types):
# Not a <ph> element: fail if any <ph> formatters are detected.
if _FORMATTERS.search(item):
print(_BAD_PLACEHOLDER_MSG % (item, self.source))
raise exception.PlaceholderNotInsidePhNode
text += item
else:
# Extract the <ph> element components.
presentation = item.attrs['name'].upper()
text += presentation
ex = ' ' # <ex> example element cdata if present.
if len(item.children):
ex = item.children[0].GetCdata()
original = item.GetCdata()
# Sanity check the <ph> element content.
cdata = original
# Replace all HTML tag tokens in cdata.
match = _HTMLTOKEN.search(cdata)
while match:
cdata = cdata.replace(match.group(0), '_')
match = _HTMLTOKEN.search(cdata)
# Replace all HTML entities in cdata.
match = _HTMLENTITY.search(cdata)
while match:
cdata = cdata.replace(match.group(0), '_')
match = _HTMLENTITY.search(cdata)
# Remove first matching formatter from cdata.
match = _FORMATTERS.search(cdata)
if match:
cdata = cdata.replace(match.group(0), '')
# Fail if <ph> special chars remain in cdata.
if re.search(r'[%\$]', cdata):
message_id = self.attrs['name'] + ' ' + original;
print(_INVALID_PH_CHAR_MSG % (message_id, self.source))
raise exception.InvalidCharactersInsidePhNode
# Otherwise, accept this <ph> placeholder.
placeholders.append(tclib.Placeholder(presentation, original, ex))
m = _WHITESPACE.match(text)
if m:
self.ws_at_start = m.group('start')
self.ws_at_end = m.group('end')
text = m.group('body')
self.shortcut_groups_ = self._SPLIT_RE.split(self.attrs['shortcut_groups'])
self.shortcut_groups_ = [i for i in self.shortcut_groups_ if i != '']
description_or_id = self.attrs['desc']
if description_or_id == '' and 'name' in self.attrs:
description_or_id = 'ID: %s' % self.attrs['name']
assigned_id = None
if self.attrs['use_name_for_id'] == 'true':
assigned_id = self.attrs['name']
message = tclib.Message(text=text, placeholders=placeholders,
description=description_or_id,
meaning=self.attrs['meaning'],
assigned_id=assigned_id)
self.InstallMessage(message)
def InstallMessage(self, message):
'''Sets this node's clique from a tclib.Message instance.
Args:
message: A tclib.Message.
'''
self.clique = self.UberClique().MakeClique(message, self.IsTranslateable())
for group in self.shortcut_groups_:
self.clique.AddToShortcutGroup(group)
if self.attrs['custom_type'] != '':
self.clique.SetCustomType(util.NewClassInstance(self.attrs['custom_type'],
clique.CustomType))
elif self.attrs['validation_expr'] != '':
self.clique.SetCustomType(
clique.OneOffCustomType(self.attrs['validation_expr']))
def SubstituteMessages(self, substituter):
'''Applies substitution to this message.
Args:
substituter: a grit.util.Substituter object.
'''
message = substituter.SubstituteMessage(self.clique.GetMessage())
if message is not self.clique.GetMessage():
self.InstallMessage(message)
def GetCliques(self):
return [self.clique] if self.clique else []
def Translate(self, lang):
'''Returns a translated version of this message.
'''
assert self.clique
msg = self.clique.MessageForLanguage(lang,
self.PseudoIsAllowed(),
self.ShouldFallbackToEnglish()
).GetRealContent()
if self._replace_ellipsis:
msg = _ELLIPSIS_PATTERN.sub(_ELLIPSIS_SYMBOL, msg)
# Always remove all byte order marks (\uFEFF) https://crbug.com/1033305
msg = msg.replace(u'\uFEFF','')
return msg.replace('[GRITLANGCODE]', lang)
def NameOrOffset(self):
key = 'name' if 'name' in self.attrs else 'offset'
return self.attrs[key]
def ExpandVariables(self):
'''We always expand variables on Messages.'''
return True
def GetDataPackValue(self, lang, encoding):
'''Returns a str represenation for a data_pack entry.'''
message = self.ws_at_start + self.Translate(lang) + self.ws_at_end
return util.Encode(message, encoding)
def IsResourceMapSource(self):
return True
@staticmethod
def Construct(parent, message, name, desc='', meaning='', translateable=True):
'''Constructs a new message node that is a child of 'parent', with the
name, desc, meaning and translateable attributes set using the same-named
parameters and the text of the message and any placeholders taken from
'message', which must be a tclib.Message() object.'''
# Convert type to appropriate string
translateable = 'true' if translateable else 'false'
node = MessageNode()
node.StartParsing('message', parent)
node.HandleAttribute('name', name)
node.HandleAttribute('desc', desc)
node.HandleAttribute('meaning', meaning)
node.HandleAttribute('translateable', translateable)
items = message.GetContent()
for ix, item in enumerate(items):
if isinstance(item, six.string_types):
# Ensure whitespace at front and back of message is correctly handled.
if ix == 0:
item = "'''" + item
if ix == len(items) - 1:
item = item + "'''"
node.AppendContent(item)
else:
phnode = PhNode()
phnode.StartParsing('ph', node)
phnode.HandleAttribute('name', item.GetPresentation())
phnode.AppendContent(item.GetOriginal())
if len(item.GetExample()) and item.GetExample() != ' ':
exnode = ExNode()
exnode.StartParsing('ex', phnode)
exnode.AppendContent(item.GetExample())
exnode.EndParsing()
phnode.AddChild(exnode)
phnode.EndParsing()
node.AddChild(phnode)
node.EndParsing()
return node
class PhNode(base.ContentNode):
'''A <ph> element.'''
def _IsValidChild(self, child):
return isinstance(child, ExNode)
def MandatoryAttributes(self):
return ['name']
def EndParsing(self):
super(PhNode, self).EndParsing()
# We only allow a single example for each placeholder
if len(self.children) > 1:
raise exception.TooManyExamples()
def GetTextualIds(self):
# The 'name' attribute is not an ID.
return []
class ExNode(base.ContentNode):
'''An <ex> element.'''
pass
|
endlessm/chromium-browser
|
tools/grit/grit/node/message.py
|
Python
|
bsd-3-clause
| 12,578
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'trainergui.ui'
#
# Created: Tue May 24 14:29:31 2016
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(401, 686)
MainWindow.setAutoFillBackground(False)
MainWindow.setDocumentMode(False)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.sliderGoal = QtGui.QSlider(self.centralwidget)
self.sliderGoal.setGeometry(QtCore.QRect(340, 20, 31, 611))
self.sliderGoal.setMaximum(990000)
self.sliderGoal.setSingleStep(10000)
self.sliderGoal.setPageStep(100000)
self.sliderGoal.setOrientation(QtCore.Qt.Vertical)
self.sliderGoal.setInvertedAppearance(False)
self.sliderGoal.setInvertedControls(False)
self.sliderGoal.setTickPosition(QtGui.QSlider.TicksBothSides)
self.sliderGoal.setTickInterval(10000)
self.sliderGoal.setObjectName(_fromUtf8("sliderGoal"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(30, 20, 281, 611))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.txtMinPulse = QtGui.QLineEdit(self.frame)
self.txtMinPulse.setGeometry(QtCore.QRect(110, 310, 141, 27))
self.txtMinPulse.setObjectName(_fromUtf8("txtMinPulse"))
self.txtMaxSensor = QtGui.QLineEdit(self.frame)
self.txtMaxSensor.setGeometry(QtCore.QRect(110, 400, 141, 27))
self.txtMaxSensor.setObjectName(_fromUtf8("txtMaxSensor"))
self.label_3 = QtGui.QLabel(self.frame)
self.label_3.setGeometry(QtCore.QRect(16, 310, 91, 21))
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(self.frame)
self.label_4.setGeometry(QtCore.QRect(16, 340, 91, 21))
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.label_5 = QtGui.QLabel(self.frame)
self.label_5.setGeometry(QtCore.QRect(16, 370, 91, 21))
self.label_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.label_2 = QtGui.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(40, 280, 67, 21))
self.label_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.txtMaxPulse = QtGui.QLineEdit(self.frame)
self.txtMaxPulse.setGeometry(QtCore.QRect(110, 340, 141, 27))
self.txtMaxPulse.setObjectName(_fromUtf8("txtMaxPulse"))
self.chkCalibrated = QtGui.QCheckBox(self.frame)
self.chkCalibrated.setGeometry(QtCore.QRect(30, 430, 97, 22))
self.chkCalibrated.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chkCalibrated.setObjectName(_fromUtf8("chkCalibrated"))
self.txtMaxGoal = QtGui.QLineEdit(self.frame)
self.txtMaxGoal.setGeometry(QtCore.QRect(110, 280, 141, 27))
self.txtMaxGoal.setObjectName(_fromUtf8("txtMaxGoal"))
self.txtMinSensor = QtGui.QLineEdit(self.frame)
self.txtMinSensor.setGeometry(QtCore.QRect(110, 370, 141, 27))
self.txtMinSensor.setObjectName(_fromUtf8("txtMinSensor"))
self.label = QtGui.QLabel(self.frame)
self.label.setGeometry(QtCore.QRect(40, 250, 67, 21))
self.label.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label.setObjectName(_fromUtf8("label"))
self.cmbSmoothing = QtGui.QComboBox(self.frame)
self.cmbSmoothing.setEnabled(False)
self.cmbSmoothing.setGeometry(QtCore.QRect(110, 200, 141, 27))
self.cmbSmoothing.setObjectName(_fromUtf8("cmbSmoothing"))
self.label_8 = QtGui.QLabel(self.frame)
self.label_8.setEnabled(False)
self.label_8.setGeometry(QtCore.QRect(16, 170, 91, 21))
self.label_8.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.txtMaxSpeed = QtGui.QLineEdit(self.frame)
self.txtMaxSpeed.setEnabled(False)
self.txtMaxSpeed.setGeometry(QtCore.QRect(110, 170, 141, 27))
self.txtMaxSpeed.setObjectName(_fromUtf8("txtMaxSpeed"))
self.label_6 = QtGui.QLabel(self.frame)
self.label_6.setGeometry(QtCore.QRect(16, 400, 91, 21))
self.label_6.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.label_14 = QtGui.QLabel(self.frame)
self.label_14.setEnabled(False)
self.label_14.setGeometry(QtCore.QRect(10, 200, 91, 21))
self.label_14.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.txtMinGoal = QtGui.QLineEdit(self.frame)
self.txtMinGoal.setGeometry(QtCore.QRect(110, 250, 141, 27))
self.txtMinGoal.setObjectName(_fromUtf8("txtMinGoal"))
self.label_7 = QtGui.QLabel(self.frame)
self.label_7.setGeometry(QtCore.QRect(18, 560, 91, 21))
self.label_7.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.txtSpeed = QtGui.QLineEdit(self.frame)
self.txtSpeed.setEnabled(False)
self.txtSpeed.setGeometry(QtCore.QRect(132, 520, 121, 27))
self.txtSpeed.setObjectName(_fromUtf8("txtSpeed"))
self.chkPower = QtGui.QCheckBox(self.frame)
self.chkPower.setEnabled(False)
self.chkPower.setGeometry(QtCore.QRect(30, 500, 97, 22))
self.chkPower.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chkPower.setText(_fromUtf8(""))
self.chkPower.setObjectName(_fromUtf8("chkPower"))
self.txtPosition = QtGui.QLineEdit(self.frame)
self.txtPosition.setEnabled(False)
self.txtPosition.setGeometry(QtCore.QRect(110, 470, 141, 27))
self.txtPosition.setObjectName(_fromUtf8("txtPosition"))
self.txtSensorRaw = QtGui.QLineEdit(self.frame)
self.txtSensorRaw.setEnabled(False)
self.txtSensorRaw.setGeometry(QtCore.QRect(112, 560, 141, 27))
self.txtSensorRaw.setObjectName(_fromUtf8("txtSensorRaw"))
self.label_10 = QtGui.QLabel(self.frame)
self.label_10.setGeometry(QtCore.QRect(40, 470, 67, 21))
self.label_10.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.label_11 = QtGui.QLabel(self.frame)
self.label_11.setGeometry(QtCore.QRect(10, 500, 91, 17))
self.label_11.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.cmbServo = QtGui.QComboBox(self.frame)
self.cmbServo.setGeometry(QtCore.QRect(110, 50, 141, 27))
self.cmbServo.setObjectName(_fromUtf8("cmbServo"))
self.label_12 = QtGui.QLabel(self.frame)
self.label_12.setGeometry(QtCore.QRect(30, 20, 71, 21))
self.label_12.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.chkEnabled = QtGui.QCheckBox(self.frame)
self.chkEnabled.setGeometry(QtCore.QRect(30, 100, 97, 22))
self.chkEnabled.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chkEnabled.setObjectName(_fromUtf8("chkEnabled"))
self.cmbBus = QtGui.QComboBox(self.frame)
self.cmbBus.setGeometry(QtCore.QRect(110, 20, 141, 27))
self.cmbBus.setFrame(True)
self.cmbBus.setObjectName(_fromUtf8("cmbBus"))
self.label_13 = QtGui.QLabel(self.frame)
self.label_13.setGeometry(QtCore.QRect(30, 50, 71, 21))
self.label_13.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.label_9 = QtGui.QLabel(self.frame)
self.label_9.setGeometry(QtCore.QRect(40, 130, 67, 21))
self.label_9.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.txtGoal = QtGui.QLineEdit(self.frame)
self.txtGoal.setGeometry(QtCore.QRect(110, 130, 141, 27))
self.txtGoal.setObjectName(_fromUtf8("txtGoal"))
self.chkMoving = QtGui.QCheckBox(self.frame)
self.chkMoving.setEnabled(False)
self.chkMoving.setGeometry(QtCore.QRect(30, 520, 97, 22))
self.chkMoving.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chkMoving.setText(_fromUtf8(""))
self.chkMoving.setObjectName(_fromUtf8("chkMoving"))
self.label_15 = QtGui.QLabel(self.frame)
self.label_15.setGeometry(QtCore.QRect(10, 520, 91, 17))
self.label_15.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_15.setObjectName(_fromUtf8("label_15"))
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setSizeGripEnabled(False)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.cmbBus, self.cmbServo)
MainWindow.setTabOrder(self.cmbServo, self.chkEnabled)
MainWindow.setTabOrder(self.chkEnabled, self.txtGoal)
MainWindow.setTabOrder(self.txtGoal, self.txtMaxSpeed)
MainWindow.setTabOrder(self.txtMaxSpeed, self.cmbSmoothing)
MainWindow.setTabOrder(self.cmbSmoothing, self.txtMinGoal)
MainWindow.setTabOrder(self.txtMinGoal, self.txtMaxGoal)
MainWindow.setTabOrder(self.txtMaxGoal, self.txtMinPulse)
MainWindow.setTabOrder(self.txtMinPulse, self.txtMaxPulse)
MainWindow.setTabOrder(self.txtMaxPulse, self.txtMinSensor)
MainWindow.setTabOrder(self.txtMinSensor, self.txtMaxSensor)
MainWindow.setTabOrder(self.txtMaxSensor, self.chkCalibrated)
MainWindow.setTabOrder(self.chkCalibrated, self.txtPosition)
MainWindow.setTabOrder(self.txtPosition, self.chkPower)
MainWindow.setTabOrder(self.chkPower, self.chkMoving)
MainWindow.setTabOrder(self.chkMoving, self.txtSpeed)
MainWindow.setTabOrder(self.txtSpeed, self.txtSensorRaw)
MainWindow.setTabOrder(self.txtSensorRaw, self.sliderGoal)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "ROS Servo Trainer", None))
self.label_3.setText(_translate("MainWindow", "Min Pulse", None))
self.label_4.setText(_translate("MainWindow", "Max Pulse", None))
self.label_5.setText(_translate("MainWindow", "MinSensor", None))
self.label_2.setText(_translate("MainWindow", "Max Goal", None))
self.chkCalibrated.setText(_translate("MainWindow", "Calibrated", None))
self.label.setText(_translate("MainWindow", "Min Goal", None))
self.label_8.setText(_translate("MainWindow", "MaxSpeed", None))
self.label_6.setText(_translate("MainWindow", "Max Sensor", None))
self.label_14.setText(_translate("MainWindow", "Smoothing", None))
self.label_7.setText(_translate("MainWindow", "Sensor Raw", None))
self.label_10.setText(_translate("MainWindow", "Position", None))
self.label_11.setText(_translate("MainWindow", "Power", None))
self.label_12.setText(_translate("MainWindow", "Bus", None))
self.chkEnabled.setText(_translate("MainWindow", "Enabled", None))
self.label_13.setText(_translate("MainWindow", "Servo", None))
self.label_9.setText(_translate("MainWindow", "Goal", None))
self.label_15.setText(_translate("MainWindow", "Moving", None))
|
MatthewVerbryke/inmoov_ros
|
inmoov_tools/trainer/trainergui.py
|
Python
|
bsd-3-clause
| 13,132
|
#!/usr/bin/env python
# Copyright (c) 2014, Bo Tian <tianbo@gmail.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may
# be used to endorse or promote products derived from this software without specific
# prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
def main():
args = sys.argv
print str(args)
extract_symbols(args[1], args[2])
def extract_symbols(input_file, output_file):
fin = open(input_file, 'r')
fout = open(output_file, 'w')
for line in fin:
if '|' in line:
cols = line.split('|')
if not '$' in cols[1]: # Skip preferred shares, warrant etc.
symbol = cols[1].replace('.', '-') # e.g., BRK.B -> BRK-B for Yahoo finance.
fout.write(symbol + '\n')
fin.close()
fout.close()
if __name__ == "__main__":
main()
|
btian/market_correlator
|
extract_symbols.py
|
Python
|
bsd-3-clause
| 2,097
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016, Germán Fuentes Capella <development@fuentescapella.com>
# BSD 3-Clause License
#
# Copyright (c) 2017, Germán Fuentes Capella
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from vps.console import get_headers, column_size
class TestDisplay(unittest.TestCase):
def test_headers(self):
h = [
{'h0': 0,
'h1': 1,
},
{'h0': 0,
'h1': 1,
},
]
self.assertEqual(get_headers(h), ['h0', 'h1'])
def test_different_headers(self):
h = [
{'h0': 0,
'h1': 1,
},
{'h0': 0,
'h1': 1,
'h2': 2,
},
]
self.assertEqual(get_headers(h), ['h0', 'h1', 'h2'])
def test_column_size(self):
matrix = [
{'k0': 'text0',
'k1': '1',
},
{'k0': 'txt',
'k1': '',
},
]
csize = column_size(matrix[0].keys(), matrix)
self.assertEqual(csize['k0'], 5)
self.assertEqual(csize['k1'], 2)
def test_column_size_with_boolean(self):
matrix = [{'k0': False}]
csize = column_size(matrix[0].keys(), matrix)
self.assertEqual(csize['k0'], 5)
|
germfue/vps-tools
|
tests/console_tests.py
|
Python
|
bsd-3-clause
| 2,784
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from random import choice
from scipy import zeros
from twoplayergame import TwoPlayerGame
# TODO: undo operation
class CaptureGame(TwoPlayerGame):
""" the capture game is a simplified version of the Go game: the first player to capture a stone wins!
Pass moves are forbidden."""
# CHECKME: suicide allowed?
BLACK = 1
WHITE = -1
EMPTY = 0
startcolor = BLACK
def __init__(self, size, suicideenabled = True):
""" the size of the board is generally between 3 and 19. """
self.size = size
self.suicideenabled = suicideenabled
self.reset()
def _iterPos(self):
""" an iterator over all the positions of the board. """
for i in range(self.size):
for j in range(self.size):
yield (i,j)
def reset(self):
""" empty the board. """
TwoPlayerGame.reset(self)
self.movesDone = 0
self.b = {}
for p in self._iterPos():
self.b[p] = self.EMPTY
# which stone belongs to which group
self.groups = {}
# how many liberties does each group have
self.liberties = {}
@property
def indim(self):
return self.size**2
@property
def outdim(self):
return 2*self.size**2
def getBoardArray(self):
""" an array with thow boolean values per position, indicating
'white stone present' and 'black stone present' respectively. """
a = zeros(self.outdim)
for i, p in enumerate(self._iterPos()):
if self.b[p] == self.WHITE:
a[2*i] = 1
elif self.b[p] == self.BLACK:
a[2*i+1] = 1
return a
def isLegal(self, c, pos):
if pos not in self.b:
return False
elif self.b[pos] != self.EMPTY:
return False
elif not self.suicideenabled:
return not self._suicide(c, pos)
return True
def doMove(self, c, pos):
""" the action is a (color, position) tuple, for the next stone to move.
returns True if the move was legal. """
self.movesDone += 1
if pos == 'resign':
self.winner = -c
return True
elif not self.isLegal(c, pos):
return False
elif self._suicide(c, pos):
assert self.suicideenabled
self.b[pos] = 'y'
self.winner = -c
return True
elif self._capture(c, pos):
self.winner = c
self.b[pos] = 'x'
return True
else:
self._setStone(c, pos)
return True
def getSensors(self):
""" just a list of the board position states. """
return map(lambda x: x[1], sorted(self.b.items()))
def __str__(self):
s = ''
for i in range(self.size):
for j in range(self.size):
val = self.b[(i,j)]
if val == self.EMPTY: s += ' .'
elif val == self.BLACK: s += ' X'
elif val == self.WHITE: s += ' O'
else: s += ' '+str(val)
s += '\n'
if self.winner:
if self.winner == self.BLACK:
w = 'Black (#)'
elif self.winner == self.WHITE:
w = 'White (*)'
else:
w = self.winner
s += 'Winner: '+w
s += ' (moves done:'+str(self.movesDone)+')\n'
return s
def _neighbors(self, pos):
""" the 4 neighboring positions """
res = []
if pos[1] < self.size -1: res.append((pos[0], pos[1]+1))
if pos[1] > 0: res.append((pos[0], pos[1]-1))
if pos[0] < self.size -1: res.append((pos[0]+1, pos[1]))
if pos[0] > 0: res.append((pos[0]-1, pos[1]))
return res
def _setStone(self, c, pos):
""" set stone, and update liberties and groups. """
self.b[pos] = c
merge = False
self.groups[pos] = self.size*pos[0]+pos[1]
freen = filter(lambda n: self.b[n] == self.EMPTY, self._neighbors(pos))
self.liberties[self.groups[pos]] = set(freen)
for n in self._neighbors(pos):
if self.b[n] == -c:
self.liberties[self.groups[n]].difference_update([pos])
elif self.b[n] == c:
if merge:
newg = self.groups[pos]
oldg = self.groups[n]
if newg == oldg:
self.liberties[newg].difference_update([pos])
else:
# merging 2 groups
for p in self.groups.keys():
if self.groups[p] == oldg:
self.groups[p] = newg
self.liberties[newg].update(self.liberties[oldg])
self.liberties[newg].difference_update([pos])
del self.liberties[oldg]
else:
# connect to this group
del self.liberties[self.groups[pos]]
self.groups[pos] = self.groups[n]
self.liberties[self.groups[n]].update(freen)
self.liberties[self.groups[n]].difference_update([pos])
merge = True
def _suicide(self, c, pos):
""" would putting a stone here be suicide for c? """
# any free neighbors?
for n in self._neighbors(pos):
if self.b[n] == self.EMPTY:
return False
# any friendly neighbor with extra liberties?
for n in self._neighbors(pos):
if self.b[n] == c:
if len(self.liberties[self.groups[n]]) > 1:
return False
# capture all surrounding ennemies?
if self._capture(c, pos):
return False
return True
def _capture(self, c, pos):
""" would putting a stone here lead to a capture? """
for n in self._neighbors(pos):
if self.b[n] == -c:
if len(self.liberties[self.groups[n]]) == 1:
return True
return False
def getLiberties(self, pos):
""" how many liberties does the stone at pos have? """
if self.b[pos] == self.EMPTY:
return None
return len(self.liberties[self.groups[pos]])
def getGroupSize(self, pos):
""" what size is the worm that this stone is part of? """
if self.b[pos] == self.EMPTY:
return None
g = self.groups[pos]
return len(filter(lambda x: x==g, self.groups.values()))
def getLegals(self, c):
""" return all the legal positions for a color """
return filter(lambda p: self.b[p] == self.EMPTY, self._iterPos())
def getAcceptable(self, c):
""" return all legal positions for a color that don't commit suicide. """
return filter(lambda p: not self._suicide(c, p), self.getLegals(c))
def getKilling(self, c):
""" return all legal positions for a color that immediately kill the opponent. """
return filter(lambda p: self._capture(c, p), self.getAcceptable(c))
def randomBoard(self, nbmoves):
""" produce a random, undecided and legal capture-game board, after at most nbmoves.
@return: the number of moves actually done. """
c = self.BLACK
self.reset()
for i in range(nbmoves):
l = set(self.getAcceptable(c))
l.difference_update(self.getKilling(c))
if len(l) == 0:
return i
self._setStone(c, choice(list(l)))
c = -c
return nbmoves
def giveHandicap(self, h, color = BLACK):
i = 0
for pos in self._handicapIterator():
i += 1
if i > h:
return
if self.isLegal(color, pos):
self._setStone(color, pos)
def _handicapIterator(self):
s = self.size
assert s > 2
yield (1,1)
if s > 3:
# 4 corners
yield (s-2, s-2)
yield (1, s-2)
yield (s-2, 1)
if s > 4:
for i in range(2,s-2):
yield (i, 1)
yield (i, s-2)
yield (1, i)
yield (s-2, i)
def playToTheEnd(self, p1, p2):
""" alternate playing moves between players until the game is over. """
assert p1.color == -p2.color
i = 0
p1.game = self
p2.game = self
players = [p1, p2]
while not self.gameOver():
p = players[i]
self.performAction(p.getAction())
i = (i+1)%2
|
daanwierstra/pybrain
|
pybrain/rl/environments/twoplayergames/capturegame.py
|
Python
|
bsd-3-clause
| 9,101
|
"""Two different implementations of merge sort. First one is the standard sort
that creates the result to new list on each level. Second one is an in-place
sort that uses two alternating buffers and offsets to limit memory usage
to O(2n).
"""
def sort(lst):
"""Standard merge sort.
Args:
lst: List to sort
Returns:
Sorted copy of the list
"""
if len(lst) <= 1:
return lst
mid = len(lst) // 2
low = sort(lst[:mid])
high = sort(lst[mid:])
res = []
i = j = 0
while i < len(low) and j < len(high):
if low[i] < high[j]:
res.append(low[i])
i += 1
else:
res.append(high[j])
j += 1
res.extend(low[i:])
res.extend(high[j:])
return res
def helper(lst, buf, start, stop, to_buf):
"""Helper function for in-place sort with alternating buffers.
Args:
lst: List to sort
buf: Buffer to store the results
start: Start index
stop: Stop index
to_buf: Boolean flag telling where result should be written to.
In case of True result should be written to buf, if False then
result should be written to l.
"""
length = stop - start
if length <= 1:
if to_buf and length == 1:
buf[start] = lst[start]
return
mid = start + length // 2
helper(lst, buf, start, mid, not to_buf)
helper(lst, buf, mid, stop, not to_buf)
# If result goes to buf swap l & buf since following code will write
# from buf to result
if to_buf:
lst, buf = buf, lst
i = start
j = mid
to = start
while i < mid and j < stop:
if buf[i] < buf[j]:
lst[to] = buf[i]
i += 1
else:
lst[to] = buf[j]
j += 1
to += 1
for i in range(i, mid):
lst[to] = buf[i]
to += 1
for j in range(j, stop):
lst[to] = buf[j]
to += 1
def sort_in_place(lst):
"""In-place merge sort.
Args:
lst: List to sort
"""
helper(lst, [None] * len(lst), 0, len(lst), False)
|
niemmi/algolib
|
algolib/sort/merge_sort.py
|
Python
|
bsd-3-clause
| 2,137
|
import os
import unittest
import vtk, qt, ctk, slicer
import math
import sys
#
# AstroMomentMapsSelfTest
#
class AstroMomentMapsSelfTest:
def __init__(self, parent):
parent.title = "Astro MomentMaps SelfTest"
parent.categories = ["Testing.TestCases"]
parent.dependencies = ["AstroVolume"]
parent.contributors = ["""
Davide Punzo (Kapteyn Astronomical Institute) and
Thijs van der Hulst (Kapteyn Astronomical Institute)."""]
parent.helpText = """
This module was developed as a self test to perform the operations needed for generating moment maps.
"""
parent.acknowledgementText = """
""" # replace with organization, grant and thanks.
self.parent = parent
# Add this test to the SelfTest module's list for discovery when the module
# is created. Since this module may be discovered before SelfTests itself,
# create the list if it doesn't already exist.
try:
slicer.selfTests
except AttributeError:
slicer.selfTests = {}
slicer.selfTests['Astro MomentMaps SelfTest'] = self.runTest
def runTest(self):
tester = AstroMomentMapsSelfTestTest()
tester.runTest()
#
# qAstroMomentMapsSelfTestWidget
#
class AstroMomentMapsSelfTestWidget:
def __init__(self, parent = None):
if not parent:
self.parent = slicer.qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.layout = self.parent.layout()
if not parent:
self.setup()
self.parent.show()
def setup(self):
# Instantiate and connect widgets ...
# reload button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadButton = qt.QPushButton("Reload")
self.reloadButton.toolTip = "Reload this module."
self.reloadButton.name = "AstroMomentMapsSelfTest Reload"
self.layout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
# reload and test button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadAndTestButton = qt.QPushButton("Reload and Test")
self.reloadAndTestButton.toolTip = "Reload this module and then run the self tests."
self.layout.addWidget(self.reloadAndTestButton)
self.reloadAndTestButton.connect('clicked()', self.onReloadAndTest)
# Add vertical spacer
self.layout.addStretch(1)
def cleanup(self):
pass
def onReload(self,moduleName="AstroMomentMapsSelfTest"):
"""Generic reload method for any scripted module.
ModuleWizard will subsitute correct default moduleName.
"""
globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)
def onReloadAndTest(self,moduleName="AstroMomentMapsSelfTest"):
self.onReload()
evalString = 'globals()["%s"].%sTest()' % (moduleName, moduleName)
tester = eval(evalString)
tester.runTest()
#
# AstroMomentMapsSelfTestLogic
#
class AstroMomentMapsSelfTestLogic:
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget
"""
def __init__(self):
pass
def hasImageData(self,volumeNode):
"""This is a dummy logic method that
returns true if the passed in volume
node has valid image data
"""
if not volumeNode:
print('no volume node')
return False
if volumeNode.GetImageData() is None:
print('no image data')
return False
return True
class AstroMomentMapsSelfTestTest(unittest.TestCase):
"""
This is the test case for your scripted module.
"""
def delayDisplay(self,message,msec=100):
"""This utility method displays a small dialog and waits.
This does two things: 1) it lets the event loop catch up
to the state of the test so that rendering and widget updates
have all taken place before the test continues and 2) it
shows the user/developer/tester the state of the test
so that we'll know when it breaks.
"""
print(message)
self.info = qt.QDialog()
self.infoLayout = qt.QVBoxLayout()
self.info.setLayout(self.infoLayout)
self.label = qt.QLabel(message,self.info)
self.infoLayout.addWidget(self.label)
qt.QTimer.singleShot(msec, self.info.close)
self.info.exec_()
def setUp(self):
slicer.mrmlScene.Clear(0)
def runTest(self):
self.setUp()
self.test_AstroMomentMapsSelfTest()
def test_AstroMomentMapsSelfTest(self):
print("Running AstroMomentMapsSelfTest Test case:")
self.downloadWEIN069()
astroVolume = slicer.util.getNode("WEIN069")
rms = astroVolume.GetDisplayThreshold()
mainWindow = slicer.util.mainWindow()
mainWindow.moduleSelector().selectModule('AstroVolume')
mainWindow.moduleSelector().selectModule('AstroMomentMaps')
astroMomentMapsModule = module = slicer.modules.astromomentmaps
astroMomentMapsModuleWidget = astroMomentMapsModule.widgetRepresentation()
AstroMomentMapsParameterNode = slicer.util.getNode("AstroMomentMapsParameters")
AstroMomentMapsParameterNode.SetIntensityMin(rms * 3)
QPushButtonList = astroMomentMapsModuleWidget.findChildren(qt.QPushButton)
for QPushButton in (QPushButtonList):
if QPushButton.name == "ApplyButton":
ApplyPushButton = QPushButton
self.delayDisplay('Calculating moment maps', 700)
ApplyPushButton.click()
ZeroMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetZeroMomentVolumeNodeID())
pixelValue0 = ZeroMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
FirstMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetFirstMomentVolumeNodeID())
pixelValue1 = FirstMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
SecondMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetSecondMomentVolumeNodeID())
pixelValue2 = SecondMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
if (math.fabs(pixelValue0 - 0.511788547039) < 1.e-6 and \
math.fabs(pixelValue1 - 5231.70947266) < 1.e-6 and \
math.fabs(pixelValue2 - 28.8058509827) < 1.e-6):
self.delayDisplay('Test passed', 700)
else:
self.delayDisplay('Test failed', 700)
# if run from Slicer interface remove the followinf exit
sys.exit()
def downloadWEIN069(self):
import AstroSampleData
astroSampleDataLogic = AstroSampleData.AstroSampleDataLogic()
self.delayDisplay('Getting WEIN069 Astro Volume')
WEIN069Volume = astroSampleDataLogic.downloadSample("WEIN069")
return WEIN069Volume
|
Punzo/SlicerAstro
|
AstroMomentMaps/Testing/Python/AstroMomentMapsSelfTest.py
|
Python
|
bsd-3-clause
| 6,817
|
# -*- coding: utf-8 -*-
"""
Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') as fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
"""
Calculate network speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
alexoneill/py3status
|
py3status/modules/netdata.py
|
Python
|
bsd-3-clause
| 5,963
|
# django imports
from django.db import models
from django.utils.translation import ugettext_lazy as _
# lfs imports
from lfs.catalog.models import Product
from lfs.order.models import Order
class Topseller(models.Model):
"""Selected products are in any case among topsellers.
"""
product = models.ForeignKey(Product, verbose_name=_(u"Product"))
position = models.PositiveSmallIntegerField(_(u"Position"), default=1)
class Meta:
ordering = ["position"]
def __unicode__(self):
return "%s (%s)" % (self.product.name, self.position)
class ProductSales(models.Model):
"""Stores totals sales per product.
"""
product = models.ForeignKey(Product, verbose_name=_(u"Product"))
sales = models.IntegerField(_(u"sales"), default=0)
class FeaturedProduct(models.Model):
"""Featured products are manually selected by the shop owner
"""
product = models.ForeignKey(Product, verbose_name=_(u"Product"))
position = models.PositiveSmallIntegerField(_(u"Position"), default=1)
active = models.BooleanField(_(u"Active"), default=True)
class Meta:
ordering = ["position"]
def __unicode__(self):
return "%s (%s)" % (self.product.name, self.position)
class OrderRatingMail(models.Model):
"""Saves whether and when a rating mail has been send for an order.
"""
order = models.ForeignKey(Order, verbose_name=_(u"Order"))
send_date = models.DateTimeField(auto_now=True)
def __unicode__(self):
return "%s (%s)" % (self.order.id, self.rating_mail_sent)
|
lichong012245/django-lfs-0.7.8
|
lfs/marketing/models.py
|
Python
|
bsd-3-clause
| 1,567
|
from collections import namedtuple
from corehq.apps.change_feed.consumer.feed import KafkaChangeFeed
from corehq.apps.change_feed.document_types import GROUP
from corehq.apps.groups.models import Group
from corehq.elastic import stream_es_query, get_es_new, ES_META
from corehq.pillows.mappings.user_mapping import USER_INDEX, USER_INDEX_INFO
from pillowtop.checkpoints.manager import PillowCheckpointEventHandler, get_checkpoint_for_elasticsearch_pillow
from pillowtop.pillow.interface import ConstructedPillow
from pillowtop.processors import PillowProcessor
from pillowtop.reindexer.change_providers.couch import CouchViewChangeProvider
from pillowtop.reindexer.reindexer import PillowChangeProviderReindexer
class GroupsToUsersProcessor(PillowProcessor):
def __init__(self):
self._es = get_es_new()
def process_change(self, pillow_instance, change):
if change.deleted:
remove_group_from_users(change.get_document(), self._es)
else:
update_es_user_with_groups(change.get_document(), self._es)
def get_group_to_user_pillow(pillow_id='GroupToUserPillow'):
assert pillow_id == 'GroupToUserPillow', 'Pillow ID is not allowed to change'
checkpoint = get_checkpoint_for_elasticsearch_pillow(pillow_id, USER_INDEX_INFO)
processor = GroupsToUsersProcessor()
return ConstructedPillow(
name=pillow_id,
checkpoint=checkpoint,
change_feed=KafkaChangeFeed(topics=[GROUP], group_id='groups-to-users'),
processor=processor,
change_processed_event_handler=PillowCheckpointEventHandler(
checkpoint=checkpoint, checkpoint_frequency=100,
),
)
def remove_group_from_users(group_doc, es_client):
if group_doc is None:
return
for user_source in stream_user_sources(group_doc.get("users", [])):
made_changes = False
if group_doc["name"] in user_source.group_names:
user_source.group_names.remove(group_doc["name"])
made_changes = True
if group_doc["_id"] in user_source.group_ids:
user_source.group_ids.remove(group_doc["_id"])
made_changes = True
if made_changes:
doc = {"__group_ids": list(user_source.group_ids), "__group_names": list(user_source.group_names)}
es_client.update(USER_INDEX, ES_META['users'].type, user_source.user_id, body={"doc": doc})
def update_es_user_with_groups(group_doc, es_client=None):
if not es_client:
es_client = get_es_new()
for user_source in stream_user_sources(group_doc.get("users", [])):
if group_doc["name"] not in user_source.group_names or group_doc["_id"] not in user_source.group_ids:
user_source.group_ids.add(group_doc["_id"])
user_source.group_names.add(group_doc["name"])
doc = {"__group_ids": list(user_source.group_ids), "__group_names": list(user_source.group_names)}
es_client.update(USER_INDEX, ES_META['users'].type, user_source.user_id, body={"doc": doc})
UserSource = namedtuple('UserSource', ['user_id', 'group_ids', 'group_names'])
def stream_user_sources(user_ids):
q = {"filter": {"and": [{"terms": {"_id": user_ids}}]}}
for result in stream_es_query(es_index='users', q=q, fields=["__group_ids", "__group_names"]):
group_ids = result.get('fields', {}).get("__group_ids", [])
group_ids = set(group_ids) if isinstance(group_ids, list) else {group_ids}
group_names = result.get('fields', {}).get("__group_names", [])
group_names = set(group_names) if isinstance(group_names, list) else {group_names}
yield UserSource(result['_id'], group_ids, group_names)
def get_groups_to_user_reindexer():
return PillowChangeProviderReindexer(
pillow=get_group_to_user_pillow(),
change_provider=CouchViewChangeProvider(
couch_db=Group.get_db(),
view_name='all_docs/by_doc_type',
view_kwargs={
'startkey': ['Group'],
'endkey': ['Group', {}],
'include_docs': True,
}
),
)
|
qedsoftware/commcare-hq
|
corehq/pillows/groups_to_user.py
|
Python
|
bsd-3-clause
| 4,115
|
# Created By: Virgil Dupras
# Created On: 2004/12/20
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
class JobCancelled(Exception):
"The user has cancelled the job"
class JobInProgressError(Exception):
"A job is already being performed, you can't perform more than one at the same time."
class JobCountError(Exception):
"The number of jobs started have exceeded the number of jobs allowed"
class Job:
"""Manages a job's progression and return it's progression through a callback.
Note that this class is not foolproof. For example, you could call
start_subjob, and then call add_progress from the parent job, and nothing
would stop you from doing it. However, it would mess your progression
because it is the sub job that is supposed to drive the progression.
Another example would be to start a subjob, then start another, and call
add_progress from the old subjob. Once again, it would mess your progression.
There are no stops because it would remove the lightweight aspect of the
class (A Job would need to have a Parent instead of just a callback,
and the parent could be None. A lot of checks for nothing.).
Another one is that nothing stops you from calling add_progress right after
SkipJob.
"""
#---Magic functions
def __init__(self, job_proportions, callback):
"""Initialize the Job with 'jobcount' jobs. Start every job with
start_job(). Every time the job progress is updated, 'callback' is called
'callback' takes a 'progress' int param, and a optional 'desc'
parameter. Callback must return false if the job must be cancelled.
"""
if not hasattr(callback, '__call__'):
raise TypeError("'callback' MUST be set when creating a Job")
if isinstance(job_proportions, int):
job_proportions = [1] * job_proportions
self._job_proportions = list(job_proportions)
self._jobcount = sum(job_proportions)
self._callback = callback
self._current_job = 0
self._passed_jobs = 0
self._progress = 0
self._currmax = 1
#---Private
def _subjob_callback(self, progress, desc=''):
"""This is the callback passed to children jobs.
"""
self.set_progress(progress, desc)
return True #if JobCancelled has to be raised, it will be at the highest level
def _do_update(self, desc):
"""Calls the callback function with a % progress as a parameter.
The parameter is a int in the 0-100 range.
"""
if self._current_job:
passed_progress = self._passed_jobs * self._currmax
current_progress = self._current_job * self._progress
total_progress = self._jobcount * self._currmax
progress = ((passed_progress + current_progress) * 100) // total_progress
else:
progress = -1 # indeterminate
# It's possible that callback doesn't support a desc arg
result = self._callback(progress, desc) if desc else self._callback(progress)
if not result:
raise JobCancelled()
#---Public
def add_progress(self, progress=1, desc=''):
self.set_progress(self._progress + progress, desc)
def check_if_cancelled(self):
self._do_update('')
def iter_with_progress(self, sequence, desc_format=None, every=1):
''' Iterate through sequence while automatically adding progress.
'''
desc = ''
if desc_format:
desc = desc_format % (0, len(sequence))
self.start_job(len(sequence), desc)
for i, element in enumerate(sequence, start=1):
yield element
if i % every == 0:
if desc_format:
desc = desc_format % (i, len(sequence))
self.add_progress(progress=every, desc=desc)
if desc_format:
desc = desc_format % (len(sequence), len(sequence))
self.set_progress(100, desc)
def start_job(self, max_progress=100, desc=''):
"""Begin work on the next job. You must not call start_job more than
'jobcount' (in __init__) times.
'max' is the job units you are to perform.
'desc' is the description of the job.
"""
self._passed_jobs += self._current_job
try:
self._current_job = self._job_proportions.pop(0)
except IndexError:
raise JobCountError()
self._progress = 0
self._currmax = max(1, max_progress)
self._do_update(desc)
def start_subjob(self, job_proportions, desc=''):
"""Starts a sub job. Use this when you want to split a job into
multiple smaller jobs. Pretty handy when starting a process where you
know how many subjobs you will have, but don't know the work unit count
for every of them.
returns the Job object
"""
self.start_job(100, desc)
return Job(job_proportions, self._subjob_callback)
def set_progress(self, progress, desc=''):
"""Sets the progress of the current job to 'progress', and call the
callback
"""
self._progress = progress
if self._progress > self._currmax:
self._progress = self._currmax
if self._progress < 0:
self._progress = 0
self._do_update(desc)
class NullJob:
def __init__(self, *args, **kwargs):
pass
def add_progress(self, *args, **kwargs):
pass
def check_if_cancelled(self):
pass
def iter_with_progress(self, sequence, *args, **kwargs):
return iter(sequence)
def start_job(self, *args, **kwargs):
pass
def start_subjob(self, *args, **kwargs):
return NullJob()
def set_progress(self, *args, **kwargs):
pass
nulljob = NullJob()
|
hsoft/jobprogress
|
jobprogress/job.py
|
Python
|
bsd-3-clause
| 6,167
|
from sympy import symbols, Symbol, sinh, nan, oo, zoo, pi, asinh, acosh, log, sqrt, \
coth, I, cot, E, tanh, tan, cosh, cos, S, sin, Rational, atanh, acoth, \
Integer, O, exp, sech, sec, csch
from sympy.utilities.pytest import raises
def test_sinh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert sinh(nan) == nan
assert sinh(zoo) == nan
assert sinh(oo) == oo
assert sinh(-oo) == -oo
assert sinh(0) == 0
assert sinh(1) == sinh(1)
assert sinh(-1) == -sinh(1)
assert sinh(x) == sinh(x)
assert sinh(-x) == -sinh(x)
assert sinh(pi) == sinh(pi)
assert sinh(-pi) == -sinh(pi)
assert sinh(2**1024 * E) == sinh(2**1024 * E)
assert sinh(-2**1024 * E) == -sinh(2**1024 * E)
assert sinh(pi*I) == 0
assert sinh(-pi*I) == 0
assert sinh(2*pi*I) == 0
assert sinh(-2*pi*I) == 0
assert sinh(-3*10**73*pi*I) == 0
assert sinh(7*10**103*pi*I) == 0
assert sinh(pi*I/2) == I
assert sinh(-pi*I/2) == -I
assert sinh(5*pi*I/2) == I
assert sinh(7*pi*I/2) == -I
assert sinh(pi*I/3) == S.Half*sqrt(3)*I
assert sinh(-2*pi*I/3) == -S.Half*sqrt(3)*I
assert sinh(pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(17*pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-3*pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(pi*I/6) == S.Half*I
assert sinh(-pi*I/6) == -S.Half*I
assert sinh(7*pi*I/6) == -S.Half*I
assert sinh(-5*pi*I/6) == -S.Half*I
assert sinh(pi*I/105) == sin(pi/105)*I
assert sinh(-pi*I/105) == -sin(pi/105)*I
assert sinh(2 + 3*I) == sinh(2 + 3*I)
assert sinh(x*I) == sin(x)*I
assert sinh(k*pi*I) == 0
assert sinh(17*k*pi*I) == 0
assert sinh(k*pi*I/2) == sin(k*pi/2)*I
def test_sinh_series():
x = Symbol('x')
assert sinh(x).series(x, 0, 10) == \
x + x**3/6 + x**5/120 + x**7/5040 + x**9/362880 + O(x**10)
def test_cosh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert cosh(nan) == nan
assert cosh(zoo) == nan
assert cosh(oo) == oo
assert cosh(-oo) == oo
assert cosh(0) == 1
assert cosh(1) == cosh(1)
assert cosh(-1) == cosh(1)
assert cosh(x) == cosh(x)
assert cosh(-x) == cosh(x)
assert cosh(pi*I) == cos(pi)
assert cosh(-pi*I) == cos(pi)
assert cosh(2**1024 * E) == cosh(2**1024 * E)
assert cosh(-2**1024 * E) == cosh(2**1024 * E)
assert cosh(pi*I/2) == 0
assert cosh(-pi*I/2) == 0
assert cosh((-3*10**73 + 1)*pi*I/2) == 0
assert cosh((7*10**103 + 1)*pi*I/2) == 0
assert cosh(pi*I) == -1
assert cosh(-pi*I) == -1
assert cosh(5*pi*I) == -1
assert cosh(8*pi*I) == 1
assert cosh(pi*I/3) == S.Half
assert cosh(-2*pi*I/3) == -S.Half
assert cosh(pi*I/4) == S.Half*sqrt(2)
assert cosh(-pi*I/4) == S.Half*sqrt(2)
assert cosh(11*pi*I/4) == -S.Half*sqrt(2)
assert cosh(-3*pi*I/4) == -S.Half*sqrt(2)
assert cosh(pi*I/6) == S.Half*sqrt(3)
assert cosh(-pi*I/6) == S.Half*sqrt(3)
assert cosh(7*pi*I/6) == -S.Half*sqrt(3)
assert cosh(-5*pi*I/6) == -S.Half*sqrt(3)
assert cosh(pi*I/105) == cos(pi/105)
assert cosh(-pi*I/105) == cos(pi/105)
assert cosh(2 + 3*I) == cosh(2 + 3*I)
assert cosh(x*I) == cos(x)
assert cosh(k*pi*I) == cos(k*pi)
assert cosh(17*k*pi*I) == cos(17*k*pi)
assert cosh(k*pi) == cosh(k*pi)
def test_cosh_series():
x = Symbol('x')
assert cosh(x).series(x, 0, 10) == \
1 + x**2/2 + x**4/24 + x**6/720 + x**8/40320 + O(x**10)
def test_tanh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert tanh(nan) == nan
assert tanh(zoo) == nan
assert tanh(oo) == 1
assert tanh(-oo) == -1
assert tanh(0) == 0
assert tanh(1) == tanh(1)
assert tanh(-1) == -tanh(1)
assert tanh(x) == tanh(x)
assert tanh(-x) == -tanh(x)
assert tanh(pi) == tanh(pi)
assert tanh(-pi) == -tanh(pi)
assert tanh(2**1024 * E) == tanh(2**1024 * E)
assert tanh(-2**1024 * E) == -tanh(2**1024 * E)
assert tanh(pi*I) == 0
assert tanh(-pi*I) == 0
assert tanh(2*pi*I) == 0
assert tanh(-2*pi*I) == 0
assert tanh(-3*10**73*pi*I) == 0
assert tanh(7*10**103*pi*I) == 0
assert tanh(pi*I/2) == tanh(pi*I/2)
assert tanh(-pi*I/2) == -tanh(pi*I/2)
assert tanh(5*pi*I/2) == tanh(5*pi*I/2)
assert tanh(7*pi*I/2) == tanh(7*pi*I/2)
assert tanh(pi*I/3) == sqrt(3)*I
assert tanh(-2*pi*I/3) == sqrt(3)*I
assert tanh(pi*I/4) == I
assert tanh(-pi*I/4) == -I
assert tanh(17*pi*I/4) == I
assert tanh(-3*pi*I/4) == I
assert tanh(pi*I/6) == I/sqrt(3)
assert tanh(-pi*I/6) == -I/sqrt(3)
assert tanh(7*pi*I/6) == I/sqrt(3)
assert tanh(-5*pi*I/6) == I/sqrt(3)
assert tanh(pi*I/105) == tan(pi/105)*I
assert tanh(-pi*I/105) == -tan(pi/105)*I
assert tanh(2 + 3*I) == tanh(2 + 3*I)
assert tanh(x*I) == tan(x)*I
assert tanh(k*pi*I) == 0
assert tanh(17*k*pi*I) == 0
assert tanh(k*pi*I/2) == tan(k*pi/2)*I
def test_tanh_series():
x = Symbol('x')
assert tanh(x).series(x, 0, 10) == \
x - x**3/3 + 2*x**5/15 - 17*x**7/315 + 62*x**9/2835 + O(x**10)
def test_coth():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert coth(nan) == nan
assert coth(zoo) == nan
assert coth(oo) == 1
assert coth(-oo) == -1
assert coth(0) == coth(0)
assert coth(0) == zoo
assert coth(1) == coth(1)
assert coth(-1) == -coth(1)
assert coth(x) == coth(x)
assert coth(-x) == -coth(x)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == cot(pi)*I
assert coth(2**1024 * E) == coth(2**1024 * E)
assert coth(-2**1024 * E) == -coth(2**1024 * E)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == I*cot(pi)
assert coth(2*pi*I) == -I*cot(2*pi)
assert coth(-2*pi*I) == I*cot(2*pi)
assert coth(-3*10**73*pi*I) == I*cot(3*10**73*pi)
assert coth(7*10**103*pi*I) == -I*cot(7*10**103*pi)
assert coth(pi*I/2) == 0
assert coth(-pi*I/2) == 0
assert coth(5*pi*I/2) == 0
assert coth(7*pi*I/2) == 0
assert coth(pi*I/3) == -I/sqrt(3)
assert coth(-2*pi*I/3) == -I/sqrt(3)
assert coth(pi*I/4) == -I
assert coth(-pi*I/4) == I
assert coth(17*pi*I/4) == -I
assert coth(-3*pi*I/4) == -I
assert coth(pi*I/6) == -sqrt(3)*I
assert coth(-pi*I/6) == sqrt(3)*I
assert coth(7*pi*I/6) == -sqrt(3)*I
assert coth(-5*pi*I/6) == -sqrt(3)*I
assert coth(pi*I/105) == -cot(pi/105)*I
assert coth(-pi*I/105) == cot(pi/105)*I
assert coth(2 + 3*I) == coth(2 + 3*I)
assert coth(x*I) == -cot(x)*I
assert coth(k*pi*I) == -cot(k*pi)*I
assert coth(17*k*pi*I) == -cot(17*k*pi)*I
assert coth(k*pi*I) == -cot(k*pi)*I
def test_coth_series():
x = Symbol('x')
assert coth(x).series(x, 0, 8) == \
1/x + x/3 - x**3/45 + 2*x**5/945 - x**7/4725 + O(x**8)
def test_csch():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert csch(nan) == nan
assert csch(zoo) == nan
assert csch(oo) == 0
assert csch(-oo) == 0
assert csch(0) == zoo
assert csch(-1) == -csch(1)
assert csch(-x) == -csch(x)
assert csch(-pi) == -csch(pi)
assert csch(-2**1024 * E) == -csch(2**1024 * E)
assert csch(pi*I) == zoo
assert csch(-pi*I) == zoo
assert csch(2*pi*I) == zoo
assert csch(-2*pi*I) == zoo
assert csch(-3*10**73*pi*I) == zoo
assert csch(7*10**103*pi*I) == zoo
assert csch(pi*I/2) == -I
assert csch(-pi*I/2) == I
assert csch(5*pi*I/2) == -I
assert csch(7*pi*I/2) == I
assert csch(pi*I/3) == -2/sqrt(3)*I
assert csch(-2*pi*I/3) == 2/sqrt(3)*I
assert csch(pi*I/4) == -sqrt(2)*I
assert csch(-pi*I/4) == sqrt(2)*I
assert csch(7*pi*I/4) == sqrt(2)*I
assert csch(-3*pi*I/4) == sqrt(2)*I
assert csch(pi*I/6) == -2*I
assert csch(-pi*I/6) == 2*I
assert csch(7*pi*I/6) == 2*I
assert csch(-7*pi*I/6) == -2*I
assert csch(-5*pi*I/6) == 2*I
assert csch(pi*I/105) == -1/sin(pi/105)*I
assert csch(-pi*I/105) == 1/sin(pi/105)*I
assert csch(x*I) == -1/sin(x)*I
assert csch(k*pi*I) == zoo
assert csch(17*k*pi*I) == zoo
assert csch(k*pi*I/2) == -1/sin(k*pi/2)*I
def test_csch_series():
x = Symbol('x')
assert csch(x).series(x, 0, 10) == \
1/ x - x/6 + 7*x**3/360 - 31*x**5/15120 + 127*x**7/604800 \
- 73*x**9/3421440 + O(x**10)
def test_sech():
x, y = symbols('x, y')
k = Symbol('k', integer=True)
assert sech(nan) == nan
assert sech(zoo) == nan
assert sech(oo) == 0
assert sech(-oo) == 0
assert sech(0) == 1
assert sech(-1) == sech(1)
assert sech(-x) == sech(x)
assert sech(pi*I) == sec(pi)
assert sech(-pi*I) == sec(pi)
assert sech(-2**1024 * E) == sech(2**1024 * E)
assert sech(pi*I/2) == zoo
assert sech(-pi*I/2) == zoo
assert sech((-3*10**73 + 1)*pi*I/2) == zoo
assert sech((7*10**103 + 1)*pi*I/2) == zoo
assert sech(pi*I) == -1
assert sech(-pi*I) == -1
assert sech(5*pi*I) == -1
assert sech(8*pi*I) == 1
assert sech(pi*I/3) == 2
assert sech(-2*pi*I/3) == -2
assert sech(pi*I/4) == sqrt(2)
assert sech(-pi*I/4) == sqrt(2)
assert sech(5*pi*I/4) == -sqrt(2)
assert sech(-5*pi*I/4) == -sqrt(2)
assert sech(pi*I/6) == 2/sqrt(3)
assert sech(-pi*I/6) == 2/sqrt(3)
assert sech(7*pi*I/6) == -2/sqrt(3)
assert sech(-5*pi*I/6) == -2/sqrt(3)
assert sech(pi*I/105) == 1/cos(pi/105)
assert sech(-pi*I/105) == 1/cos(pi/105)
assert sech(x*I) == 1/cos(x)
assert sech(k*pi*I) == 1/cos(k*pi)
assert sech(17*k*pi*I) == 1/cos(17*k*pi)
def test_sech_series():
x = Symbol('x')
assert sech(x).series(x, 0, 10) == \
1 - x**2/2 + 5*x**4/24 - 61*x**6/720 + 277*x**8/8064 + O(x**10)
def test_asinh():
x, y = symbols('x,y')
assert asinh(x) == asinh(x)
assert asinh(-x) == -asinh(x)
assert asinh(nan) == nan
assert asinh( 0) == 0
assert asinh(+1) == log(sqrt(2) + 1)
assert asinh(-1) == log(sqrt(2) - 1)
assert asinh(I) == pi*I/2
assert asinh(-I) == -pi*I/2
assert asinh(I/2) == pi*I/6
assert asinh(-I/2) == -pi*I/6
assert asinh(oo) == oo
assert asinh(-oo) == -oo
assert asinh(I*oo) == oo
assert asinh(-I *oo) == -oo
assert asinh(zoo) == zoo
assert asinh(I *(sqrt(3) - 1)/(2**(S(3)/2))) == pi*I/12
assert asinh(-I *(sqrt(3) - 1)/(2**(S(3)/2))) == -pi*I/12
assert asinh(I*(sqrt(5) - 1)/4) == pi*I/10
assert asinh(-I*(sqrt(5) - 1)/4) == -pi*I/10
assert asinh(I*(sqrt(5) + 1)/4) == 3*pi*I/10
assert asinh(-I*(sqrt(5) + 1)/4) == -3*pi*I/10
def test_asinh_series():
x = Symbol('x')
assert asinh(x).series(x, 0, 8) == \
x - x**3/6 + 3*x**5/40 - 5*x**7/112 + O(x**8)
t5 = asinh(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asinh(x).taylor_term(7, x, t5, 0) == -5*x**7/112
def test_acosh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCosh/03/01/
# at specific points
x = Symbol('x')
assert acosh(-x) == acosh(-x)
assert acosh(1) == 0
assert acosh(-1) == pi*I
assert acosh(0) == I*pi/2
assert acosh(Rational(1, 2)) == I*pi/3
assert acosh(Rational(-1, 2)) == 2*pi*I/3
assert acosh(zoo) == oo
assert acosh(I) == log(I*(1 + sqrt(2)))
assert acosh(-I) == log(-I*(1 + sqrt(2)))
assert acosh((sqrt(3) - 1)/(2*sqrt(2))) == 5*pi*I/12
assert acosh(-(sqrt(3) - 1)/(2*sqrt(2))) == 7*pi*I/12
assert acosh(sqrt(2)/2) == I*pi/4
assert acosh(-sqrt(2)/2) == 3*I*pi/4
assert acosh(sqrt(3)/2) == I*pi/6
assert acosh(-sqrt(3)/2) == 5*I*pi/6
assert acosh(sqrt(2 + sqrt(2))/2) == I*pi/8
assert acosh(-sqrt(2 + sqrt(2))/2) == 7*I*pi/8
assert acosh(sqrt(2 - sqrt(2))/2) == 3*I*pi/8
assert acosh(-sqrt(2 - sqrt(2))/2) == 5*I*pi/8
assert acosh((1 + sqrt(3))/(2*sqrt(2))) == I*pi/12
assert acosh(-(1 + sqrt(3))/(2*sqrt(2))) == 11*I*pi/12
assert acosh((sqrt(5) + 1)/4) == I*pi/5
assert acosh(-(sqrt(5) + 1)/4) == 4*I*pi/5
def test_acosh_infinities():
assert acosh(oo) == oo
assert acosh(-oo) == oo
assert acosh(I*oo) == oo
assert acosh(-I*oo) == oo
def test_acosh_series():
x = Symbol('x')
assert acosh(x).series(x, 0, 8) == \
-I*x + pi*I/2 - I*x**3/6 - 3*I*x**5/40 - 5*I*x**7/112 + O(x**8)
t5 = acosh(x).taylor_term(5, x)
assert t5 == - 3*I*x**5/40
assert acosh(x).taylor_term(7, x, t5, 0) == - 5*I*x**7/112
# TODO please write more tests -- see issue 3751
def test_atanh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcTanh/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert atanh(0) == 0
assert atanh(I) == I*pi/4
assert atanh(-I) == -I*pi/4
assert atanh(1) == oo
assert atanh(-1) == -oo
# at infinites
assert atanh(I*oo) == I*pi/2
assert atanh(-I*oo) == -I*pi/2
assert atanh(zoo) == nan
#properties
assert atanh(-x) == -atanh(x)
assert atanh(I/sqrt(3)) == I*pi/6
assert atanh(-I/sqrt(3)) == -I*pi/6
assert atanh(I*sqrt(3)) == I*pi/3
assert atanh(-I*sqrt(3)) == -I*pi/3
assert atanh(I*(1 + sqrt(2))) == 3*pi*I/8
assert atanh(I*(sqrt(2) - 1)) == pi*I/8
assert atanh(I*(1 - sqrt(2))) == -pi*I/8
assert atanh(-I*(1 + sqrt(2))) == -3*pi*I/8
assert atanh(I*sqrt(5 + 2*sqrt(5))) == 2*I*pi/5
assert atanh(-I*sqrt(5 + 2*sqrt(5))) == -2*I*pi/5
assert atanh(I*(2 - sqrt(3))) == pi*I/12
assert atanh(I*(sqrt(3) - 2)) == -pi*I/12
assert atanh(oo) == -I*pi/2
def test_atanh_series():
x = Symbol('x')
assert atanh(x).series(x, 0, 10) == \
x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_atanh_infinities():
assert atanh(oo) == -I*pi/2
assert atanh(-oo) == I*pi/2
# TODO please write more tests -- see issue 3751
def test_acoth():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCoth/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert acoth(0) == I*pi/2
assert acoth(I) == -I*pi/4
assert acoth(-I) == I*pi/4
assert acoth(1) == oo
assert acoth(-1) == -oo
# at infinites
assert acoth(oo) == 0
assert acoth(-oo) == 0
assert acoth(I*oo) == 0
assert acoth(-I*oo) == 0
assert acoth(zoo) == 0
#properties
assert acoth(-x) == -acoth(x)
assert acoth(I/sqrt(3)) == -I*pi/3
assert acoth(-I/sqrt(3)) == I*pi/3
assert acoth(I*sqrt(3)) == -I*pi/6
assert acoth(-I*sqrt(3)) == I*pi/6
assert acoth(I*(1 + sqrt(2))) == -pi*I/8
assert acoth(-I*(sqrt(2) + 1)) == pi*I/8
assert acoth(I*(1 - sqrt(2))) == 3*pi*I/8
assert acoth(I*(sqrt(2) - 1)) == -3*pi*I/8
assert acoth(I*sqrt(5 + 2*sqrt(5))) == -I*pi/10
assert acoth(-I*sqrt(5 + 2*sqrt(5))) == I*pi/10
assert acoth(I*(2 + sqrt(3))) == -pi*I/12
assert acoth(-I*(2 + sqrt(3))) == pi*I/12
assert acoth(I*(2 - sqrt(3))) == -5*pi*I/12
assert acoth(I*(sqrt(3) - 2)) == 5*pi*I/12
def test_acoth_series():
x = Symbol('x')
assert acoth(x).series(x, 0, 10) == \
I*pi/2 + x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_inverses():
x = Symbol('x')
assert sinh(x).inverse() == asinh
raises(AttributeError, lambda: cosh(x).inverse())
assert tanh(x).inverse() == atanh
assert coth(x).inverse() == acoth
assert asinh(x).inverse() == sinh
assert acosh(x).inverse() == cosh
assert atanh(x).inverse() == tanh
assert acoth(x).inverse() == coth
def test_leading_term():
x = Symbol('x')
assert cosh(x).as_leading_term(x) == 1
assert coth(x).as_leading_term(x) == 1/x
assert acosh(x).as_leading_term(x) == I*pi/2
assert acoth(x).as_leading_term(x) == I*pi/2
for func in [sinh, tanh, asinh, atanh]:
assert func(x).as_leading_term(x) == x
for func in [sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth]:
for arg in (1/x, S.Half):
eq = func(arg)
assert eq.as_leading_term(x) == eq
for func in [csch, sech]:
eq = func(S.Half)
assert eq.as_leading_term(x) == eq
def test_complex():
a, b = symbols('a,b', real=True)
z = a + b*I
for func in [sinh, cosh, tanh, coth, sech, csch]:
assert func(z).conjugate() == func(a - b*I)
for deep in [True, False]:
assert sinh(z).expand(
complex=True, deep=deep) == sinh(a)*cos(b) + I*cosh(a)*sin(b)
assert cosh(z).expand(
complex=True, deep=deep) == cosh(a)*cos(b) + I*sinh(a)*sin(b)
assert tanh(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(cos(b)**2 + sinh(a)**2) + I*sin(b)*cos(b)/(cos(b)**2 + sinh(a)**2)
assert coth(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(sin(b)**2 + sinh(a)**2) - I*sin(b)*cos(b)/(sin(b)**2 + sinh(a)**2)
assert csch(z).expand(complex=True, deep=deep) == cos(b) * sinh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2) - I*sin(b) * cosh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2)
assert sech(z).expand(complex=True, deep=deep) == cos(b) * cosh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2) - I*sin(b) * sinh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2)
def test_complex_2899():
a, b = symbols('a,b', real=True)
for deep in [True, False]:
for func in [sinh, cosh, tanh, coth]:
assert func(a).expand(complex=True, deep=deep) == func(a)
def test_simplifications():
x = Symbol('x')
assert sinh(asinh(x)) == x
assert sinh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sinh(atanh(x)) == x/sqrt(1 - x**2)
assert sinh(acoth(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert cosh(asinh(x)) == sqrt(1 + x**2)
assert cosh(acosh(x)) == x
assert cosh(atanh(x)) == 1/sqrt(1 - x**2)
assert cosh(acoth(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert tanh(asinh(x)) == x/sqrt(1 + x**2)
assert tanh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1) / x
assert tanh(atanh(x)) == x
assert tanh(acoth(x)) == 1/x
assert coth(asinh(x)) == sqrt(1 + x**2)/x
assert coth(acosh(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert coth(atanh(x)) == 1/x
assert coth(acoth(x)) == x
assert csch(asinh(x)) == 1/x
assert csch(acosh(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert csch(atanh(x)) == sqrt(1 - x**2)/x
assert csch(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sech(asinh(x)) == 1/sqrt(1 + x**2)
assert sech(acosh(x)) == 1/x
assert sech(atanh(x)) == sqrt(1 - x**2)
assert sech(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)/x
def test_issue_4136():
assert cosh(asinh(Integer(3)/2)) == sqrt(Integer(13)/4)
def test_sinh_rewrite():
x = Symbol('x')
assert sinh(x).rewrite(exp) == (exp(x) - exp(-x))/2 \
== sinh(x).rewrite('tractable')
assert sinh(x).rewrite(cosh) == -I*cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert sinh(x).rewrite(tanh) == 2*tanh_half/(1 - tanh_half**2)
coth_half = coth(S.Half*x)
assert sinh(x).rewrite(coth) == 2*coth_half/(coth_half**2 - 1)
def test_cosh_rewrite():
x = Symbol('x')
assert cosh(x).rewrite(exp) == (exp(x) + exp(-x))/2 \
== cosh(x).rewrite('tractable')
assert cosh(x).rewrite(sinh) == -I*sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert cosh(x).rewrite(tanh) == (1 + tanh_half)/(1 - tanh_half)
coth_half = coth(S.Half*x)**2
assert cosh(x).rewrite(coth) == (coth_half + 1)/(coth_half - 1)
def test_tanh_rewrite():
x = Symbol('x')
assert tanh(x).rewrite(exp) == (exp(x) - exp(-x))/(exp(x) + exp(-x)) \
== tanh(x).rewrite('tractable')
assert tanh(x).rewrite(sinh) == I*sinh(x)/sinh(I*pi/2 - x)
assert tanh(x).rewrite(cosh) == I*cosh(I*pi/2 - x)/cosh(x)
assert tanh(x).rewrite(coth) == 1/coth(x)
def test_coth_rewrite():
x = Symbol('x')
assert coth(x).rewrite(exp) == (exp(x) + exp(-x))/(exp(x) - exp(-x)) \
== coth(x).rewrite('tractable')
assert coth(x).rewrite(sinh) == -I*sinh(I*pi/2 - x)/sinh(x)
assert coth(x).rewrite(cosh) == -I*cosh(x)/cosh(I*pi/2 - x)
assert coth(x).rewrite(tanh) == 1/tanh(x)
def test_csch_rewrite():
x = Symbol('x')
assert csch(x).rewrite(exp) == 1 / (exp(x)/2 - exp(-x)/2) \
== csch(x).rewrite('tractable')
assert csch(x).rewrite(cosh) == I/cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert csch(x).rewrite(tanh) == (1 - tanh_half**2)/(2*tanh_half)
coth_half = coth(S.Half*x)
assert csch(x).rewrite(coth) == (coth_half**2 - 1)/(2*coth_half)
def test_sech_rewrite():
x = Symbol('x')
assert sech(x).rewrite(exp) == 1 / (exp(x)/2 + exp(-x)/2) \
== sech(x).rewrite('tractable')
assert sech(x).rewrite(sinh) == I/sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert sech(x).rewrite(tanh) == (1 - tanh_half)/(1 + tanh_half)
coth_half = coth(S.Half*x)**2
assert sech(x).rewrite(coth) == (coth_half - 1)/(coth_half + 1)
def test_derivs():
x = Symbol('x')
assert coth(x).diff(x) == -sinh(x)**(-2)
assert sinh(x).diff(x) == cosh(x)
assert cosh(x).diff(x) == sinh(x)
assert tanh(x).diff(x) == -tanh(x)**2 + 1
assert csch(x).diff(x) == -coth(x)*csch(x)
assert sech(x).diff(x) == -tanh(x)*sech(x)
assert acoth(x).diff(x) == 1/(-x**2 + 1)
assert asinh(x).diff(x) == 1/sqrt(x**2 + 1)
assert acosh(x).diff(x) == 1/sqrt(x**2 - 1)
assert atanh(x).diff(x) == 1/(-x**2 + 1)
def test_sinh_expansion():
x,y = symbols('x,y')
assert sinh(x+y).expand(trig=True) == sinh(x)*cosh(y) + cosh(x)*sinh(y)
assert sinh(2*x).expand(trig=True) == 2*sinh(x)*cosh(x)
assert sinh(3*x).expand(trig=True).expand() == \
sinh(x)**3 + 3*sinh(x)*cosh(x)**2
def test_cosh_expansion():
x,y = symbols('x,y')
assert cosh(x+y).expand(trig=True) == cosh(x)*cosh(y) + sinh(x)*sinh(y)
assert cosh(2*x).expand(trig=True) == cosh(x)**2 + sinh(x)**2
assert cosh(3*x).expand(trig=True).expand() == \
3*sinh(x)**2*cosh(x) + cosh(x)**3
|
Mitchkoens/sympy
|
sympy/functions/elementary/tests/test_hyperbolic.py
|
Python
|
bsd-3-clause
| 22,379
|
# -*- coding: utf-8 -*-
DEBUG = True
TESTING = True
SECRET_KEY = 'SECRET_KEY'
DATABASE_URI = 'mysql+pymysql://root:root@127.0.0.1/git_webhook'
CELERY_BROKER_URL = 'redis://:@127.0.0.1:6379/0'
CELERY_RESULT_BACKEND = 'redis://:@127.0.0.1:6379/0'
SOCKET_MESSAGE_QUEUE = 'redis://:@127.0.0.1:6379/0'
GITHUB_CLIENT_ID = '123'
GITHUB_CLIENT_SECRET = 'SECRET'
|
NetEaseGame/git-webhook
|
app/config_test.py
|
Python
|
mit
| 357
|
# References:
#
# https://www.tensorflow.org/guide/low_level_intro
#
# only needed for python 2.7
# from __future__ import absolute_import
# from __future__ import division
# from __future__ import print_function
import numpy as np
from numpy import array
from numpy import float32
# a complete input set on 7 bits
# useful for training various sorts of data
bin7 = array([
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 1],
[0, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 1, 0, 1, 1],
[0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 1],
[0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1],
[0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1],
[0, 0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 1, 0, 1],
[0, 0, 1, 0, 1, 1, 0],
[0, 0, 1, 0, 1, 1, 1],
[0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 1],
[0, 0, 1, 1, 0, 1, 0],
[0, 0, 1, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 1],
[0, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1],
[0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 1],
[0, 1, 0, 0, 1, 1, 0],
[0, 1, 0, 0, 1, 1, 1],
[0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 1, 0, 0],
[0, 1, 0, 1, 1, 0, 1],
[0, 1, 0, 1, 1, 1, 0],
[0, 1, 0, 1, 1, 1, 1],
[0, 1, 1, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1],
[0, 1, 1, 0, 0, 1, 0],
[0, 1, 1, 0, 0, 1, 1],
[0, 1, 1, 0, 1, 0, 0],
[0, 1, 1, 0, 1, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[0, 1, 1, 0, 1, 1, 1],
[0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 1, 0],
[0, 1, 1, 1, 0, 1, 1],
[0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 0, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 1, 0],
[1, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0],
[1, 0, 0, 0, 1, 0, 1],
[1, 0, 0, 0, 1, 1, 0],
[1, 0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 0, 1, 1],
[1, 0, 0, 1, 1, 0, 0],
[1, 0, 0, 1, 1, 0, 1],
[1, 0, 0, 1, 1, 1, 0],
[1, 0, 0, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 0, 0, 1, 0],
[1, 0, 1, 0, 0, 1, 1],
[1, 0, 1, 0, 1, 0, 0],
[1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 1, 0],
[1, 0, 1, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 0, 0],
[1, 0, 1, 1, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0],
[1, 0, 1, 1, 0, 1, 1],
[1, 0, 1, 1, 1, 0, 0],
[1, 0, 1, 1, 1, 0, 1],
[1, 0, 1, 1, 1, 1, 0],
[1, 0, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 1, 0],
[1, 1, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 0, 0],
[1, 1, 0, 0, 1, 0, 1],
[1, 1, 0, 0, 1, 1, 0],
[1, 1, 0, 0, 1, 1, 1],
[1, 1, 0, 1, 0, 0, 0],
[1, 1, 0, 1, 0, 0, 1],
[1, 1, 0, 1, 0, 1, 0],
[1, 1, 0, 1, 0, 1, 1],
[1, 1, 0, 1, 1, 0, 0],
[1, 1, 0, 1, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0],
[1, 1, 0, 1, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 1],
[1, 1, 1, 0, 0, 1, 0],
[1, 1, 1, 0, 0, 1, 1],
[1, 1, 1, 0, 1, 0, 0],
[1, 1, 1, 0, 1, 0, 1],
[1, 1, 1, 0, 1, 1, 0],
[1, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 1],
[1, 1, 1, 1, 0, 1, 0],
[1, 1, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
])
'''
Train the network to count to 3
column 0: less than 3
column 1: exactly 3
column 2: more than 3
'''
count3 = array([
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
])
# this takes a looong time to index, and
# python may crash several times before indexing is complete
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
model = Sequential()
model.add(Dense(8,
activation=keras.activations.sigmoid,
))
model.add(Dense(3,
activation=keras.activations.sigmoid,
))
model.compile(
optimizer=tf.train.AdamOptimizer(0.001),
# loss=keras.losses.categorical_crossentropy,
loss=keras.losses.mse,
metrics=[keras.metrics.binary_accuracy]
)
# This is the process I used to train my weights
# model.fit(bin7, count3, epochs=2000)
# myWeights = model.get_weights()
# np.set_printoptions(suppress=True)
# np.set_printoptions(precision=2)
# print('myWeights =', myWeights)
# These are the weights I got, pretty-printed
myWeights = [
# first layer, 7x8
array([[ 1.2 , -1.16, -1.97, 2.16, 0.97, 0.86, -1.2 , 1.12],
[ 1.21, -1.17, -1.97, 2.16, 0.84, 0.76, -1.19, 1.22],
[ 1.19, -1.2 , -1.98, 2.15, 0.87, 0.84, -1.19, 1.13],
[ 1.21, -1.2 , -1.97, 2.15, 0.89, 0.8 , -1.2 , 1.16],
[ 1.21, -1.12, -1.97, 2.16, 0.99, 0.8 , -1.21, 1.18],
[ 1.23, -1.09, -1.98, 2.15, 1.12, 0.81, -1.24, 1.13],
[ 1.24, -1.11, -1.99, 2.14, 1. , 0.77, -1.23, 1.17]],
dtype=float32),
# biases for 8 intermediate nodes
array([-4.57, 3.13, 4. , -4.44, -1.08, -3.11, 4.39, -4.35],
dtype=float32),
# second layer, 8x3
array([[-2.37, -1.54, 2.82],
[ 2.57, -0.09, -3. ],
[ 3.42, -2.18, -4.26],
[-3.27, 1.66, 2.1 ],
[-1.64, 0.12, -0.26],
[-1.85, -1.73, 2.25],
[ 2.71, 0.95, -4.85],
[-2.82, -1.4 , 2.69]], dtype=float32),
# biases for 3 output nodes
array([ 0.21, -0.39, -1.22], dtype=float32)
]
# test the model and your weights
# model.fit(bin7, count3, epochs=1)
# model.set_weights(myWeights)
# predict3 = model.predict(bin7)
# np.set_printoptions(suppress=True)
# np.set_printoptions(precision=1)
# print('prediction =', predict3)
Examples = {
'count3' : [ bin7, count3, model, myWeights ],
}
|
WmHHooper/aima-python
|
submissions/aardvark/myNN.py
|
Python
|
mit
| 8,182
|
from sqlalchemy.orm import create_session, relationship, mapper, \
contains_eager, joinedload, subqueryload, subqueryload_all,\
Session, aliased, with_polymorphic
from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL, fixtures
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import assert_raises, eq_, is_
class Company(fixtures.ComparableEntity):
pass
class Person(fixtures.ComparableEntity):
pass
class Engineer(Person):
pass
class Manager(Person):
pass
class Boss(Manager):
pass
class Machine(fixtures.ComparableEntity):
pass
class Paperwork(fixtures.ComparableEntity):
pass
class SelfReferentialTestJoinedToBase(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
Table('people', metadata,
Column('person_id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
Table('engineers', metadata,
Column('person_id', Integer,
ForeignKey('people.person_id'),
primary_key=True),
Column('primary_language', String(50)),
Column('reports_to_id', Integer,
ForeignKey('people.person_id')))
@classmethod
def setup_mappers(cls):
engineers, people = cls.tables.engineers, cls.tables.people
mapper(Person, people,
polymorphic_on=people.c.type,
polymorphic_identity='person')
mapper(Engineer, engineers,
inherits=Person,
inherit_condition=engineers.c.person_id == people.c.person_id,
polymorphic_identity='engineer',
properties={
'reports_to':relationship(
Person,
primaryjoin=
people.c.person_id == engineers.c.reports_to_id)})
def test_has(self):
p1 = Person(name='dogbert')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=p1)
sess = create_session()
sess.add(p1)
sess.add(e1)
sess.flush()
sess.expunge_all()
eq_(sess.query(Engineer)
.filter(Engineer.reports_to.has(Person.name == 'dogbert'))
.first(),
Engineer(name='dilbert'))
def test_oftype_aliases_in_exists(self):
e1 = Engineer(name='dilbert', primary_language='java')
e2 = Engineer(name='wally', primary_language='c++', reports_to=e1)
sess = create_session()
sess.add_all([e1, e2])
sess.flush()
eq_(sess.query(Engineer)
.filter(Engineer.reports_to
.of_type(Engineer)
.has(Engineer.name == 'dilbert'))
.first(),
e2)
def test_join(self):
p1 = Person(name='dogbert')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=p1)
sess = create_session()
sess.add(p1)
sess.add(e1)
sess.flush()
sess.expunge_all()
eq_(sess.query(Engineer)
.join('reports_to', aliased=True)
.filter(Person.name == 'dogbert').first(),
Engineer(name='dilbert'))
class SelfReferentialJ2JTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
Column('person_id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
engineers = Table('engineers', metadata,
Column('person_id', Integer,
ForeignKey('people.person_id'),
primary_key=True),
Column('primary_language', String(50)),
Column('reports_to_id', Integer,
ForeignKey('managers.person_id'))
)
managers = Table('managers', metadata,
Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
)
@classmethod
def setup_mappers(cls):
engineers = cls.tables.engineers
managers = cls.tables.managers
people = cls.tables.people
mapper(Person, people,
polymorphic_on=people.c.type,
polymorphic_identity='person')
mapper(Manager, managers,
inherits=Person,
polymorphic_identity='manager')
mapper(Engineer, engineers,
inherits=Person,
polymorphic_identity='engineer',
properties={
'reports_to':relationship(
Manager,
primaryjoin=
managers.c.person_id == engineers.c.reports_to_id,
backref='engineers')})
def test_has(self):
m1 = Manager(name='dogbert')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=m1)
sess = create_session()
sess.add(m1)
sess.add(e1)
sess.flush()
sess.expunge_all()
eq_(sess.query(Engineer)
.filter(Engineer.reports_to.has(Manager.name == 'dogbert'))
.first(),
Engineer(name='dilbert'))
def test_join(self):
m1 = Manager(name='dogbert')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=m1)
sess = create_session()
sess.add(m1)
sess.add(e1)
sess.flush()
sess.expunge_all()
eq_(sess.query(Engineer)
.join('reports_to', aliased=True)
.filter(Manager.name == 'dogbert').first(),
Engineer(name='dilbert'))
def test_filter_aliasing(self):
m1 = Manager(name='dogbert')
m2 = Manager(name='foo')
e1 = Engineer(name='wally', primary_language='java', reports_to=m1)
e2 = Engineer(name='dilbert', primary_language='c++', reports_to=m2)
e3 = Engineer(name='etc', primary_language='c++')
sess = create_session()
sess.add_all([m1, m2, e1, e2, e3])
sess.flush()
sess.expunge_all()
# filter aliasing applied to Engineer doesn't whack Manager
eq_(sess.query(Manager)
.join(Manager.engineers)
.filter(Manager.name == 'dogbert').all(),
[m1])
eq_(sess.query(Manager)
.join(Manager.engineers)
.filter(Engineer.name == 'dilbert').all(),
[m2])
eq_(sess.query(Manager, Engineer)
.join(Manager.engineers)
.order_by(Manager.name.desc()).all(),
[(m2, e2), (m1, e1)])
def test_relationship_compare(self):
m1 = Manager(name='dogbert')
m2 = Manager(name='foo')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=m1)
e2 = Engineer(name='wally', primary_language='c++', reports_to=m2)
e3 = Engineer(name='etc', primary_language='c++')
sess = create_session()
sess.add(m1)
sess.add(m2)
sess.add(e1)
sess.add(e2)
sess.add(e3)
sess.flush()
sess.expunge_all()
eq_(sess.query(Manager)
.join(Manager.engineers)
.filter(Engineer.reports_to == None).all(),
[])
eq_(sess.query(Manager)
.join(Manager.engineers)
.filter(Engineer.reports_to == m1).all(),
[m1])
class SelfReferentialJ2JSelfTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
Column('person_id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
engineers = Table('engineers', metadata,
Column('person_id', Integer,
ForeignKey('people.person_id'),
primary_key=True),
Column('reports_to_id', Integer,
ForeignKey('engineers.person_id')))
@classmethod
def setup_mappers(cls):
engineers = cls.tables.engineers
people = cls.tables.people
mapper(Person, people,
polymorphic_on=people.c.type,
polymorphic_identity='person')
mapper(Engineer, engineers,
inherits=Person,
polymorphic_identity='engineer',
properties={
'reports_to':relationship(
Engineer,
primaryjoin=
engineers.c.person_id == engineers.c.reports_to_id,
backref='engineers',
remote_side=engineers.c.person_id)})
def _two_obj_fixture(self):
e1 = Engineer(name='wally')
e2 = Engineer(name='dilbert', reports_to=e1)
sess = Session()
sess.add_all([e1, e2])
sess.commit()
return sess
def _five_obj_fixture(self):
sess = Session()
e1, e2, e3, e4, e5 = [
Engineer(name='e%d' % (i + 1)) for i in range(5)
]
e3.reports_to = e1
e4.reports_to = e2
sess.add_all([e1, e2, e3, e4, e5])
sess.commit()
return sess
def test_has(self):
sess = self._two_obj_fixture()
eq_(sess.query(Engineer)
.filter(Engineer.reports_to.has(Engineer.name == 'wally'))
.first(),
Engineer(name='dilbert'))
def test_join_explicit_alias(self):
sess = self._five_obj_fixture()
ea = aliased(Engineer)
eq_(sess.query(Engineer)
.join(ea, Engineer.engineers)
.filter(Engineer.name == 'e1').all(),
[Engineer(name='e1')])
def test_join_aliased_flag_one(self):
sess = self._two_obj_fixture()
eq_(sess.query(Engineer)
.join('reports_to', aliased=True)
.filter(Engineer.name == 'wally').first(),
Engineer(name='dilbert'))
def test_join_aliased_flag_two(self):
sess = self._five_obj_fixture()
eq_(sess.query(Engineer)
.join(Engineer.engineers, aliased=True)
.filter(Engineer.name == 'e4').all(),
[Engineer(name='e2')])
def test_relationship_compare(self):
sess = self._five_obj_fixture()
e1 = sess.query(Engineer).filter_by(name='e1').one()
eq_(sess.query(Engineer)
.join(Engineer.engineers, aliased=True)
.filter(Engineer.reports_to == None).all(),
[])
eq_(sess.query(Engineer)
.join(Engineer.engineers, aliased=True)
.filter(Engineer.reports_to == e1).all(),
[e1])
class M2MFilterTest(fixtures.MappedTest):
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
organizations = Table('organizations', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)))
engineers_to_org = Table('engineers_to_org', metadata,
Column('org_id', Integer,
ForeignKey('organizations.id')),
Column('engineer_id', Integer,
ForeignKey('engineers.person_id')))
people = Table('people', metadata,
Column('person_id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
engineers = Table('engineers', metadata,
Column('person_id', Integer,
ForeignKey('people.person_id'),
primary_key=True),
Column('primary_language', String(50)))
@classmethod
def setup_mappers(cls):
organizations = cls.tables.organizations
people = cls.tables.people
engineers = cls.tables.engineers
engineers_to_org = cls.tables.engineers_to_org
class Organization(cls.Comparable):
pass
mapper(Organization, organizations,
properties={
'engineers':relationship(
Engineer,
secondary=engineers_to_org,
backref='organizations')})
mapper(Person, people,
polymorphic_on=people.c.type,
polymorphic_identity='person')
mapper(Engineer, engineers,
inherits=Person,
polymorphic_identity='engineer')
@classmethod
def insert_data(cls):
Organization = cls.classes.Organization
e1 = Engineer(name='e1')
e2 = Engineer(name='e2')
e3 = Engineer(name='e3')
e4 = Engineer(name='e4')
org1 = Organization(name='org1', engineers=[e1, e2])
org2 = Organization(name='org2', engineers=[e3, e4])
sess = create_session()
sess.add(org1)
sess.add(org2)
sess.flush()
def test_not_contains(self):
Organization = self.classes.Organization
sess = create_session()
e1 = sess.query(Person).filter(Engineer.name == 'e1').one()
eq_(sess.query(Organization)
.filter(~Organization.engineers
.of_type(Engineer)
.contains(e1))
.all(),
[Organization(name='org2')])
# this had a bug
eq_(sess.query(Organization)
.filter(~Organization.engineers
.contains(e1))
.all(),
[Organization(name='org2')])
def test_any(self):
sess = create_session()
Organization = self.classes.Organization
eq_(sess.query(Organization)
.filter(Organization.engineers
.of_type(Engineer)
.any(Engineer.name == 'e1'))
.all(),
[Organization(name='org1')])
eq_(sess.query(Organization)
.filter(Organization.engineers
.any(Engineer.name == 'e1'))
.all(),
[Organization(name='org1')])
class SelfReferentialM2MTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table('secondary', metadata,
Column('left_id', Integer,
ForeignKey('parent.id'),
nullable=False),
Column('right_id', Integer,
ForeignKey('parent.id'),
nullable=False))
Table('parent', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('cls', String(50)))
Table('child1', metadata,
Column('id', Integer,
ForeignKey('parent.id'),
primary_key=True))
Table('child2', metadata,
Column('id', Integer,
ForeignKey('parent.id'),
primary_key=True))
@classmethod
def setup_classes(cls):
class Parent(cls.Basic):
pass
class Child1(Parent):
pass
class Child2(Parent):
pass
@classmethod
def setup_mappers(cls):
child1 = cls.tables.child1
child2 = cls.tables.child2
Parent = cls.classes.Parent
parent = cls.tables.parent
Child1 = cls.classes.Child1
Child2 = cls.classes.Child2
secondary = cls.tables.secondary
mapper(Parent, parent,
polymorphic_on=parent.c.cls)
mapper(Child1, child1,
inherits=Parent,
polymorphic_identity='child1',
properties={
'left_child2':relationship(
Child2,
secondary=secondary,
primaryjoin=parent.c.id == secondary.c.right_id,
secondaryjoin=parent.c.id == secondary.c.left_id,
uselist=False,
backref="right_children")})
mapper(Child2, child2,
inherits=Parent,
polymorphic_identity='child2')
def test_query_crit(self):
Child1, Child2 = self.classes.Child1, self.classes.Child2
sess = create_session()
c11, c12, c13 = Child1(), Child1(), Child1()
c21, c22, c23 = Child2(), Child2(), Child2()
c11.left_child2 = c22
c12.left_child2 = c22
c13.left_child2 = c23
sess.add_all([c11, c12, c13, c21, c22, c23])
sess.flush()
# test that the join to Child2 doesn't alias Child1 in the select
eq_(set(sess.query(Child1).join(Child1.left_child2)),
set([c11, c12, c13]))
eq_(set(sess.query(Child1, Child2).join(Child1.left_child2)),
set([(c11, c22), (c12, c22), (c13, c23)]))
# test __eq__() on property is annotating correctly
eq_(set(sess.query(Child2)
.join(Child2.right_children)
.filter(Child1.left_child2 == c22)),
set([c22]))
# test the same again
self.assert_compile(
sess.query(Child2)
.join(Child2.right_children)
.filter(Child1.left_child2 == c22)
.with_labels().statement,
"SELECT child2.id AS child2_id, parent.id AS parent_id, "
"parent.cls AS parent_cls FROM secondary AS secondary_1, "
"parent JOIN child2 ON parent.id = child2.id JOIN secondary AS "
"secondary_2 ON parent.id = secondary_2.left_id JOIN "
"(parent AS parent_1 JOIN child1 AS child1_1 ON parent_1.id = child1_1.id) "
"ON parent_1.id = secondary_2.right_id WHERE "
"parent_1.id = secondary_1.right_id AND :param_1 = "
"secondary_1.left_id"
)
def test_eager_join(self):
Child1, Child2 = self.classes.Child1, self.classes.Child2
sess = create_session()
c1 = Child1()
c1.left_child2 = Child2()
sess.add(c1)
sess.flush()
# test that the splicing of the join works here, doesn't break in
# the middle of "parent join child1"
q = sess.query(Child1).options(joinedload('left_child2'))
self.assert_compile(q.limit(1).with_labels().statement,
"SELECT anon_1.child1_id AS anon_1_child1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.parent_cls AS anon_1_parent_cls, "
"child2_1.id AS child2_1_id, parent_1.id AS "
"parent_1_id, parent_1.cls AS parent_1_cls FROM "
"(SELECT child1.id AS child1_id, parent.id AS parent_id, "
"parent.cls AS parent_cls "
"FROM parent JOIN child1 ON parent.id = child1.id "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN "
"(secondary AS secondary_1 JOIN "
"(parent AS parent_1 JOIN child2 AS child2_1 "
"ON parent_1.id = child2_1.id) ON parent_1.id = secondary_1.left_id) "
"ON anon_1.parent_id = secondary_1.right_id",
{'param_1':1})
# another way to check
assert q.limit(1).with_labels().subquery().count().scalar() == 1
assert q.first() is c1
def test_subquery_load(self):
Child1, Child2 = self.classes.Child1, self.classes.Child2
sess = create_session()
c1 = Child1()
c1.left_child2 = Child2()
sess.add(c1)
sess.flush()
sess.expunge_all()
query_ = sess.query(Child1).options(subqueryload('left_child2'))
for row in query_.all():
assert row.left_child2
class EagerToSubclassTest(fixtures.MappedTest):
"""Test eager loads to subclass mappers"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('data', String(10)))
Table('base', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10)),
Column('related_id', Integer,
ForeignKey('related.id')))
Table('sub', metadata,
Column('id', Integer,
ForeignKey('base.id'),
primary_key=True),
Column('data', String(10)),
Column('parent_id', Integer,
ForeignKey('parent.id'),
nullable=False))
Table('related', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('data', String(10)))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
class Base(cls.Comparable):
pass
class Sub(Base):
pass
class Related(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
sub = cls.tables.sub
Sub = cls.classes.Sub
base = cls.tables.base
Base = cls.classes.Base
parent = cls.tables.parent
Parent = cls.classes.Parent
related = cls.tables.related
Related = cls.classes.Related
mapper(Parent, parent,
properties={'children':relationship(Sub, order_by=sub.c.data)})
mapper(Base, base,
polymorphic_on=base.c.type,
polymorphic_identity='b',
properties={'related':relationship(Related)})
mapper(Sub, sub,
inherits=Base,
polymorphic_identity='s')
mapper(Related, related)
@classmethod
def insert_data(cls):
global p1, p2
Parent = cls.classes.Parent
Sub = cls.classes.Sub
Related = cls.classes.Related
sess = Session()
r1, r2 = Related(data='r1'), Related(data='r2')
s1 = Sub(data='s1', related=r1)
s2 = Sub(data='s2', related=r2)
s3 = Sub(data='s3')
s4 = Sub(data='s4', related=r2)
s5 = Sub(data='s5')
p1 = Parent(data='p1', children=[s1, s2, s3])
p2 = Parent(data='p2', children=[s4, s5])
sess.add(p1)
sess.add(p2)
sess.commit()
def test_joinedload(self):
Parent = self.classes.Parent
sess = Session()
def go():
eq_(sess.query(Parent)
.options(joinedload(Parent.children)).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
Parent = self.classes.Parent
Sub = self.classes.Sub
sess = Session()
def go():
eq_(sess.query(Parent)
.join(Parent.children)
.options(contains_eager(Parent.children))
.order_by(Parent.data, Sub.data).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
def test_subq_through_related(self):
Parent = self.classes.Parent
Base = self.classes.Base
sess = Session()
def go():
eq_(sess.query(Parent)
.options(subqueryload_all(Parent.children, Base.related))
.order_by(Parent.data).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 3)
def test_subq_through_related_aliased(self):
Parent = self.classes.Parent
Base = self.classes.Base
pa = aliased(Parent)
sess = Session()
def go():
eq_(sess.query(pa)
.options(subqueryload_all(pa.children, Base.related))
.order_by(pa.data).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 3)
class SubClassEagerToSubClassTest(fixtures.MappedTest):
"""Test joinedloads from subclass to subclass mappers"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10)),
)
Table('subparent', metadata,
Column('id', Integer,
ForeignKey('parent.id'),
primary_key=True),
Column('data', String(10)),
)
Table('base', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10)),
)
Table('sub', metadata,
Column('id', Integer,
ForeignKey('base.id'),
primary_key=True),
Column('data', String(10)),
Column('subparent_id', Integer,
ForeignKey('subparent.id'),
nullable=False)
)
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
class Subparent(Parent):
pass
class Base(cls.Comparable):
pass
class Sub(Base):
pass
@classmethod
def setup_mappers(cls):
sub = cls.tables.sub
Sub = cls.classes.Sub
base = cls.tables.base
Base = cls.classes.Base
parent = cls.tables.parent
Parent = cls.classes.Parent
subparent = cls.tables.subparent
Subparent = cls.classes.Subparent
mapper(Parent, parent,
polymorphic_on=parent.c.type,
polymorphic_identity='b')
mapper(Subparent, subparent,
inherits=Parent,
polymorphic_identity='s',
properties={
'children':relationship(Sub, order_by=base.c.id)})
mapper(Base, base,
polymorphic_on=base.c.type,
polymorphic_identity='b')
mapper(Sub, sub,
inherits=Base,
polymorphic_identity='s')
@classmethod
def insert_data(cls):
global p1, p2
Sub, Subparent = cls.classes.Sub, cls.classes.Subparent
sess = create_session()
p1 = Subparent(
data='p1',
children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')])
p2 = Subparent(
data='p2',
children=[Sub(data='s4'), Sub(data='s5')])
sess.add(p1)
sess.add(p2)
sess.flush()
def test_joinedload(self):
Subparent = self.classes.Subparent
sess = create_session()
def go():
eq_(sess.query(Subparent)
.options(joinedload(Subparent.children)).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(sess.query(Subparent)
.options(joinedload("children")).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
Subparent = self.classes.Subparent
sess = create_session()
def go():
eq_(sess.query(Subparent)
.join(Subparent.children)
.options(contains_eager(Subparent.children)).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(sess.query(Subparent)
.join(Subparent.children)
.options(contains_eager("children")).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 1)
def test_subqueryload(self):
Subparent = self.classes.Subparent
sess = create_session()
def go():
eq_(sess.query(Subparent)
.options(subqueryload(Subparent.children)).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 2)
sess.expunge_all()
def go():
eq_(sess.query(Subparent)
.options(subqueryload("children")).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 2)
class SameNamedPropTwoPolymorphicSubClassesTest(fixtures.MappedTest):
"""test pathing when two subclasses contain a different property
for the same name, and polymorphic loading is used.
#2614
"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10))
)
Table('b', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True)
)
Table('btod', metadata,
Column('bid', Integer, ForeignKey('b.id'), nullable=False),
Column('did', Integer, ForeignKey('d.id'), nullable=False)
)
Table('c', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True)
)
Table('ctod', metadata,
Column('cid', Integer, ForeignKey('c.id'), nullable=False),
Column('did', Integer, ForeignKey('d.id'), nullable=False)
)
Table('d', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True)
)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
class B(A):
pass
class C(A):
pass
class D(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
A = cls.classes.A
B = cls.classes.B
C = cls.classes.C
D = cls.classes.D
mapper(A, cls.tables.a, polymorphic_on=cls.tables.a.c.type)
mapper(B, cls.tables.b, inherits=A, polymorphic_identity='b',
properties={
'related': relationship(D, secondary=cls.tables.btod)
})
mapper(C, cls.tables.c, inherits=A, polymorphic_identity='c',
properties={
'related': relationship(D, secondary=cls.tables.ctod)
})
mapper(D, cls.tables.d)
@classmethod
def insert_data(cls):
B = cls.classes.B
C = cls.classes.C
D = cls.classes.D
session = Session()
d = D()
session.add_all([
B(related=[d]),
C(related=[d])
])
session.commit()
def test_free_w_poly_subquery(self):
A = self.classes.A
B = self.classes.B
C = self.classes.C
D = self.classes.D
session = Session()
d = session.query(D).one()
a_poly = with_polymorphic(A, [B, C])
def go():
for a in session.query(a_poly).\
options(
subqueryload(a_poly.B.related),
subqueryload(a_poly.C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 3)
def test_fixed_w_poly_subquery(self):
A = self.classes.A
B = self.classes.B
C = self.classes.C
D = self.classes.D
session = Session()
d = session.query(D).one()
def go():
for a in session.query(A).with_polymorphic([B, C]).\
options(subqueryload(B.related), subqueryload(C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 3)
def test_free_w_poly_joined(self):
A = self.classes.A
B = self.classes.B
C = self.classes.C
D = self.classes.D
session = Session()
d = session.query(D).one()
a_poly = with_polymorphic(A, [B, C])
def go():
for a in session.query(a_poly).\
options(
joinedload(a_poly.B.related),
joinedload(a_poly.C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 1)
def test_fixed_w_poly_joined(self):
A = self.classes.A
B = self.classes.B
C = self.classes.C
D = self.classes.D
session = Session()
d = session.query(D).one()
def go():
for a in session.query(A).with_polymorphic([B, C]).\
options(joinedload(B.related), joinedload(C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 1)
class SubClassToSubClassFromParentTest(fixtures.MappedTest):
"""test #2617
"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('z', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True)
)
Table('a', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10)),
Column('z_id', Integer, ForeignKey('z.id'))
)
Table('b', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True)
)
Table('d', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('b_id', Integer, ForeignKey('b.id'))
)
@classmethod
def setup_classes(cls):
class Z(cls.Comparable):
pass
class A(cls.Comparable):
pass
class B(A):
pass
class D(A):
pass
@classmethod
def setup_mappers(cls):
Z = cls.classes.Z
A = cls.classes.A
B = cls.classes.B
D = cls.classes.D
mapper(Z, cls.tables.z)
mapper(A, cls.tables.a, polymorphic_on=cls.tables.a.c.type,
with_polymorphic='*',
properties={
'zs': relationship(Z, lazy="subquery")
})
mapper(B, cls.tables.b, inherits=A, polymorphic_identity='b',
properties={
'related': relationship(D, lazy="subquery",
primaryjoin=cls.tables.d.c.b_id ==
cls.tables.b.c.id)
})
mapper(D, cls.tables.d, inherits=A, polymorphic_identity='d')
@classmethod
def insert_data(cls):
B = cls.classes.B
session = Session()
session.add(B())
session.commit()
def test_2617(self):
A = self.classes.A
session = Session()
def go():
a1 = session.query(A).first()
eq_(a1.related, [])
self.assert_sql_count(testing.db, go, 3)
class SubClassToSubClassMultiTest(AssertsCompiledSQL, fixtures.MappedTest):
"""
Two different joined-inh subclasses, led by a
parent, with two distinct endpoints:
parent -> subcl1 -> subcl2 -> (ep1, ep2)
the join to ep2 indicates we need to join
from the middle of the joinpoint, skipping ep1
"""
run_create_tables = None
run_deletes = None
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30))
)
Table('base1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30))
)
Table('sub1', metadata,
Column('id', Integer, ForeignKey('base1.id'), primary_key=True),
Column('parent_id', ForeignKey('parent.id')),
Column('subdata', String(30))
)
Table('base2', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('base1_id', ForeignKey('base1.id')),
Column('data', String(30))
)
Table('sub2', metadata,
Column('id', Integer, ForeignKey('base2.id'), primary_key=True),
Column('subdata', String(30))
)
Table('ep1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('base2_id', Integer, ForeignKey('base2.id')),
Column('data', String(30))
)
Table('ep2', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('base2_id', Integer, ForeignKey('base2.id')),
Column('data', String(30))
)
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
class Base1(cls.Comparable):
pass
class Sub1(Base1):
pass
class Base2(cls.Comparable):
pass
class Sub2(Base2):
pass
class EP1(cls.Comparable):
pass
class EP2(cls.Comparable):
pass
@classmethod
def _classes(cls):
return cls.classes.Parent, cls.classes.Base1,\
cls.classes.Base2, cls.classes.Sub1,\
cls.classes.Sub2, cls.classes.EP1,\
cls.classes.EP2
@classmethod
def setup_mappers(cls):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = cls._classes()
mapper(Parent, cls.tables.parent, properties={
'sub1': relationship(Sub1)
})
mapper(Base1, cls.tables.base1, properties={
'sub2': relationship(Sub2)
})
mapper(Sub1, cls.tables.sub1, inherits=Base1)
mapper(Base2, cls.tables.base2, properties={
'ep1': relationship(EP1),
'ep2': relationship(EP2)
})
mapper(Sub2, cls.tables.sub2, inherits=Base2)
mapper(EP1, cls.tables.ep1)
mapper(EP2, cls.tables.ep2)
def test_one(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
s.query(Parent).join(Parent.sub1, Sub1.sub2).
join(Sub2.ep1).
join(Sub2.ep2),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) "
"ON parent.id = sub1.parent_id JOIN "
"(base2 JOIN sub2 "
"ON base2.id = sub2.id) "
"ON base1.id = base2.base1_id "
"JOIN ep1 ON base2.id = ep1.base2_id "
"JOIN ep2 ON base2.id = ep2.base2_id"
)
def test_two(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s2a = aliased(Sub2, flat=True)
s = Session()
self.assert_compile(
s.query(Parent).join(Parent.sub1).
join(s2a, Sub1.sub2),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) "
"ON parent.id = sub1.parent_id JOIN "
"(base2 AS base2_1 JOIN sub2 AS sub2_1 "
"ON base2_1.id = sub2_1.id) "
"ON base1.id = base2_1.base1_id"
)
def test_three(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
s.query(Base1).join(Base1.sub2).
join(Sub2.ep1).\
join(Sub2.ep2),
"SELECT base1.id AS base1_id, base1.data AS base1_data "
"FROM base1 JOIN (base2 JOIN sub2 "
"ON base2.id = sub2.id) ON base1.id = "
"base2.base1_id "
"JOIN ep1 ON base2.id = ep1.base2_id "
"JOIN ep2 ON base2.id = ep2.base2_id"
)
def test_four(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
s.query(Sub2).join(Base1, Base1.id == Sub2.base1_id).
join(Sub2.ep1).\
join(Sub2.ep2),
"SELECT sub2.id AS sub2_id, base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
"FROM base2 JOIN sub2 ON base2.id = sub2.id "
"JOIN base1 ON base1.id = base2.base1_id "
"JOIN ep1 ON base2.id = ep1.base2_id "
"JOIN ep2 ON base2.id = ep2.base2_id"
)
def test_five(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
s.query(Sub2).join(Sub1, Sub1.id == Sub2.base1_id).
join(Sub2.ep1).\
join(Sub2.ep2),
"SELECT sub2.id AS sub2_id, base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
"FROM base2 JOIN sub2 ON base2.id = sub2.id "
"JOIN "
"(base1 JOIN sub1 ON base1.id = sub1.id) "
"ON sub1.id = base2.base1_id "
"JOIN ep1 ON base2.id = ep1.base2_id "
"JOIN ep2 ON base2.id = ep2.base2_id"
)
def test_six(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
s.query(Sub2).from_self().\
join(Sub2.ep1).
join(Sub2.ep2),
"SELECT anon_1.sub2_id AS anon_1_sub2_id, "
"anon_1.base2_id AS anon_1_base2_id, "
"anon_1.base2_base1_id AS anon_1_base2_base1_id, "
"anon_1.base2_data AS anon_1_base2_data, "
"anon_1.sub2_subdata AS anon_1_sub2_subdata "
"FROM (SELECT sub2.id AS sub2_id, base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
"FROM base2 JOIN sub2 ON base2.id = sub2.id) AS anon_1 "
"JOIN ep1 ON anon_1.base2_id = ep1.base2_id "
"JOIN ep2 ON anon_1.base2_id = ep2.base2_id"
)
def test_seven(self):
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes()
s = Session()
self.assert_compile(
# adding Sub2 to the entities list helps it,
# otherwise the joins for Sub2.ep1/ep2 don't have columns
# to latch onto. Can't really make it better than this
s.query(Parent, Sub2).join(Parent.sub1).\
join(Sub1.sub2).from_self().\
join(Sub2.ep1).
join(Sub2.ep2),
"SELECT anon_1.parent_id AS anon_1_parent_id, "
"anon_1.parent_data AS anon_1_parent_data, "
"anon_1.sub2_id AS anon_1_sub2_id, "
"anon_1.base2_id AS anon_1_base2_id, "
"anon_1.base2_base1_id AS anon_1_base2_base1_id, "
"anon_1.base2_data AS anon_1_base2_data, "
"anon_1.sub2_subdata AS anon_1_sub2_subdata "
"FROM (SELECT parent.id AS parent_id, parent.data AS parent_data, "
"sub2.id AS sub2_id, "
"base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, "
"base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
"FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) "
"ON parent.id = sub1.parent_id JOIN "
"(base2 JOIN sub2 ON base2.id = sub2.id) "
"ON base1.id = base2.base1_id) AS anon_1 "
"JOIN ep1 ON anon_1.base2_id = ep1.base2_id "
"JOIN ep2 ON anon_1.base2_id = ep2.base2_id"
)
class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
"""test long join paths with a joined-inh in the middle, where we go multiple
times across the same joined-inh to the same target but with other classes
in the middle. E.g. test [ticket:2908]
"""
run_setup_mappers = 'once'
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class Root(Base):
__tablename__ = 'root'
id = Column(Integer, primary_key=True)
sub1_id = Column(Integer, ForeignKey('sub1.id'))
intermediate = relationship("Intermediate")
sub1 = relationship("Sub1")
class Intermediate(Base):
__tablename__ = 'intermediate'
id = Column(Integer, primary_key=True)
sub1_id = Column(Integer, ForeignKey('sub1.id'))
root_id = Column(Integer, ForeignKey('root.id'))
sub1 = relationship("Sub1")
class Parent(Base):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True)
class Sub1(Parent):
__tablename__ = 'sub1'
id = Column(Integer, ForeignKey('parent.id'),
primary_key=True)
target = relationship("Target")
class Target(Base):
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
sub1_id = Column(Integer, ForeignKey('sub1.id'))
def test_join(self):
Root, Intermediate, Sub1, Target = \
self.classes.Root, self.classes.Intermediate, \
self.classes.Sub1, self.classes.Target
s1_alias = aliased(Sub1)
s2_alias = aliased(Sub1)
t1_alias = aliased(Target)
t2_alias = aliased(Target)
sess = Session()
q = sess.query(Root).\
join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
join(t2_alias, s2_alias.target)
self.assert_compile(q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
"FROM root "
"JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
"FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_1 "
"ON anon_1.sub1_id = root.sub1_id "
"JOIN target AS target_1 ON anon_1.sub1_id = target_1.sub1_id "
"JOIN intermediate ON root.id = intermediate.root_id "
"JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
"FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_2 "
"ON anon_2.sub1_id = intermediate.sub1_id "
"JOIN target AS target_2 ON anon_2.sub1_id = target_2.sub1_id")
def test_join_flat(self):
Root, Intermediate, Sub1, Target = \
self.classes.Root, self.classes.Intermediate, \
self.classes.Sub1, self.classes.Target
s1_alias = aliased(Sub1, flat=True)
s2_alias = aliased(Sub1, flat=True)
t1_alias = aliased(Target)
t2_alias = aliased(Target)
sess = Session()
q = sess.query(Root).\
join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
join(t2_alias, s2_alias.target)
self.assert_compile(q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
"FROM root "
"JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 ON parent_1.id = sub1_1.id) "
"ON sub1_1.id = root.sub1_id "
"JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
"JOIN intermediate ON root.id = intermediate.root_id "
"JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 ON parent_2.id = sub1_2.id) "
"ON sub1_2.id = intermediate.sub1_id "
"JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id"
)
def test_joinedload(self):
Root, Intermediate, Sub1, Target = \
self.classes.Root, self.classes.Intermediate, \
self.classes.Sub1, self.classes.Target
sess = Session()
q = sess.query(Root).\
options(
joinedload(Root.sub1).joinedload(Sub1.target),
joinedload(Root.intermediate).joinedload(Intermediate.sub1).\
joinedload(Sub1.target),
)
self.assert_compile(q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id, "
"target_1.id AS target_1_id, target_1.sub1_id AS target_1_sub1_id, "
"sub1_1.id AS sub1_1_id, parent_1.id AS parent_1_id, "
"intermediate_1.id AS intermediate_1_id, "
"intermediate_1.sub1_id AS intermediate_1_sub1_id, "
"intermediate_1.root_id AS intermediate_1_root_id, "
"target_2.id AS target_2_id, target_2.sub1_id AS target_2_sub1_id, "
"sub1_2.id AS sub1_2_id, parent_2.id AS parent_2_id "
"FROM root "
"LEFT OUTER JOIN intermediate AS intermediate_1 "
"ON root.id = intermediate_1.root_id "
"LEFT OUTER JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 "
"ON parent_1.id = sub1_1.id) ON sub1_1.id = intermediate_1.sub1_id "
"LEFT OUTER JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
"LEFT OUTER JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 "
"ON parent_2.id = sub1_2.id) ON sub1_2.id = root.sub1_id "
"LEFT OUTER JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id")
class MultipleAdaptUsesEntityOverTableTest(AssertsCompiledSQL, fixtures.MappedTest):
__dialect__ = 'default'
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
Column('id', Integer, primary_key=True),
Column('name', String)
)
Table('b', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True)
)
Table('c', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('bid', Integer, ForeignKey('b.id'))
)
Table('d', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('cid', Integer, ForeignKey('c.id'))
)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
class B(A):
pass
class C(A):
pass
class D(A):
pass
@classmethod
def setup_mappers(cls):
A, B, C, D = cls.classes.A, cls.classes.B, cls.classes.C, cls.classes.D
a, b, c, d = cls.tables.a, cls.tables.b, cls.tables.c, cls.tables.d
mapper(A, a)
mapper(B, b, inherits=A)
mapper(C, c, inherits=A)
mapper(D, d, inherits=A)
def _two_join_fixture(self):
A, B, C, D = self.classes.A, self.classes.B, self.classes.C, self.classes.D
s = Session()
return s.query(B.name, C.name, D.name).select_from(B).\
join(C, C.bid == B.id).\
join(D, D.cid == C.id)
def test_two_joins_adaption(self):
a, b, c, d = self.tables.a, self.tables.b, self.tables.c, self.tables.d
q = self._two_join_fixture()
btoc = q._from_obj[0].left
ac_adapted = btoc.right.element.left
c_adapted = btoc.right.element.right
is_(ac_adapted.element, a)
is_(c_adapted.element, c)
ctod = q._from_obj[0].right
ad_adapted = ctod.left
d_adapted = ctod.right
is_(ad_adapted.element, a)
is_(d_adapted.element, d)
bname, cname, dname = q._entities
b_name_adapted = bname._resolve_expr_against_query_aliases(
q, bname.column, None)
c_name_adapted = cname._resolve_expr_against_query_aliases(
q, cname.column, None)
d_name_adapted = dname._resolve_expr_against_query_aliases(
q, dname.column, None)
assert bool(b_name_adapted == a.c.name)
assert bool(c_name_adapted == ac_adapted.c.name)
assert bool(d_name_adapted == ad_adapted.c.name)
def test_two_joins_sql(self):
q = self._two_join_fixture()
self.assert_compile(q,
"SELECT a.name AS a_name, a_1.name AS a_1_name, "
"a_2.name AS a_2_name "
"FROM a JOIN b ON a.id = b.id JOIN "
"(a AS a_1 JOIN c AS c_1 ON a_1.id = c_1.id) ON c_1.bid = b.id "
"JOIN (a AS a_2 JOIN d AS d_1 ON a_2.id = d_1.id) "
"ON d_1.cid = c_1.id"
)
|
michaelBenin/sqlalchemy
|
test/orm/inheritance/test_relationship.py
|
Python
|
mit
| 53,676
|
from flappy.display3d.vertexbuffer3d import VertexBuffer3D, VertexBuffer3DFormat
from flappy.display3d.indexbuffer3d import IndexBuffer3D
from flappy.display3d.program3d import Program3D
from flappy.display3d.texture import Texture
from flappy.display3d.scene3d import Scene3D
|
wannaphongcom/flappy
|
flappy/display3d/__init__.py
|
Python
|
mit
| 279
|
#!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
import socket
import os
import sys
import imp
import md5
import sha
import codecs
import base64
import platform
import shutil
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
# Paths
CONFIG_PATH = '/etc/opt/microsoft/omsagent/conf/'
SERVER_ADDRESS = '/var/opt/microsoft/omsagent/npm_state/npmdagent.sock'
DEST_FILE_NAME = 'npmd_agent_config.xml'
PLUGIN_PATH = '/opt/microsoft/omsagent/plugin/'
PLUGIN_CONF_PATH = '/etc/opt/microsoft/omsagent/conf/omsagent.d/'
RESOURCE_MODULE_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/DSCResources/MSFT_nxOMSAgentNPMConfigResource/NPM/'
DSC_RESOURCE_VERSION_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/VERSION'
AGENT_RESOURCE_VERSION_PATH = '/var/opt/microsoft/omsagent/npm_state/npm_version'
DSC_X64_AGENT_PATH = 'Agent/64/'
DSC_X86_AGENT_PATH = 'Agent/32/'
DSC_PLUGIN_PATH = 'Plugin/plugin/'
DSC_PLUGIN_CONF_PATH = 'Plugin/conf/'
AGENT_BINARY_PATH = '/opt/microsoft/omsagent/plugin/'
AGENT_SCRIPT_PATH = '/opt/microsoft/omsconfig/Scripts/NPMAgentBinaryCap.sh'
# Constants
X64 = '64bit'
AGENT_BINARY_NAME = 'npmd_agent'
def enum(**enums):
return type('Enum', (), enums)
Commands = enum(LogNPM = 'ErrorLog', StartNPM = 'StartNPM', StopNPM = 'StopNPM', Config = 'Config', Purge = 'Purge')
LogType = enum(Error = 'ERROR', Info = 'INFO')
class INPMDiagnosticLog:
def log(self):
pass
class NPMDiagnosticLogUtil(INPMDiagnosticLog):
def log(self, logType, logString):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = Commands.LogNPM + ':' + '[' + logType + ']' + logString
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
finally:
sock.close()
LOG_ACTION = NPMDiagnosticLogUtil()
class IOMSAgent:
def restart_oms_agent(self):
pass
class OMSAgentUtil(IOMSAgent):
def restart_oms_agent(self):
if os.system('sudo /opt/microsoft/omsagent/bin/service_control restart') == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error restarting omsagent.')
return False
class INPMAgent:
def binary_setcap(self):
pass
class NPMAgentUtil(IOMSAgent):
def binary_setcap(self, binaryPath):
if os.path.exists(AGENT_SCRIPT_PATH) and os.system('sudo %s %s' %(AGENT_SCRIPT_PATH, binaryPath)) == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error setting capabilities to npmd agent binary.')
return False
global show_mof
show_mof = False
OMS_ACTION = OMSAgentUtil()
NPM_ACTION = NPMAgentUtil()
# [key] string ConfigType;
# [write] string ConfigID;
# [write] string Contents;
# [write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [write] string ContentChecksum;
def init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if ConfigType is not None and ConfigType != '':
ConfigType = ConfigType.encode('ascii', 'ignore')
else:
ConfigType = 'UpdatedAgentConfig'
if ConfigID is not None:
ConfigID = ConfigID.encode('ascii', 'ignore')
else:
ConfigID = ''
if Contents is not None:
Contents = base64.b64decode(Contents)#Contents.encode('ascii', 'ignore')
else:
Contents = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure.encode('ascii', 'ignore')
else:
Ensure = 'Present'
if ContentChecksum is not None:
ContentChecksum = ContentChecksum.encode('ascii', 'ignore')
else:
ContentChecksum = ''
return ConfigType, ConfigID, Contents, Ensure, ContentChecksum
def Set_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('TEST', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if not os.path.exists(AGENT_SCRIPT_PATH):
LG().Log(LogType.Error, 'npmd set cap script does not exist, exiting test')
return [retval]
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting test')
return [retval]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, resource is present on the agent, set will purge')
retval = -1
return [retval]
if TestResourceVersion() != 0 or TestConfigUpdate(Contents) != 0:
retval = -1
return [retval]
def Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
retval = 0
ShowMof('GET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return [retval]
def Print(s, file=sys.stdout):
file.write(s + '\n')
# Compare resource version in DSC and agent machine
# Returns
# 0 if version is same
# dsc version number if there is a mismatch or agent config not present
def TestResourceVersion():
retval = 0
dscVersion = ReadFile(DSC_RESOURCE_VERSION_PATH)
if not os.path.exists(AGENT_RESOURCE_VERSION_PATH):
#npmd agent is not present, copy binaries
retval = dscVersion
else:
agentVersion = ReadFile(AGENT_RESOURCE_VERSION_PATH)
if agentVersion != dscVersion:
#version mismatch, copy binaries
retval = dscVersion
return retval
def TestConfigUpdate(Contents):
retval = 0
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = 0
elif not os.path.exists(destFileFullPath):
# Configuration does not exist, fail
retval = -1
else:
origConfigData = ReadFile(destFileFullPath)
#compare
if origConfigData is None or origConfigData != Contents:
retval = -1
return retval
def SetConfigUpdate(Contents):
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
# Update config after checking if directory exists
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = -1
else:
retval = WriteFile(destFileFullPath, Contents)
if retval == 0 and os.path.exists(AGENT_RESOURCE_VERSION_PATH): #notify server only if plugin is present
LG().Log(LogType.Info, 'Updated the file, going to notify server')
NotifyServer(Commands.Config)
return retval
def SetFilesUpdate(newVersion):
retval = UpdateAgentBinary(newVersion)
retval &= UpdatePluginFiles()
if retval:
return 0
return -1
def UpdateAgentBinary(newVersion):
retval = True
arch = platform.architecture()
src = ''
if arch is not None and arch[0] == X64:
src = RESOURCE_MODULE_PATH.__add__(DSC_X64_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
else:
src = RESOURCE_MODULE_PATH.__add__(DSC_X86_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
LOG_ACTION.log(LogType.Error, 'npmd agent binary do not support 32-bit.')
#Update version number after deleting and copying new agent files
if retval == True:
WriteFile(AGENT_RESOURCE_VERSION_PATH, newVersion)
# set capabilities to binary
src_files = os.listdir(src)
for file_name in src_files:
if AGENT_BINARY_NAME in file_name:
full_file_name = os.path.join(AGENT_BINARY_PATH, file_name)
break
NPM_ACTION.binary_setcap(full_file_name)
# Notify ruby plugin
#retval &= NotifyServer(Commands.RestartNPM)
return retval
def UpdatePluginFiles():
retval = True
#replace files
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# restart oms agent
retval &= OMS_ACTION.restart_oms_agent()
return retval
def CopyAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if (os.path.isfile(full_file_name)):
shutil.copy(full_file_name, dest)
except:
LOG_ACTION.log(LogType.Error, 'copy_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
# Deletes all files present in both directories
def DeleteAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(dest, file_name)
if (os.path.isfile(full_file_name)):
os.remove(full_file_name)
except:
LOG_ACTION.log(LogType.Error, 'delete_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
def PurgeSolution():
# remove plugin config file so that plugin does not start again
retval = DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# remove resource version file
try:
os.remove(AGENT_RESOURCE_VERSION_PATH)
except:
LOG_ACTION.log(LogType.Error, 'failed to remove version file')
retval = False
# notify ruby plugin to purge agent
NotifyServer(Commands.Purge)
return retval
def NotifyServer(command):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
LG().Log(LogType.Info, 'connecting to ' + SERVER_ADDRESS)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = command
LG().Log(LogType.Info, 'sending ' + message)
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
# restart omsagent if command was config update and sock conn failed
if (command == Commands.Config):
OMS_ACTION.restart_oms_agent()
finally:
LG().Log(LogType.Info, 'closing socket')
sock.close()
def WriteFile(path, contents):
retval = 0
try:
dFile = open(path, 'w+')
dFile.write(contents)
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
retval = -1
return retval
def ReadFile(path):
content = None
try:
dFile = codecs.open (path, encoding = 'utf8', mode = "r")
content = dFile.read()
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
return content
|
MSFTOSSMgmt/WPSDSCLinux
|
Providers/Scripts/2.4x-2.5x/Scripts/nxOMSAgentNPMConfig.py
|
Python
|
mit
| 15,607
|
from bitfinex.client import Client, TradeClient
|
laisee/bitfinex
|
bitfinex/__init__.py
|
Python
|
mit
| 48
|
from . import tests
from . import defaults
from .scripting import (
registeredTests,
testGlyph,
testLayer,
testFont,
formatGlyphReport,
formatLayerReport,
formatFontReport
)
|
typesupply/glyph-nanny
|
source/code/glyphNanny/__init__.py
|
Python
|
mit
| 201
|
import unittest
from frappe.model.base_document import BaseDocument
class TestBaseDocument(unittest.TestCase):
def test_docstatus(self):
doc = BaseDocument({"docstatus": 0})
self.assertTrue(doc.docstatus.is_draft())
self.assertEquals(doc.docstatus, 0)
doc.docstatus = 1
self.assertTrue(doc.docstatus.is_submitted())
self.assertEquals(doc.docstatus, 1)
doc.docstatus = 2
self.assertTrue(doc.docstatus.is_cancelled())
self.assertEquals(doc.docstatus, 2)
|
frappe/frappe
|
frappe/tests/test_base_document.py
|
Python
|
mit
| 476
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/components/engine/shared_eng_moncal_ifs32.iff"
result.attribute_template_id = 8
result.stfName("space/space_item","eng_moncal_ifs32_n")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/ship/components/engine/shared_eng_moncal_ifs32.py
|
Python
|
mit
| 477
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import struct
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest)
else:
if os.path.exists(dest):
os.unlink(dest)
shutil.copy(source, dest)
if extension in ('.plist', '.strings') and convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
args.extend(['--auto-activate-custom-fonts'])
if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
args.extend([
'--target-device', 'iphone', '--target-device', 'ipad',
'--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
])
else:
args.extend([
'--target-device', 'mac',
'--minimum-deployment-target',
os.environ['MACOSX_DEPLOYMENT_TARGET'],
])
args.extend(['--output-format', 'human-readable-text', '--compile', dest,
source])
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
r'file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
env = os.environ.copy()
# Ref:
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
# The problem with this flag is that it resets the file mtime on the file to
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env['ZERO_AR_DATE'] = '1'
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
return libtoolout.returncode
def ExecPackageIosFramework(self, framework):
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
module_path = os.path.join(framework, 'Modules');
if not os.path.exists(module_path):
os.mkdir(module_path)
module_template = 'framework module %s {\n' \
' umbrella header "%s.h"\n' \
'\n' \
' export *\n' \
' module * { export * }\n' \
'}\n' % (binary, binary)
module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
module_file.write(module_template)
module_file.close()
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
framework_name = os.path.basename(framework).split('.')[0]
all_headers = map(os.path.abspath, all_headers)
filelist = {}
for header in all_headers:
filename = os.path.basename(header)
filelist[filename] = header
filelist[os.path.join(framework_name, filename)] = header
WriteHmap(out, filelist)
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
header_path = os.path.join(framework, 'Headers');
if not os.path.exists(header_path):
os.makedirs(header_path)
for header in copy_headers:
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
This invokes 'actool' to compile all the inputs .xcassets files. The
|keys| arguments is a json-encoded dictionary of extra arguments to
pass to 'actool' when the asset catalogs contains an application icon
or a launch image.
Note that 'actool' does not create the Assets.car file if the asset
catalogs does not contains imageset.
"""
command_line = [
'xcrun', 'actool', '--output-format', 'human-readable-text',
'--compress-pngs', '--notices', '--warnings', '--errors',
]
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
if is_iphone_target:
platform = os.environ['CONFIGURATION'].split('-')[-1]
if platform not in ('iphoneos', 'iphonesimulator'):
platform = 'iphonesimulator'
command_line.extend([
'--platform', platform, '--target-device', 'iphone',
'--target-device', 'ipad', '--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
])
else:
command_line.extend([
'--platform', 'macosx', '--target-device', 'mac',
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
'--compile',
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
])
if keys:
keys = json.loads(keys)
for key, value in keys.iteritems():
arg_name = '--' + key
if isinstance(value, bool):
if value:
command_line.append(arg_name)
elif isinstance(value, list):
for v in value:
command_line.append(arg_name)
command_line.append(str(v))
else:
command_line.append(arg_name)
command_line.append(str(value))
# Note: actool crashes if inputs path are relative, so use os.path.abspath
# to get absolute path name for inputs.
command_line.extend(map(os.path.abspath, inputs))
subprocess.check_call(command_line)
def ExecMergeInfoPlist(self, output, *inputs):
"""Merge multiple .plist files into a single .plist file."""
merged_plist = {}
for path in inputs:
plist = self._LoadPlistMaybeBinary(path)
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
def ExecCodeSignBundle(self, key, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
2. copy Entitlements.plist from user or SDK next to the bundle,
3. code sign the bundle.
"""
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--entitlements',
entitlements_path, '--timestamp=none', os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
for key, value in plist.iteritems():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
self._MergePlist(merged_value, value)
merged_plist[key] = merged_value
else:
merged_plist[key] = value
else:
merged_plist[key] = value
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
def NextGreaterPowerOf2(x):
return 2**(x-1).bit_length()
def WriteHmap(output_name, filelist):
"""Generates a header map based on |filelist|.
Per Mark Mentovai:
A header map is structured essentially as a hash table, keyed by names used
in #includes, and providing pathnames to the actual files.
The implementation below and the comment above comes from inspecting:
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
while also looking at the implementation in clang in:
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
"""
magic = 1751998832
version = 1
_reserved = 0
count = len(filelist)
capacity = NextGreaterPowerOf2(count)
strings_offset = 24 + (12 * capacity)
max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
out = open(output_name, "wb")
out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
count, capacity, max_value_length))
# Create empty hashmap buckets.
buckets = [None] * capacity
for file, path in filelist.items():
key = 0
for c in file:
key += ord(c.lower()) * 13
# Fill next empty bucket.
while buckets[key & capacity - 1] is not None:
key = key + 1
buckets[key & capacity - 1] = (file, path)
next_offset = 1
for bucket in buckets:
if bucket is None:
out.write(struct.pack('<LLL', 0, 0, 0))
else:
(file, path) = bucket
key_offset = next_offset
prefix_offset = key_offset + len(file) + 1
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
next_offset = suffix_offset + len(os.path.basename(path)) + 1
out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
# Pad byte since next offset starts at 1.
out.write(struct.pack('<x'))
for bucket in buckets:
if bucket is not None:
(file, path) = bucket
out.write(struct.pack('<%ds' % len(file), file))
out.write(struct.pack('<s', '\0'))
base = os.path.dirname(path) + os.sep
out.write(struct.pack('<%ds' % len(base), base))
out.write(struct.pack('<s', '\0'))
path = os.path.basename(path)
out.write(struct.pack('<%ds' % len(path), path))
out.write(struct.pack('<s', '\0'))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
tkelman/utf8rewind
|
tools/gyp/pylib/gyp/mac_tool.py
|
Python
|
mit
| 26,881
|
EXCEPTION_INFO = 'exception_info'
MESSAGE = 'message'
PAYLOAD = 'payload'
_event_handlers = {
EXCEPTION_INFO: [],
MESSAGE: [],
PAYLOAD: []
}
def _check_type(typ):
if typ not in _event_handlers:
raise ValueError('Unknown type: %s. Must be one of %s' % (typ, _event_handlers.keys()))
def _add_handler(typ, handler_fn, pos):
_check_type(typ)
pos = pos if pos is not None else -1
handlers = _event_handlers[typ]
try:
handlers.index(handler_fn)
except ValueError:
handlers.insert(pos, handler_fn)
def _remove_handler(typ, handler_fn):
_check_type(typ)
handlers = _event_handlers[typ]
try:
index = handlers.index(handler_fn)
handlers.pop(index)
except ValueError:
pass
def _on_event(typ, target, **kw):
_check_type(typ)
ref = target
for handler in _event_handlers[typ]:
result = handler(ref, **kw)
if result is False:
return False
ref = result
return ref
# Add/remove event handlers
def add_exception_info_handler(handler_fn, pos=None):
_add_handler(EXCEPTION_INFO, handler_fn, pos)
def remove_exception_info_handler(handler_fn):
_remove_handler(EXCEPTION_INFO, handler_fn)
def add_message_handler(handler_fn, pos=None):
_add_handler(MESSAGE, handler_fn, pos)
def remove_message_handler(handler_fn):
_remove_handler(MESSAGE, handler_fn)
def add_payload_handler(handler_fn, pos=None):
_add_handler(PAYLOAD, handler_fn, pos)
def remove_payload_handler(handler_fn):
_remove_handler(PAYLOAD, handler_fn)
# Event handler processing
def on_exception_info(exc_info, **kw):
return _on_event(EXCEPTION_INFO, exc_info, **kw)
def on_message(message, **kw):
return _on_event(MESSAGE, message, **kw)
def on_payload(payload, **kw):
return _on_event(PAYLOAD, payload, **kw)
# Misc
def reset():
for handlers in _event_handlers.values():
del handlers[:]
|
rollbar/pyrollbar
|
rollbar/lib/events.py
|
Python
|
mit
| 1,970
|
def bigdigits(line_splitted):
line_splitted = list(line_splitted)
string_row_one = '-**----*--***--***---*---****--**--****--**---**--'
list_row_one = list(string_row_one)
string_row_two = '*--*--**-----*----*-*--*-*----*-------*-*--*-*--*-'
list_row_two = list(string_row_two)
string_row_three = '*--*---*---**---**--****-***--***----*---**---***-'
list_row_three = list(string_row_three)
string_row_four = '*--*---*--*-------*----*----*-*--*--*---*--*----*-'
list_row_four = list(string_row_four)
string_row_five = '-**---***-****-***-----*-***---**---*----**---**--'
list_row_five = list(string_row_five)
string_row_six = '--------------------------------------------------'
list_row_six = list(string_row_six)
rows_formatted = [list_row_one,list_row_two,list_row_three,list_row_four,list_row_five,list_row_six]
line_splitted = [ord(c.lower()) for c in line_splitted]
for x in range(0,len(line_splitted)):
if(x>=len(line_splitted)):
pass
elif(line_splitted[x] >= 48 and line_splitted[x]<=57):
pass
else:
line_splitted.pop(x)
x = x-1
line_splitted = [chr(i) for i in line_splitted]
for x in range(0,6):
current_row = ''
for i in range(0,len(line_splitted)):
current_number = int(line_splitted[i])
current_row = current_row + ''.join(rows_formatted[x][current_number*5 : current_number*5+5])
print(current_row)
test_cases = open('test_cases','r')
for line in test_cases:
line_splitted = line.strip()
bigdigits(line_splitted)
|
Lhedzv/Coding_Wednesdays
|
bigdigits.py
|
Python
|
mit
| 1,512
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
faq_en = '''
<p>
<b>Why statistics on the site does not coincide with the statistics in the game?</b>
</p>
<p>
Algorithms collection statistics IL2 stats differs from statistics in-game. As a consequence of these statistics will not coincide with the game.
</p>
<br>
<p>
<b>How is calculated the rating?</b>
</p>
<p>
1) calculate how many scores player earns per one life - score / (dead + capture) = SD<br>
2) calculate how many scores player earns per one hour - score / flight time = SH<br>
3) calculate rating by formula: (SD * SH * score) / 1000
</p>
<br>
<p>
<b>Why my profile is not displayed in the table of players?</b>
</p>
<p>
Statistics exclude inactive players from the overall rating. By default players inactive for more than 7 days - do not participate in the rating.
</p>
<br>
<p>
<b>I landed at the airfield, but sortie status - landing not on airfield. Why?</b>
</p>
<p>
Landing working only on active airfield. Usually active airfield is the one where you can respawn.
</p>
<br>
<p>
<b>What is the Fairplay Index?</b>
</p>
<p>
Fairplay Index is an indicator of the correct behavior of the player, it affects the score.The maximum value - 100% indicates that the player does not violate the rules, a player receives a 100% score and all bonuses. If the index is less than 100%, that player gets a percentage of the score corresponding to the current index. Also, in this case, the player does not receive any bonuses.<br>
Violations of reducing the index:<br>
Disconnection -10%<br>
Shotdown friendly aircraft -10%<br>
Destroyed friendly ground target -5%<br>
The index recovered by 5% per flying hour, if the player did not violate the rules.<br>
The idea was borrowed from the project Bellum War.
</p>
<br>
'''
faq_ru = '''
<p>
<b>Почему статистика на сайте не совпадает со статистикой внутри игры?</b>
</p>
<p>
Алгоритмы сбора статистики IL2 stats отличаются от статистики в игре. Как следствие данная статистика не будет совпадать с игровой.
</p>
<br>
<p>
<b>Как рассчитывается рейтинг?</b>
</p>
<p>
Рейтинг пилота рассчитывается на основе заработанных пилотом очков, среднего количества очков за жизнь и за час. Такой способ расчета рейтинга учитывает не только количественные, но и качественные показатели пилота, а так же сводит в единую систему оценки пилотов разных специализаций.<br>
Как именно рассчитывается рейтинг:<br>
1) вычисляем сколько игрок зарабатывает очков за одну жизнь - очки / (смерти + плен) = ОС<br>
2) вычисляем сколько игрок зарабатывает очков за один час налета - очки / налет часов = ОЧ<br>
3) вычисляем рейтинг по формуле: (ОС * ОЧ * очки) / 1000
</p>
<br>
<p>
<b>Почему мой профиль не отображается в общей таблице игроков?</b>
</p>
<p>
В статистике включена опция которая исключает неактивных игроков из общего рейтинга. По умолчанию игроки неактивные более 7 дней - не участвуют в рейтинге.
</p>
<br>
<p>
<b>Я приземлился на аэродром, но в статусе вылета указана посадка в поле. Почему?</b>
</p>
<p>
Посадка засчитывается только на активный аэродром. Как правило активный аэродром это тот на котором вы можете начать вылет.
</p>
<br>
<p>
<b>Что такое "Индекс честной игры"?</b>
</p>
<p>
Индекс честной игры (Fairplay) это показатель правильного поведения игрока, влияющий на получаемые им очки. Максимальное значение - 100% говорит о том, что игрок не нарушает правила, такой игрок получает 100% очков и все полагающиеся ему бонусы. Если индекс меньше 100%, то игрок получает не всю сумму заработанных очков, а лишь процент от них, соответствующий текущему индексу честной игры. Так же, в этом случае, игрок не получает ни каких бонусов.<br>
Нарушения уменьшающие индекс честной игры:<br>
Дисконнект -10%<br>
Уничтожение союзного самолета -10%<br>
Уничтожение союзной техники -5%<br>
Индекс восстанавливается по 5% за час налета, при условии игры без нарушений.<br>
Идея заимствована из проекта Bellum War.
</p>
<br>
'''
def default_chunks(apps, schema_editor):
Chunk = apps.get_model('chunks', 'Chunk')
faq = Chunk.objects.get_or_create(key='faq')[0]
faq.title_en = 'FAQ'
faq.title_ru = 'FAQ'
faq.content_en = faq_en
faq.content_ru = faq_ru
faq.save()
class Migration(migrations.Migration):
dependencies = [
('chunks', '0003_auto_20151107_2007'),
]
operations = [
migrations.RunPython(default_chunks),
]
|
Flyingfox646/flyingfox
|
src/chunks/migrations/0004_faq_fairplay.py
|
Python
|
mit
| 5,970
|
# AnalogClock's font selector for setup dialog
# E. A. Tacao <e.a.tacao |at| estadao.com.br>
# http://j.domaindlx.com/elements28/wxpython/
# 15 Fev 2006, 22:00 GMT-03:00
# Distributed under the wxWidgets license.
import wx
from wx.lib.newevent import NewEvent
from wx.lib.buttons import GenButton
#----------------------------------------------------------------------------
(FontSelectEvent, EVT_FONTSELECT) = NewEvent()
#----------------------------------------------------------------------------
class FontSelect(GenButton):
def __init__(self, parent, size=(75, 21), value=None):
GenButton.__init__(self, parent, wx.ID_ANY, label="Select...",
size=size)
self.SetBezelWidth(1)
self.parent = parent
self.SetValue(value)
self.parent.Bind(wx.EVT_BUTTON, self.OnClick, self)
def GetValue(self):
return self.value
def SetValue(self, value):
if value is None:
value = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
self.value = value
def OnClick(self, event):
data = wx.FontData()
data.EnableEffects(False)
font = self.value; font.SetPointSize(10)
data.SetInitialFont(font)
dlg = wx.FontDialog(self, data)
changed = dlg.ShowModal() == wx.ID_OK
if changed:
data = dlg.GetFontData()
self.value = data.GetChosenFont()
self.Refresh()
dlg.Destroy()
if changed:
nevt = FontSelectEvent(id=self.GetId(), obj=self, val=self.value)
wx.PostEvent(self.parent, nevt)
#
##
### eof
|
ktan2020/legacy-automation
|
win/Lib/site-packages/wx-3.0-msw/wx/lib/analogclock/lib_setup/fontselect.py
|
Python
|
mit
| 1,713
|
from sofi.ui import TableRow
def test_basic():
assert(str(TableRow()) == "<tr></tr>")
def test_text():
assert(str(TableRow("text")) == "<tr>text</tr>")
def test_custom_class_ident_style_and_attrs():
assert(str(TableRow("text", cl='abclass', ident='123', style="font-size:0.9em;", attrs={"data-test": 'abc'}))
== "<tr id=\"123\" class=\"abclass\" style=\"font-size:0.9em;\" data-test=\"abc\">text</tr>")
|
tryexceptpass/sofi
|
test/tablerow_test.py
|
Python
|
mit
| 429
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
SITE_ROOT = os.path.dirname(os.path.abspath(__file__))
DEBUG = True # 调试模式
TEMPLATE_DIR = os.path.join(SITE_ROOT, 'templates') # 模板目录
BASE_TEMPLATE = 'base' # 基础模板
# URL 映射
URLS = (
'/', 'Index',
'(/j)?/shorten', 'Shorten',
'/([0-9a-zA-Z]{5,})', 'Expand',
'/j/expand', 'Expand',
'/.*', 'Index',
)
# 数据库配置
DATABASES = {
'dbn': 'mysql',
'db': 'shorturl',
'user': 'py',
'pw': 'py_passwd',
'host': 'localhost',
'port': 3306,
}
|
mozillazg/ShortURL
|
shorturl/settings.py
|
Python
|
mit
| 569
|
import csv
from meerkat_abacus.config import config
def data_types(param_config=config):
with open(param_config.config_directory + param_config.country_config["types_file"],
"r", encoding='utf-8',
errors="replace") as f:
DATA_TYPES_DICT = [_dict for _dict in csv.DictReader(f)]
return DATA_TYPES_DICT
def data_types_for_form_name(form_name, param_config=config):
return [data_type for data_type in data_types(param_config=param_config) if form_name == data_type['form']]
DATA_TYPES_DICT = data_types()
|
who-emro/meerkat_abacus
|
meerkat_abacus/util/data_types.py
|
Python
|
mit
| 555
|
class Options:
instance = None
def __init__(self,options):
self.options = options
@classmethod
def set(cls,options):
"""Create an Options instance with the provided dictionary of
options"""
cls.instance = Options(options)
@classmethod
def inst(cls):
"""Get the Options instance.
"""
if cls.instance is None:
raise OptionsError("No options have been set")
return cls.instance
@classmethod
def get(cls,name,as_type = str):
"""Get an option by name.
Raises an OptionsError if the option doesn't exist.
"""
inst = cls.inst()
if name in inst.options:
return as_type(inst.options[name])
else:
raise OptionsError("No option with key '%s'" % name)
@classmethod
def overwrite(cls,name,value):
inst = cls.inst()
inst.options[name] = value
@classmethod
def isset(cls,name):
"""Checks whether the option exists and is set.
By set, it means whether the option has length. All the option
values are strings.
"""
inst = cls.inst()
if name in inst.options and \
len(inst.options[name]) > 0:
return True
else:
return False
class OptionsError(Exception):
pass
|
sumyfly/vdebug
|
plugin/python/vdebug/opts.py
|
Python
|
mit
| 1,365
|
from __future__ import absolute_import
import logging
import warnings
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound
from pip.index import PackageFinder
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions, dist_is_editable
from pip.utils.deprecation import RemovedInPip7Warning
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, remote_version_raw, remote_version_parsed in \
self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
logger.info(
'%s (Current: %s Latest: %s)',
dist.project_name, dist.version, remote_version_raw,
)
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
if options.mirrors:
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
index_urls += options.mirrors
dependency_links = []
for dist in get_installed_distributions(local_only=options.local,
user_only=options.user):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
include_editables=False,
)
for dist in installed_packages:
req = InstallRequirement.from_line(dist.key, None)
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most
# up-to-date
if link is None:
continue
except DistributionNotFound:
continue
else:
# It might be a good idea that link or finder had a public
# method that returned version
remote_version = finder._link_package_versions(
link, req.name
)[0]
remote_version_raw = remote_version[2]
remote_version_parsed = remote_version[0]
yield dist, remote_version_raw, remote_version_parsed
def run_listing(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=True,
)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(
installed_packages,
key=lambda dist: dist.project_name.lower(),
)
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.info(line)
def run_uptodate(self, options):
uptodate = []
for dist, remote_version_raw, remote_version_parsed in \
self.find_packages_latests_versions(options):
if dist.parsed_version == remote_version_parsed:
uptodate.append(dist)
self.output_package_listing(uptodate)
|
Carreau/pip
|
pip/commands/list.py
|
Python
|
mit
| 7,507
|
"""
homeassistant.components.graphite
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Component that records all events and state changes and feeds the data to
a graphite installation.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/graphite/
"""
import logging
import queue
import socket
import threading
import time
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED)
from homeassistant.helpers import state
DOMAIN = "graphite"
_LOGGER = logging.getLogger(__name__)
def setup(hass, config):
""" Setup graphite feeder. """
graphite_config = config.get('graphite', {})
host = graphite_config.get('host', 'localhost')
prefix = graphite_config.get('prefix', 'ha')
try:
port = int(graphite_config.get('port', 2003))
except ValueError:
_LOGGER.error('Invalid port specified')
return False
GraphiteFeeder(hass, host, port, prefix)
return True
class GraphiteFeeder(threading.Thread):
""" Feeds data to graphite. """
def __init__(self, hass, host, port, prefix):
super(GraphiteFeeder, self).__init__(daemon=True)
self._hass = hass
self._host = host
self._port = port
# rstrip any trailing dots in case they think they
# need it
self._prefix = prefix.rstrip('.')
self._queue = queue.Queue()
self._quit_object = object()
self._we_started = False
hass.bus.listen_once(EVENT_HOMEASSISTANT_START,
self.start_listen)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP,
self.shutdown)
hass.bus.listen(EVENT_STATE_CHANGED, self.event_listener)
_LOGGER.debug('Graphite feeding to %s:%i initialized',
self._host, self._port)
def start_listen(self, event):
""" Start event-processing thread. """
_LOGGER.debug('Event processing thread started')
self._we_started = True
self.start()
def shutdown(self, event):
""" Tell the thread that we are done.
This does not block because there is nothing to
clean up (and no penalty for killing in-process
connections to graphite.
"""
_LOGGER.debug('Event processing signaled exit')
self._queue.put(self._quit_object)
def event_listener(self, event):
""" Queue an event for processing. """
if self.is_alive() or not self._we_started:
_LOGGER.debug('Received event')
self._queue.put(event)
else:
_LOGGER.error('Graphite feeder thread has died, not '
'queuing event!')
def _send_to_graphite(self, data):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(10)
sock.connect((self._host, self._port))
sock.sendall(data.encode('ascii'))
sock.send('\n'.encode('ascii'))
sock.close()
def _report_attributes(self, entity_id, new_state):
now = time.time()
things = dict(new_state.attributes)
try:
things['state'] = state.state_as_number(new_state)
except ValueError:
pass
lines = ['%s.%s.%s %f %i' % (self._prefix,
entity_id, key.replace(' ', '_'),
value, now)
for key, value in things.items()
if isinstance(value, (float, int))]
if not lines:
return
_LOGGER.debug('Sending to graphite: %s', lines)
try:
self._send_to_graphite('\n'.join(lines))
except socket.gaierror:
_LOGGER.error('Unable to connect to host %s', self._host)
except socket.error:
_LOGGER.exception('Failed to send data to graphite')
def run(self):
while True:
event = self._queue.get()
if event == self._quit_object:
_LOGGER.debug('Event processing thread stopped')
self._queue.task_done()
return
elif (event.event_type == EVENT_STATE_CHANGED and
event.data.get('new_state')):
_LOGGER.debug('Processing STATE_CHANGED event for %s',
event.data['entity_id'])
try:
self._report_attributes(event.data['entity_id'],
event.data['new_state'])
# pylint: disable=broad-except
except Exception:
# Catch this so we can avoid the thread dying and
# make it visible.
_LOGGER.exception('Failed to process STATE_CHANGED event')
else:
_LOGGER.warning('Processing unexpected event type %s',
event.event_type)
self._queue.task_done()
|
coteyr/home-assistant
|
homeassistant/components/graphite.py
|
Python
|
mit
| 5,000
|
# Time: O(n)
# Space: O(n)
# Given a binary tree, return the tilt of the whole tree.
#
# The tilt of a tree node is defined as the absolute difference
# between the sum of all left subtree node values and
# the sum of all right subtree node values. Null node has tilt 0.
#
# The tilt of the whole tree is defined as the sum of all nodes' tilt.
#
# Example:
# Input:
# 1
# / \
# 2 3
# Output: 1
# Explanation:
# Tilt of node 2 : 0
# Tilt of node 3 : 0
# Tilt of node 1 : |2-3| = 1
# Tilt of binary tree : 0 + 0 + 1 = 1
# Note:
#
# The sum of node values in any subtree won't exceed
# the range of 32-bit integer.
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findTilt(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def postOrderTraverse(root, tilt):
if not root:
return 0, tilt
left, tilt = postOrderTraverse(root.left, tilt)
right, tilt = postOrderTraverse(root.right, tilt)
tilt += abs(left-right)
return left+right+root.val, tilt
return postOrderTraverse(root, 0)[1]
|
kamyu104/LeetCode
|
Python/binary-tree-tilt.py
|
Python
|
mit
| 1,289
|
""" discovery and running of std-library "unittest" style tests. """
from __future__ import absolute_import, division, print_function
import sys
import traceback
# for transferring markers
import _pytest._code
from _pytest.config import hookimpl
from _pytest.outcomes import fail, skip, xfail
from _pytest.python import transfer_markers, Class, Module, Function
from _pytest.compat import getimfunc
def pytest_pycollect_makeitem(collector, name, obj):
# has unittest been imported and is obj a subclass of its TestCase?
try:
if not issubclass(obj, sys.modules["unittest"].TestCase):
return
except Exception:
return
# yes, so let's collect it
return UnitTestCase(name, parent=collector)
class UnitTestCase(Class):
# marker for fixturemanger.getfixtureinfo()
# to declare that our children do not support funcargs
nofuncargs = True
def setup(self):
cls = self.obj
if getattr(cls, "__unittest_skip__", False):
return # skipped
setup = getattr(cls, "setUpClass", None)
if setup is not None:
setup()
teardown = getattr(cls, "tearDownClass", None)
if teardown is not None:
self.addfinalizer(teardown)
super(UnitTestCase, self).setup()
def collect(self):
from unittest import TestLoader
cls = self.obj
if not getattr(cls, "__test__", True):
return
self.session._fixturemanager.parsefactories(self, unittest=True)
loader = TestLoader()
module = self.getparent(Module).obj
foundsomething = False
for name in loader.getTestCaseNames(self.obj):
x = getattr(self.obj, name)
if not getattr(x, "__test__", True):
continue
funcobj = getimfunc(x)
transfer_markers(funcobj, cls, module)
yield TestCaseFunction(name, parent=self, callobj=funcobj)
foundsomething = True
if not foundsomething:
runtest = getattr(self.obj, "runTest", None)
if runtest is not None:
ut = sys.modules.get("twisted.trial.unittest", None)
if ut is None or runtest != ut.TestCase.runTest:
yield TestCaseFunction("runTest", parent=self)
class TestCaseFunction(Function):
nofuncargs = True
_excinfo = None
_testcase = None
def setup(self):
self._testcase = self.parent.obj(self.name)
self._fix_unittest_skip_decorator()
self._obj = getattr(self._testcase, self.name)
if hasattr(self._testcase, "setup_method"):
self._testcase.setup_method(self._obj)
if hasattr(self, "_request"):
self._request._fillfixtures()
def _fix_unittest_skip_decorator(self):
"""
The @unittest.skip decorator calls functools.wraps(self._testcase)
The call to functools.wraps() fails unless self._testcase
has a __name__ attribute. This is usually automatically supplied
if the test is a function or method, but we need to add manually
here.
See issue #1169
"""
if sys.version_info[0] == 2:
setattr(self._testcase, "__name__", self.name)
def teardown(self):
if hasattr(self._testcase, "teardown_method"):
self._testcase.teardown_method(self._obj)
# Allow garbage collection on TestCase instance attributes.
self._testcase = None
self._obj = None
def startTest(self, testcase):
pass
def _addexcinfo(self, rawexcinfo):
# unwrap potential exception info (see twisted trial support below)
rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo)
try:
excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
except TypeError:
try:
try:
values = traceback.format_exception(*rawexcinfo)
values.insert(
0,
"NOTE: Incompatible Exception Representation, "
"displaying natively:\n\n",
)
fail("".join(values), pytrace=False)
except (fail.Exception, KeyboardInterrupt):
raise
except: # noqa
fail(
"ERROR: Unknown Incompatible Exception "
"representation:\n%r" % (rawexcinfo,),
pytrace=False,
)
except KeyboardInterrupt:
raise
except fail.Exception:
excinfo = _pytest._code.ExceptionInfo()
self.__dict__.setdefault("_excinfo", []).append(excinfo)
def addError(self, testcase, rawexcinfo):
self._addexcinfo(rawexcinfo)
def addFailure(self, testcase, rawexcinfo):
self._addexcinfo(rawexcinfo)
def addSkip(self, testcase, reason):
try:
skip(reason)
except skip.Exception:
self._skipped_by_mark = True
self._addexcinfo(sys.exc_info())
def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
try:
xfail(str(reason))
except xfail.Exception:
self._addexcinfo(sys.exc_info())
def addUnexpectedSuccess(self, testcase, reason=""):
self._unexpectedsuccess = reason
def addSuccess(self, testcase):
pass
def stopTest(self, testcase):
pass
def _handle_skip(self):
# implements the skipping machinery (see #2137)
# analog to pythons Lib/unittest/case.py:run
testMethod = getattr(self._testcase, self._testcase._testMethodName)
if getattr(self._testcase.__class__, "__unittest_skip__", False) or getattr(
testMethod, "__unittest_skip__", False
):
# If the class or method was skipped.
skip_why = getattr(
self._testcase.__class__, "__unittest_skip_why__", ""
) or getattr(testMethod, "__unittest_skip_why__", "")
try: # PY3, unittest2 on PY2
self._testcase._addSkip(self, self._testcase, skip_why)
except TypeError: # PY2
if sys.version_info[0] != 2:
raise
self._testcase._addSkip(self, skip_why)
return True
return False
def runtest(self):
if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
self._testcase(result=self)
else:
# disables tearDown and cleanups for post mortem debugging (see #1890)
if self._handle_skip():
return
self._testcase.debug()
def _prunetraceback(self, excinfo):
Function._prunetraceback(self, excinfo)
traceback = excinfo.traceback.filter(
lambda x: not x.frame.f_globals.get("__unittest")
)
if traceback:
excinfo.traceback = traceback
@hookimpl(tryfirst=True)
def pytest_runtest_makereport(item, call):
if isinstance(item, TestCaseFunction):
if item._excinfo:
call.excinfo = item._excinfo.pop(0)
try:
del call.result
except AttributeError:
pass
# twisted trial support
@hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item):
if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules:
ut = sys.modules["twisted.python.failure"]
Failure__init__ = ut.Failure.__init__
check_testcase_implements_trial_reporter()
def excstore(
self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None
):
if exc_value is None:
self._rawexcinfo = sys.exc_info()
else:
if exc_type is None:
exc_type = type(exc_value)
self._rawexcinfo = (exc_type, exc_value, exc_tb)
try:
Failure__init__(
self, exc_value, exc_type, exc_tb, captureVars=captureVars
)
except TypeError:
Failure__init__(self, exc_value, exc_type, exc_tb)
ut.Failure.__init__ = excstore
yield
ut.Failure.__init__ = Failure__init__
else:
yield
def check_testcase_implements_trial_reporter(done=[]):
if done:
return
from zope.interface import classImplements
from twisted.trial.itrial import IReporter
classImplements(TestCaseFunction, IReporter)
done.append(1)
|
ddboline/pytest
|
src/_pytest/unittest.py
|
Python
|
mit
| 8,619
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/tatooine/shared_planter_hanging_style_01.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/static/structure/tatooine/shared_planter_hanging_style_01.py
|
Python
|
mit
| 463
|
# -*- coding: utf-8 -*-
"""
Project Bluebox
Copyright (C) <2015> <University of Stuttgart>
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE file for details.
"""
from mcm.Bluebox import app
from mcm.Bluebox import configuration
# socketio.run(
# app,
app.run(
host=configuration.my_bind_host,
port=int(configuration.my_endpoint_port),
debug=False,
threaded=True
)
|
timwaizenegger/swift-bluebox
|
_runApp_Development_nodebug.py
|
Python
|
mit
| 442
|
import subprocess
from genes.mac.traits import only_osx
@only_osx
def systemsetup(*args):
subprocess.call(['systemsetup'] + list(args))
|
hatchery/Genepool2
|
genes/systemsetup/commands.py
|
Python
|
mit
| 143
|
import os
import sys
import yaml
import argparse
from code import interact
def parse_options(argv):
parser = argparse.ArgumentParser(description='Macumba Shell',
prog='macumba-shell')
parser.add_argument('--v1', action='store_true', dest='v1',
help='Use Juju 1.x API')
parser.add_argument('--v2', action='store_true', dest='v2',
help='Use Juju 2.x API')
parser.add_argument('-m', '--model', dest='model',
help='The Environment(v1)/Model(v2) to connect to.')
return parser.parse_args(argv)
def main():
opts = parse_options(sys.argv[1:])
if not opts.model:
raise Exception("Must choose a Environment/Model.")
if opts.v1:
juju_home = os.getenv("JUJU_HOME", "~/.juju")
from .v1 import JujuClient # noqa
env = os.path.expanduser(
os.path.join(
juju_home,
"environments/{}.jenv".format(opts.model)))
if not os.path.isfile(env):
raise Exception("Unable to locate: {}".format(env))
env_yaml = yaml.load(open(env))
uuid = env_yaml['environ-uuid']
server = env_yaml['state-servers'][0]
password = env_yaml['password']
user = env_yaml['user']
url = os.path.join('wss://', server, 'environment', uuid, 'api')
elif opts.v2:
xdg_home = os.getenv("XDG_DATA_HOME", "~/.local/share")
juju_home = os.path.join(xdg_home, 'juju')
from .v2 import JujuClient # noqa
env = os.path.expanduser(
os.path.join(
juju_home,
"models/cache.yaml"))
if not os.path.isfile(env):
raise Exception("Unable to locate: {}".format(env))
env = yaml.load(open(env))
uuid = env['server-user'][opts.model]['server-uuid']
server = env['server-data'][uuid]['api-endpoints'][0]
password = env['server-data'][uuid]['identities']['admin']
url = os.path.join('wss://', server, 'model', uuid, 'api')
else:
raise Exception("Could not determine Juju API Version to use.")
print('Connecting to {}'.format(url))
j = JujuClient(url=url, password=password)
j.login()
interact(banner="juju client logged in. Object is named 'j',"
" so j.status() will fetch current status as a dict.",
local=locals())
|
battlemidget/conjure-up
|
macumba/cli.py
|
Python
|
mit
| 2,432
|
''' TODO: Init package docs
'''
import logging
import uuid
from grader.models import Grader
from grader.utils.config import is_grader_dir
logger = logging.getLogger(__name__)
help = 'Initialize grader by creating grader.yml'
def setup_parser(parser):
parser.add_argument('--course-id', default=str(uuid.uuid4()),
help='Unique course ID (for docker\'s sake)')
parser.add_argument('--force', action='store_true',
help='Overwrite an existing grader.yml')
parser.add_argument('--canvas-host', default=None,
help='Canvas server to use (will prompt for canvas token')
parser.add_argument('name', help='Name of the course')
parser.set_defaults(run=run)
def run(args):
logger.debug("Setting up grader in {}".format(args.path))
# Check for existing config
if is_grader_dir(args.path) and not args.force:
logger.critical(
"grader already configured in {}. Abort!".format(args.path)
)
raise SystemExit(1)
if args.canvas_host:
canvas_token = input("Canvas access token (from {}/profile/settings): ".format(args.canvas_host))
else:
canvas_token = None
# Create the new grader
g = Grader.new(args.path, args.name, args.course_id, args.canvas_host, canvas_token)
logger.info("Wrote {}".format(g.config.file_path))
|
grade-it/grader
|
redkyn-grader/grader/commands/init.py
|
Python
|
mit
| 1,386
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/powerup/weapon/fs_quest_sad/shared_melee_speed_quest.iff"
result.attribute_template_id = -1
result.stfName("powerup_n","weapon_melee_speed_quest")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/powerup/weapon/fs_quest_sad/shared_melee_speed_quest.py
|
Python
|
mit
| 483
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_sean_trenwell.iff"
result.attribute_template_id = 9
result.stfName("npc_name","sean_trenwell")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/mobile/shared_dressed_sean_trenwell.py
|
Python
|
mit
| 444
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/attachment/weapon/shared_hutt_medium_weapon1_s02.iff"
result.attribute_template_id = 8
result.stfName("item_n","ship_attachment")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
obi-two/Rebelion
|
data/scripts/templates/object/tangible/ship/attachment/weapon/shared_hutt_medium_weapon1_s02.py
|
Python
|
mit
| 471
|
# -*- coding: utf-8 -*-
import pytest
from bottle import Bottle, debug, request, response
from webtest import TestApp
from webargs import ValidationError, fields
from webargs.bottleparser import BottleParser
hello_args = {
'name': fields.Str(missing='World', validate=lambda n: len(n) >= 3),
}
hello_multiple = {
'name': fields.List(fields.Str())
}
parser = BottleParser()
@pytest.fixture
def app():
app = Bottle()
@app.route('/echo', method=['GET', 'POST'])
def index():
return parser.parse(hello_args, request)
@app.route('/echomulti/', method=['GET', 'POST'])
def multi():
return parser.parse(hello_multiple, request)
debug(True)
return app
@pytest.fixture
def testapp(app):
return TestApp(app)
def test_parse_querystring_args(testapp):
assert testapp.get('/echo?name=Fred').json == {'name': 'Fred'}
def test_parse_querystring_multiple(testapp):
expected = {'name': ['steve', 'Loria']}
assert testapp.get('/echomulti/?name=steve&name=Loria').json == expected
def test_parse_form_multiple(testapp):
expected = {'name': ['steve', 'Loria']}
assert testapp.post('/echomulti/', {'name': ['steve', 'Loria']}).json == expected
def test_parse_form(testapp):
assert testapp.post('/echo', {'name': 'Joe'}).json == {'name': 'Joe'}
def test_parse_json(testapp):
assert testapp.post_json('/echo', {'name': 'Fred'}).json == {'name': 'Fred'}
def test_parse_json_default(testapp):
assert testapp.post_json('/echo', {}).json == {'name': 'World'}
def test_parsing_form_default(testapp):
assert testapp.post('/echo', {}).json == {'name': 'World'}
def test_abort_called_on_validation_error(testapp):
res = testapp.post('/echo', {'name': 'b'}, expect_errors=True)
assert res.status_code == 422
def test_validator_that_raises_validation_error(app):
def always_fail(value):
raise ValidationError('something went wrong')
args = {'text': fields.Str(validate=always_fail)}
@app.route('/validated', method=['POST'])
def validated_route():
parser.parse(args)
vtestapp = TestApp(app)
res = vtestapp.post_json('/validated', {'text': 'bar'}, expect_errors=True)
assert res.status_code == 422
def test_use_args_decorator(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_args({'myvalue': fields.Int()})
def echo2(args):
return args
assert testapp.post('/foo/', {'myvalue': 23}).json == {'myvalue': 23}
def test_use_args_with_validation(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_args({'myvalue': fields.Int()}, validate=lambda args: args['myvalue'] > 42)
def echo(args):
return args
result = testapp.post('/foo/', {'myvalue': 43}, expect_errors=True)
assert result.status_code == 200
result = testapp.post('/foo/', {'myvalue': 41}, expect_errors=True)
assert result.status_code == 422
def test_use_args_with_url_params(app, testapp):
@app.route('/foo/<name>')
@parser.use_args({'myvalue': fields.Int()})
def foo(args, name):
return args
assert testapp.get('/foo/Fred?myvalue=42').json == {'myvalue': 42}
def test_use_kwargs_decorator(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_kwargs({'myvalue': fields.Int()})
def echo2(myvalue):
return {'myvalue': myvalue}
assert testapp.post('/foo/', {'myvalue': 23}).json == {'myvalue': 23}
def test_use_kwargs_with_url_params(app, testapp):
@app.route('/foo/<name>')
@parser.use_kwargs({'myvalue': fields.Int()})
def foo(myvalue, name):
return {'myvalue': myvalue}
assert testapp.get('/foo/Fred?myvalue=42').json == {'myvalue': 42}
def test_parsing_headers(app, testapp):
@app.route('/echo2')
def echo2():
args = parser.parse(hello_args, request, locations=('headers',))
return args
res = testapp.get('/echo2', headers={'name': 'Fred'}).json
assert res == {'name': 'Fred'}
def test_parsing_cookies(app, testapp):
@app.route('/setcookie')
def setcookie():
response.set_cookie('name', 'Fred')
return {}
@app.route('/echocookie')
def echocookie():
args = parser.parse(hello_args, request, locations=('cookies',))
return args
testapp.get('/setcookie')
assert testapp.get('/echocookie').json == {'name': 'Fred'}
def test_arg_specific_locations(app, testapp):
testargs = {
'name': fields.Str(location='json'),
'age': fields.Int(location='querystring'),
}
@app.route('/echo', method=['POST'])
def echo():
args = parser.parse(testargs, request)
return args
resp = testapp.post_json('/echo?age=42', {'name': 'Fred'})
assert resp.json['age'] == 42
assert resp.json['name'] == 'Fred'
|
hyunchel/webargs
|
tests/test_bottleparser.py
|
Python
|
mit
| 4,813
|
#!/usr/bin/env python
# Ben Jones
# Georgia Tech
# Spring 2014
#
# htpt-socks.py: this file builds upon the work of Zavier Lagraula's
# PySocks code to create a SOCKS server for our HTTP circumvention tool
import socks
"""SOCKS 4 proxy server class.
Copyright (C) 2001 Xavier Lagraula
See COPYRIGHT.txt and GPL.txt for copyrights information.
Build upon the TCPServer class of the SocketServer module, the Socks4Proxy
class is an implementation of the SOCKS protocol, version 4.
This server uses one thread per connection request.
Known bugs:
- Some CONNECT request closed by the client are not detected and finish in an
infinite loop of select always returning the "request" socket as ready to
read even if there is nothing more to read on it. This situation is now
detected and lead to a Closed_Connection exception.
Implementation choices:
- Protocol errors are handled by exceptions
- The function that creates a socket is responsible for its closing -> never
close a socket passed in as a parameter, and always use a try/finally block
after creating a socket to ensure correct closing of sockets.
"""
import SocketServer2
import time
import select
import thread
import IDENT_Client
import IPv4_Tools
import getopt
import os
import sys
import socket
import ConfigFile
__all__ = [
'DEFAULT_OPTIONS',
'SocksError',
'Connection_Closed',
'Bind_TimeOut_Expired',
'Request_Error',
'Client_Connection_Closed',
'Remote_Connection_Closed',
'Remote_Connection_Failed',
'Remote_Connection_Failed_Invalid_Host',
'Request_Failed',
'Request_Failed_No_Identd',
'Request_Failed_Ident_failed',
'Request_Refused',
'Request_Bad_Version',
'Request_Unknown_Command',
'Request_Unauthorized_Client',
'Request_Invalid_Port',
'Request_Invalid_Format',
'ThreadingSocks4Proxy'
]
# Default server options.
# Options are stored in a dictionary.
DEFAULT_OPTIONS = {}
OPTION_TYPE = {}
# The interface on which the server listens for incoming SOCKS requests.
DEFAULT_OPTIONS['bind_address'] = '127.0.0.1'
# The port on which the server listens for incoming SOCKS requests.
DEFAULT_OPTIONS['bind_port'] = 10000
# Will the server use IDENT request to authenticate the user making a request?
DEFAULT_OPTIONS['use_ident'] = 0
# Maximum request size taken in consideration.
DEFAULT_OPTIONS['req_buf_size'] = 1024
# Data is forwarded between the client and the remote peer by blocks of max
# 'data_buf_size' bytes.
DEFAULT_OPTIONS['data_buf_size'] = 1500
# After this delay n seconds without any activity on a connection between the
# client and the remote peer, the connection is closed.
DEFAULT_OPTIONS['inactivity_timeout'] = 360
# The SOCKS proxy waits no more than this number of seconds for an incoming
# connection (BIND requests). It then rejects the client request.
DEFAULT_OPTIONS['bind_timeout'] = 120
DEFAULT_OPTIONS['send_port'] = 8000
SHORT_OPTIONS = 'a:p:i:r:d:t:b:'
# The map trick is useful here as all options
LONG_OPTIONS = [
'bind_address=',
'bind_port=',
'use_ident',
'req_buf_size=',
'data_buf_size=',
'inactivity_timeout=',
'bind_timeout='
]
DEFAULT_OPTIONS['configfile'] = ''
OPTION_TYPE['configfile'] = ['string']
# SOCKS 4 protocol constant values.
SOCKS_VERSION = 4
COMMAND_CONNECT = 1
COMMAND_BIND = 2
COMMANDS = [
COMMAND_CONNECT,
COMMAND_BIND
]
REQUEST_GRANTED = 90
REQUEST_REJECTED_FAILED = 91
REQUEST_REJECTED_NO_IDENTD = 92
REQUEST_REJECTED_IDENT_FAILED = 93
# Sockets protocol constant values.
ERR_CONNECTION_RESET_BY_PEER = 10054
ERR_CONNECTION_REFUSED = 10061
# For debugging only.
def now():
return time.ctime(time.time())
# Exception classes for the server
class SocksError(Exception): pass
class Connection_Closed(SocksError): pass
class Bind_TimeOut_Expired(SocksError): pass
class Request_Error(SocksError): pass
class Client_Connection_Closed(Connection_Closed): pass
class Remote_Connection_Closed(Connection_Closed): pass
class Remote_Connection_Failed(Connection_Closed): pass
class Remote_Connection_Failed_Invalid_Host(Remote_Connection_Failed): pass
class Request_Failed(Request_Error): pass
class Request_Failed_No_Identd(Request_Failed): pass
class Request_Failed_Ident_failed(Request_Failed): pass
class Request_Refused(Request_Error): pass
class Request_Bad_Version(Request_Refused): pass
class Request_Unknown_Command(Request_Refused): pass
class Request_Unauthorized_Client(Request_Refused): pass
class Request_Invalid_Port(Request_Refused): pass
class Request_Invalid_Format(Request_Refused): pass
# Server class
class ThreadingSocks4Proxy(SocketServer2.ThreadingTCPServer):
"""Threading SOCKS4 proxy class.
Note: this server maintains two lists of all CONNECTION and BIND requests being
handled. This is not really useful for now but may become in the future.
Moreover, it has been a good way to learn about the semaphores of the threading
module :)"""
def __Decode_Command_Line(self, argv = [], definitions = {}, defaults = {}):
result = {}
line_opts, rest = getopt.getopt(argv, SHORT_OPTIONS, LONG_OPTIONS)
for item in line_opts:
opt, value = item
# First trying "opt" value against options that use an argument.
if opt in ['-a', '--bind_adress']:
opt = 'bind_adress'
elif opt in ['-p', '--bind_port']:
opt = 'bind_port'
elif opt in ['-i', '--use_ident']:
opt = 'use_ident'
value = 1
elif opt in ['-r', '--req_buf_size']:
opt = 'req_buf_size'
elif opt in ['-d', '--data_buf_size']:
opt = 'data_buf_size'
elif opt in ['-d', '--inactivity_timeout']:
opt = 'inactivity_timeout'
elif opt in ['-b', '--bind_timeout']:
opt = 'bind_timeout'
result[opt] = value
return ConfigFile.evaluate(definitions, result, defaults)
def __init__(self, RequestHandlerClass, *args):
"""Constructor of the server."""
self.Options = DEFAULT_OPTIONS
listenPort = args[0]
if len(args) > 1:
sendPort = args[1]
self.Options['send_port'] = sendPort
self.Options['bind_port'] = listenPort
print "Server starting with following options:"
for i in self.Options.keys(): print i, ':', self.Options[i]
print 'The choosen ip adress is', DEFAULT_OPTIONS['bind_address']
SocketServer2.ThreadingTCPServer.__init__(
self,
(self.Options['bind_address'], self.Options['bind_port']),
RequestHandlerClass)
class ForwardSocksReq(SocketServer2.BaseRequestHandler):
"""This request handler class handles sOCKS 4 requests."""
def handle(self):
"""This function is the main request handler function.
It delegates each step of the request processing to a different function and
handles raised exceptions in order to warn the client that its request has
been rejected (if needed).
The steps are:
- decode_request: reads the request and splits it into a dictionary. it checks
if the request is well-formed (correct socks version, correct command number,
well-formed port number.
- validate_request: checks if the current configuration accepts to handle the
request (client identification, authorization rules)
- handle_connect: handles CONNECT requests
- handle_bind: handles BIND requests
"""
print thread.get_ident(), '-'*40
print thread.get_ident(), 'Request from ', self.client_address
try:
# Read and decode the request from the client and verify that it
# is well-formed.
req = self.decode_request()
print thread.get_ident(), 'Decoded request:', req
# We have some well-formed request to handle.
# Let's validate it.
self.validate_request(req)
# We are here so the request is valid.
# We must decide of the action to take according to the "command"
# part of the request.
if req['command'] == COMMAND_CONNECT:
self.handle_connect(req)
elif req['command'] == COMMAND_BIND:
self.handle_bind(req)
# Global SOCKS errors handling.
except Request_Failed_No_Identd:
self.answer_rejected(REQUEST_REJECTED_NO_IDENTD)
except Request_Failed_Ident_failed:
self.answer_rejected(REQUEST_REJECTED_IDENT_FAILED)
except Request_Error:
self.answer_rejected()
except Remote_Connection_Failed:
self.answer_rejected()
except Bind_TimeOut_Expired:
self.answer_rejected()
# Once established, if the remote or the client connection is closed
# we must exit silently. This exception is in fact the way the function
# used to forward data between the client and the remote server tells
# us it has finished working.
except Connection_Closed:
pass
def validate_request(self, req):
"""This function validates the request against any validating rule.
Two things are taken in consideration:
- where does the request come from? (address check)
- who is requesting? (identity check)
Note: in fact, identity verification is disabled for now because ICQ does
stupid things such as always providing a "a" user that doesn't exists in bind
requests."""
# Address check. As for now, only requests from non routable addresses
# are accepted. This is because of security issues and will later be
# configurable with a system of validating rules.
if IPv4_Tools.is_routable(self.client_address[0]):
raise Request_Unauthorized_Client(req)
# If a user ID is provided, we must make an identd control. As for now,
# we accept request without userid without control but this behaviour
# will be changed when configurations options will be provided.
if req['userid'] and self.server.Options['use_ident']:
# We must get some information about the request socket to build
# the identd request.
local_ip, local_port = self.request.getsockname()
ident_srv_ip, ident_srv_port = self.request.getpeername()
if (not IDENT_Client.check_IDENT(
ident_srv_ip, ident_srv_port, local_port, req['userid'])):
raise Request_Failed_Ident_failed(req)
# If we managed to get here, then the request is valid.
def decode_request(self):
"""This function reads the request socket for the request data, decodes
it and checks that it is well formed."""
# reading the data from the socket.
data = self.request.recv(self.server.Options['req_buf_size'])
# It is useless to process too short a request.
if len(data) < 9: raise Request_Invalid_Format(data)
# Extracting components of the request. Checks are made at each step.
req = {}
# SOCKS version of the request.
req['version'] = ord(data[0])
if req['version'] != SOCKS_VERSION:
raise Request_Bad_Version(req)
# Command used.
req['command'] = ord(data[1])
if not req['command'] in COMMANDS:
raise Request_Unknown_Command(req)
# Address of the remote peer.
req['address'] = (
socket.inet_ntoa(data[4:8]),
self.string2port(data[2:4]))
if not IPv4_Tools.is_port(req['address'][1]):
raise Request_Invalid_Port(req)
# Note: only the fact that the port is in [1, 65535] is checked here.
# Address and port legitimity are later checked in validate_request.
# Requester user ID. May not be provided.
req['userid'] = self.get_string(data[8:])
req['data'] = data
# If we are here, then the request is well-formed. Let us return it to
# the caller.
return req
def handle_connect(self, req):
"""This function handles a CONNECT request.
The actions taken are:
- create a new socket,
- register the connection into the server,
- connect to the remote host,
- tell the client the connection is established,
- forward data between the client and the remote peer."""
# Create a socket to connect to the remote server
print "Here- address: {}".format(req['address'])
# remote = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote = socks.socksocket()
remote.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
remote.setproxy(socks.PROXY_TYPE_SOCKS4, "localhost", self.server.Options['send_port'])
# From now on, we must not forget to close this socket before leaving.
try:
try:
# Connection to the remote server
print thread.get_ident(), 'Connecting to', req['address']
# Possible way to handle the timeout defined in the protocol!
# Make the connect non-blocking, then do a select and keep
# an eye on the writable socket, just as I did with the
# accept() from BIND requests.
# Do this tomorrow... Geez... 00:47... Do this this evening.
# remote.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# print "Trying to connect to server: {}".format(self.server.Options['send_port'])
# remote.connect(("127.0.0.1",self.server.Options['send_port']))
remote.connect(req['address'])
print "Success!"
# remote.send(req['data'])
# The only connection that can be reset here is the one of the
# client, so we don't need to answer. Any other socket
# exception forces us to try to answer to the client.
except socket.error as e:
print e
exception, value, traceback = sys.exc_info()
if value[0] == ERR_CONNECTION_RESET_BY_PEER:
raise Client_Connection_Closed((ERR_CONNECTION_RESET_BY_PEER, socket.errorTab[ERR_CONNECTION_RESET_BY_PEER]))
else:
raise Remote_Connection_Failed
except:
raise Remote_Connection_Failed
# From now on we will already have answered to the client.
# Any exception occuring now must make us exit silently.
try:
# Telling the client that the connection it asked for is
# granted.
self.answer_granted()
# Starting to relay information between the two peers.
self.forward(self.request, remote)
# We don't have the right to "speak" to the client anymore.
# So any socket failure means a "connection closed" and silent
# exit.
except socket.error:
raise Connection_Closed
# Mandatory closing of the remote socket.
finally:
remote.close()
def answer_granted(self, dst_ip = '0.0.0.0', dst_port = 0):
"""This function sends a REQUEST_GRANTED answer to the client."""
self.answer(REQUEST_GRANTED, dst_ip, dst_port)
def answer_rejected(self, reason = REQUEST_REJECTED_FAILED, dst_ip = '0.0.0.0', dst_port = 0):
"""This function send a REQUEST_REJECTED answer to the client."""
self.answer(reason, dst_ip, dst_port)
def answer(self, code = REQUEST_GRANTED, ip_str = '0.0.0.0', port_int = 0):
"""This function sends an answer to the client. This has been
factorised because all answers follow the same format."""
# Any problem occuring here means that we are unable to "speak" to
# the client -> we must act as if the connection to it had already
# been closed.
try:
ip = socket.inet_aton(ip_str)
port = self.port2string(port_int)
packet = chr(0) # Version number is 0 in answer
packet += chr(code) # Error code
packet += port
packet += ip
print thread.get_ident(), 'Sending back:', code, self.string2port(port), socket.inet_ntoa(ip)
self.request.send(packet)
except:
# Trying to keep a trace of the original exception.
raise Client_Connection_Closed(sys.exc_info())
def forward(self, client_sock, server_sock):
"""This function makes the forwarding of data by listening to two
sockets, and writing to one everything it reads on the other.
This is done using select(), in order to be able to listen on both sockets
simultaneously and to implement an inactivity timeout."""
# Once we're here, we are not supposed to "speak" with the client
# anymore. So any error means for us to close the connection.
print thread.get_ident(), 'Forwarding.'
# These are not used to anything significant now, but I keep them in
# case I would want to do some statistics/logging.
octets_in, octets_out = 0, 0
try:
try:
# Here are the sockets we will be listening.
sockslist = [client_sock, server_sock]
while 1:
# Let us listen...
readables, writeables, exceptions = select.select(
sockslist, [], [],
self.server.Options['inactivity_timeout'])
# If the "exceptions" list is not empty or if we are here
# because of the timer (i.e. all lists are empty), then
# we must must bail out, we have finished our work.
if (exceptions
or (readables, writeables, exceptions) == ([], [], [])):
raise Connection_Closed
# Only a precaution.
data = ''
# Just in case we would be in the improbable case of data
# awaiting to be read on both sockets, we treat the
# "readables" list as if it oculd contain more than one
# element. Thus the "for" loop...
for readable_sock in readables:
# We know the socket we want to read of, but we still
# must find what is the other socket. This method
# builds a list containing one element.
writeableslist = [client_sock, server_sock]
writeableslist.remove(readable_sock)
# We read one chunk of data and then send it to the
# other socket
data = readable_sock.recv(
self.server.Options['data_buf_size'])
# We must handle the case where data=='' because of a
# bug: we sometimes end with an half-closed socket,
# i.e. a socket closed by the peer, on which one can
# always read, but where there is no data to read.
# This must be detected or it would lead to an infinite
# loop.
if data:
writeableslist[0].send(data)
# This is only for future logging/stats.
if readable_sock == client_sock:
octets_out += len(data)
else:
octets_in += len(data)
else:
# The sock is readable but nothing can be read.
# This means a poorly detected connection close.
raise Connection_Closed
# If one peer closes its conenction, we have finished our work.
except socket.error:
exception, value, traceback = sys.exc_info()
if value[0] == ERR_CONNECTION_RESET_BY_PEER:
raise Connection_Closed
raise
finally:
print thread.get_ident(), octets_in, 'octets in and', octets_out, 'octets out. Connection closed.'
def string2port(self, port_str):
"""This function converts between a packed (16 bits) port number to an
integer."""
return (ord(port_str[0]) << 8) + ord(port_str[1])
def port2string(self, port):
"""This function converts a port number (16 bits integer) into a packed
string (2 chars)."""
return chr((port & 0xff00) >> 8)+ chr(port & 0x00ff)
def get_string(self, nullterminated):
"""This function converts a null terminated string stored in a Python
string to a "normal Python string."""
return nullterminated[0: nullterminated.index(chr(0))]
class ReceiveSocksReq(SocketServer2.BaseRequestHandler):
"""This request handler class handles sOCKS 4 requests."""
def handle(self):
"""This function is the main request handler function.
It delegates each step of the request processing to a different function and
handles raised exceptions in order to warn the client that its request has
been rejected (if needed).
The steps are:
- decode_request: reads the request and splits it into a dictionary. it checks
if the request is well-formed (correct socks version, correct command number,
well-formed port number.
- validate_request: checks if the current configuration accepts to handle the
request (client identification, authorization rules)
- handle_connect: handles CONNECT requests
- handle_bind: handles BIND requests
"""
print thread.get_ident(), '-'*40
print thread.get_ident(), 'Request from ', self.client_address
try:
# Read and decode the request from the client and verify that it
# is well-formed.
req = self.decode_request()
print thread.get_ident(), 'Decoded request:', req
# We have some well-formed request to handle.
# Let's validate it.
self.validate_request(req)
# We are here so the request is valid.
# We must decide of the action to take according to the "command"
# part of the request.
if req['command'] == COMMAND_CONNECT:
self.handle_connect(req)
elif req['command'] == COMMAND_BIND:
self.handle_bind(req)
# Global SOCKS errors handling.
except Request_Failed_No_Identd:
self.answer_rejected(REQUEST_REJECTED_NO_IDENTD)
except Request_Failed_Ident_failed:
self.answer_rejected(REQUEST_REJECTED_IDENT_FAILED)
except Request_Error:
self.answer_rejected()
except Remote_Connection_Failed:
self.answer_rejected()
except Bind_TimeOut_Expired:
self.answer_rejected()
# Once established, if the remote or the client connection is closed
# we must exit silently. This exception is in fact the way the function
# used to forward data between the client and the remote server tells
# us it has finished working.
except Connection_Closed:
pass
def validate_request(self, req):
"""This function validates the request against any validating rule.
Two things are taken in consideration:
- where does the request come from? (address check)
- who is requesting? (identity check)
Note: in fact, identity verification is disabled for now because ICQ does
stupid things such as always providing a "a" user that doesn't exists in bind
requests."""
# Address check. As for now, only requests from non routable addresses
# are accepted. This is because of security issues and will later be
# configurable with a system of validating rules.
if IPv4_Tools.is_routable(self.client_address[0]):
raise Request_Unauthorized_Client(req)
# If a user ID is provided, we must make an identd control. As for now,
# we accept request without userid without control but this behaviour
# will be changed when configurations options will be provided.
if req['userid'] and self.server.Options['use_ident']:
# We must get some information about the request socket to build
# the identd request.
local_ip, local_port = self.request.getsockname()
ident_srv_ip, ident_srv_port = self.request.getpeername()
if (not IDENT_Client.check_IDENT(
ident_srv_ip, ident_srv_port, local_port, req['userid'])):
raise Request_Failed_Ident_failed(req)
# If we managed to get here, then the request is valid.
def decode_request(self):
"""This function reads the request socket for the request data, decodes
it and checks that it is well formed."""
# reading the data from the socket.
data = self.request.recv(self.server.Options['req_buf_size'])
# It is useless to process too short a request.
if len(data) < 9: raise Request_Invalid_Format(data)
# Extracting components of the request. Checks are made at each step.
req = {}
# SOCKS version of the request.
req['version'] = ord(data[0])
if req['version'] != SOCKS_VERSION:
raise Request_Bad_Version(req)
# Command used.
req['command'] = ord(data[1])
if not req['command'] in COMMANDS:
raise Request_Unknown_Command(req)
# Address of the remote peer.
req['address'] = (
socket.inet_ntoa(data[4:8]),
self.string2port(data[2:4]))
if not IPv4_Tools.is_port(req['address'][1]):
raise Request_Invalid_Port(req)
# Note: only the fact that the port is in [1, 65535] is checked here.
# Address and port legitimity are later checked in validate_request.
# Requester user ID. May not be provided.
req['userid'] = self.get_string(data[8:])
# If we are here, then the request is well-formed. Let us return it to
# the caller.
return req
def handle_bind(self, req):
"""This function handles a BIND request.
The actions taken are:
- create a new socket,
- bind it to the external ip chosen on init of the server,
- listen for a connection on this socket,
- register the bind into the server,
- tell the client the bind is ready,
- accept an incoming connection,
- tell the client the connection is established,
- forward data between the client and the remote peer."""
# Create a socket to receive incoming connection.
remote = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# From now on, whatever we do, we must close the "remote" socket before
# leaving. I love try/finally blocks.
try:
# In this block, the only open connection is the client one, so a
# ERR_CONNECTION_RESET_BY_PEER exception means "exit silently
# because you won't be able to send me anything anyway".
# Any other exception must interrupt processing and exit from here.
try:
# Binding the new socket to the chosen external ip
remote.bind((self.server.external_ip, 0))
remote.listen(1)
# Collecting information about the socket to store it in the
# "waiting binds" list.
socket_ip, socket_port = remote.getsockname()
except socket.error:
# A "connection reset by peer" here means the client has closed
# the connection.
exception, value, traceback = sys.exc_info()
if value[0] == ERR_CONNECTION_RESET_BY_PEER:
raise Client_Connection_Closed((ERR_CONNECTION_RESET_BY_PEER, socket.errorTab[ERR_CONNECTION_RESET_BY_PEER]))
else:
# We may be able to make a more precise diagnostic, but
# in fact, it doesn't seem useful here for now.
raise Remote_Connection_Failed
# Sending first answer meaning request is accepted and socket
# is waiting for incoming connection.
self.answer_granted(socket_ip, socket_port)
try:
# Waiting for incoming connection. I use a select here to
# implement the timeout stuff.
read_sock, junk, exception_sock = select.select(
[remote], [], [remote],
self.server.Options['bind_timeout'])
# If all lists are empty, then the select has ended because
# of the timer.
if (read_sock, junk, exception_sock) == ([], [], []):
raise Bind_TimeOut_Expired
# We also drop the connection if an exception condition is
# detected on the socket. We must also warn the client that
# its request is rejecte (remember that for a bind, the client
# expects TWO answers from the proxy).
if exception_sock:
raise Remote_Connection_Failed
# An incoming connection is pending. Let us accept it
incoming, peer = remote.accept()
except:
# We try to keep a trace of the previous exception
# for debugging purpose.
raise Remote_Connection_Failed(sys.exc_info())
# From now on , we must not forget to close this connection.
try:
# We must now check that the incoming connection is from
# the expected server.
if peer[0] != req['address'][0]:
raise Remote_Connection_Failed_Invalid_Host
# We can now tell the client the connection is OK, and
# start the forwarding process.
self.answer_granted()
self.forward(self.request, incoming)
# Mandatory closing of the socket with the remote peer.
finally:
incoming.close()
# Mandatory closing ofthe listening socket
finally:
remote.close()
def handle_connect(self, req):
"""This function handles a CONNECT request.
The actions taken are:
- create a new socket,
- register the connection into the server,
- connect to the remote host,
- tell the client the connection is established,
- forward data between the client and the remote peer."""
# Create a socket to connect to the remote server
remote = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# From now on, we must not forget to close this socket before leaving.
try:
try:
# Connection to the remote server
print thread.get_ident(), 'Connecting to', req['address']
# Possible way to handle the timeout defined in the protocol!
# Make the connect non-blocking, then do a select and keep
# an eye on the writable socket, just as I did with the
# accept() from BIND requests.
# Do this tomorrow... Geez... 00:47... Do this this evening.
remote.connect(req['address'])
# The only connection that can be reset here is the one of the
# client, so we don't need to answer. Any other socket
# exception forces us to try to answer to the client.
except socket.error:
exception, value, traceback = sys.exc_info()
if value[0] == ERR_CONNECTION_RESET_BY_PEER:
raise Client_Connection_Closed((ERR_CONNECTION_RESET_BY_PEER, socket.errorTab[ERR_CONNECTION_RESET_BY_PEER]))
else:
raise Remote_Connection_Failed
except:
raise Remote_Connection_Failed
# From now on we will already have answered to the client.
# Any exception occuring now must make us exit silently.
try:
# Telling the client that the connection it asked for is
# granted.
self.answer_granted()
# Starting to relay information between the two peers.
self.forward(self.request, remote)
# We don't have the right to "speak" to the client anymore.
# So any socket failure means a "connection closed" and silent
# exit.
except socket.error:
raise Connection_Closed
# Mandatory closing of the remote socket.
finally:
remote.close()
def answer_granted(self, dst_ip = '0.0.0.0', dst_port = 0):
"""This function sends a REQUEST_GRANTED answer to the client."""
self.answer(REQUEST_GRANTED, dst_ip, dst_port)
def answer_rejected(self, reason = REQUEST_REJECTED_FAILED, dst_ip = '0.0.0.0', dst_port = 0):
"""This function send a REQUEST_REJECTED answer to the client."""
self.answer(reason, dst_ip, dst_port)
def answer(self, code = REQUEST_GRANTED, ip_str = '0.0.0.0', port_int = 0):
"""This function sends an answer to the client. This has been
factorised because all answers follow the same format."""
# Any problem occuring here means that we are unable to "speak" to
# the client -> we must act as if the connection to it had already
# been closed.
try:
ip = socket.inet_aton(ip_str)
port = self.port2string(port_int)
packet = chr(0) # Version number is 0 in answer
packet += chr(code) # Error code
packet += port
packet += ip
print thread.get_ident(), 'Sending back:', code, self.string2port(port), socket.inet_ntoa(ip)
self.request.send(packet)
except:
# Trying to keep a trace of the original exception.
raise Client_Connection_Closed(sys.exc_info())
def forward(self, client_sock, server_sock):
"""This function makes the forwarding of data by listening to two
sockets, and writing to one everything it reads on the other.
This is done using select(), in order to be able to listen on both sockets
simultaneously and to implement an inactivity timeout."""
# Once we're here, we are not supposed to "speak" with the client
# anymore. So any error means for us to close the connection.
print thread.get_ident(), 'Forwarding.'
# These are not used to anything significant now, but I keep them in
# case I would want to do some statistics/logging.
octets_in, octets_out = 0, 0
try:
try:
# Here are the sockets we will be listening.
sockslist = [client_sock, server_sock]
while 1:
# Let us listen...
readables, writeables, exceptions = select.select(
sockslist, [], [],
self.server.Options['inactivity_timeout'])
# If the "exceptions" list is not empty or if we are here
# because of the timer (i.e. all lists are empty), then
# we must must bail out, we have finished our work.
if (exceptions
or (readables, writeables, exceptions) == ([], [], [])):
raise Connection_Closed
# Only a precaution.
data = ''
# Just in case we would be in the improbable case of data
# awaiting to be read on both sockets, we treat the
# "readables" list as if it oculd contain more than one
# element. Thus the "for" loop...
for readable_sock in readables:
# We know the socket we want to read of, but we still
# must find what is the other socket. This method
# builds a list containing one element.
writeableslist = [client_sock, server_sock]
writeableslist.remove(readable_sock)
# We read one chunk of data and then send it to the
# other socket
data = readable_sock.recv(
self.server.Options['data_buf_size'])
# We must handle the case where data=='' because of a
# bug: we sometimes end with an half-closed socket,
# i.e. a socket closed by the peer, on which one can
# always read, but where there is no data to read.
# This must be detected or it would lead to an infinite
# loop.
if data:
writeableslist[0].send(data)
# This is only for future logging/stats.
if readable_sock == client_sock:
octets_out += len(data)
else:
octets_in += len(data)
else:
# The sock is readable but nothing can be read.
# This means a poorly detected connection close.
raise Connection_Closed
# If one peer closes its conenction, we have finished our work.
except socket.error:
exception, value, traceback = sys.exc_info()
if value[0] == ERR_CONNECTION_RESET_BY_PEER:
raise Connection_Closed
raise
finally:
print thread.get_ident(), octets_in, 'octets in and', octets_out, 'octets out. Connection closed.'
def string2port(self, port_str):
"""This function converts between a packed (16 bits) port number to an
integer."""
return (ord(port_str[0]) << 8) + ord(port_str[1])
def port2string(self, port):
"""This function converts a port number (16 bits integer) into a packed
string (2 chars)."""
return chr((port & 0xff00) >> 8)+ chr(port & 0x00ff)
def get_string(self, nullterminated):
"""This function converts a null terminated string stored in a Python
string to a "normal Python string."""
return nullterminated[0: nullterminated.index(chr(0))]
if __name__ == "__main__":
server = ThreadingSocks4Proxy(ReceiveSocksReq, sys.argv[1:])
server.serve_forever()
|
gsathya/htpt
|
htpt/socks4a/htptProxy.py
|
Python
|
mit
| 40,287
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/general/shared_fountain_generic_style_1.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/static/structure/general/shared_fountain_generic_style_1.py
|
Python
|
mit
| 462
|
#!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import ptp
module = 'ptp_clock_cdc'
testbench = 'test_%s_64' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
TS_WIDTH = 64
NS_WIDTH = 4
FNS_WIDTH = 16
INPUT_PERIOD_NS = 0x6
INPUT_PERIOD_FNS = 0x6666
OUTPUT_PERIOD_NS = 0x6
OUTPUT_PERIOD_FNS = 0x6666
USE_SAMPLE_CLOCK = 1
LOG_FIFO_DEPTH = 3
LOG_RATE = 3
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_clk = Signal(bool(0))
input_rst = Signal(bool(0))
output_clk = Signal(bool(0))
output_rst = Signal(bool(0))
sample_clk = Signal(bool(0))
input_ts = Signal(intbv(0)[96:])
# Outputs
output_ts = Signal(intbv(0)[96:])
output_ts_step = Signal(bool(0))
output_pps = Signal(bool(0))
# PTP clock
ptp_clock = ptp.PtpClock(period_ns=INPUT_PERIOD_NS, period_fns=INPUT_PERIOD_FNS)
ptp_logic = ptp_clock.create_logic(
input_clk,
input_rst,
ts_64=input_ts
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_clk=input_clk,
input_rst=input_rst,
output_clk=output_clk,
output_rst=output_rst,
sample_clk=sample_clk,
input_ts=input_ts,
output_ts=output_ts,
output_ts_step=output_ts_step,
output_pps=output_pps
)
@always(delay(3200))
def clkgen():
clk.next = not clk
input_clk.next = not input_clk
output_clk_hp = Signal(int(3200))
@instance
def clkgen_output():
while True:
yield delay(int(output_clk_hp))
output_clk.next = not output_clk
@always(delay(5000))
def clkgen_sample():
sample_clk.next = not sample_clk
@instance
def check():
yield delay(100000)
yield clk.posedge
rst.next = 1
input_rst.next = 1
output_rst.next = 1
yield clk.posedge
yield clk.posedge
yield clk.posedge
input_rst.next = 0
output_rst.next = 0
yield clk.posedge
yield delay(100000)
yield clk.posedge
# testbench stimulus
yield clk.posedge
print("test 1: Same clock speed")
current_test.next = 1
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 2: Slightly faster")
current_test.next = 2
output_clk_hp.next = 3100
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 3: Slightly slower")
current_test.next = 3
output_clk_hp.next = 3300
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 4: Significantly faster")
current_test.next = 4
output_clk_hp.next = 2000
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 5: Significantly slower")
current_test.next = 5
output_clk_hp.next = 5000
yield clk.posedge
for i in range(30000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
|
alexforencich/verilog-ethernet
|
tb/test_ptp_clock_cdc_64.py
|
Python
|
mit
| 5,957
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "collegeassist.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
Avinash-Yadav/collegeassist
|
manage.py
|
Python
|
mit
| 811
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""Support for programmatically generating markup streams from Python code using
a very simple syntax. The main entry point to this module is the `tag` object
(which is actually an instance of the ``ElementFactory`` class). You should
rarely (if ever) need to directly import and use any of the other classes in
this module.
Elements can be created using the `tag` object using attribute access. For
example:
>>> doc = tag.p('Some text and ', tag.a('a link', href='http://example.org/'), '.')
>>> doc
<Element "p">
This produces an `Element` instance which can be further modified to add child
nodes and attributes. This is done by "calling" the element: positional
arguments are added as child nodes (alternatively, the `Element.append` method
can be used for that purpose), whereas keywords arguments are added as
attributes:
>>> doc(tag.br)
<Element "p">
>>> print(doc)
<p>Some text and <a href="http://example.org/">a link</a>.<br/></p>
If an attribute name collides with a Python keyword, simply append an underscore
to the name:
>>> doc(class_='intro')
<Element "p">
>>> print(doc)
<p class="intro">Some text and <a href="http://example.org/">a link</a>.<br/></p>
As shown above, an `Element` can easily be directly rendered to XML text by
printing it or using the Python ``str()`` function. This is basically a
shortcut for converting the `Element` to a stream and serializing that
stream:
>>> stream = doc.generate()
>>> stream #doctest: +ELLIPSIS
<genshi.core.Stream object at ...>
>>> print(stream)
<p class="intro">Some text and <a href="http://example.org/">a link</a>.<br/></p>
The `tag` object also allows creating "fragments", which are basically lists
of nodes (elements or text) that don't have a parent element. This can be useful
for creating snippets of markup that are attached to a parent element later (for
example in a template). Fragments are created by calling the `tag` object, which
returns an object of type `Fragment`:
>>> fragment = tag('Hello, ', tag.em('world'), '!')
>>> fragment
<Fragment>
>>> print(fragment)
Hello, <em>world</em>!
"""
from genshi.core import Attrs, Markup, Namespace, QName, Stream, \
START, END, TEXT
__all__ = ['Fragment', 'Element', 'ElementFactory', 'tag']
__docformat__ = 'restructuredtext en'
class Fragment(object):
"""Represents a markup fragment, which is basically just a list of element
or text nodes.
"""
__slots__ = ['children']
def __init__(self):
"""Create a new fragment."""
self.children = []
def __add__(self, other):
return Fragment()(self, other)
def __call__(self, *args):
"""Append any positional arguments as child nodes.
:see: `append`
"""
for arg in args:
self.append(arg)
return self
def __iter__(self):
return self._generate()
def __repr__(self):
return '<%s>' % type(self).__name__
def __str__(self):
return str(self.generate())
def __unicode__(self):
return str(self.generate())
def __html__(self):
return Markup(self.generate())
def append(self, node):
"""Append an element or string as child node.
:param node: the node to append; can be an `Element`, `Fragment`, or a
`Stream`, or a Python string or number
"""
if isinstance(node, (Stream, Element, str, int, float)):
# For objects of a known/primitive type, we avoid the check for
# whether it is iterable for better performance
self.children.append(node)
elif isinstance(node, Fragment):
self.children.extend(node.children)
elif node is not None:
try:
for child in node:
self.append(child)
except TypeError:
self.children.append(node)
def _generate(self):
for child in self.children:
if isinstance(child, Fragment):
for event in child._generate():
yield event
elif isinstance(child, Stream):
for event in child:
yield event
else:
if not isinstance(child, str):
child = str(child)
yield TEXT, child, (None, -1, -1)
def generate(self):
"""Return a markup event stream for the fragment.
:rtype: `Stream`
"""
return Stream(self._generate())
def _kwargs_to_attrs(kwargs):
attrs = []
names = set()
for name, value in list(kwargs.items()):
name = name.rstrip('_').replace('_', '-')
if value is not None and name not in names:
attrs.append((QName(name), str(value)))
names.add(name)
return Attrs(attrs)
class Element(Fragment):
"""Simple XML output generator based on the builder pattern.
Construct XML elements by passing the tag name to the constructor:
>>> print((Element('strong')))
<strong/>
Attributes can be specified using keyword arguments. The values of the
arguments will be converted to strings and any special XML characters
escaped:
>>> print((Element('textarea', rows=10)))
<textarea rows="10"/>
>>> print((Element('span', title='1 < 2')))
<span title="1 < 2"/>
>>> print((Element('span', title='"baz"')))
<span title=""baz""/>
The " character is escaped using a numerical entity.
The order in which attributes are rendered is undefined.
If an attribute value evaluates to `None`, that attribute is not included
in the output:
>>> print((Element('a', name=None)))
<a/>
Attribute names that conflict with Python keywords can be specified by
appending an underscore:
>>> print((Element('div', class_='warning')))
<div class="warning"/>
Nested elements can be added to an element using item access notation.
The call notation can also be used for this and for adding attributes
using keyword arguments, as one would do in the constructor.
>>> print((Element('ul')(Element('li'), Element('li'))))
<ul><li/><li/></ul>
>>> print((Element('a')('Label')))
<a>Label</a>
>>> print((Element('a')('Label', href="target")))
<a href="target">Label</a>
Text nodes can be nested in an element by adding strings instead of
elements. Any special characters in the strings are escaped automatically:
>>> print((Element('em')('Hello world')))
<em>Hello world</em>
>>> print((Element('em')(42)))
<em>42</em>
>>> print((Element('em')('1 < 2')))
<em>1 < 2</em>
This technique also allows mixed content:
>>> print((Element('p')('Hello ', Element('b')('world'))))
<p>Hello <b>world</b></p>
Quotes are not escaped inside text nodes:
>>> print((Element('p')('"Hello"')))
<p>"Hello"</p>
Elements can also be combined with other elements or strings using the
addition operator, which results in a `Fragment` object that contains the
operands:
>>> print((Element('br') + 'some text' + Element('br')))
<br/>some text<br/>
Elements with a namespace can be generated using the `Namespace` and/or
`QName` classes:
>>> from genshi.core import Namespace
>>> xhtml = Namespace('http://www.w3.org/1999/xhtml')
>>> print((Element(xhtml.html, lang='en')))
<html xmlns="http://www.w3.org/1999/xhtml" lang="en"/>
"""
__slots__ = ['tag', 'attrib']
def __init__(self, tag_, **attrib):
Fragment.__init__(self)
self.tag = QName(tag_)
self.attrib = _kwargs_to_attrs(attrib)
def __call__(self, *args, **kwargs):
"""Append any positional arguments as child nodes, and keyword arguments
as attributes.
:return: the element itself so that calls can be chained
:rtype: `Element`
:see: `Fragment.append`
"""
self.attrib |= _kwargs_to_attrs(kwargs)
Fragment.__call__(self, *args)
return self
def __repr__(self):
return '<%s "%s">' % (type(self).__name__, self.tag)
def _generate(self):
yield START, (self.tag, self.attrib), (None, -1, -1)
for kind, data, pos in Fragment._generate(self):
yield kind, data, pos
yield END, self.tag, (None, -1, -1)
def generate(self):
"""Return a markup event stream for the fragment.
:rtype: `Stream`
"""
return Stream(self._generate())
class ElementFactory(object):
"""Factory for `Element` objects.
A new element is created simply by accessing a correspondingly named
attribute of the factory object:
>>> factory = ElementFactory()
>>> print((factory.foo))
<foo/>
>>> print((factory.foo(id=2)))
<foo id="2"/>
Markup fragments (lists of nodes without a parent element) can be created
by calling the factory:
>>> print((factory('Hello, ', factory.em('world'), '!')))
Hello, <em>world</em>!
A factory can also be bound to a specific namespace:
>>> factory = ElementFactory('http://www.w3.org/1999/xhtml')
>>> print((factory.html(lang="en")))
<html xmlns="http://www.w3.org/1999/xhtml" lang="en"/>
The namespace for a specific element can be altered on an existing factory
by specifying the new namespace using item access:
>>> factory = ElementFactory()
>>> print((factory.html(factory['http://www.w3.org/2000/svg'].g(id=3))))
<html><g xmlns="http://www.w3.org/2000/svg" id="3"/></html>
Usually, the `ElementFactory` class is not be used directly. Rather, the
`tag` instance should be used to create elements.
"""
def __init__(self, namespace=None):
"""Create the factory, optionally bound to the given namespace.
:param namespace: the namespace URI for any created elements, or `None`
for no namespace
"""
if namespace and not isinstance(namespace, Namespace):
namespace = Namespace(namespace)
self.namespace = namespace
def __call__(self, *args):
"""Create a fragment that has the given positional arguments as child
nodes.
:return: the created `Fragment`
:rtype: `Fragment`
"""
return Fragment()(*args)
def __getitem__(self, namespace):
"""Return a new factory that is bound to the specified namespace.
:param namespace: the namespace URI or `Namespace` object
:return: an `ElementFactory` that produces elements bound to the given
namespace
:rtype: `ElementFactory`
"""
return ElementFactory(namespace)
def __getattr__(self, name):
"""Create an `Element` with the given name.
:param name: the tag name of the element to create
:return: an `Element` with the specified name
:rtype: `Element`
"""
return Element(self.namespace and self.namespace[name] or name)
tag = ElementFactory()
"""Global `ElementFactory` bound to the default namespace.
:type: `ElementFactory`
"""
|
Lyleo/OmniMarkupPreviewer
|
OmniMarkupLib/Renderers/libs/python3/genshi/builder.py
|
Python
|
mit
| 11,729
|
from .utils import PyKEArgumentHelpFormatter
import numpy as np
from astropy.io import fits as pyfits
from matplotlib import pyplot as plt
from tqdm import tqdm
from . import kepio, kepmsg, kepkey, kepfit, kepstat, kepfunc
__all__ = ['kepoutlier']
def kepoutlier(infile, outfile=None, datacol='SAP_FLUX', nsig=3.0, stepsize=1.0,
npoly=3, niter=1, operation='remove', ranges='0,0', plot=False,
plotfit=False, overwrite=False, verbose=False,
logfile='kepoutlier.log'):
"""
kepoutlier -- Remove or replace statistical outliers from time series data
kepoutlier identifies data outliers relative to piecemeal best-fit
polynomials. Outliers are either removed from the output time series or
replaced by a noise-treated value defined by the polynomial fit. Identified
outliers and the best fit functions are optionally plotted for inspection
purposes.
Parameters
----------
infile : str
The name of a MAST standard format FITS file containing a Kepler light
curve within the first data extension.
outfile : str
The name of the output FITS file. ``outfile`` will be direct copy of
infile with either data outliers removed (i.e. the table will have
fewer rows) or the outliers will be corrected according to a best-fit
function and a noise model.
datacol : str
The column name containing data stored within extension 1 of infile.
This data will be searched for outliers. Typically this name is
SAP_FLUX (Simple Aperture Photometry fluxes) or PDCSAP_FLUX (Pre-search
Data Conditioning fluxes).
nsig : float
The sigma clipping threshold. Data deviating from a best fit function
by more than the threshold will be either removed or corrected
according to the user selection of operation.
stepsize : float
The data within datacol is unlikely to be well represented by a single
polynomial function. stepsize splits the data up into a series of time
blocks, each is fit independently by a separate function. The user can
provide an informed choice of stepsize after inspecting the data with
the kepdraw tool. Units are days.
npoly : int
The polynomial order of each best-fit function.
niter : int
If outliers are found in a particular data section, that data will be
removed temporarily and the time series fit again. This will be
iterated niter times before freezing upon the best available fit.
operation : str
* ``remove`` throws away outliers. The output data table will smaller
or equal in size to the input table.
* ``replace`` replaces outliers with a value that is consistent with
the best-fit polynomial function and a random component defined by the
rms of the data relative to the fit and calculated using the inverse
normal cumulative function and a random number generator.
ranges : str
The user can choose specific time ranges of data on which to work. This
could, for example, avoid removing known stellar flares from a dataset.
Time ranges are supplied as comma-separated pairs of Barycentric Julian
Dates (BJDs). Multiple ranges are separated by a semi-colon. An example
containing two time ranges is::
'2455012.48517,2455014.50072;2455022.63487,2455025.08231'
If the user wants to correct the entire time series then providing
``ranges = '0,0'`` will tell the task to operate on the whole time series.
plot : bool
Plot the data and outliers?
plotfit : bool
Overlay the polynomial fits upon the plot?
overwrite : bool
Overwrite the output file?
verbose : bool
Print informative messages and warnings to the shell and logfile?
logfile : str
Name of the logfile containing error and warning messages.
Examples
--------
.. code-block:: bash
$ kepoutlier kplr002437329-2010355172524_llc.fits --datacol SAP_FLUX
--nsig 4 --stepsize 5 --npoly 2 --niter 10 --operation replace
--verbose --plot --plotfit
.. image:: ../_static/images/api/kepoutlier.png
:align: center
"""
if outfile is None:
outfile = infile.split('.')[0] + "-{}.fits".format(__all__[0])
# log the call
hashline = '--------------------------------------------------------------'
kepmsg.log(logfile, hashline, verbose)
call = ('KEPOUTLIER -- '
+ ' infile={}'.format(infile)
+ ' outfile={}'.format(outfile)
+ ' datacol={}'.format(datacol)
+ ' nsig={}'.format(nsig)
+ ' stepsize={}'.format(stepsize)
+ ' npoly={}'.format(npoly)
+ ' niter={}'.format(niter)
+ ' operation={}'.format(operation)
+ ' ranges={}'.format(ranges)
+ ' plot={}'.format(plot)
+ ' plotfit={}'.format(plotfit)
+ ' overwrite={}'.format(overwrite)
+ ' verbose={}'.format(verbose)
+ ' logfile={}'.format(logfile))
kepmsg.log(logfile, call+'\n', verbose)
# start time
kepmsg.clock('KEPOUTLIER started at', logfile, verbose)
# overwrite output file
if overwrite:
kepio.overwrite(outfile, logfile, verbose)
if kepio.fileexists(outfile):
errmsg = ('ERROR -- KEPOUTLIER: {} exists. Use overwrite=True'
.format(outfile))
kepmsg.err(logfile, errmsg, verbose)
# open input file
instr = pyfits.open(infile)
tstart, tstop, bjdref, cadence = kepio.timekeys(instr, infile, logfile,
verbose)
try:
work = instr[0].header['FILEVER']
cadenom = 1.0
except:
cadenom = cadence
# fudge non-compliant FITS keywords with no values
instr = kepkey.emptykeys(instr, infile, logfile, verbose)
# read table structure
table = kepio.readfitstab(infile, instr[1], logfile, verbose)
# filter input data table
try:
nanclean = instr[1].header['NANCLEAN']
except:
time = kepio.readtimecol(infile, table, logfile, verbose)
flux = kepio.readfitscol(infile, table, datacol, logfile, verbose)
finite_data_mask = np.isfinite(time) & np.isfinite(flux) & (flux != 0)
table = table[finite_data_mask]
instr[1].data = table
comment = 'NaN cadences removed from data'
kepkey.new('NANCLEAN', True, comment, instr[1], outfile, logfile,
verbose)
# read table columns
try:
intime = instr[1].data.field('barytime') + 2.4e6
except:
intime = kepio.readfitscol(infile, instr[1].data, 'time', logfile,
verbose)
indata = kepio.readfitscol(infile, instr[1].data, datacol, logfile,
verbose)
intime = intime + bjdref
indata = indata / cadenom
# time ranges for region to be corrected
t1, t2 = kepio.timeranges(ranges, logfile, verbose)
cadencelis = kepstat.filterOnRange(intime, t1, t2)
# find limits of each time step
tstep1, tstep2 = [], []
work = intime[0]
while work < intime[-1]:
tstep1.append(work)
tstep2.append(np.array([work + stepsize, intime[-1]],
dtype='float64').min())
work += stepsize
# find cadence limits of each time step
cstep1, cstep2 = [], []
work1 = 0
work2 = 0
for i in range(len(intime)):
if intime[i] >= intime[work1] and intime[i] < intime[work1] + stepsize:
work2 = i
else:
cstep1.append(work1)
cstep2.append(work2)
work1 = i
work2 = i
cstep1.append(work1)
cstep2.append(work2)
outdata = indata * 1.0
# comment keyword in output file
kepkey.history(call, instr[0], outfile, logfile, verbose)
# clean up x-axis unit
intime0 = (tstart // 100) * 100.0
ptime = intime - intime0
xlab = 'BJD $-$ {}'.format(intime0)
# clean up y-axis units
pout = indata * 1.0
nrm = len(str(int(pout.max())))-1
pout = pout / 10**nrm
ylab = '10$^%d$ e$^-$ s$^{-1}$' % nrm
# data limits
xmin = ptime.min()
xmax = ptime.max()
ymin = pout.min()
ymax = pout.max()
xr = xmax - xmin
yr = ymax - ymin
ptime = np.insert(ptime, [0], [ptime[0]])
ptime = np.append(ptime, [ptime[-1]])
pout = np.insert(pout, [0], [0.0])
pout = np.append(pout, 0.0)
# plot light curve
if plot:
plt.figure()
plt.clf()
# plot data
ax = plt.axes([0.06, 0.1, 0.93, 0.87])
# force tick labels to be absolute rather than relative
plt.gca().xaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
plt.gca().yaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
plt.plot(ptime, pout, color='#0000ff', linestyle='-', linewidth=1.0)
plt.fill(ptime, pout, color='#ffff00', linewidth=0.0, alpha=0.2)
plt.xlabel(xlab, {'color' : 'k'})
plt.ylabel(ylab, {'color' : 'k'})
plt.grid()
# loop over each time step, fit data, determine rms
masterfit = indata * 0.0
mastersigma = np.zeros(len(masterfit))
functype = getattr(kepfunc, 'poly' + str(npoly))
for i in range(len(cstep1)):
pinit = [indata[cstep1[i]:cstep2[i]+1].mean()]
if npoly > 0:
for j in range(npoly):
pinit.append(0.0)
pinit = np.array(pinit, dtype='float32')
try:
coeffs, errors, covar, iiter, sigma, chi2, dof, fit, plotx, ploty = \
kepfit.lsqclip(functype, pinit,
intime[cstep1[i]:cstep2[i]+1] - intime[cstep1[i]],
indata[cstep1[i]:cstep2[i]+1], None, nsig,
nsig, niter, logfile, verbose)
for j in range(len(coeffs)):
masterfit[cstep1[i]: cstep2[i] + 1] += (coeffs[j]
* (intime[cstep1[i]:cstep2[i]+1] - intime[cstep1[i]]) ** j)
for j in range(cstep1[i], cstep2[i] + 1):
mastersigma[j] = sigma
if plotfit:
plt.plot(plotx + intime[cstep1[i]] - intime0, ploty / 10 ** nrm,
'g', lw=3)
except:
for j in range(cstep1[i], cstep2[i] + 1):
masterfit[j] = indata[j]
mastersigma[j] = 1.0e10
message = ('WARNING -- KEPOUTLIER: could not fit range '
+ str(intime[cstep1[i]]) + '-' + str(intime[cstep2[i]]))
kepmsg.warn(logfile, message, verbose)
# reject outliers
rejtime, rejdata = [], []
naxis2 = 0
for i in tqdm(range(len(masterfit))):
if (abs(indata[i] - masterfit[i]) > nsig * mastersigma[i]
and i in cadencelis):
rejtime.append(intime[i])
rejdata.append(indata[i])
if operation == 'replace':
[rnd] = kepstat.randarray([masterfit[i]], [mastersigma[i]])
table[naxis2] = table[i]
table.field(datacol)[naxis2] = rnd
naxis2 += 1
else:
table[naxis2] = table[i]
naxis2 += 1
instr[1].data = table[:naxis2]
if plot:
rejtime = np.array(rejtime, dtype='float64')
rejdata = np.array(rejdata, dtype='float32')
plt.plot(rejtime - intime0, rejdata / 10 ** nrm, 'ro')
# plot ranges
plt.xlim(xmin - xr * 0.01, xmax + xr * 0.01)
if ymin >= 0.0:
plt.ylim(ymin - yr * 0.01, ymax + yr * 0.01)
else:
plt.ylim(1.0e-10, ymax + yr * 0.01)
# render plot
plt.show()
# write output file
print("Writing output file {}...".format(outfile))
instr.writeto(outfile)
# close input file
instr.close()
kepmsg.clock('KEPOUTLIER completed at', logfile, verbose)
def kepoutlier_main():
import argparse
parser = argparse.ArgumentParser(
description='Remove or replace data outliers from a time series',
formatter_class=PyKEArgumentHelpFormatter)
parser.add_argument('infile', help='Name of input file', type=str)
parser.add_argument('--outfile',
help=('Name of FITS file to output.'
' If None, outfile is infile-kepoutlier.'),
default=None)
parser.add_argument('--datacol', default='SAP_FLUX',
help='Name of data column to plot', type=str)
parser.add_argument('--nsig', default=3.,
help='Sigma clipping threshold for outliers',
type=float)
parser.add_argument('--stepsize', default=1.0,
help='Stepsize on which to fit data [days]',
type=float)
parser.add_argument('--npoly', default=3,
help='Polynomial order for each fit', type=int)
parser.add_argument('--niter', default=1,
help='Maximum number of clipping iterations', type=int)
parser.add_argument('--operation', default='remove',
help='Remove or replace outliers?', type=str,
choices=['replace','remove'])
parser.add_argument('--ranges', default='0,0',
help='Time ranges of regions to filter', type=str)
parser.add_argument('--plot', action='store_true', help='Plot result?')
parser.add_argument('--plotfit', action='store_true',
help='Plot fit over results?')
parser.add_argument('--overwrite', action='store_true',
help='Overwrite output file?')
parser.add_argument('--verbose', action='store_true',
help='Write to a log file?')
parser.add_argument('--logfile', '-l', help='Name of ascii log file',
default='kepoutlier.log', dest='logfile', type=str)
args = parser.parse_args()
kepoutlier(args.infile, args.outfile, args.datacol, args.nsig,
args.stepsize, args.npoly,args.niter, args.operation,
args.ranges, args.plot, args.plotfit, args.overwrite,
args.verbose, args.logfile)
|
gully/PyKE
|
pyke/kepoutlier.py
|
Python
|
mit
| 14,393
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# @author : beaengine@gmail.com
from headers.BeaEnginePython import *
from nose.tools import *
class TestSuite:
def test(self):
# 66 0F 38 3d /r
# pmaxsd mm1, mm2/m64
Buffer = bytes.fromhex('660f383d9011223344')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0xf383d')
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'pmaxsd')
assert_equal(myDisasm.repr(), 'pmaxsd xmm2, xmmword ptr [rax+44332211h]')
# VEX.NDS.128.66.0F38.WIG 3d /r
# vpmaxsd xmm1, xmm2, xmm3/m128
Buffer = bytes.fromhex('c402013d0e')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
assert_equal(myDisasm.repr(), 'vpmaxsd xmm9, xmm15, xmmword ptr [r14]')
# VEX.NDS.256.66.0F38.WIG 3d /r
# vpmaxsd ymm1, ymm2, ymm3/m256
Buffer = bytes.fromhex('c402053d0e')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
assert_equal(myDisasm.repr(), 'vpmaxsd ymm9, ymm15, ymmword ptr [r14]')
# EVEX.NDS.128.66.0F38.WIG 3d /r
# vpmaxsd xmm1 {k1}{z}, xmm2, xmm3/m128
Buffer = bytes.fromhex('620205063d0e')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2)
assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5)
assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x6)
assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1)
assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2)
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d')
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
assert_equal(myDisasm.repr(), 'vpmaxsd xmm25, xmm31, xmmword ptr [r14]')
# EVEX.NDS.256.66.0F38.WIG 3d /r
# vpmaxsd ymm1 {k1}{z}, ymm2, ymm3/m256
Buffer = bytes.fromhex('620205203d0e')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2)
assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5)
assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x20)
assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1)
assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2)
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d')
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
assert_equal(myDisasm.repr(), 'vpmaxsd ymm25, ymm31, ymmword ptr [r14]')
# EVEX.NDS.512.66.0F38.WIG 3d /r
# vpmaxsd zmm1 {k1}{z}, zmm2, zmm3/m512
Buffer = bytes.fromhex('620205403d0e')
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2)
assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5)
assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x40)
assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1)
assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2)
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d')
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
assert_equal(myDisasm.repr(), 'vpmaxsd zmm25, zmm31, zmmword ptr [r14]')
|
0vercl0k/rp
|
src/third_party/beaengine/tests/0f383d.py
|
Python
|
mit
| 4,045
|
# -*- coding: utf-8 -*-
import json
from django import template
from django.conf import settings
register = template.Library()
from django_iceberg.auth_utils import init_iceberg
@register.inclusion_tag('django_iceberg/javascript_sdk.html', takes_context=True)
def iceberg_javascript_sdk(context):
"""
To Finish
"""
if getattr(settings, 'ICEBERG_USE_LOCAL', False):
livrary_path = 'http://connect.local.iceberg-marketplace.com:9000/script.js'
else:
livrary_path = 'http://connect.iceberg-marketplace.com/script.js'
return {
'LIBRARY_URL': livrary_path
}
@register.inclusion_tag('django_iceberg/sso.html', takes_context=True)
def iceberg_sso(context):
api_handler = init_iceberg(context['request'])
if hasattr(api_handler, '_sso_response'):
return {
'appNamespace': api_handler.conf.ICEBERG_APPLICATION_NAMESPACE,
"sso_data": json.dumps(api_handler._sso_response)
}
else:
return {}
@register.inclusion_tag('django_iceberg/sso.html', takes_context=True)
def iceberg_sso_with_seller(context, seller_id):
api_handler = init_iceberg(context['request'])
if hasattr(api_handler, '_sso_response'):
return {
"modules": json.dumps(['client', 'seller']),
'appNamespace': api_handler.conf.ICEBERG_APPLICATION_NAMESPACE,
"sso_data": json.dumps(api_handler._sso_response),
"seller": json.dumps({"id": seller_id}),
}
else:
return {}
|
izberg-marketplace/django-izberg
|
django_iceberg/templatetags/iceberg.py
|
Python
|
mit
| 1,539
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import jsonify, session
from marshmallow import INCLUDE, fields
from marshmallow_enum import EnumField
from indico.modules.categories.controllers.base import RHDisplayCategoryBase
from indico.modules.events.controllers.base import RHDisplayEventBase
from indico.modules.search.base import SearchOptions, SearchTarget, get_search_provider
from indico.modules.search.internal import InternalSearch
from indico.modules.search.result_schemas import ResultSchema
from indico.modules.search.views import WPCategorySearch, WPEventSearch, WPSearch
from indico.util.marshmallow import validate_with_message
from indico.web.args import use_kwargs
from indico.web.rh import RH
class RHSearchDisplay(RH):
def _process(self):
return WPSearch.render_template('search.html')
class RHCategorySearchDisplay(RHDisplayCategoryBase):
def _process(self):
return WPCategorySearch.render_template('category_search.html', self.category)
class RHEventSearchDisplay(RHDisplayEventBase):
def _process(self):
return WPEventSearch.render_template('event_search.html', self.event)
class RHAPISearch(RH):
"""API for searching across all records with the current search provider.
Besides pagination, filters or placeholders may be passed as query parameters.
Since `type` may be a list, the results from the search provider are not mixed with
the InternalSearch.
"""
@use_kwargs({
'page': fields.Int(missing=None),
'q': fields.String(required=True),
'type': fields.List(EnumField(SearchTarget), missing=None),
'admin_override_enabled': fields.Bool(
missing=False,
validate=validate_with_message(lambda value: session.user and session.user.is_admin,
'Restricted to admins')
),
}, location='query', unknown=INCLUDE)
def _process(self, page, q, type, **params):
search_provider = get_search_provider()
if type == [SearchTarget.category]:
search_provider = InternalSearch
result = search_provider().search(q, session.user, page, type, **params)
return ResultSchema().dump(result)
class RHAPISearchOptions(RH):
def _process(self):
search_provider = get_search_provider()()
placeholders = search_provider.get_placeholders()
sort_options = search_provider.get_sort_options()
return jsonify(SearchOptions(placeholders, sort_options).dump())
|
pferreir/indico
|
indico/modules/search/controllers.py
|
Python
|
mit
| 2,694
|
"""Tests for documenteer.sphinext.mockcoderefs."""
from shutil import rmtree
from tempfile import mkdtemp
import pytest
from sphinx.application import Sphinx
import documenteer.sphinxext.mockcoderefs as mockcoderefs
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
@pytest.fixture()
def app(request):
src = mkdtemp()
doctree = mkdtemp()
confdir = mkdtemp()
outdir = mkdtemp()
Sphinx._log = lambda self, message, wfile, nonl=False: None
app = Sphinx(
srcdir=src,
confdir=None,
outdir=outdir,
doctreedir=doctree,
buildername="html",
)
mockcoderefs.setup(app)
# Stitch together as the sphinx app init() usually does w/ real conf files
try:
app.config.init_values()
except TypeError:
# Sphinx < 1.6.0
app.config.init_values(Sphinx._log)
def fin():
for dirname in (src, doctree, confdir, outdir):
rmtree(dirname)
request.addfinalizer(fin)
return app
@pytest.fixture()
def inliner(app):
return Mock(document=Mock(settings=Mock(env=Mock(app=app))))
@pytest.mark.parametrize(
"test_input,expected",
[
(("lmod", "lsst.afw"), "lsst.afw"),
(("lmod", "~lsst.afw"), "afw"),
(("lmod", "~lsst"), "lsst"),
],
)
def test_mock_code_ref_role(inliner, test_input, expected):
role_name, role_content = test_input
result = mockcoderefs.mock_code_ref_role(
name=role_name,
rawtext=role_content,
text=role_content,
inliner=inliner,
lineno=None,
)
assert result[0][0].astext() == expected
|
lsst-sqre/sphinxkit
|
tests/test_sphinxext_mockcoderefs.py
|
Python
|
mit
| 1,652
|
#!/usr/bin/env python
from utils import load_data, save_data
def run():
# load in members, orient by bioguide ID
print("Loading current legislators...")
current = load_data("legislators-current.yaml")
current_bioguide = { }
for m in current:
if "bioguide" in m["id"]:
current_bioguide[m["id"]["bioguide"]] = m
# remove out-of-office people from current committee membership
print("Sweeping committee membership...")
membership_current = load_data("committee-membership-current.yaml")
for committee_id in list(membership_current.keys()):
for member in membership_current[committee_id]:
if member["bioguide"] not in current_bioguide:
print("\t[%s] Ding ding ding! (%s)" % (member["bioguide"], member["name"]))
membership_current[committee_id].remove(member)
save_data(membership_current, "committee-membership-current.yaml")
# remove out-of-office people from social media info
print("Sweeping social media accounts...")
socialmedia_current = load_data("legislators-social-media.yaml")
for member in list(socialmedia_current):
if member["id"]["bioguide"] not in current_bioguide:
print("\t[%s] Ding ding ding! (%s)" % (member["id"]["bioguide"], member["social"]))
socialmedia_current.remove(member)
save_data(socialmedia_current, "legislators-social-media.yaml")
if __name__ == '__main__':
run()
|
hugovk/congress-legislators
|
scripts/sweep.py
|
Python
|
cc0-1.0
| 1,432
|
###############################################################################
# Copyright (C) 2008 Johann Haarhoff <johann.haarhoff@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of Version 2 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
###############################################################################
#
# Originally written:
# 2008 Johann Haarhoff, <johann.haarhoff@gmail.com>
# Modifications:
#
###############################################################################
#global modules
import shapelibc
import dbflibc
import sys
#my modules
from xmlwriter import * #AUTO_REMOVED by make
from vec import * #AUTO_REMOVED by make
def castSpecific(shpobj):
"""
if given a SHPObject, this will return a more
specific version like SHPPointObject depending
on the SHPType of the given object
"""
if shpobj._SHPType == shapelibc.SHPT_POINT:
obj = SHPPointObject()
obj.createFromObject(shpobj)
return obj
elif shpobj._SHPType == shapelibc.SHPT_ARCZ:
obj = SHPArcZObject()
obj.createFromObject(shpobj)
return obj
elif shpobj._SHPType == shapelibc.SHPT_ARC:
obj = SHPArcObject()
obj.createFromObject(shpobj)
return obj
elif shpobj._SHPType == shapelibc.SHPT_POLYGONZ:
obj = SHPPolygonZObject()
obj.createFromObject(shpobj)
return obj
elif shpobj._SHPType == shapelibc.SHPT_POLYGON:
obj = SHPPolygonObject()
obj.createFromObject(shpobj)
return obj
class WrongShapeObjectError(Exception):
"""
Thrown when trying to instantiate say a
SHPPointOPbject from file, and the file
returns a different type
"""
pass
class SHPObject():
def __init__(self,SHPType = shapelibc.SHPT_NULL,SHPId = -1,Verts = [[]],Label="",Desc = ""):
self._SHPType = SHPType
self._SHPId = SHPId
self._Verts = Verts
self._Label = Label
self._Desc = Desc
def createFromFile(self,filestream,shapenum):
"""
The filestream should already be opened
with shapelibc.open() before calling this
"""
shp = shapelibc.ShapeFile_read_object(filestream,shapenum)
SHPObject.__init__(self,shapelibc.SHPObject_type_get(shp),
shapelibc.SHPObject_id_get(shp),
shapelibc.SHPObject_vertices(shp))
def makeDescriptionFromFile(self,filestream,shapenum):
"""
The filestream should already be opened
with dbflibc.open() before calling this
"""
numfields = dbflibc.DBFFile_field_count(filestream)
for i in range(0,numfields):
field_name = str(dbflibc.DBFFile_field_info(filestream,i)[1]).upper()
field_data = str(dbflibc.DBFFile_read_attribute(filestream,shapenum,i)).lower()
self._Desc = self._Desc + "<b>" + field_name + ": </b>" + field_data + "<br>"
class SHPPointObject(SHPObject):
def __init__(self,SHPId = -1,Verts = [[]],Label="",Desc=""):
SHPObject.__init__(self,shapelibc.SHPT_POINT,SHPId,Verts,Label,Desc)
def createFromFile(self,filestream,shapenum):
SHPObject.createFromFile(self,filestream,shapenum)
if self._SHPType != shapelibc.SHPT_POINT:
raise WrongShapeObjectError()
def createFromObject(self,shpobject):
if shpobject._SHPType != shapelibc.SHPT_POINT:
raise WrongShapeObjectError()
SHPPointObject.__init__(self,shpobject._SHPId,shpobject._Verts,shpobject._Label,shpobject._Desc)
def toKML(self,out,styleUrl="",indentstr = '\t'):
kmlwriter = BetterXMLWriter(out,indentstr)
kmlwriter.openElement("Placemark")
kmlwriter.openElement("name")
if self._Label == "":
kmlwriter.addData(str(self._SHPId))
else:
kmlwriter.addData(str(self._Label))
kmlwriter.closeLast()
kmlwriter.openElement("styleUrl")
kmlwriter.addData(str(styleUrl))
kmlwriter.closeLast()
kmlwriter.openElement("description")
kmlwriter.addCData(self._Desc)
kmlwriter.closeLast()
kmlwriter.openElement("Point")
kmlwriter.openElement("coordinates")
for i,j in self._Verts:
kmlwriter.addData(str(i)+","+str(j)+",0 ")
kmlwriter.endDocument()
class SHPArcZObject(SHPObject):
def __init__(self,SHPId = -1,Verts = [[]],Label="",Desc=""):
SHPObject.__init__(self,shapelibc.SHPT_ARCZ,SHPId,Verts,Label,Desc)
def createFromFile(self,filestream,shapenum):
SHPObject.createFromFile(self,filestream,shapenum)
if self._SHPType != shapelibc.SHPT_ARCZ:
raise WrongShapeObjectError()
def createFromObject(self,shpobject):
if shpobject._SHPType != shapelibc.SHPT_ARCZ:
raise WrongShapeObjectError()
SHPArcZObject.__init__(self,shpobject._SHPId,shpobject._Verts,shpobject._Label,shpobject._Desc)
def toKML(self,out,styleUrl="",indentstr = '\t'):
kmlwriter = BetterXMLWriter(out,indentstr)
kmlwriter.openElement("Placemark")
kmlwriter.openElement("name")
if self._Label == "":
kmlwriter.addData(str(self._SHPId))
else:
kmlwriter.addData(str(self._Label))
kmlwriter.closeLast()
kmlwriter.openElement("styleUrl")
kmlwriter.addData(str(styleUrl))
kmlwriter.closeLast()
kmlwriter.openElement("description")
kmlwriter.addCData(self._Desc)
kmlwriter.closeLast()
kmlwriter.openElement("LineString")
kmlwriter.openElement("tessellate")
kmlwriter.addData("1")
kmlwriter.closeLast()
kmlwriter.openElement("coordinates")
#shapelibc does not populate _Verts properly,
#so we need to check for the Z coordinate
#even if this is an ArcZ
if len(self._Verts[0][0]) == 2:
#we only have x and y
for i,j in self._Verts[0]:
kmlwriter.addData(str(i)+","+str(j)+",0 ")
elif len(self._Verts[0][0]) == 3:
#we have x, y and z
for i,j,k in self._Verts[0]:
kmlwriter.addData(str(i)+","+str(j)+","+str(k)+" ")
elif len(self._Verts[0][0]) == 4:
#we have x,y,z and m
#I don't know what to do with m at this stage
for i,j,k,l in self._Verts[0]:
kmlwriter.addData(str(i)+","+str(j)+","+str(k)+" ")
kmlwriter.endDocument()
class SHPArcObject(SHPArcZObject):
def __init__(self,SHPId = -1,Verts = [[]],Label="",Desc=""):
SHPObject.__init__(self,shapelibc.SHPT_ARC,SHPId,Verts,Label,Desc)
def createFromFile(self,filestream,shapenum):
SHPObject.createFromFile(self,filestream,shapenum)
if self._SHPType != shapelibc.SHPT_ARC:
raise WrongShapeObjectError()
def createFromObject(self,shpobject):
if shpobject._SHPType != shapelibc.SHPT_ARC:
raise WrongShapeObjectError()
SHPArcObject.__init__(self,shpobject._SHPId,shpobject._Verts,shpobject._Label,shpobject._Desc)
class SHPPolygonZObject(SHPObject):
def __init__(self,SHPId = -1,Verts = [[]],Label="",Desc=""):
SHPObject.__init__(self,shapelibc.SHPT_POLYGONZ,SHPId,Verts,Label,Desc)
def createFromFile(self,filestream,shapenum):
SHPObject.createFromFile(self,filestream,shapenum)
if self._SHPType != shapelibc.SHPT_POLYGONZ:
raise WrongShapeObjectError()
def createFromObject(self,shpobject):
if shpobject._SHPType != shapelibc.SHPT_POLYGONZ:
raise WrongShapeObjectError()
SHPPolygonZObject.__init__(self,shpobject._SHPId,shpobject._Verts,shpobject._Label,shpobject._Desc)
def toKML(self,out,styleUrl="",indentstr = '\t'):
kmlwriter = BetterXMLWriter(out,indentstr)
kmlwriter.openElement("Placemark")
kmlwriter.openElement("name")
if self._Label == "":
kmlwriter.addData(str(self._SHPId))
else:
kmlwriter.addData(str(self._Label))
kmlwriter.closeLast()
kmlwriter.openElement("styleUrl")
kmlwriter.addData(str(styleUrl))
kmlwriter.closeLast()
kmlwriter.openElement("description")
kmlwriter.addCData(self._Desc)
kmlwriter.closeLast()
kmlwriter.openElement("Polygon")
kmlwriter.openElement("extrude")
kmlwriter.addData("0")
kmlwriter.closeLast()
kmlwriter.openElement("tessellate")
kmlwriter.addData("1")
kmlwriter.closeLast()
#polygons may have multiple parts
#in the shapefile, a part is an outer boundary if the
#poly is wound clockwise, and an inner boundary if it
#is wound anticlockwise.
#we use winding_number in vec.py to figure this out
for part,coords in enumerate(self._Verts):
dir = winding_number(coords) #winding_number is from vec.py
if dir > 0:
kmlwriter.openElement("outerBoundaryIs")
elif dir < 0:
kmlwriter.openElement("innerBoundaryIs")
kmlwriter.openElement("LinearRing")
kmlwriter.openElement("coordinates")
#shapelibc does not populate _Verts properly,
#so we need to check for the Z coordinate
#even if this is a PolygonZ
if len(self._Verts[part][0]) == 2:
#we only have x and y
for i,j in self._Verts[part]:
kmlwriter.addData(str(i)+","+str(j)+",0 ")
elif len(self._Verts[part][0]) == 3:
#we have x, y and z
for i,j,k in self._Verts[part]:
kmlwriter.addData(str(i)+","+str(j)+","+str(k)+" ")
elif len(self._Verts[part][0]) == 4:
#we have x,y,z and m
#I don't know what to do with m at this stage
for i,j,k,l in self._Verts[part]:
kmlwriter.addData(str(i)+","+str(j)+","+str(k)+" ")
kmlwriter.closeLast() #coordinates
kmlwriter.closeLast() #LinearRing
kmlwriter.closeLast() #outer/innerBoudary
kmlwriter.endDocument()
class SHPPolygonObject(SHPPolygonZObject):
def __init__(self,SHPId = -1,Verts = [[]],Label="",Desc=""):
SHPObject.__init__(self,shapelibc.SHPT_POLYGON,SHPId,Verts,Label,Desc)
def createFromFile(self,filestream,shapenum):
SHPObject.createFromFile(self,filestream,shapenum)
if self._SHPType != shapelibc.SHPT_POLYGON:
raise WrongShapeObjectError()
def createFromObject(self,shpobject):
if shpobject._SHPType != shapelibc.SHPT_POLYGON:
raise WrongShapeObjectError()
SHPPolygonObject.__init__(self,shpobject._SHPId,shpobject._Verts,shpobject._Label,shpobject._Desc)
|
Jaden-J/shape2ge
|
src/shapeobjects.py
|
Python
|
gpl-2.0
| 10,069
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
u"""
Основной скрипт запуска ДЗ.
Данный скрипт призван запускать на выполнение домашнее задание #6.
"""
__author__ = "Elena Sharovar"
__date__ = "2014-11-23"
from hw6_solution1 import modifier
def runner():
u"""Запускает выполнение всех задач"""
print "Modifying file..."
modifier("data.csv")
print "Modified successfully!"
if __name__ == '__main__':
runner()
|
pybursa/homeworks
|
e_tverdokhleboff/hw6/hw6_starter.py
|
Python
|
gpl-2.0
| 540
|
from PyQt5.QtCore import pyqtSlot, QThread, pyqtSignal
import os
from PyQt5.QtWidgets import QFileDialog, QProgressDialog, QMessageBox
from PyQt5.QtCore import pyqtSlot, QObject
from books.soldiers import processData
import route_gui
from lxml import etree
import multiprocessing
import math
class XmlImport(QObject):
threadUpdateSignal = pyqtSignal(int, int, name="progressUpdate")
threadExceptionSignal = pyqtSignal(object, name="exceptionInProcess")
threadResultsSignal = pyqtSignal(dict, name="results")
finishedSignal = pyqtSignal(dict, str, name="processFinished")
def __init__(self, parent):
super(XmlImport, self).__init__(parent)
self.parent = parent
self.processCount = 0
self.result = {}
self.thread = QThread(parent = self.parent)
self.threadUpdateSignal.connect(self._updateProgressBarInMainThread)
self.threadExceptionSignal.connect(self._loadingFailed)
self.threadResultsSignal.connect(self._processFinished)
self.filepath = ""
def importOne(self, xmlEntry):
if self.processor is not None:
result = self.processor.extractOne(xmlEntry)
return result
else:
return None
@pyqtSlot()
def openXMLFile(self):
filename = QFileDialog.getOpenFileName(self.parent, "Open xml-file containing the data to be analyzed.",
".", "Person data files (*.xml);;All files (*)")
if filename[0] != "":
self.filepath = filename[0]
self.parent.setWindowTitle("Kaira " + filename[0])
self._analyzeOpenedXml(filename)
def _analyzeOpenedXml(self, file):
self.progressDialog = QProgressDialog(self.parent)
self.progressDialog.setCancelButton(None)
self.progressDialog.setLabelText("Extracting provided datafile...")
self.progressDialog.open()
self.progressDialog.setValue(0)
self.file = file
self.thread.run = self._runProcess
self.thread.start()
def _runProcess(self):
try:
xmlDataDocument = self._getXMLroot(self.file[0])
#TODO: Lue xml:n metadata
try:
#TODO: Moniprosarituki?
self.processor = route_gui.Router.get_processdata_class(xmlDataDocument.attrib["bookseries"])(self._processUpdateCallback)
result = self.processor.startExtractionProcess(xmlDataDocument, self.file[0])
self.threadResultsSignal.emit(result)
except KeyError:
raise MetadataException()
except Exception as e:
if "DEV" in os.environ and os.environ["DEV"]:
raise e
else:
print(e)
self.threadExceptionSignal.emit(e)
@pyqtSlot(int, int)
def _updateProgressBarInMainThread(self, i, max):
self.progressDialog.setRange(0, max)
self.progressDialog.setValue(i)
@pyqtSlot(object)
def _loadingFailed(self, e):
self.progressDialog.cancel()
import pymongo
errMessage = "Error in data-file. Extraction failed. Is the xml valid and in utf-8 format? More info: "
if isinstance(e, pymongo.errors.ServerSelectionTimeoutError):
errMessage = "Couldn't connect to database. Try going to '/mongodb/data/db' in application directory and deleting 'mongod.lock' file and restart application. More info: "
msgbox = QMessageBox()
msgbox.information(self.parent, "Extraction failed", errMessage + str(e))
msgbox.show()
@pyqtSlot(dict)
def _processFinished(self, result):
self.result = result
self.finishedSignal.emit(self.result, self.filepath)
def _processUpdateCallback(self, i, max):
self.threadUpdateSignal.emit(i, max)
def _getXMLroot(self, filepath):
#read the data in XML-format to be processed
parser = etree.XMLParser(encoding="utf-8")
tree = etree.parse(filepath, parser=parser) #ET.parse(filepath)
return tree.getroot()
class MetadataException(Exception):
def __init__(self):
self.msg = "ERROR: The document doesn't contain bookseries attribute in the beginning of the file. Couldn't import. Try " \
"to generate new xml-file from the source ocr-text or add the missing attribute to the file manually."
def __str__(self):
return repr(self.msg)
|
Learning-from-our-past/Kaira
|
qtgui/xmlImport.py
|
Python
|
gpl-2.0
| 4,469
|
"""GIFImage by Matthew Roe"""
import Image
import pygame
from pygame.locals import *
import time
class GIFImage(object):
def __init__(self, filename):
self.filename = filename
self.image = Image.open(filename)
self.frames = []
self.get_frames()
self.cur = 0
self.ptime = time.time()
self.running = True
self.breakpoint = len(self.frames)-1
self.startpoint = 0
self.reversed = False
def get_rect(self):
return pygame.rect.Rect((0,0), self.image.size)
def get_frames(self):
image = self.image
pal = image.getpalette()
base_palette = []
for i in range(0, len(pal), 3):
rgb = pal[i:i+3]
base_palette.append(rgb)
all_tiles = []
try:
while 1:
if not image.tile:
image.seek(0)
if image.tile:
all_tiles.append(image.tile[0][3][0])
image.seek(image.tell()+1)
except EOFError:
image.seek(0)
all_tiles = tuple(set(all_tiles))
try:
while 1:
try:
duration = image.info["duration"]
except:
duration = 100
duration *= .001 #convert to milliseconds!
cons = False
x0, y0, x1, y1 = (0, 0) + image.size
if image.tile:
tile = image.tile
else:
image.seek(0)
tile = image.tile
if len(tile) > 0:
x0, y0, x1, y1 = tile[0][1]
if all_tiles:
if all_tiles in ((6,), (7,)):
cons = True
pal = image.getpalette()
palette = []
for i in range(0, len(pal), 3):
rgb = pal[i:i+3]
palette.append(rgb)
elif all_tiles in ((7, 8), (8, 7)):
pal = image.getpalette()
palette = []
for i in range(0, len(pal), 3):
rgb = pal[i:i+3]
palette.append(rgb)
else:
palette = base_palette
else:
palette = base_palette
pi = pygame.image.fromstring(image.tostring(), image.size, image.mode)
pi.set_palette(palette)
if "transparency" in image.info:
pi.set_colorkey(image.info["transparency"])
pi2 = pygame.Surface(image.size, SRCALPHA)
if cons:
for i in self.frames:
pi2.blit(i[0], (0,0))
pi2.blit(pi, (x0, y0), (x0, y0, x1-x0, y1-y0))
self.frames.append([pi2, duration])
image.seek(image.tell()+1)
except EOFError:
pass
def render(self, screen, pos):
if self.running:
if time.time() - self.ptime > self.frames[self.cur][1]:
if self.reversed:
self.cur -= 1
if self.cur < self.startpoint:
self.cur = self.breakpoint
else:
self.cur += 1
if self.cur > self.breakpoint:
self.cur = self.startpoint
self.ptime = time.time()
screen.blit(self.frames[self.cur][0], pos)
def seek(self, num):
self.cur = num
if self.cur < 0:
self.cur = 0
if self.cur >= len(self.frames):
self.cur = len(self.frames)-1
def set_bounds(self, start, end):
if start < 0:
start = 0
if start >= len(self.frames):
start = len(self.frames) - 1
if end < 0:
end = 0
if end >= len(self.frames):
end = len(self.frames) - 1
if end < start:
end = start
self.startpoint = start
self.breakpoint = end
def pause(self):
self.running = False
def play(self):
self.running = True
def rewind(self):
self.seek(0)
def fastforward(self):
self.seek(self.length()-1)
def get_height(self):
return self.image.size[1]
def get_width(self):
return self.image.size[0]
def get_size(self):
return self.image.size
def length(self):
return len(self.frames)
def reverse(self):
self.reversed = not self.reversed
def reset(self):
self.cur = 0
self.ptime = time.time()
self.reversed = False
def copy(self):
new = GIFImage(self.filename)
new.running = self.running
new.breakpoint = self.breakpoint
new.startpoint = self.startpoint
new.cur = self.cur
new.ptime = self.ptime
new.reversed = self.reversed
return new
##def main():
## pygame.init()
## screen = pygame.display.set_mode((640, 480))
##
## hulk = GIFImage("hulk.gif")
## football = GIFImage("football.gif")
## hulk2 = hulk.copy()
## hulk2.reverse()
## hulk3 = hulk.copy()
## hulk3.set_bounds(0, 2)
## spiderman = GIFImage("spiderman7.gif")
##
## while 1:
## for event in pygame.event.get():
## if event.type == QUIT:
## pygame.quit()
## return
##
## screen.fill((255,255,255))
## hulk.render(screen, (50, 0))
## hulk2.render(screen, (50, 150))
## hulk3.render(screen, (50, 300))
## football.render(screen, (200, 50))
## spiderman.render(screen, (200, 150))
## pygame.display.flip()
##
##if __name__ == "__main__":
## main()
|
drfreemayn/ml-testing
|
sex-dice/GIFImage.py
|
Python
|
gpl-2.0
| 5,891
|
import string
__version__ = string.split('$Revision: 1.6 $')[1]
__date__ = string.join(string.split('$Date: 2001/11/17 14:12:34 $')[1:3], ' ')
__author__ = 'Tarn Weisner Burton <twburton@users.sourceforge.net>'
__doc__ = 'http://oss.sgi.com/projects/ogl-sample/registry/SUN/convolution_border_modes.txt'
__api_version__ = 0x103
GL_WRAP_BORDER_SUN = 0x81D4
def glInitConvolutionBorderModesSUN():
from OpenGL.GL import __has_extension
return __has_extension("GL_SUN_convolution_border_modes")
def __info():
if glInitConvolutionBorderModesSUN():
return []
|
fxia22/ASM_xf
|
PythonD/site_python/OpenGL/GL/SUN/convolution_border_modes.py
|
Python
|
gpl-2.0
| 585
|
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Mock HTTPretty.
HTTPretty fix related to SSL bug:
https://github.com/gabrielfalcao/HTTPretty/issues/242
"""
import httpretty
import httpretty.core
from httpretty import HTTPretty as OriginalHTTPretty
try:
from requests.packages.urllib3.contrib.pyopenssl import \
inject_into_urllib3, extract_from_urllib3
pyopenssl_override = True
except:
pyopenssl_override = False
class MyHTTPretty(OriginalHTTPretty):
"""
HTTPretty mock.
pyopenssl monkey-patches the default ssl_wrap_socket() function in the
'requests' library, but this can stop the HTTPretty socket monkey-patching
from working for HTTPS requests.
Our version extends the base HTTPretty enable() and disable()
implementations to undo and redo the pyopenssl monkey-patching,
respectively.
"""
@classmethod
def enable(cls):
"""Enable method mock."""
OriginalHTTPretty.enable()
if pyopenssl_override:
# Take out the pyopenssl version - use the default implementation
extract_from_urllib3()
@classmethod
def disable(cls):
"""Disable method mock."""
OriginalHTTPretty.disable()
if pyopenssl_override:
# Put the pyopenssl version back in place
inject_into_urllib3()
# Substitute in our version
HTTPretty = MyHTTPretty
httpretty.core.httpretty = MyHTTPretty
# May need to set other module-level attributes here, e.g. enable, reset etc,
# depending on your needs
httpretty.httpretty = MyHTTPretty
|
slint/zenodo
|
zenodo/modules/records/httpretty_mock.py
|
Python
|
gpl-2.0
| 2,498
|
import struct
import os
from sys import maxint
from enigma import eTimer, eHdmiCEC, eActionMap
from config import config, ConfigSelection, ConfigYesNo, ConfigSubsection, ConfigText
from Tools.StbHardware import getFPWasTimerWakeup
from Tools.Directories import fileExists
config.hdmicec = ConfigSubsection()
config.hdmicec.enabled = ConfigYesNo(default = False)
config.hdmicec.control_tv_standby = ConfigYesNo(default = True)
config.hdmicec.control_tv_wakeup = ConfigYesNo(default = True)
config.hdmicec.report_active_source = ConfigYesNo(default = True)
config.hdmicec.report_active_menu = ConfigYesNo(default = True)
config.hdmicec.handle_tv_standby = ConfigYesNo(default = True)
config.hdmicec.handle_tv_wakeup = ConfigYesNo(default = True)
config.hdmicec.tv_wakeup_detection = ConfigSelection(
choices = {
"wakeup": _("Wakeup"),
"tvreportphysicaladdress": _("TV physical address report"),
"sourcerequest": _("Source request"),
"streamrequest": _("Stream request"),
"osdnamerequest": _("OSD name request"),
"activity": _("Any activity"),
},
default = "streamrequest")
config.hdmicec.fixed_physical_address = ConfigText(default = "0.0.0.0")
config.hdmicec.volume_forwarding = ConfigYesNo(default = False)
config.hdmicec.control_receiver_wakeup = ConfigYesNo(default = False)
config.hdmicec.control_receiver_standby = ConfigYesNo(default = False)
config.hdmicec.handle_deepstandby_events = ConfigYesNo(default = False)
config.hdmicec.preemphasis = ConfigYesNo(default = False)
choicelist = []
for i in (10, 50, 100, 150, 250, 500, 750, 1000, 1500, 2000):
choicelist.append(("%d" % i, "%d ms" % i))
config.hdmicec.minimum_send_interval = ConfigSelection(default = "0", choices = [("0", _("Disabled"))] + choicelist)
class HdmiCec:
instance = None
def __init__(self):
if config.hdmicec.enabled.value:
assert not HdmiCec.instance, "only one HdmiCec instance is allowed!"
HdmiCec.instance = self
self.wait = eTimer()
self.wait.timeout.get().append(self.sendCmd)
self.queue = []
eHdmiCEC.getInstance().messageReceived.get().append(self.messageReceived)
config.misc.standbyCounter.addNotifier(self.onEnterStandby, initial_call = False)
config.misc.DeepStandby.addNotifier(self.onEnterDeepStandby, initial_call = False)
self.setFixedPhysicalAddress(config.hdmicec.fixed_physical_address.value)
self.volumeForwardingEnabled = False
self.volumeForwardingDestination = 0
eActionMap.getInstance().bindAction('', -maxint - 1, self.keyEvent)
config.hdmicec.volume_forwarding.addNotifier(self.configVolumeForwarding)
config.hdmicec.enabled.addNotifier(self.configVolumeForwarding)
if config.hdmicec.handle_deepstandby_events.value:
if not getFPWasTimerWakeup():
self.wakeupMessages()
# if fileExists("/proc/stb/hdmi/preemphasis"):
# self.sethdmipreemphasis()
def getPhysicalAddress(self):
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
hexstring = '%04x' % physicaladdress
return hexstring[0] + '.' + hexstring[1] + '.' + hexstring[2] + '.' + hexstring[3]
def setFixedPhysicalAddress(self, address):
if address != config.hdmicec.fixed_physical_address.value:
config.hdmicec.fixed_physical_address.value = address
config.hdmicec.fixed_physical_address.save()
hexstring = address[0] + address[2] + address[4] + address[6]
eHdmiCEC.getInstance().setFixedPhysicalAddress(int(float.fromhex(hexstring)))
def sendMessage(self, address, message):
if config.hdmicec.enabled.value:
cmd = 0
data = ''
if message == "wakeup":
cmd = 0x04
elif message == "sourceactive":
address = 0x0f # use broadcast for active source command
cmd = 0x82
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "standby":
cmd = 0x36
elif message == "sourceinactive":
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
cmd = 0x9d
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "menuactive":
cmd = 0x8e
data = str(struct.pack('B', 0x00))
elif message == "menuinactive":
cmd = 0x8e
data = str(struct.pack('B', 0x01))
elif message == "givesystemaudiostatus":
cmd = 0x7d
address = 0x05
elif message == "setsystemaudiomode":
cmd = 0x70
address = 0x05
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "osdname":
cmd = 0x47
data = os.uname()[1]
data = data[:14]
elif message == "poweractive":
cmd = 0x90
data = str(struct.pack('B', 0x00))
elif message == "powerinactive":
cmd = 0x90
data = str(struct.pack('B', 0x01))
elif message == "reportaddress":
address = 0x0f # use broadcast address
cmd = 0x84
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
devicetype = eHdmiCEC.getInstance().getDeviceType()
data = str(struct.pack('BBB', int(physicaladdress/256), int(physicaladdress%256), devicetype))
elif message == "vendorid":
cmd = 0x87
data = '\x00\x00\x00'
elif message == "keypoweron":
cmd = 0x44
data = str(struct.pack('B', 0x6d))
elif message == "keypoweroff":
cmd = 0x44
data = str(struct.pack('B', 0x6c))
if cmd:
if config.hdmicec.minimum_send_interval.value != "0" and message != "standby": # Use no interval time when message is standby. usefull for Panasonic TV
self.queue.append((address, cmd, data))
if not self.wait.isActive():
self.wait.start(int(config.hdmicec.minimum_send_interval.value), True)
else:
eHdmiCEC.getInstance().sendMessage(address, cmd, data, len(data))
def sendCmd(self):
if len(self.queue):
(address, cmd, data) = self.queue.pop(0)
eHdmiCEC.getInstance().sendMessage(address, cmd, data, len(data))
self.wait.start(int(config.hdmicec.minimum_send_interval.value), True)
def sendMessages(self, address, messages):
for message in messages:
self.sendMessage(address, message)
def wakeupMessages(self):
if config.hdmicec.enabled.value:
messages = []
if config.hdmicec.control_tv_wakeup.value:
messages.append("wakeup")
if config.hdmicec.report_active_source.value:
messages.append("sourceactive")
if config.hdmicec.report_active_menu.value:
messages.append("menuactive")
if messages:
self.sendMessages(0, messages)
if config.hdmicec.control_receiver_wakeup.value:
self.sendMessage(5, "keypoweron")
self.sendMessage(5, "setsystemaudiomode")
def standbyMessages(self):
if config.hdmicec.enabled.value:
messages = []
if config.hdmicec.control_tv_standby.value:
messages.append("standby")
else:
if config.hdmicec.report_active_source.value:
messages.append("sourceinactive")
if config.hdmicec.report_active_menu.value:
messages.append("menuinactive")
if messages:
self.sendMessages(0, messages)
if config.hdmicec.control_receiver_standby.value:
self.sendMessage(5, "keypoweroff")
self.sendMessage(5, "standby")
def onLeaveStandby(self):
self.wakeupMessages()
def onEnterStandby(self, configElement):
from Screens.Standby import inStandby
inStandby.onClose.append(self.onLeaveStandby)
self.standbyMessages()
def onEnterDeepStandby(self, configElement):
if config.hdmicec.handle_deepstandby_events.value:
self.standbyMessages()
def standby(self):
from Screens.Standby import Standby, inStandby
if not inStandby:
from Tools import Notifications
Notifications.AddNotification(Standby)
def wakeup(self):
from Screens.Standby import Standby, inStandby
if inStandby:
inStandby.Power()
def messageReceived(self, message):
if config.hdmicec.enabled.value:
from Screens.Standby import inStandby
cmd = message.getCommand()
data = 16 * '\x00'
length = message.getData(data, len(data))
if cmd == 0x00: # feature abort
if data[0] == '\x44':
print 'eHdmiCec: volume forwarding not supported by device %02x'%(message.getAddress())
self.volumeForwardingEnabled = False
elif cmd == 0x46: # request name
self.sendMessage(message.getAddress(), 'osdname')
elif cmd == 0x7e or cmd == 0x72: # system audio mode status
if data[0] == '\x01':
self.volumeForwardingDestination = 5 # on: send volume keys to receiver
else:
self.volumeForwardingDestination = 0 # off: send volume keys to tv
if config.hdmicec.volume_forwarding.value:
print 'eHdmiCec: volume forwarding to device %02x enabled'% self.volumeForwardingDestination
self.volumeForwardingEnabled = True
elif cmd == 0x8f: # request power status
if inStandby:
self.sendMessage(message.getAddress(), 'powerinactive')
else:
self.sendMessage(message.getAddress(), 'poweractive')
elif cmd == 0x83: # request address
self.sendMessage(message.getAddress(), 'reportaddress')
elif cmd == 0x86: # request streaming path
physicaladdress = ord(data[0]) * 256 + ord(data[1])
ouraddress = eHdmiCEC.getInstance().getPhysicalAddress()
if physicaladdress == ouraddress:
if not inStandby:
if config.hdmicec.report_active_source.value:
self.sendMessage(message.getAddress(), 'sourceactive')
elif cmd == 0x85: # request active source
if not inStandby:
if config.hdmicec.report_active_source.value:
self.sendMessage(message.getAddress(), 'sourceactive')
elif cmd == 0x8c: # request vendor id
self.sendMessage(message.getAddress(), 'vendorid')
elif cmd == 0x8d: # menu request
requesttype = ord(data[0])
if requesttype == 2: # query
if inStandby:
self.sendMessage(message.getAddress(), 'menuinactive')
else:
self.sendMessage(message.getAddress(), 'menuactive')
# handle standby request from the tv
if cmd == 0x36 and config.hdmicec.handle_tv_standby.value:
self.standby()
# handle wakeup requests from the tv
if config.hdmicec.handle_tv_wakeup.value:
if cmd == 0x04 and config.hdmicec.tv_wakeup_detection.value == "wakeup":
self.wakeup()
elif cmd == 0x84 and config.hdmicec.tv_wakeup_detection.value == "tvreportphysicaladdress":
if (ord(data[0]) * 256 + ord(data[1])) == 0 and ord(data[2]) == 0:
self.wakeup()
elif cmd == 0x85 and config.hdmicec.tv_wakeup_detection.value == "sourcerequest":
self.wakeup()
elif cmd == 0x86 and config.hdmicec.tv_wakeup_detection.value == "streamrequest":
physicaladdress = ord(data[0]) * 256 + ord(data[1])
ouraddress = eHdmiCEC.getInstance().getPhysicalAddress()
if physicaladdress == ouraddress:
self.wakeup()
elif cmd == 0x46 and config.hdmicec.tv_wakeup_detection.value == "osdnamerequest":
self.wakeup()
elif cmd != 0x36 and config.hdmicec.tv_wakeup_detection.value == "activity":
self.wakeup()
def configVolumeForwarding(self, configElement):
if config.hdmicec.enabled.value and config.hdmicec.volume_forwarding.value:
self.volumeForwardingEnabled = True
self.sendMessage(0x05, 'givesystemaudiostatus')
else:
self.volumeForwardingEnabled = False
def keyEvent(self, keyCode, keyEvent):
if not self.volumeForwardingEnabled: return
cmd = 0
data = ''
if keyEvent == 0:
if keyCode == 115:
cmd = 0x44
data = str(struct.pack('B', 0x41))
if keyCode == 114:
cmd = 0x44
data = str(struct.pack('B', 0x42))
if keyCode == 113:
cmd = 0x44
data = str(struct.pack('B', 0x43))
if keyEvent == 2:
if keyCode == 115:
cmd = 0x44
data = str(struct.pack('B', 0x41))
if keyCode == 114:
cmd = 0x44
data = str(struct.pack('B', 0x42))
if keyCode == 113:
cmd = 0x44
data = str(struct.pack('B', 0x43))
if keyEvent == 1:
if keyCode == 115 or keyCode == 114 or keyCode == 113:
cmd = 0x45
if cmd:
eHdmiCEC.getInstance().sendMessage(self.volumeForwardingDestination, cmd, data, len(data))
return 1
else:
return 0
def sethdmipreemphasis(self):
try:
if config.hdmicec.preemphasis.value == True:
file = open("/proc/stb/hdmi/preemphasis", "w")
file.write('on')
file.close()
else:
file = open("/proc/stb/hdmi/preemphasis", "w")
file.write('off')
file.close()
except:
return
hdmi_cec = HdmiCec()
|
popazerty/enigma2
|
lib/python/Components/HdmiCec.py
|
Python
|
gpl-2.0
| 12,328
|
from wrappers import *
from inspect import getdoc
@plugin
class Help:
@command("list")
def list(self, message):
"""list the loaded plugins"""
return message.reply(list(self.bot.plugins.keys()), "loaded plugins: " + ", ".join(self.bot.plugins.keys()))
@command("commands")
def listcoms(self, message):
"""list the available commands"""
return message.reply(list(self.bot.commands.keys()), "available commands: " + ", ".join(self.bot.commands.keys()))
@command("aliases")
def listaliases(self, message):
"""list the saved aliases"""
return message.reply(list(self.bot.aliases.keys()), "saved aliases: " + ", ".join(self.bot.aliases.keys()))
@command("expand")
def expand(self, message):
"""show what an alias does"""
if message.text:
command = message.text.split()[0].strip()
if command in self.bot.aliases:
x = self.bot.aliases[command]
x = self.bot.command_char+ "alias %s = " % command + " || ".join(["%s%s" % (cmd, (" " + arg) if arg else "") for cmd, arg in x])
return message.reply(x)
@command("help", simple=True)
def help_(self, message):
"""help <command> => returns the help for the specified command"""
if not isinstance(message.data, str):
doc = getdoc(message.data)
if not doc:
return message.reply("No help found for passed object '%s'" % message.data.__class__.__name__)
else:
firstline = "%s: %s" % (message.data.__class__.__name__, doc.split("\n")[0])
return message.reply(doc, firstline)
elif message.data:
try:
com = message.data.split()[0]
func = self.bot.commands[com][0]
except:
raise Exception("specifed command not found")
doc = func.__doc__
if not doc:
return message.reply("No help found for specified command")
else:
firstline = "%s: %s" % (com, doc.split("\n")[0])
return message.reply(doc, firstline)
else:
return message.reply("Help can be found at https://github.com/ellxc/piperbot/blob/master/README.md or by joining #piperbot on freenode")
|
ellxc/piperbot
|
plugins/help.py
|
Python
|
gpl-2.0
| 2,390
|
# Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
import imp
import inspect
import logging
import os
import shutil
import sys
import tarfile
from xl.nls import gettext as _
from xl import (
common,
event,
settings,
xdg
)
logger = logging.getLogger(__name__)
class InvalidPluginError(Exception):
def __str__(self):
return str(self.args[0])
class PluginsManager(object):
def __init__(self, exaile, load=True):
self.plugindirs = [ os.path.join(p, 'plugins') \
for p in xdg.get_data_dirs() ]
if xdg.local_hack:
self.plugindirs.insert(1, os.path.join(xdg.exaile_dir, 'plugins'))
try:
os.makedirs(self.plugindirs[0])
except:
pass
self.plugindirs = [ x for x in self.plugindirs if os.path.exists(x) ]
self.loaded_plugins = {}
self.exaile = exaile
self.enabled_plugins = {}
self.load = load
def __findplugin(self, pluginname):
for dir in self.plugindirs:
path = os.path.join(dir, pluginname)
if os.path.exists(path):
return path
return None
def load_plugin(self, pluginname, reload=False):
if not reload and pluginname in self.loaded_plugins:
return self.loaded_plugins[pluginname]
path = self.__findplugin(pluginname)
if path is None:
return False
sys.path.insert(0, path)
plugin = imp.load_source(pluginname, os.path.join(path,'__init__.py'))
if hasattr(plugin, 'plugin_class'):
plugin = plugin.plugin_class()
sys.path = sys.path[1:]
self.loaded_plugins[pluginname] = plugin
return plugin
def install_plugin(self, path):
try:
tar = tarfile.open(path, "r:*") #transparently supports gz, bz2
except (tarfile.ReadError, OSError):
raise InvalidPluginError(
_('Plugin archive is not in the correct format.'))
#ensure the paths in the archive are sane
mems = tar.getmembers()
base = os.path.basename(path).split('.')[0]
if os.path.isdir(os.path.join(self.plugindirs[0], base)):
raise InvalidPluginError(
_('A plugin with the name "%s" is already installed.') % base)
for m in mems:
if not m.name.startswith(base):
raise InvalidPluginError(
_('Plugin archive contains an unsafe path.'))
tar.extractall(self.plugindirs[0])
def __on_new_plugin_loaded(self, eventname, exaile, maybe_name, fn):
event.remove_callback(self.__on_new_plugin_loaded, eventname)
fn()
def __enable_new_plugin(self, plugin):
'''Sets up a new-style plugin. See helloworld plugin for details'''
if hasattr(plugin, 'on_gui_loaded'):
if self.exaile.loading:
event.add_ui_callback(self.__on_new_plugin_loaded, 'gui_loaded',
None, plugin.on_gui_loaded)
else:
plugin.on_gui_loaded()
if hasattr(plugin, 'on_exaile_loaded'):
if self.exaile.loading:
event.add_ui_callback(self.__on_new_plugin_loaded, 'exaile_loaded',
None, plugin.on_exaile_loaded)
else:
plugin.on_exaile_loaded()
def uninstall_plugin(self, pluginname):
self.disable_plugin(pluginname)
for dir in self.plugindirs:
try:
shutil.rmtree(self.__findplugin(pluginname))
return True
except:
pass
return False
def enable_plugin(self, pluginname):
try:
plugin = self.load_plugin(pluginname)
if not plugin: raise Exception("Error loading plugin")
plugin.enable(self.exaile)
if not inspect.ismodule(plugin):
self.__enable_new_plugin(plugin)
self.enabled_plugins[pluginname] = plugin
logger.debug("Loaded plugin %s" % pluginname)
self.save_enabled()
except Exception as e:
logger.exception("Unable to enable plugin %s", pluginname)
raise e
def disable_plugin(self, pluginname):
try:
plugin = self.enabled_plugins[pluginname]
del self.enabled_plugins[pluginname]
except KeyError:
logger.exception("Plugin not found, possibly already disabled")
return False
try:
plugin.disable(self.exaile)
logger.debug("Unloaded plugin %s" % pluginname)
self.save_enabled()
except Exception as e:
logger.exception("Unable to fully disable plugin %s", pluginname)
raise e
return True
def list_installed_plugins(self):
pluginlist = []
for dir in self.plugindirs:
if os.path.exists(dir):
for file in os.listdir(dir):
if file not in pluginlist and \
os.path.isdir(os.path.join(dir, file)) and \
file != '__pycache__':
pluginlist.append(file)
return pluginlist
def list_available_plugins(self):
pass
def list_updateable_plugins(self):
pass
def get_plugin_info(self, pluginname):
path = os.path.join(self.__findplugin(pluginname), 'PLUGININFO')
f = open(path)
infodict = {}
for line in f:
try:
key, val = line.split("=",1)
# restricted eval - no bult-in funcs. marginally more secure.
infodict[key] = eval(val, {'__builtins__': None, '_': _}, {})
except ValueError:
pass # this happens on blank lines
return infodict
def is_compatible(self, info):
'''
Returns True if the plugin claims to be compatible with the
current platform.
:param info: The data returned from get_plugin_info()
'''
platforms = info.get('Platforms', [])
if len(platforms) == 0:
platforms = [sys.platform]
for platform in platforms:
if sys.platform.startswith(platform):
return True
return False
def is_potentially_broken(self, info):
'''
Returns True if one of the modules that the plugin requires is
not detected as available.
:param info: The data returned from get_plugin_info()
'''
from gi.repository import GIRepository
gir = GIRepository.Repository.get_default()
modules = info.get('RequiredModules', [])
for module in modules:
pair = module.split(':', 1)
if len(pair) > 1:
prefix, module = pair
if prefix == 'gi':
if not gir.enumerate_versions(module):
return True
else:
try:
mdata = imp.find_module(module)
if mdata[0] is not None:
mdata[0].close()
except Exception:
return True
return False
def get_plugin_default_preferences(self, pluginname):
"""
Returns the default preferences for a plugin
"""
preflist = {}
path = self.__findplugin(pluginname)
plugin = imp.load_source(pluginname, os.path.join(path,'__init__.py'))
try:
preferences_pane = plugin.get_preferences_pane()
for c in dir(preferences_pane):
attr = getattr(preferences_pane, c)
if inspect.isclass(attr):
try:
preflist[attr.name] = attr.default
except AttributeError:
pass
except AttributeError:
pass
return preflist
def save_enabled(self):
if self.load:
settings.set_option("plugins/enabled", self.enabled_plugins.keys())
def load_enabled(self):
to_enable = settings.get_option("plugins/enabled", [])
for plugin in to_enable:
try:
self.enable_plugin(plugin)
except:
pass
# vim: et sts=4 sw=4
|
virtuald/exaile
|
xl/plugins.py
|
Python
|
gpl-2.0
| 9,679
|
import code
import unittest
import os
import pcbnew
import pdb
import tempfile
from pcbnew import *
class TestBoardClass(unittest.TestCase):
def setUp(self):
self.pcb = LoadBoard("data/complex_hierarchy.kicad_pcb")
self.TITLE="Test Board"
self.COMMENT1="For load/save test"
self.FILENAME=tempfile.mktemp()+".kicad_pcb"
def test_pcb_find_module(self):
module = self.pcb.FindModule('P1')
self.assertEqual(module.GetReference(),'P1')
def test_pcb_get_track_count(self):
pcb = BOARD()
self.assertEqual(pcb.GetNumSegmTrack(),0)
track0 = TRACK(pcb)
pcb.Add(track0)
self.assertEqual(pcb.GetNumSegmTrack(),1)
track1 = TRACK(pcb)
pcb.Add(track1)
self.assertEqual(pcb.GetNumSegmTrack(),2)
def test_pcb_bounding_box(self):
pcb = BOARD()
track = TRACK(pcb)
pcb.Add(track)
#track.SetStartEnd(wxPointMM(10.0, 10.0),
# wxPointMM(20.0, 30.0))
track.SetStart(wxPointMM(10.0, 10.0))
track.SetEnd(wxPointMM(20.0, 30.0))
track.SetWidth(FromMM(0.5))
#!!! THIS FAILS? == 0.0 x 0.0 ??
#height, width = ToMM(pcb.ComputeBoundingBox().GetSize())
bounding_box = pcb.ComputeBoundingBox()
height, width = ToMM(bounding_box.GetSize())
self.assertAlmostEqual(width, (30-10) + 0.5, 2)
self.assertAlmostEqual(height, (20-10) + 0.5, 2)
def test_pcb_get_pad(self):
pcb = BOARD()
module = MODULE(pcb)
pcb.Add(module)
pad = D_PAD(module)
module.Add(pad)
pad.SetShape(PAD_OVAL)
pad.SetSize(wxSizeMM(2.0, 3.0))
pad.SetPosition(wxPointMM(0,0))
# easy case
p1 = pcb.GetPad(wxPointMM(0,0))
# top side
p2 = pcb.GetPad(wxPointMM(0.9,0.0))
# bottom side
p3 = pcb.GetPad(wxPointMM(0,1.4))
# TODO: get pad == p1 evaluated as true instead
# of relying in the internal C++ object pointer
self.assertEqual(pad.this, p1.this)
self.assertEqual(pad.this, p2.this)
self.assertEqual(pad.this, p3.this)
def test_pcb_save_and_load(self):
pcb = BOARD()
pcb.GetTitleBlock().SetTitle(self.TITLE)
pcb.GetTitleBlock().SetComment1(self.COMMENT1)
result = SaveBoard(self.FILENAME,pcb)
self.assertTrue(result)
pcb2 = LoadBoard(self.FILENAME)
self.assertNotEqual(pcb2,None)
tb = pcb2.GetTitleBlock()
self.assertEqual(tb.GetTitle(),self.TITLE)
self.assertEqual(tb.GetComment1(),self.COMMENT1)
os.remove(self.FILENAME)
#def test_interactive(self):
# code.interact(local=locals())
if __name__ == '__main__':
unittest.main()
|
johnbeard/kicad-git
|
qa/testcases/test_002_board_class.py
|
Python
|
gpl-2.0
| 2,834
|
# -*- coding: utf-8 -*-
# Copyright 2008-2014 Jaap Karssenberg <jaap.karssenberg@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
'''
This is the development documentation of zim.
B{NOTE:} There is also some generic development documentation in the
"HACKING" folder in the source distribution. Please also have a look
at that if you want to help with zim development.
In this API documentation many of the methods with names starting with
C{do_} and C{on_} are not documented. The reason is that these are
signal handlers that are not part of the external API. They act upon
a signal but should never be called directly by other objects.
Overview
========
The script C{zim.py} is a thin wrapper around the C{main()} function
defined in L{zim.main}. This main function constructs a C{Command}
object that implements a specific commandline command. The C{Command}
object then either connects to a running instance of zim, or executes
the application.
To execute the application, the command typically constructs a
C{Notebook} object, a C{PluginManager} and a C{ConfigManager}. Then
depending on the command the graphical interface is constructed, a
webserver is started or some other action is executed on the notebook.
The C{Notebook} object is found in L{zim.notebook} and implements the
API for accessing and storing pages, attachments and other data in
the notebook folder.
The notebook works together with an C{Index} object which keeps a
SQLite database of all the pages to speed up notebook access and allows
to e.g. show a list of pages in the side pane of the user interface.
Another aspect of the notebook is the parsing of the wiki text in the
pages such that it can be shown in the interface or exported to another
format. See L{zim.formats} for implementations of different parsers.
All classes related to configuration are located in L{zim.config}.
The C{ConfigManager} handles looking up config files and provides them
for all components.
Plugins are defined as sub-modules of L{zim.plugins}. The
C{PluginManager} manages the plugins that are loaded and objects that
can be extended by plugins.
The graphical user interface is implemented in the L{zim.gui} module
and it's sub-modules. The webinterface is implemented in L{zim.www}.
The graphical interface uses a background process to coordinate
between running instances, this is implemented in L{zim.ipc}.
Functionality for exporting content is implemented in L{zim.exporter}.
And search functionality can be found in L{zim.search}.
Many classes in zim have signals which allow other objects to connect
to a listen for specific events. This allows for an event driven chain
of control, which is mainly used in the graphical interface, but is
also used elsewhere. If you are not familiar with event driven programs
please refer to a Gtk manual.
Infrastructure classes
----------------------
All functions and objects to interact with the file system can be
found in L{zim.fs}.
For executing external applications see L{zim.applications} or
L{zim.gui.applications}.
Some generic base classes and functions can be found in L{zim.utils}
@newfield signal: Signal, Signals
@newfield emits: Emits, Emits
@newfield implementation: Implementation
'''
# New epydoc fields defined above are inteded as follows:
# @signal: signal-name (param1, param2): description
# @emits: signal
# @implementation: must implement / optional for sub-classes
# Bunch of meta data, used at least in the about dialog
__version__ = '0.62'
__url__='http://www.zim-wiki.org'
__author__ = 'Jaap Karssenberg <jaap.karssenberg@gmail.com>'
__copyright__ = 'Copyright 2008 - 2014 Jaap Karssenberg <jaap.karssenberg@gmail.com>'
__license__='''\
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
'''
import os
import sys
import gettext
import logging
import locale
logger = logging.getLogger('zim')
#: This parameter can be set by ./setup.py, can be e.g. "maemo"
PLATFORM = None
########################################################################
## Note: all init here must happen before importing any other zim
## modules, so can not use zim.fs utilities etc.
## therefore ZIM_EXECUTABLE is a string, not an object
## Check executable and relative data dir
## (sys.argv[0] should always be correct, even for compiled exe)
if os.name == "nt":
# See notes in zim/fs.py about encoding expected by abspath
ZIM_EXECUTABLE = os.path.abspath(
unicode(sys.argv[0], sys.getfilesystemencoding())
)
else:
ZIM_EXECUTABLE = unicode(
os.path.abspath(sys.argv[0]),
sys.getfilesystemencoding()
)
## Initialize locale (needed e.g. for natural_sort)
locale.setlocale(locale.LC_ALL, '')
## Initialize gettext (maybe make this optional later for module use ?)
if os.name == "nt" and not os.environ.get('LANG'):
# Set locale config for gettext (other platforms have this by default)
# Using LANG because it is lowest prio - do not override other params
lang, enc = locale.getlocale()
os.environ['LANG'] = lang + '.' + enc
logging.info('Locale set to: %s', os.environ['LANG'])
_localedir = os.path.join(os.path.dirname(ZIM_EXECUTABLE), 'locale')
if not os.name == "nt":
_localedir = _localedir.encode(sys.getfilesystemencoding())
if os.path.isdir(_localedir):
# We are running from a source dir - use the locale data included there
gettext.install('zim', _localedir, unicode=True, names=('_', 'gettext', 'ngettext'))
else:
# Hope the system knows where to find the data
gettext.install('zim', None, unicode=True, names=('_', 'gettext', 'ngettext'))
########################################################################
## Now we are allowed to import sub modules
import zim.environ # initializes environment parameters
import zim.config
# Check if we can find our own data files
_file = zim.config.data_file('zim.png')
if not (_file and _file.exists()): #pragma: no cover
raise AssertionError(
'ERROR: Could not find data files in path: \n'
'%s\n'
'Try setting XDG_DATA_DIRS'
% map(str, zim.config.data_dirs())
)
def get_zim_revision():
'''Returns multiline string with bazaar revision info, if any.
Otherwise a string saying no info was found. Intended for debug
logging.
'''
try:
from zim._version import version_info
return '''\
Zim revision is:
branch: %(branch_nick)s
revision: %(revno)s %(revision_id)s
date: %(date)s''' % version_info
except ImportError:
return 'No bzr version-info found'
|
fabricehong/zim-desktop
|
zim/__init__.py
|
Python
|
gpl-2.0
| 7,256
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
# PyQt4 includes for python bindings to QT
from PyQt4.QtCore import Qt, QString
from PyQt4.QtGui import QFont, QFontMetrics, QColor, QIcon, QLabel, QWidget, QVBoxLayout
from PyQt4.Qsci import QsciScintilla, QsciLexerPython
# Main
class EditorBase(QsciScintilla):
def __init__(self, mainwindow):
QsciScintilla.__init__(self, mainwindow)
self.mainwindow = mainwindow
## define the font to use
font = QFont()
font.setFamily("Consolas")
font.setFixedPitch(True)
font.setPointSize(10)
# the font metrics here will help
# building the margin width later
fm = QFontMetrics(font)
## set the default font of the editor
## and take the same font for line numbers
self.setFont(font)
self.setMarginsFont(font)
## Line numbers
# conventionnaly, margin 0 is for line numbers
self.setMarginWidth(0, fm.width( "00000" ) + 5)
self.setMarginLineNumbers(0, True)
## Edge Mode shows a red vetical bar at 80 chars
self.setEdgeMode(QsciScintilla.EdgeLine)
self.setEdgeColumn(80)
self.setEdgeColor(QColor("#CCCCCC"))
## Folding visual : we will use boxes
self.setFolding(QsciScintilla.BoxedTreeFoldStyle)
## Braces matching
self.setBraceMatching(QsciScintilla.SloppyBraceMatch)
## Editing line color
#self.setCaretLineVisible(True)
#self.setCaretLineBackgroundColor(QColor("#CDA869"))
## Margins colors
# line numbers margin
self.setMarginsBackgroundColor(QColor("#333333"))
self.setMarginsForegroundColor(QColor("#CCCCCC"))
# folding margin colors (foreground,background)
#self.setFoldMarginColors(QColor("#99CC66"),QColor("#333300"))
self.setFoldMarginColors(QColor("#CCCCCC"),QColor("#CCCCCC"))
## Choose a lexer
lexer = QsciLexerPython()
lexer.setDefaultFont(font)
self.setLexer(lexer)
class EditorTab(object):
def __init__(self, mainwindow, filePath):
self.mainwindow = mainwindow
self.tabIcon = QIcon(":/Images/Images/cog.png")
self.tabLabel = "Editor Dyn Tab"
self.tab = QWidget(self.mainwindow)
self.widgetLayout = QVBoxLayout(self.tab)
self.widgetLayout.setAlignment(Qt.AlignTop)
self.editorStatusLabel = QLabel(self.tab)
self.editorStatusLabel.setAlignment(Qt.AlignCenter)
self.editorStatusLabel.setObjectName("editorStatusLabel")
self.editorStatusLabel.setText(QString("No files currently loaded..."))
self.widgetLayout.addWidget(self.editorStatusLabel)
self.editorStuff = EditorBase(self.mainwindow)
self.widgetLayout.addWidget(self.editorStuff)
try:
f = open(filePath,'r')
except:
return
for l in f.readlines():
self.editorStuff.append(l)
f.close()
self.editorStatusLabel.setText(QString(filePath))
self.mainwindow.tabWidget.insertTab(0,self.tab,self.tabIcon,self.tabLabel)
self.mainwindow.tabWidget.setCurrentIndex(0)
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_gui/util/editorbase.py
|
Python
|
gpl-2.0
| 3,269
|
SD_PROTO_VER = 0x02
SD_SHEEP_PROTO_VER = 0x0a
SD_EC_MAX_STRIP = 16
SD_MAX_COPIES = SD_EC_MAX_STRIP * 2 - 1
SD_OP_CREATE_AND_WRITE_OBJ = 0x01
SD_OP_READ_OBJ = 0x02
SD_OP_WRITE_OBJ = 0x03
SD_OP_REMOVE_OBJ = 0x04
SD_OP_DISCARD_OBJ = 0x05
SD_OP_NEW_VDI = 0x11
SD_OP_LOCK_VDI = 0x12
SD_OP_RELEASE_VDI = 0x13
SD_OP_GET_VDI_INFO = 0x14
SD_OP_READ_VDIS = 0x15
SD_OP_FLUSH_VDI = 0x16
SD_OP_DEL_VDI = 0x17
SD_OP_GET_CLUSTER_DEFAULT = 0x18
SD_OP_GET_OBJ_LIST = 0xA1
SD_OP_GET_EPOCH = 0xA2
SD_OP_CREATE_AND_WRITE_PEER = 0xA3
SD_OP_READ_PEER = 0xA4
SD_OP_WRITE_PEER = 0xA5
SD_OP_REMOVE_PEER = 0xA6
SD_OP_GET_VDI_COPIES = 0xAB
SD_OP_READ_DEL_VDIS = 0xC9
# macros in the SD_FLAG_CMD_XXX group are mutually exclusive
SD_FLAG_CMD_WRITE = 0x01
SD_FLAG_CMD_COW = 0x02
SD_FLAG_CMD_CACHE = 0x04
SD_FLAG_CMD_DIRECT = 0x08 # don't use object cache
# return something back while sending something to sheep
SD_FLAG_CMD_PIGGYBACK = 0x10
SD_FLAG_CMD_TGT = 0x20
SD_RES_SUCCESS = 0x00 # Success
SD_RES_UNKNOWN = 0x01 # Unknown error
SD_RES_NO_OBJ = 0x02 # No object found
SD_RES_EIO = 0x03 # I/O error
SD_RES_VDI_EXIST = 0x04 # VDI exists already
SD_RES_INVALID_PARMS = 0x05 # Invalid parameters
SD_RES_SYSTEM_ERROR = 0x06 # System error
SD_RES_VDI_LOCKED = 0x07 # VDI is locked
SD_RES_NO_VDI = 0x08 # No VDI found
SD_RES_NO_BASE_VDI = 0x09 # No base VDI found
SD_RES_VDI_READ = 0x0A # Cannot read requested VDI
SD_RES_VDI_WRITE = 0x0B # Cannot write requested VDI
SD_RES_BASE_VDI_READ = 0x0C # Cannot read base VDI
SD_RES_BASE_VDI_WRITE = 0x0D # Cannot write base VDI
SD_RES_NO_TAG = 0x0E # Requested tag is not found
SD_RES_STARTUP = 0x0F # Sheepdog is on starting up
SD_RES_VDI_NOT_LOCKED = 0x10 # VDI is not locked
SD_RES_SHUTDOWN = 0x11 # Sheepdog is shutting down
SD_RES_NO_MEM = 0x12 # Cannot allocate memory
SD_RES_FULL_VDI = 0x13 # we already have the maximum VDIs
SD_RES_VER_MISMATCH = 0x14 # Protocol version mismatch
SD_RES_NO_SPACE = 0x15 # Server has no room for new objects
SD_RES_WAIT_FOR_FORMAT = 0x16 # Sheepdog is waiting for a format operation
SD_RES_WAIT_FOR_JOIN = 0x17 # Sheepdog is waiting for other nodes joining
SD_RES_JOIN_FAILED = 0x18 # Target node had failed to join sheepdog
SD_RES_HALT = 0x19 # Sheepdog is stopped doing IO
SD_RES_READONLY = 0x1A # Object is read-only
# inode object in client is invalidated, refreshing is required
SD_RES_INODE_INVALIDATED = 0x1D
# Object ID rules
#
# 0 - 31 (32 bits): data object space
# 32 - 55 (24 bits): VDI object space
# 56 - 59 ( 4 bits): reserved VDI object space
# 60 - 63 ( 4 bits): object type identifier space
VDI_SPACE_SHIFT = 32
SD_VDI_MASK = 0x00FFFFFF00000000
VDI_BIT = 1 << 63
VMSTATE_BIT = 1 << 62
VDI_ATTR_BIT = 1 << 61
VDI_BTREE_BIT = 1 << 60
LEDGER_BIT = 1 << 59
OLD_MAX_DATA_OBJS = 1 << 20
MAX_DATA_OBJS = 1 << 32
SD_MAX_VDI_LEN = 256
SD_MAX_VDI_TAG_LEN = 256
SD_MAX_VDI_ATTR_KEY_LEN = 256
SD_MAX_VDI_ATTR_VALUE_LEN = 65536
SD_MAX_SNAPSHOT_TAG_LEN = 256
SD_NR_VDIS = 1 << 24
SD_DATA_OBJ_SIZE = 1 << 22
SD_OLD_MAX_VDI_SIZE = (SD_DATA_OBJ_SIZE * OLD_MAX_DATA_OBJS)
SD_MAX_VDI_SIZE = (SD_DATA_OBJ_SIZE * MAX_DATA_OBJS)
SD_DEFAULT_BLOCK_SIZE_SHIFT = 22
SD_LEDGER_OBJ_SIZE = 1 << 22
CURRENT_VDI_ID = 0
STORE_LEN = 16
SD_REQ_SIZE = 48
SD_RSP_SIZE = 48
LOCK_TYPE_NORMAL = 0
LOCK_TYPE_SHARED = 1 # for iSCSI multipath
def vid_to_vdi_oid(vid):
return VDI_BIT | vid << VDI_SPACE_SHIFT
|
matsu777/sheepdog
|
tests/operation/proto.py
|
Python
|
gpl-2.0
| 3,562
|
##
# Copyright 2009-2018 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for SNPyhlo, implemented as an easyblock
@authors: Ewan Higgs (HPC-UGent)
@authors: Kenneth Hoste (HPC-UGent)
"""
import os
import re
import shutil
import stat
from easybuild.framework.easyblock import EasyBlock
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import adjust_permissions, mkdir
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.run import run_cmd
class EB_SNPhylo(EasyBlock):
"""Support for building and installing SNPhylo."""
def configure_step(self):
"""No configure step for SNPhylo."""
pass
def build_step(self):
"""No build step for SNPhylo."""
# check for required dependencies
for dep in ['MUSCLE', 'PHYLIP', 'Python', 'R']:
if not get_software_root(dep):
raise EasyBuildError("Required dependency '%s' not loaded", dep)
# check for required R libraries
rver = get_software_version('R')
r_libs, _ = run_cmd("R --vanilla --no-save --slave -e 'print(installed.packages())'", simple=False)
for rpkg in ['gdsfmt', 'getopt', 'SNPRelate', 'phangorn']:
if not re.search(r'^%s\s.*%s' % (rpkg, rver), r_libs, re.M):
raise EasyBuildError("Required R package '%s' not installed", rpkg)
# run setup.sh, and send a bunch of newlines as stdin to 'answer' the Q&A;
# all questions can be answered with the default answer (if the dependencies are specified correctly);
# use run_cmd_qa doesn not work because of buffering issues (questions are not coming through)
adjust_permissions('setup.sh', stat.S_IXUSR, add=True)
(out, _) = run_cmd('bash ./setup.sh', inp='\n' * 10, simple=False)
success_msg = "SNPHYLO is successfully installed!!!"
if success_msg not in out:
raise EasyBuildError("Success message '%s' not found in setup.sh output: %s", success_msg, out)
def install_step(self):
"""Install by copying files/directories."""
bindir = os.path.join(self.installdir, 'bin')
binfiles = ['snphylo.sh', 'snphylo.cfg', 'snphylo.template']
try:
mkdir(bindir, parents=True)
for binfile in binfiles:
shutil.copy2(os.path.join(self.builddir, binfile), bindir)
shutil.copytree(os.path.join(self.builddir, 'scripts'), os.path.join(self.installdir, 'scripts'))
except OSError as err:
raise EasyBuildError("Failed to copy SNPhylo files/dirs: %s", err)
def sanity_check_step(self):
"""Custom sanity check for SNPhylo."""
custom_paths = {
'files': ['bin/snphylo.sh', 'bin/snphylo.cfg', 'bin/snphylo.template'],
'dirs': ['scripts'],
}
super(EB_SNPhylo, self).sanity_check_step(custom_paths=custom_paths)
|
bartoldeman/easybuild-easyblocks
|
easybuild/easyblocks/s/snphylo.py
|
Python
|
gpl-2.0
| 3,939
|
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Cursor classes
"""
import sys
import weakref
import re
import itertools
from mysql.connector import constants
from mysql.connector import errors
from mysql.connector import utils
RE_SQL_COMMENT = re.compile("\/\*.*\*\/")
RE_SQL_INSERT_VALUES = re.compile(
r'VALUES\s*(\(\s*(?:%(?:\(.*\)|)s\s*(?:,|)\s*)+\))',
re.I | re.M)
RE_SQL_INSERT_STMT = re.compile(r'INSERT\s+INTO', re.I)
RE_SQL_SPLIT_STMTS = re.compile(
r''';(?=(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$)''')
class CursorBase(object):
"""
Base for defining MySQLCursor. This class is a skeleton and defines
methods and members as required for the Python Database API
Specification v2.0.
It's better to inherite from MySQLCursor.
"""
def __init__(self):
self._description = None
self._rowcount = -1
self._last_insert_id = None
self.arraysize = 1
def callproc(self, procname, args=()):
pass
def close(self):
pass
def execute(self, operation, params=()):
pass
def executemany(self, operation, seqparams):
pass
def fetchone(self):
pass
def fetchmany(self, size=1):
pass
def fetchall(self):
pass
def nextset(self):
pass
def setinputsizes(self, sizes):
pass
def setoutputsize(self, size, column=None):
pass
def reset(self):
pass
@property
def description(self):
"""Returns description of columns in a result
This property returns a list of tuples describing the columns in
in a result set. A tuple is described as follows::
(column_name,
type,
None,
None,
None,
None,
null_ok,
column_flags) # Addition to PEP-249 specs
Returns a list of tuples.
"""
return self._description
@property
def rowcount(self):
"""Returns the number of rows produced or affected
This property returns the number of rows produced by queries
such as a SELECT, or affected rows when executing DML statements
like INSERT or UPDATE.
Note that for non-buffered cursors it is impossible to know the
number of rows produced before having fetched them all. For those,
the number of rows will be -1 right after execution, and
incremented when fetching rows.
Returns an integer.
"""
return self._rowcount
@property
def lastrowid(self):
"""Returns the value generated for an AUTO_INCREMENT column
Returns the value generated for an AUTO_INCREMENT column by
the previous INSERT or UPDATE statement or None when there is
no such value available.
Returns a long value or None.
"""
return self._last_insert_id
class MySQLCursor(CursorBase):
"""Default cursor for interacting with MySQL
This cursor will execute statements and handle the result. It will
not automatically fetch all rows.
MySQLCursor should be inherited whenever other functionallity is
required. An example would to change the fetch* member functions
to return dictionaries instead of lists of values.
Implements the Python Database API Specification v2.0 (PEP-249)
"""
def __init__(self, connection=None):
CursorBase.__init__(self)
self._connection = None
self._stored_results = []
self._nextrow = (None, None)
self._warnings = None
self._warning_count = 0
self._executed = None
self._executed_list = []
if connection is not None:
self._set_connection(connection)
def __iter__(self):
"""
Iteration over the result set which calls self.fetchone()
and returns the next row.
"""
return iter(self.fetchone, None)
def _set_connection(self, connection):
try:
self._connection = weakref.proxy(connection)
self._connection._protocol
except (AttributeError, TypeError):
raise errors.InterfaceError(errno=2048)
def _reset_result(self):
self._rowcount = -1
self._lastrowid = None
self._nextrow = (None, None)
self._stored_results = []
self._warnings = None
self._warning_count = 0
self._description = None
self._executed = None
self._executed_list = []
self.reset()
def _have_unread_result(self):
"""Check whether there is an unread result"""
try:
return self._connection.unread_result
except AttributeError:
return False
def next(self):
"""
Used for iterating over the result set. Calles self.fetchone()
to get the next row.
"""
try:
row = self.fetchone()
except errors.InterfaceError:
raise StopIteration
if not row:
raise StopIteration
return row
def close(self):
"""Close the cursor
Returns True when successful, otherwise False.
"""
if self._connection is None:
return False
self._reset_result()
self._connection = None
return True
def _process_params_dict(self, params):
try:
to_mysql = self._connection.converter.to_mysql
escape = self._connection.converter.escape
quote = self._connection.converter.quote
res = {}
for k,v in params.items():
c = v
c = to_mysql(c)
c = escape(c)
c = quote(c)
res[k] = c
except StandardError, e:
raise errors.ProgrammingError(
"Failed processing pyformat-parameters; %s" % e)
else:
return res
return None
def _process_params(self, params):
"""
Process the parameters which were given when self.execute() was
called. It does following using the MySQLConnection converter:
* Convert Python types to MySQL types
* Escapes characters required for MySQL.
* Quote values when needed.
Returns a list.
"""
if isinstance(params,dict):
return self._process_params_dict(params)
try:
res = params
res = map(self._connection.converter.to_mysql,res)
res = map(self._connection.converter.escape,res)
res = map(self._connection.converter.quote,res)
except StandardError, e:
raise errors.ProgrammingError(
"Failed processing format-parameters; %s" % e)
else:
return tuple(res)
return None
def _row_to_python(self, rowdata, desc=None):
res = ()
try:
if not desc:
desc = self.description
for idx,v in enumerate(rowdata):
flddsc = desc[idx]
res += (self._connection.converter.to_python(flddsc, v),)
except StandardError, e:
raise errors.InterfaceError(
"Failed converting row to Python types; %s" % e)
else:
return res
return None
def _handle_noresultset(self, res):
"""Handles result of execute() when there is no result set
"""
try:
self._rowcount = res['affected_rows']
self._last_insert_id = res['insert_id']
self._warning_count = res['warning_count']
except (KeyError, TypeError), err:
raise errors.ProgrammingError(
"Failed handling non-resultset; %s" % err)
if self._connection.get_warnings is True and self._warning_count:
self._warnings = self._fetch_warnings()
def _handle_resultset(self):
pass
def _handle_result(self, result):
"""
Handle the result after a command was send. The result can be either
an OK-packet or a dictionary containing column/eof information.
Raises InterfaceError when result is not a dict() or result is
invalid.
"""
if not isinstance(result, dict):
raise errors.InterfaceError('Result was not a dict()')
if 'columns' in result:
# Weak test, must be column/eof information
self._description = result['columns']
self._connection.unread_result = True
self._handle_resultset()
elif 'affected_rows' in result:
# Weak test, must be an OK-packet
self._connection.unread_result = False
self._handle_noresultset(result)
else:
raise errors.InterfaceError('Invalid result')
def _execute_iter(self, query_iter):
"""Generator returns MySQLCursor objects for multiple statements
This method is only used when multiple statements are executed
by the execute() method. It uses itertools.izip to iterate over the
given query_iter (result of MySQLConnection.cmd_query_iter()) and
the list of statements that were executed.
Yields a MySQLCursor instance.
"""
if not self._executed_list:
self._executed_list = RE_SQL_SPLIT_STMTS.split(self._executed)
for result, stmt in itertools.izip(query_iter,
iter(self._executed_list)):
self._reset_result()
self._handle_result(result)
self._executed = stmt
yield self
def execute(self, operation, params=None, multi=False):
"""Executes the given operation
Executes the given operation substituting any markers with
the given parameters.
For example, getting all rows where id is 5:
cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,))
The multi argument should be set to True when executing multiple
statements in one operation. If not set and multiple results are
found, an InterfaceError will be raised.
If warnings where generated, and connection.get_warnings is True, then
self._warnings will be a list containing these warnings.
Returns an iterator when multi is True, otherwise None.
"""
if not operation:
return
if self._have_unread_result():
raise errors.InternalError("Unread result found.")
self._reset_result()
stmt = ''
try:
if isinstance(operation, unicode):
operation = operation.encode(self._connection.charset)
except (UnicodeDecodeError, UnicodeEncodeError), e:
raise errors.ProgrammingError(str(e))
if params is not None:
try:
stmt = operation % self._process_params(params)
except TypeError:
raise errors.ProgrammingError(
"Wrong number of arguments during string formatting")
else:
stmt = operation
if multi:
self._executed = stmt
self._executed_list = []
return self._execute_iter(self._connection.cmd_query_iter(stmt))
else:
self._executed = stmt
try:
self._handle_result(self._connection.cmd_query(stmt))
except errors.InterfaceError, err:
if self._connection._have_next_result:
raise errors.InterfaceError(
"Use multi=True when executing multiple statements")
raise
return None
def executemany(self, operation, seq_params):
"""Execute the given operation multiple times
The executemany() method will execute the operation iterating
over the list of parameters in seq_params.
Example: Inserting 3 new employees and their phone number
data = [
('Jane','555-001'),
('Joe', '555-001'),
('John', '555-003')
]
stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')"
cursor.executemany(stmt, data)
INSERT statements are optimized by batching the data, that is
using the MySQL multiple rows syntax.
Results are discarded. If they are needed, consider looping over
data using the execute() method.
"""
if not operation:
return
if self._have_unread_result():
raise errors.InternalError("Unread result found.")
elif len(RE_SQL_SPLIT_STMTS.split(operation)) > 1:
raise errors.InternalError(
"executemany() does not support multiple statements")
# Optimize INSERTs by batching them
if re.match(RE_SQL_INSERT_STMT,operation):
opnocom = re.sub(RE_SQL_COMMENT, '', operation)
m = re.search(RE_SQL_INSERT_VALUES, opnocom)
fmt = m.group(1)
values = []
for params in seq_params:
values.append(fmt % self._process_params(params))
operation = operation.replace(m.group(1), ','.join(values), 1)
return self.execute(operation)
rowcnt = 0
try:
for params in seq_params:
self.execute(operation, params)
if self.with_rows and self._have_unread_result():
self.fetchall()
rowcnt += self._rowcount
except (ValueError, TypeError), err:
raise errors.InterfaceError(
"Failed executing the operation; %s" % err)
except:
# Raise whatever execute() raises
raise
self._rowcount = rowcnt
def stored_results(self):
"""Returns an iterator for stored results
This method returns an iterator over results which are stored when
callproc() is called. The iterator will provide MySQLCursorBuffered
instances.
Returns a iterator.
"""
return iter(self._stored_results)
def callproc(self, procname, args=()):
"""Calls a stored procedue with the given arguments
The arguments will be set during this session, meaning
they will be called like _<procname>__arg<nr> where
<nr> is an enumeration (+1) of the arguments.
Coding Example:
1) Definining the Stored Routine in MySQL:
CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT)
BEGIN
SET pProd := pFac1 * pFac2;
END
2) Executing in Python:
args = (5,5,0) # 0 is to hold pprod
cursor.callproc('multiply', args)
print cursor.fetchone()
The last print should output ('5', '5', 25L)
Does not return a value, but a result set will be
available when the CALL-statement execute successfully.
Raises exceptions when something is wrong.
"""
argfmt = "@_%s_arg%d"
self._stored_results = []
results = []
try:
procargs = self._process_params(args)
argnames = []
for idx,arg in enumerate(procargs):
argname = argfmt % (procname, idx+1)
argnames.append(argname)
setquery = "SET %s=%%s" % argname
self.execute(setquery, (arg,))
call = "CALL %s(%s)" % (procname,','.join(argnames))
for result in self._connection.cmd_query_iter(call):
if 'columns' in result:
tmp = MySQLCursorBuffered(self._connection._get_self())
tmp._handle_result(result)
results.append(tmp)
if argnames:
select = "SELECT %s" % ','.join(argnames)
self.execute(select)
self._stored_results = results
return self.fetchone()
else:
self._stored_results = results
return ()
except errors.Error:
raise
except StandardError, e:
raise errors.InterfaceError(
"Failed calling stored routine; %s" % e)
def getlastrowid(self):
"""Returns the value generated for an AUTO_INCREMENT column
This method is kept for backward compatibility. Please use the
property lastrowid instead.
Returns a long value or None.
"""
return self.lastrowid
def _fetch_warnings(self):
"""
Fetch warnings doing a SHOW WARNINGS. Can be called after getting
the result.
Returns a result set or None when there were no warnings.
"""
res = []
try:
c = self._connection.cursor()
cnt = c.execute("SHOW WARNINGS")
res = c.fetchall()
c.close()
except StandardError, e:
raise errors.InterfaceError, errors.InterfaceError(
"Failed getting warnings; %s" % e), sys.exc_info()[2]
if self._connection.raise_on_warnings is True:
msg = '; '.join([ "(%s) %s" % (r[1],r[2]) for r in res])
raise errors.get_mysql_exception(res[0][1],res[0][2])
else:
if len(res):
return res
return None
def _handle_eof(self, eof):
self._connection.unread_result = False
self._nextrow = (None, None)
self._warning_count = eof['warning_count']
if self._connection.get_warnings is True and eof['warning_count']:
self._warnings = self._fetch_warnings()
def _fetch_row(self):
if not self._have_unread_result():
return None
row = None
try:
if self._nextrow == (None, None):
(row, eof) = self._connection.get_row()
else:
(row, eof) = self._nextrow
if row:
(foo, eof) = self._nextrow = self._connection.get_row()
if eof is not None:
self._handle_eof(eof)
if self._rowcount == -1:
self._rowcount = 1
else:
self._rowcount += 1
if eof:
self._handle_eof(eof)
except:
raise
else:
return row
return None
def fetchwarnings(self):
return self._warnings
def fetchone(self):
row = self._fetch_row()
if row:
return self._row_to_python(row)
return None
def fetchmany(self,size=None):
res = []
cnt = (size or self.arraysize)
while cnt > 0 and self._have_unread_result():
cnt -= 1
row = self.fetchone()
if row:
res.append(row)
return res
def fetchall(self):
if not self._have_unread_result():
raise errors.InterfaceError("No result set to fetch from.")
res = []
(rows, eof) = self._connection.get_rows()
self._rowcount = len(rows)
for i in xrange(0,self.rowcount):
res.append(self._row_to_python(rows[i]))
self._handle_eof(eof)
return res
@property
def column_names(self):
"""Returns column names
This property returns the columns names as a tuple.
Returns a tuple.
"""
if not self.description:
return ()
return tuple( [d[0].decode('utf8') for d in self.description] )
@property
def statement(self):
"""Returns the executed statement
This property returns the executed statement. When multiple
statements were executed, the current statement in the iterator
will be returned.
"""
return self._executed.strip()
@property
def with_rows(self):
"""Returns whether the cursor could have rows returned
This property returns True when column descriptions are available
and possibly also rows, which will need to be fetched.
Returns True or False.
"""
if not self.description:
return False
return True
def __unicode__(self):
fmt = "MySQLCursor: %s"
if self._executed:
if len(self._executed) > 30:
res = fmt % (self._executed[:30] + '..')
else:
res = fmt % (self._executed)
else:
res = fmt % '(Nothing executed yet)'
return res
def __str__(self):
return repr(self.__unicode__())
class MySQLCursorBuffered(MySQLCursor):
"""Cursor which fetches rows within execute()"""
def __init__(self, connection=None):
MySQLCursor.__init__(self, connection)
self._rows = None
self._next_row = 0
def _handle_resultset(self):
(self._rows, eof) = self._connection.get_rows()
self._rowcount = len(self._rows)
self._handle_eof(eof)
self._next_row = 0
try:
self._connection.unread_result = False
except:
pass
def reset(self):
self._rows = None
def _fetch_row(self):
row = None
try:
row = self._rows[self._next_row]
except:
return None
else:
self._next_row += 1
return row
return None
def fetchall(self):
if self._rows is None:
raise errors.InterfaceError("No result set to fetch from.")
res = []
for row in self._rows:
res.append(self._row_to_python(row))
self._next_row = len(self._rows)
return res
def fetchmany(self,size=None):
res = []
cnt = (size or self.arraysize)
while cnt > 0:
cnt -= 1
row = self.fetchone()
if row:
res.append(row)
return res
@property
def with_rows(self):
return self._rows is not None
class MySQLCursorRaw(MySQLCursor):
def fetchone(self):
row = self._fetch_row()
if row:
return row
return None
def fetchall(self):
if not self._have_unread_result():
raise errors.InterfaceError("No result set to fetch from.")
(rows, eof) = self._connection.get_rows()
self._rowcount = len(rows)
self._handle_eof(eof)
return rows
class MySQLCursorBufferedRaw(MySQLCursorBuffered):
def fetchone(self):
row = self._fetch_row()
if row:
return row
return None
def fetchall(self):
if self._rows is None:
raise errors.InterfaceError("No result set to fetch from.")
return [ r for r in self._rows ]
|
mitchcapper/mythbox
|
resources/lib/mysql-connector-python/python2/mysql/connector/cursor.py
|
Python
|
gpl-2.0
| 24,598
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2011 Carlos Abalde <carlos.abalde@gmail.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os.path
import string
import urllib
import duplicity.backend
from duplicity.errors import BackendException
class GDocsBackend(duplicity.backend.Backend):
"""Connect to remote store using Google Google Documents List API"""
ROOT_FOLDER_ID = 'folder%3Aroot'
BACKUP_DOCUMENT_TYPE = 'application/binary'
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
# Import Google Data APIs libraries.
try:
global atom
global gdata
import atom.data
import gdata.client
import gdata.docs.client
import gdata.docs.data
except ImportError as e:
raise BackendException("""\
Google Docs backend requires Google Data APIs Python Client Library (see http://code.google.com/p/gdata-python-client/).
Exception: %s""" % str(e))
# Setup client instance.
self.client = gdata.docs.client.DocsClient(source='duplicity $version')
self.client.ssl = True
self.client.http_client.debug = False
self._authorize(parsed_url.username + '@' + parsed_url.hostname, self.get_password())
# Fetch destination folder entry (and crete hierarchy if required).
folder_names = string.split(parsed_url.path[1:], '/')
parent_folder = None
parent_folder_id = GDocsBackend.ROOT_FOLDER_ID
for folder_name in folder_names:
entries = self._fetch_entries(parent_folder_id, 'folder', folder_name)
if entries is not None:
if len(entries) == 1:
parent_folder = entries[0]
elif len(entries) == 0:
folder = gdata.docs.data.Resource(type='folder', title=folder_name)
parent_folder = self.client.create_resource(folder, collection=parent_folder)
else:
parent_folder = None
if parent_folder:
parent_folder_id = parent_folder.resource_id.text
else:
raise BackendException("Error while creating destination folder '%s'." % folder_name)
else:
raise BackendException("Error while fetching destination folder '%s'." % folder_name)
self.folder = parent_folder
def _put(self, source_path, remote_filename):
self._delete(remote_filename)
# Set uploader instance. Note that resumable uploads are required in order to
# enable uploads for all file types.
# (see http://googleappsdeveloper.blogspot.com/2011/05/upload-all-file-types-to-any-google.html)
file = source_path.open()
uploader = gdata.client.ResumableUploader(
self.client, file,
GDocsBackend.BACKUP_DOCUMENT_TYPE,
os.path.getsize(file.name),
chunk_size=gdata.client.ResumableUploader.DEFAULT_CHUNK_SIZE,
desired_class=gdata.docs.data.Resource)
if uploader:
# Chunked upload.
entry = gdata.docs.data.Resource(title=atom.data.Title(text=remote_filename))
uri = self.folder.get_resumable_create_media_link().href + '?convert=false'
entry = uploader.UploadFile(uri, entry=entry)
if not entry:
raise BackendException("Failed to upload file '%s' to remote folder '%s'"
% (source_path.get_filename(), self.folder.title.text))
else:
raise BackendException("Failed to initialize upload of file '%s' to remote folder '%s'"
% (source_path.get_filename(), self.folder.title.text))
assert not file.close()
def _get(self, remote_filename, local_path):
entries = self._fetch_entries(self.folder.resource_id.text,
GDocsBackend.BACKUP_DOCUMENT_TYPE,
remote_filename)
if len(entries) == 1:
entry = entries[0]
self.client.DownloadResource(entry, local_path.name)
else:
raise BackendException("Failed to find file '%s' in remote folder '%s'"
% (remote_filename, self.folder.title.text))
def _list(self):
entries = self._fetch_entries(self.folder.resource_id.text,
GDocsBackend.BACKUP_DOCUMENT_TYPE)
return [entry.title.text for entry in entries]
def _delete(self, filename):
entries = self._fetch_entries(self.folder.resource_id.text,
GDocsBackend.BACKUP_DOCUMENT_TYPE,
filename)
for entry in entries:
self.client.delete(entry.get_edit_link().href + '?delete=true', force=True)
def _authorize(self, email, password, captcha_token=None, captcha_response=None):
try:
self.client.client_login(email,
password,
source='duplicity $version',
service='writely',
captcha_token=captcha_token,
captcha_response=captcha_response)
except gdata.client.CaptchaChallenge as challenge:
print('A captcha challenge in required. Please visit ' + challenge.captcha_url)
answer = None
while not answer:
answer = raw_input('Answer to the challenge? ')
self._authorize(email, password, challenge.captcha_token, answer)
except gdata.client.BadAuthentication:
raise BackendException(
'Invalid user credentials given. Be aware that accounts '
'that use 2-step verification require creating an application specific '
'access code for using this Duplicity backend. Follow the instruction in '
'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 '
'and create your application-specific password to run duplicity backups.')
def _fetch_entries(self, folder_id, type, title=None):
# Build URI.
uri = '/feeds/default/private/full/%s/contents' % folder_id
if type == 'folder':
uri += '/-/folder?showfolders=true'
elif type == GDocsBackend.BACKUP_DOCUMENT_TYPE:
uri += '?showfolders=false'
else:
uri += '?showfolders=true'
if title:
uri += '&title=' + urllib.quote(title) + '&title-exact=true'
# Fetch entries.
entries = self.client.get_all_resources(uri=uri)
# When filtering by entry title, API is returning (don't know why) documents in other
# folders (apart from folder_id) matching the title, so some extra filtering is required.
if title:
result = []
for entry in entries:
resource_type = entry.get_resource_type()
if (not type) \
or (type == 'folder' and resource_type == 'folder') \
or (type == GDocsBackend.BACKUP_DOCUMENT_TYPE and resource_type != 'folder'):
if folder_id != GDocsBackend.ROOT_FOLDER_ID:
for link in entry.in_collections():
folder_entry = self.client.get_entry(link.href, None, None,
desired_class=gdata.docs.data.Resource)
if folder_entry and (folder_entry.resource_id.text == folder_id):
result.append(entry)
elif len(entry.in_collections()) == 0:
result.append(entry)
else:
result = entries
# Done!
return result
""" gdata is an alternate way to access gdocs, currently 05/2015 lacking OAuth support """
duplicity.backend.register_backend('gdata+gdocs', GDocsBackend)
duplicity.backend.uses_netloc.extend(['gdata+gdocs'])
|
nils-tekampe/duplicity
|
duplicity/backends/gdocsbackend.py
|
Python
|
gpl-2.0
| 8,939
|
import pytest
from cfme.infrastructure import repositories
from utils.update import update
from utils.wait import TimedOutError, wait_for
@pytest.mark.tier(2)
@pytest.mark.meta(blockers=[1188427])
def test_repository_crud(soft_assert, random_string, request):
repo_name = 'Test Repo {}'.format(random_string)
repo = repositories.Repository(repo_name, '//testhost/share/path')
request.addfinalizer(repo.delete)
# create
repo.create()
# read
assert repo.exists
# update
with update(repo):
repo.name = 'Updated {}'.format(repo_name)
with soft_assert.catch_assert():
assert repo.exists, 'Repository rename failed'
# Only change the name back if renaming succeeded
with update(repo):
repo.name = repo_name
# delete
repo.delete()
try:
wait_for(lambda: not repo.exists)
except TimedOutError:
raise AssertionError('failed to delete repository')
|
lehinevych/cfme_tests
|
cfme/tests/infrastructure/test_repositories.py
|
Python
|
gpl-2.0
| 962
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import json
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum_xvg import transaction
from electrum_xvg.bitcoin import base_encode
from electrum_xvg.i18n import _
from electrum_xvg.plugins import run_hook
from util import *
dialogs = [] # Otherwise python randomly garbage collects the dialogs...
def show_transaction(tx, parent, desc=None, prompt_if_unsaved=False):
d = TxDialog(tx, parent, desc, prompt_if_unsaved)
dialogs.append(d)
d.show()
class TxDialog(QDialog):
def __init__(self, tx, parent, desc, prompt_if_unsaved):
'''Transactions in the wallet will show their description.
Pass desc to give a description for txs not yet in the wallet.
'''
self.tx = tx
self.tx.deserialize()
self.parent = parent
self.wallet = parent.wallet
self.prompt_if_unsaved = prompt_if_unsaved
self.saved = False
self.broadcast = False
self.desc = desc
QDialog.__init__(self)
self.setMinimumWidth(600)
self.setWindowTitle(_("Transaction"))
vbox = QVBoxLayout()
self.setLayout(vbox)
vbox.addWidget(QLabel(_("Transaction ID:")))
self.tx_hash_e = ButtonsLineEdit()
qr_show = lambda: self.parent.show_qrcode(str(self.tx_hash_e.text()), 'Transaction ID')
self.tx_hash_e.addButton(":icons/qrcode.png", qr_show, _("Show as QR code"))
self.tx_hash_e.setReadOnly(True)
vbox.addWidget(self.tx_hash_e)
self.status_label = QLabel()
vbox.addWidget(self.status_label)
self.tx_desc = QLabel()
vbox.addWidget(self.tx_desc)
self.date_label = QLabel()
vbox.addWidget(self.date_label)
self.amount_label = QLabel()
vbox.addWidget(self.amount_label)
self.fee_label = QLabel()
vbox.addWidget(self.fee_label)
self.add_io(vbox)
vbox.addStretch(1)
self.sign_button = b = QPushButton(_("Sign"))
b.clicked.connect(self.sign)
self.broadcast_button = b = QPushButton(_("Broadcast"))
b.clicked.connect(self.do_broadcast)
self.save_button = b = QPushButton(_("Save"))
b.clicked.connect(self.save)
self.cancel_button = b = QPushButton(_("Close"))
b.clicked.connect(self.close)
b.setDefault(True)
self.qr_button = b = QPushButton()
b.setIcon(QIcon(":icons/qrcode.png"))
b.clicked.connect(self.show_qr)
self.copy_button = CopyButton(lambda: str(self.tx), self.parent.app)
# Action buttons
self.buttons = [self.sign_button, self.broadcast_button, self.cancel_button]
# Transaction sharing buttons
self.sharing_buttons = [self.copy_button, self.qr_button, self.save_button]
run_hook('transaction_dialog', self)
hbox = QHBoxLayout()
hbox.addLayout(Buttons(*self.sharing_buttons))
hbox.addStretch(1)
hbox.addLayout(Buttons(*self.buttons))
vbox.addLayout(hbox)
self.update()
def do_broadcast(self):
self.parent.broadcast_transaction(self.tx, self.desc, parent=self)
self.broadcast = True
self.update()
def closeEvent(self, event):
if (self.prompt_if_unsaved and not self.saved and not self.broadcast
and QMessageBox.question(
self, _('Warning'),
_('This transaction is not saved. Close anyway?'),
QMessageBox.Yes | QMessageBox.No) == QMessageBox.No):
event.ignore()
else:
event.accept()
dialogs.remove(self)
def show_qr(self):
text = self.tx.raw.decode('hex')
text = base_encode(text, base=43)
try:
self.parent.show_qrcode(text, 'Transaction')
except Exception as e:
self.show_message(str(e))
def sign(self):
def sign_done(success):
self.sign_button.setDisabled(False)
self.prompt_if_unsaved = True
self.saved = False
self.update()
self.sign_button.setDisabled(True)
cancelled, ret = self.parent.sign_tx(self.tx, sign_done, parent=self)
if cancelled:
self.sign_button.setDisabled(False)
def save(self):
name = 'signed_%s.txn' % (self.tx.hash()[0:8]) if self.tx.is_complete() else 'unsigned.txn'
fileName = self.parent.getSaveFileName(_("Select where to save your signed transaction"), name, "*.txn")
if fileName:
with open(fileName, "w+") as f:
f.write(json.dumps(self.tx.as_dict(), indent=4) + '\n')
self.show_message(_("Transaction saved successfully"))
self.saved = True
def update(self):
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(self.tx)
tx_hash = self.tx.hash()
desc = self.desc
time_str = None
self.broadcast_button.hide()
if self.tx.is_complete():
status = _("Signed")
if tx_hash in self.wallet.transactions.keys():
desc, is_default = self.wallet.get_label(tx_hash)
conf, timestamp = self.wallet.get_confirmations(tx_hash)
if timestamp:
time_str = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
else:
time_str = _('Pending')
status = _("%d confirmations")%conf
else:
self.broadcast_button.show()
# cannot broadcast when offline
if self.parent.network is None:
self.broadcast_button.setEnabled(False)
else:
s, r = self.tx.signature_count()
status = _("Unsigned") if s == 0 else _('Partially signed') + ' (%d/%d)'%(s,r)
tx_hash = _('Unknown');
if self.wallet.can_sign(self.tx):
self.sign_button.show()
else:
self.sign_button.hide()
self.tx_hash_e.setText(tx_hash)
if desc is None:
self.tx_desc.hide()
else:
self.tx_desc.setText(_("Description") + ': ' + desc)
self.tx_desc.show()
self.status_label.setText(_('Status:') + ' ' + status)
if time_str is not None:
self.date_label.setText(_("Date: %s")%time_str)
self.date_label.show()
else:
self.date_label.hide()
# if we are not synchronized, we cannot tell
if not self.wallet.up_to_date:
return
if is_relevant:
if is_mine:
if fee is not None:
self.amount_label.setText(_("Amount sent:")+' %s'% self.parent.format_amount(-v+fee) + ' ' + self.parent.base_unit())
self.fee_label.setText(_("Transaction fee")+': %s'% self.parent.format_amount(-fee) + ' ' + self.parent.base_unit())
else:
self.amount_label.setText(_("Amount sent:")+' %s'% self.parent.format_amount(-v) + ' ' + self.parent.base_unit())
self.fee_label.setText(_("Transaction fee")+': '+ _("unknown"))
else:
self.amount_label.setText(_("Amount received:")+' %s'% self.parent.format_amount(v) + ' ' + self.parent.base_unit())
else:
self.amount_label.setText(_("Transaction unrelated to your wallet"))
run_hook('transaction_dialog_update', self)
def add_io(self, vbox):
if self.tx.locktime > 0:
vbox.addWidget(QLabel("LockTime: %d\n" % self.tx.locktime))
vbox.addWidget(QLabel(_("Inputs")))
ext = QTextCharFormat()
rec = QTextCharFormat()
rec.setBackground(QBrush(QColor("lightgreen")))
rec.setToolTip(_("Wallet receive address"))
chg = QTextCharFormat()
chg.setBackground(QBrush(QColor("yellow")))
chg.setToolTip(_("Wallet change address"))
def text_format(addr):
if self.wallet.is_mine(addr):
return chg if self.wallet.is_change(addr) else rec
return ext
i_text = QTextEdit()
i_text.setFont(QFont(MONOSPACE_FONT))
i_text.setReadOnly(True)
i_text.setMaximumHeight(100)
cursor = i_text.textCursor()
for x in self.tx.inputs:
if x.get('is_coinbase'):
cursor.insertText('coinbase')
else:
prevout_hash = x.get('prevout_hash')
prevout_n = x.get('prevout_n')
cursor.insertText(prevout_hash[0:8] + '...', ext)
cursor.insertText(prevout_hash[-8:] + ":%-4d " % prevout_n, ext)
addr = x.get('address')
if addr == "(pubkey)":
_addr = self.wallet.find_pay_to_pubkey_address(prevout_hash, prevout_n)
if _addr:
addr = _addr
if addr is None:
addr = _('unknown')
cursor.insertText(addr, text_format(addr))
cursor.insertBlock()
vbox.addWidget(i_text)
vbox.addWidget(QLabel(_("Outputs")))
o_text = QTextEdit()
o_text.setFont(QFont(MONOSPACE_FONT))
o_text.setReadOnly(True)
o_text.setMaximumHeight(100)
cursor = o_text.textCursor()
for addr, v in self.tx.get_outputs():
cursor.insertText(addr, text_format(addr))
if v is not None:
cursor.insertText('\t', ext)
cursor.insertText(self.parent.format_amount(v, whitespaces = True), ext)
cursor.insertBlock()
vbox.addWidget(o_text)
def show_message(self, msg):
QMessageBox.information(self, _('Message'), msg, _('OK'))
|
harwee/electrum-xvg-tor
|
gui/qt/transaction_dialog.py
|
Python
|
gpl-3.0
| 10,558
|
"""
Support for plotting vector fields.
Presently this contains Quiver and Barb. Quiver plots an arrow in the
direction of the vector, with the size of the arrow related to the
magnitude of the vector.
Barbs are like quiver in that they point along a vector, but
the magnitude of the vector is given schematically by the presence of barbs
or flags on the barb.
This will also become a home for things such as standard
deviation ellipses, which can and will be derived very easily from
the Quiver code.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import weakref
import numpy as np
from numpy import ma
import matplotlib.collections as mcollections
import matplotlib.transforms as transforms
import matplotlib.text as mtext
import matplotlib.artist as martist
from matplotlib.artist import allow_rasterization
from matplotlib import docstring
import matplotlib.font_manager as font_manager
import matplotlib.cbook as cbook
from matplotlib.cbook import delete_masked_points
from matplotlib.patches import CirclePolygon
import math
_quiver_doc = """
Plot a 2-D field of arrows.
Call signatures::
quiver(U, V, **kw)
quiver(U, V, C, **kw)
quiver(X, Y, U, V, **kw)
quiver(X, Y, U, V, C, **kw)
*U* and *V* are the arrow data, *X* and *Y* set the location of the
arrows, and *C* sets the color of the arrows. These arguments may be 1-D or
2-D arrays or sequences.
If *X* and *Y* are absent, they will be generated as a uniform grid.
If *U* and *V* are 2-D arrays and *X* and *Y* are 1-D, and if ``len(X)`` and
``len(Y)`` match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
The default settings auto-scales the length of the arrows to a reasonable size.
To change this behavior see the *scale* and *scale_units* kwargs.
The defaults give a slightly swept-back arrow; to make the head a
triangle, make *headaxislength* the same as *headlength*. To make the
arrow more pointed, reduce *headwidth* or increase *headlength* and
*headaxislength*. To make the head smaller relative to the shaft,
scale down all the head parameters. You will probably do best to leave
minshaft alone.
*linewidths* and *edgecolors* can be used to customize the arrow
outlines.
Parameters
----------
X : 1D or 2D array, sequence, optional
The x coordinates of the arrow locations
Y : 1D or 2D array, sequence, optional
The y coordinates of the arrow locations
U : 1D or 2D array or masked array, sequence
The x components of the arrow vectors
V : 1D or 2D array or masked array, sequence
The y components of the arrow vectors
C : 1D or 2D array, sequence, optional
The arrow colors
units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ]
The arrow dimensions (except for *length*) are measured in multiples of
this unit.
'width' or 'height': the width or height of the axis
'dots' or 'inches': pixels or inches, based on the figure dpi
'x', 'y', or 'xy': respectively *X*, *Y*, or :math:`\\sqrt{X^2 + Y^2}`
in data units
The arrows scale differently depending on the units. For
'x' or 'y', the arrows get larger as one zooms in; for other
units, the arrow size is independent of the zoom state. For
'width or 'height', the arrow size increases with the width and
height of the axes, respectively, when the window is resized;
for 'dots' or 'inches', resizing does not change the arrows.
angles : [ 'uv' | 'xy' ], array, optional
Method for determining the angle of the arrows. Default is 'uv'.
'uv': the arrow axis aspect ratio is 1 so that
if *U*==*V* the orientation of the arrow on the plot is 45 degrees
counter-clockwise from the horizontal axis (positive to the right).
'xy': arrows point from (x,y) to (x+u, y+v).
Use this for plotting a gradient field, for example.
Alternatively, arbitrary angles may be specified as an array
of values in degrees, counter-clockwise from the horizontal axis.
Note: inverting a data axis will correspondingly invert the
arrows only with ``angles='xy'``.
scale : None, float, optional
Number of data units per arrow length unit, e.g., m/s per plot width; a
smaller scale parameter makes the arrow longer. Default is *None*.
If *None*, a simple autoscaling algorithm is used, based on the average
vector length and the number of vectors. The arrow length unit is given by
the *scale_units* parameter
scale_units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ], \
None, optional
If the *scale* kwarg is *None*, the arrow length unit. Default is *None*.
e.g. *scale_units* is 'inches', *scale* is 2.0, and
``(u,v) = (1,0)``, then the vector will be 0.5 inches long.
If *scale_units* is 'width'/'height', then the vector will be half the
width/height of the axes.
If *scale_units* is 'x' then the vector will be 0.5 x-axis
units. To plot vectors in the x-y plane, with u and v having
the same units as x and y, use
``angles='xy', scale_units='xy', scale=1``.
width : scalar, optional
Shaft width in arrow units; default depends on choice of units,
above, and number of vectors; a typical starting value is about
0.005 times the width of the plot.
headwidth : scalar, optional
Head width as multiple of shaft width, default is 3
headlength : scalar, optional
Head length as multiple of shaft width, default is 5
headaxislength : scalar, optional
Head length at shaft intersection, default is 4.5
minshaft : scalar, optional
Length below which arrow scales, in units of head length. Do not
set this to less than 1, or small arrows will look terrible!
Default is 1
minlength : scalar, optional
Minimum length as a multiple of shaft width; if an arrow length
is less than this, plot a dot (hexagon) of this diameter instead.
Default is 1.
pivot : [ 'tail' | 'mid' | 'middle' | 'tip' ], optional
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*.
color : [ color | color sequence ], optional
This is a synonym for the
:class:`~matplotlib.collections.PolyCollection` facecolor kwarg.
If *C* has been set, *color* has no effect.
Notes
-----
Additional :class:`~matplotlib.collections.PolyCollection`
keyword arguments:
%(PolyCollection)s
See Also
--------
quiverkey : Add a key to a quiver plot
""" % docstring.interpd.params
_quiverkey_doc = """
Add a key to a quiver plot.
Call signature::
quiverkey(Q, X, Y, U, label, **kw)
Arguments:
*Q*:
The Quiver instance returned by a call to quiver.
*X*, *Y*:
The location of the key; additional explanation follows.
*U*:
The length of the key
*label*:
A string with the length and units of the key
Keyword arguments:
*angle* = 0
The angle of the key arrow. Measured in degrees anti-clockwise from the
x-axis.
*coordinates* = [ 'axes' | 'figure' | 'data' | 'inches' ]
Coordinate system and units for *X*, *Y*: 'axes' and 'figure' are
normalized coordinate systems with 0,0 in the lower left and 1,1
in the upper right; 'data' are the axes data coordinates (used for
the locations of the vectors in the quiver plot itself); 'inches'
is position in the figure in inches, with 0,0 at the lower left
corner.
*color*:
overrides face and edge colors from *Q*.
*labelpos* = [ 'N' | 'S' | 'E' | 'W' ]
Position the label above, below, to the right, to the left of the
arrow, respectively.
*labelsep*:
Distance in inches between the arrow and the label. Default is
0.1
*labelcolor*:
defaults to default :class:`~matplotlib.text.Text` color.
*fontproperties*:
A dictionary with keyword arguments accepted by the
:class:`~matplotlib.font_manager.FontProperties` initializer:
*family*, *style*, *variant*, *size*, *weight*
Any additional keyword arguments are used to override vector
properties taken from *Q*.
The positioning of the key depends on *X*, *Y*, *coordinates*, and
*labelpos*. If *labelpos* is 'N' or 'S', *X*, *Y* give the position
of the middle of the key arrow. If *labelpos* is 'E', *X*, *Y*
positions the head, and if *labelpos* is 'W', *X*, *Y* positions the
tail; in either of these two cases, *X*, *Y* is somewhere in the
middle of the arrow+label key object.
"""
class QuiverKey(martist.Artist):
""" Labelled arrow for use as a quiver plot scale key."""
halign = {'N': 'center', 'S': 'center', 'E': 'left', 'W': 'right'}
valign = {'N': 'bottom', 'S': 'top', 'E': 'center', 'W': 'center'}
pivot = {'N': 'middle', 'S': 'middle', 'E': 'tip', 'W': 'tail'}
def __init__(self, Q, X, Y, U, label, **kw):
martist.Artist.__init__(self)
self.Q = Q
self.X = X
self.Y = Y
self.U = U
self.angle = kw.pop('angle', 0)
self.coord = kw.pop('coordinates', 'axes')
self.color = kw.pop('color', None)
self.label = label
self._labelsep_inches = kw.pop('labelsep', 0.1)
self.labelsep = (self._labelsep_inches * Q.ax.figure.dpi)
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref.labelsep = (self_weakref._labelsep_inches*fig.dpi)
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = Q.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
self.labelpos = kw.pop('labelpos', 'N')
self.labelcolor = kw.pop('labelcolor', None)
self.fontproperties = kw.pop('fontproperties', dict())
self.kw = kw
_fp = self.fontproperties
# boxprops = dict(facecolor='red')
self.text = mtext.Text(
text=label, # bbox=boxprops,
horizontalalignment=self.halign[self.labelpos],
verticalalignment=self.valign[self.labelpos],
fontproperties=font_manager.FontProperties(**_fp))
if self.labelcolor is not None:
self.text.set_color(self.labelcolor)
self._initialized = False
self.zorder = Q.zorder + 0.1
def remove(self):
"""
Overload the remove method
"""
self.Q.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
martist.Artist.remove(self)
__init__.__doc__ = _quiverkey_doc
def _init(self):
if True: # not self._initialized:
if not self.Q._initialized:
self.Q._init()
self._set_transform()
_pivot = self.Q.pivot
self.Q.pivot = self.pivot[self.labelpos]
# Hack: save and restore the Umask
_mask = self.Q.Umask
self.Q.Umask = ma.nomask
self.verts = self.Q._make_verts(np.array([self.U]),
np.zeros((1,)),
self.angle)
self.Q.Umask = _mask
self.Q.pivot = _pivot
kw = self.Q.polykw
kw.update(self.kw)
self.vector = mcollections.PolyCollection(
self.verts,
offsets=[(self.X, self.Y)],
transOffset=self.get_transform(),
**kw)
if self.color is not None:
self.vector.set_color(self.color)
self.vector.set_transform(self.Q.get_transform())
self.vector.set_figure(self.get_figure())
self._initialized = True
def _text_x(self, x):
if self.labelpos == 'E':
return x + self.labelsep
elif self.labelpos == 'W':
return x - self.labelsep
else:
return x
def _text_y(self, y):
if self.labelpos == 'N':
return y + self.labelsep
elif self.labelpos == 'S':
return y - self.labelsep
else:
return y
@allow_rasterization
def draw(self, renderer):
self._init()
self.vector.draw(renderer)
x, y = self.get_transform().transform_point((self.X, self.Y))
self.text.set_x(self._text_x(x))
self.text.set_y(self._text_y(y))
self.text.draw(renderer)
self.stale = False
def _set_transform(self):
if self.coord == 'data':
self.set_transform(self.Q.ax.transData)
elif self.coord == 'axes':
self.set_transform(self.Q.ax.transAxes)
elif self.coord == 'figure':
self.set_transform(self.Q.ax.figure.transFigure)
elif self.coord == 'inches':
self.set_transform(self.Q.ax.figure.dpi_scale_trans)
else:
raise ValueError('unrecognized coordinates')
def set_figure(self, fig):
martist.Artist.set_figure(self, fig)
self.text.set_figure(fig)
def contains(self, mouseevent):
# Maybe the dictionary should allow one to
# distinguish between a text hit and a vector hit.
if (self.text.contains(mouseevent)[0] or
self.vector.contains(mouseevent)[0]):
return True, {}
return False, {}
quiverkey_doc = _quiverkey_doc
# This is a helper function that parses out the various combination of
# arguments for doing colored vector plots. Pulling it out here
# allows both Quiver and Barbs to use it
def _parse_args(*args):
X, Y, U, V, C = [None] * 5
args = list(args)
# The use of atleast_1d allows for handling scalar arguments while also
# keeping masked arrays
if len(args) == 3 or len(args) == 5:
C = np.atleast_1d(args.pop(-1))
V = np.atleast_1d(args.pop(-1))
U = np.atleast_1d(args.pop(-1))
if U.ndim == 1:
nr, nc = 1, U.shape[0]
else:
nr, nc = U.shape
if len(args) == 2: # remaining after removing U,V,C
X, Y = [np.array(a).ravel() for a in args]
if len(X) == nc and len(Y) == nr:
X, Y = [a.ravel() for a in np.meshgrid(X, Y)]
else:
indexgrid = np.meshgrid(np.arange(nc), np.arange(nr))
X, Y = [np.ravel(a) for a in indexgrid]
return X, Y, U, V, C
def _check_consistent_shapes(*arrays):
all_shapes = set(a.shape for a in arrays)
if len(all_shapes) != 1:
raise ValueError('The shapes of the passed in arrays do not match.')
class Quiver(mcollections.PolyCollection):
"""
Specialized PolyCollection for arrows.
The only API method is set_UVC(), which can be used
to change the size, orientation, and color of the
arrows; their locations are fixed when the class is
instantiated. Possibly this method will be useful
in animations.
Much of the work in this class is done in the draw()
method so that as much information as possible is available
about the plot. In subsequent draw() calls, recalculation
is limited to things that might have changed, so there
should be no performance penalty from putting the calculations
in the draw() method.
"""
_PIVOT_VALS = ('tail', 'mid', 'middle', 'tip')
@docstring.Substitution(_quiver_doc)
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%s
"""
self.ax = ax
X, Y, U, V, C = _parse_args(*args)
self.X = X
self.Y = Y
self.XY = np.hstack((X[:, np.newaxis], Y[:, np.newaxis]))
self.N = len(X)
self.scale = kw.pop('scale', None)
self.headwidth = kw.pop('headwidth', 3)
self.headlength = float(kw.pop('headlength', 5))
self.headaxislength = kw.pop('headaxislength', 4.5)
self.minshaft = kw.pop('minshaft', 1)
self.minlength = kw.pop('minlength', 1)
self.units = kw.pop('units', 'width')
self.scale_units = kw.pop('scale_units', None)
self.angles = kw.pop('angles', 'uv')
self.width = kw.pop('width', None)
self.color = kw.pop('color', 'k')
pivot = kw.pop('pivot', 'tail').lower()
# validate pivot
if pivot not in self._PIVOT_VALS:
raise ValueError(
'pivot must be one of {keys}, you passed {inp}'.format(
keys=self._PIVOT_VALS, inp=pivot))
# normalize to 'middle'
if pivot == 'mid':
pivot = 'middle'
self.pivot = pivot
self.transform = kw.pop('transform', ax.transData)
kw.setdefault('facecolors', self.color)
kw.setdefault('linewidths', (0,))
mcollections.PolyCollection.__init__(self, [], offsets=self.XY,
transOffset=self.transform,
closed=False,
**kw)
self.polykw = kw
self.set_UVC(U, V, C)
self._initialized = False
self.keyvec = None
self.keytext = None
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref._new_UV = True # vertices depend on width, span
# which in turn depend on dpi
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = self.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
def remove(self):
"""
Overload the remove method
"""
# disconnect the call back
self.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
mcollections.PolyCollection.remove(self)
def _init(self):
"""
Initialization delayed until first draw;
allow time for axes setup.
"""
# It seems that there are not enough event notifications
# available to have this work on an as-needed basis at present.
if True: # not self._initialized:
trans = self._set_transform()
ax = self.ax
sx, sy = trans.inverted().transform_point(
(ax.bbox.width, ax.bbox.height))
self.span = sx
if self.width is None:
sn = np.clip(math.sqrt(self.N), 8, 25)
self.width = 0.06 * self.span / sn
# _make_verts sets self.scale if not already specified
if not self._initialized and self.scale is None:
self._make_verts(self.U, self.V, self.angles)
self._initialized = True
def get_datalim(self, transData):
trans = self.get_transform()
transOffset = self.get_offset_transform()
full_transform = (trans - transData) + (transOffset - transData)
XY = full_transform.transform(self.XY)
bbox = transforms.Bbox.null()
bbox.update_from_data_xy(XY, ignore=True)
return bbox
@allow_rasterization
def draw(self, renderer):
self._init()
verts = self._make_verts(self.U, self.V, self.angles)
self.set_verts(verts, closed=False)
self._new_UV = False
mcollections.PolyCollection.draw(self, renderer)
self.stale = False
def set_UVC(self, U, V, C=None):
# We need to ensure we have a copy, not a reference
# to an array that might change before draw().
U = ma.masked_invalid(U, copy=True).ravel()
V = ma.masked_invalid(V, copy=True).ravel()
mask = ma.mask_or(U.mask, V.mask, copy=False, shrink=True)
if C is not None:
C = ma.masked_invalid(C, copy=True).ravel()
mask = ma.mask_or(mask, C.mask, copy=False, shrink=True)
if mask is ma.nomask:
C = C.filled()
else:
C = ma.array(C, mask=mask, copy=False)
self.U = U.filled(1)
self.V = V.filled(1)
self.Umask = mask
if C is not None:
self.set_array(C)
self._new_UV = True
self.stale = True
def _dots_per_unit(self, units):
"""
Return a scale factor for converting from units to pixels
"""
ax = self.ax
if units in ('x', 'y', 'xy'):
if units == 'x':
dx0 = ax.viewLim.width
dx1 = ax.bbox.width
elif units == 'y':
dx0 = ax.viewLim.height
dx1 = ax.bbox.height
else: # 'xy' is assumed
dxx0 = ax.viewLim.width
dxx1 = ax.bbox.width
dyy0 = ax.viewLim.height
dyy1 = ax.bbox.height
dx1 = np.hypot(dxx1, dyy1)
dx0 = np.hypot(dxx0, dyy0)
dx = dx1 / dx0
else:
if units == 'width':
dx = ax.bbox.width
elif units == 'height':
dx = ax.bbox.height
elif units == 'dots':
dx = 1.0
elif units == 'inches':
dx = ax.figure.dpi
else:
raise ValueError('unrecognized units')
return dx
def _set_transform(self):
"""
Sets the PolygonCollection transform to go
from arrow width units to pixels.
"""
dx = self._dots_per_unit(self.units)
self._trans_scale = dx # pixels per arrow width unit
trans = transforms.Affine2D().scale(dx)
self.set_transform(trans)
return trans
def _angles_lengths(self, U, V, eps=1):
xy = self.ax.transData.transform(self.XY)
uv = np.hstack((U[:, np.newaxis], V[:, np.newaxis]))
xyp = self.ax.transData.transform(self.XY + eps * uv)
dxy = xyp - xy
angles = np.arctan2(dxy[:, 1], dxy[:, 0])
lengths = np.hypot(*dxy.T) / eps
return angles, lengths
def _make_verts(self, U, V, angles):
uv = (U + V * 1j)
str_angles = angles if isinstance(angles, six.string_types) else ''
if str_angles == 'xy' and self.scale_units == 'xy':
# Here eps is 1 so that if we get U, V by diffing
# the X, Y arrays, the vectors will connect the
# points, regardless of the axis scaling (including log).
angles, lengths = self._angles_lengths(U, V, eps=1)
elif str_angles == 'xy' or self.scale_units == 'xy':
# Calculate eps based on the extents of the plot
# so that we don't end up with roundoff error from
# adding a small number to a large.
eps = np.abs(self.ax.dataLim.extents).max() * 0.001
angles, lengths = self._angles_lengths(U, V, eps=eps)
if str_angles and self.scale_units == 'xy':
a = lengths
else:
a = np.abs(uv)
if self.scale is None:
sn = max(10, math.sqrt(self.N))
if self.Umask is not ma.nomask:
amean = a[~self.Umask].mean()
else:
amean = a.mean()
# crude auto-scaling
# scale is typical arrow length as a multiple of the arrow width
scale = 1.8 * amean * sn / self.span
if self.scale_units is None:
if self.scale is None:
self.scale = scale
widthu_per_lenu = 1.0
else:
if self.scale_units == 'xy':
dx = 1
else:
dx = self._dots_per_unit(self.scale_units)
widthu_per_lenu = dx / self._trans_scale
if self.scale is None:
self.scale = scale * widthu_per_lenu
length = a * (widthu_per_lenu / (self.scale * self.width))
X, Y = self._h_arrows(length)
if str_angles == 'xy':
theta = angles
elif str_angles == 'uv':
theta = np.angle(uv)
else:
theta = ma.masked_invalid(np.deg2rad(angles)).filled(0)
theta = theta.reshape((-1, 1)) # for broadcasting
xy = (X + Y * 1j) * np.exp(1j * theta) * self.width
xy = xy[:, :, np.newaxis]
XY = np.concatenate((xy.real, xy.imag), axis=2)
if self.Umask is not ma.nomask:
XY = ma.array(XY)
XY[self.Umask] = ma.masked
# This might be handled more efficiently with nans, given
# that nans will end up in the paths anyway.
return XY
def _h_arrows(self, length):
""" length is in arrow width units """
# It might be possible to streamline the code
# and speed it up a bit by using complex (x,y)
# instead of separate arrays; but any gain would be slight.
minsh = self.minshaft * self.headlength
N = len(length)
length = length.reshape(N, 1)
# This number is chosen based on when pixel values overflow in Agg
# causing rendering errors
# length = np.minimum(length, 2 ** 16)
np.clip(length, 0, 2 ** 16, out=length)
# x, y: normal horizontal arrow
x = np.array([0, -self.headaxislength,
-self.headlength, 0],
np.float64)
x = x + np.array([0, 1, 1, 1]) * length
y = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
y = np.repeat(y[np.newaxis, :], N, axis=0)
# x0, y0: arrow without shaft, for short vectors
x0 = np.array([0, minsh - self.headaxislength,
minsh - self.headlength, minsh], np.float64)
y0 = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
ii = [0, 1, 2, 3, 2, 1, 0, 0]
X = x.take(ii, 1)
Y = y.take(ii, 1)
Y[:, 3:-1] *= -1
X0 = x0.take(ii)
Y0 = y0.take(ii)
Y0[3:-1] *= -1
shrink = length / minsh if minsh != 0. else 0.
X0 = shrink * X0[np.newaxis, :]
Y0 = shrink * Y0[np.newaxis, :]
short = np.repeat(length < minsh, 8, axis=1)
# Now select X0, Y0 if short, otherwise X, Y
np.copyto(X, X0, where=short)
np.copyto(Y, Y0, where=short)
if self.pivot == 'middle':
X -= 0.5 * X[:, 3, np.newaxis]
elif self.pivot == 'tip':
X = X - X[:, 3, np.newaxis] # numpy bug? using -= does not
# work here unless we multiply
# by a float first, as with 'mid'.
elif self.pivot != 'tail':
raise ValueError(("Quiver.pivot must have value in {{'middle', "
"'tip', 'tail'}} not {0}").format(self.pivot))
tooshort = length < self.minlength
if tooshort.any():
# Use a heptagonal dot:
th = np.arange(0, 8, 1, np.float64) * (np.pi / 3.0)
x1 = np.cos(th) * self.minlength * 0.5
y1 = np.sin(th) * self.minlength * 0.5
X1 = np.repeat(x1[np.newaxis, :], N, axis=0)
Y1 = np.repeat(y1[np.newaxis, :], N, axis=0)
tooshort = np.repeat(tooshort, 8, 1)
np.copyto(X, X1, where=tooshort)
np.copyto(Y, Y1, where=tooshort)
# Mask handling is deferred to the caller, _make_verts.
return X, Y
quiver_doc = _quiver_doc
_barbs_doc = r"""
Plot a 2-D field of barbs.
Call signatures::
barb(U, V, **kw)
barb(U, V, C, **kw)
barb(X, Y, U, V, **kw)
barb(X, Y, U, V, C, **kw)
Arguments:
*X*, *Y*:
The x and y coordinates of the barb locations
(default is head of barb; see *pivot* kwarg)
*U*, *V*:
Give the x and y components of the barb shaft
*C*:
An optional array used to map colors to the barbs
All arguments may be 1-D or 2-D arrays or sequences. If *X* and *Y*
are absent, they will be generated as a uniform grid. If *U* and *V*
are 2-D arrays but *X* and *Y* are 1-D, and if ``len(X)`` and ``len(Y)``
match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
*U*, *V*, *C* may be masked arrays, but masked *X*, *Y* are not
supported at present.
Keyword arguments:
*length*:
Length of the barb in points; the other parts of the barb
are scaled against this.
Default is 7.
*pivot*: [ 'tip' | 'middle' | float ]
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*. Default is 'tip'. Can
also be a number, which shifts the start of the barb that many
points from the origin.
*barbcolor*: [ color | color sequence ]
Specifies the color all parts of the barb except any flags. This
parameter is analagous to the *edgecolor* parameter for polygons,
which can be used instead. However this parameter will override
facecolor.
*flagcolor*: [ color | color sequence ]
Specifies the color of any flags on the barb. This parameter is
analagous to the *facecolor* parameter for polygons, which can be
used instead. However this parameter will override facecolor. If
this is not set (and *C* has not either) then *flagcolor* will be
set to match *barbcolor* so that the barb has a uniform color. If
*C* has been set, *flagcolor* has no effect.
*sizes*:
A dictionary of coefficients specifying the ratio of a given
feature to the length of the barb. Only those values one wishes to
override need to be included. These features include:
- 'spacing' - space between features (flags, full/half barbs)
- 'height' - height (distance from shaft to top) of a flag or
full barb
- 'width' - width of a flag, twice the width of a full barb
- 'emptybarb' - radius of the circle used for low magnitudes
*fill_empty*:
A flag on whether the empty barbs (circles) that are drawn should
be filled with the flag color. If they are not filled, they will
be drawn such that no color is applied to the center. Default is
False
*rounding*:
A flag to indicate whether the vector magnitude should be rounded
when allocating barb components. If True, the magnitude is
rounded to the nearest multiple of the half-barb increment. If
False, the magnitude is simply truncated to the next lowest
multiple. Default is True
*barb_increments*:
A dictionary of increments specifying values to associate with
different parts of the barb. Only those values one wishes to
override need to be included.
- 'half' - half barbs (Default is 5)
- 'full' - full barbs (Default is 10)
- 'flag' - flags (default is 50)
*flip_barb*:
Either a single boolean flag or an array of booleans. Single
boolean indicates whether the lines and flags should point
opposite to normal for all barbs. An array (which should be the
same size as the other data arrays) indicates whether to flip for
each individual barb. Normal behavior is for the barbs and lines
to point right (comes from wind barbs having these features point
towards low pressure in the Northern Hemisphere.) Default is
False
Barbs are traditionally used in meteorology as a way to plot the speed
and direction of wind observations, but can technically be used to
plot any two dimensional vector quantity. As opposed to arrows, which
give vector magnitude by the length of the arrow, the barbs give more
quantitative information about the vector magnitude by putting slanted
lines or a triangle for various increments in magnitude, as show
schematically below::
: /\ \\
: / \ \\
: / \ \ \\
: / \ \ \\
: ------------------------------
.. note the double \\ at the end of each line to make the figure
.. render correctly
The largest increment is given by a triangle (or "flag"). After those
come full lines (barbs). The smallest increment is a half line. There
is only, of course, ever at most 1 half line. If the magnitude is
small and only needs a single half-line and no full lines or
triangles, the half-line is offset from the end of the barb so that it
can be easily distinguished from barbs with a single full line. The
magnitude for the barb shown above would nominally be 65, using the
standard increments of 50, 10, and 5.
linewidths and edgecolors can be used to customize the barb.
Additional :class:`~matplotlib.collections.PolyCollection` keyword
arguments:
%(PolyCollection)s
""" % docstring.interpd.params
docstring.interpd.update(barbs_doc=_barbs_doc)
class Barbs(mcollections.PolyCollection):
'''
Specialized PolyCollection for barbs.
The only API method is :meth:`set_UVC`, which can be used to
change the size, orientation, and color of the arrows. Locations
are changed using the :meth:`set_offsets` collection method.
Possibly this method will be useful in animations.
There is one internal function :meth:`_find_tails` which finds
exactly what should be put on the barb given the vector magnitude.
From there :meth:`_make_barbs` is used to find the vertices of the
polygon to represent the barb based on this information.
'''
# This may be an abuse of polygons here to render what is essentially maybe
# 1 triangle and a series of lines. It works fine as far as I can tell
# however.
@docstring.interpd
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%(barbs_doc)s
"""
self._pivot = kw.pop('pivot', 'tip')
self._length = kw.pop('length', 7)
barbcolor = kw.pop('barbcolor', None)
flagcolor = kw.pop('flagcolor', None)
self.sizes = kw.pop('sizes', dict())
self.fill_empty = kw.pop('fill_empty', False)
self.barb_increments = kw.pop('barb_increments', dict())
self.rounding = kw.pop('rounding', True)
self.flip = kw.pop('flip_barb', False)
transform = kw.pop('transform', ax.transData)
# Flagcolor and barbcolor provide convenience parameters for
# setting the facecolor and edgecolor, respectively, of the barb
# polygon. We also work here to make the flag the same color as the
# rest of the barb by default
if None in (barbcolor, flagcolor):
kw['edgecolors'] = 'face'
if flagcolor:
kw['facecolors'] = flagcolor
elif barbcolor:
kw['facecolors'] = barbcolor
else:
# Set to facecolor passed in or default to black
kw.setdefault('facecolors', 'k')
else:
kw['edgecolors'] = barbcolor
kw['facecolors'] = flagcolor
# Explicitly set a line width if we're not given one, otherwise
# polygons are not outlined and we get no barbs
if 'linewidth' not in kw and 'lw' not in kw:
kw['linewidth'] = 1
# Parse out the data arrays from the various configurations supported
x, y, u, v, c = _parse_args(*args)
self.x = x
self.y = y
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
# Make a collection
barb_size = self._length ** 2 / 4 # Empirically determined
mcollections.PolyCollection.__init__(self, [], (barb_size,),
offsets=xy,
transOffset=transform, **kw)
self.set_transform(transforms.IdentityTransform())
self.set_UVC(u, v, c)
def _find_tails(self, mag, rounding=True, half=5, full=10, flag=50):
'''
Find how many of each of the tail pieces is necessary. Flag
specifies the increment for a flag, barb for a full barb, and half for
half a barb. Mag should be the magnitude of a vector (i.e., >= 0).
This returns a tuple of:
(*number of flags*, *number of barbs*, *half_flag*, *empty_flag*)
*half_flag* is a boolean whether half of a barb is needed,
since there should only ever be one half on a given
barb. *empty_flag* flag is an array of flags to easily tell if
a barb is empty (too low to plot any barbs/flags.
'''
# If rounding, round to the nearest multiple of half, the smallest
# increment
if rounding:
mag = half * (mag / half + 0.5).astype(int)
num_flags = np.floor(mag / flag).astype(int)
mag = np.mod(mag, flag)
num_barb = np.floor(mag / full).astype(int)
mag = np.mod(mag, full)
half_flag = mag >= half
empty_flag = ~(half_flag | (num_flags > 0) | (num_barb > 0))
return num_flags, num_barb, half_flag, empty_flag
def _make_barbs(self, u, v, nflags, nbarbs, half_barb, empty_flag, length,
pivot, sizes, fill_empty, flip):
'''
This function actually creates the wind barbs. *u* and *v*
are components of the vector in the *x* and *y* directions,
respectively.
*nflags*, *nbarbs*, and *half_barb*, empty_flag* are,
*respectively, the number of flags, number of barbs, flag for
*half a barb, and flag for empty barb, ostensibly obtained
*from :meth:`_find_tails`.
*length* is the length of the barb staff in points.
*pivot* specifies the point on the barb around which the
entire barb should be rotated. Right now, valid options are
'tip' and 'middle'. Can also be a number, which shifts the start
of the barb that many points from the origin.
*sizes* is a dictionary of coefficients specifying the ratio
of a given feature to the length of the barb. These features
include:
- *spacing*: space between features (flags, full/half
barbs)
- *height*: distance from shaft of top of a flag or full
barb
- *width* - width of a flag, twice the width of a full barb
- *emptybarb* - radius of the circle used for low
magnitudes
*fill_empty* specifies whether the circle representing an
empty barb should be filled or not (this changes the drawing
of the polygon).
*flip* is a flag indicating whether the features should be flipped to
the other side of the barb (useful for winds in the southern
hemisphere).
This function returns list of arrays of vertices, defining a polygon
for each of the wind barbs. These polygons have been rotated to
properly align with the vector direction.
'''
# These control the spacing and size of barb elements relative to the
# length of the shaft
spacing = length * sizes.get('spacing', 0.125)
full_height = length * sizes.get('height', 0.4)
full_width = length * sizes.get('width', 0.25)
empty_rad = length * sizes.get('emptybarb', 0.15)
# Controls y point where to pivot the barb.
pivot_points = dict(tip=0.0, middle=-length / 2.)
# Check for flip
if flip:
full_height = -full_height
endx = 0.0
try:
endy = float(pivot)
except ValueError:
endy = pivot_points[pivot.lower()]
# Get the appropriate angle for the vector components. The offset is
# due to the way the barb is initially drawn, going down the y-axis.
# This makes sense in a meteorological mode of thinking since there 0
# degrees corresponds to north (the y-axis traditionally)
angles = -(ma.arctan2(v, u) + np.pi / 2)
# Used for low magnitude. We just get the vertices, so if we make it
# out here, it can be reused. The center set here should put the
# center of the circle at the location(offset), rather than at the
# same point as the barb pivot; this seems more sensible.
circ = CirclePolygon((0, 0), radius=empty_rad).get_verts()
if fill_empty:
empty_barb = circ
else:
# If we don't want the empty one filled, we make a degenerate
# polygon that wraps back over itself
empty_barb = np.concatenate((circ, circ[::-1]))
barb_list = []
for index, angle in np.ndenumerate(angles):
# If the vector magnitude is too weak to draw anything, plot an
# empty circle instead
if empty_flag[index]:
# We can skip the transform since the circle has no preferred
# orientation
barb_list.append(empty_barb)
continue
poly_verts = [(endx, endy)]
offset = length
# Add vertices for each flag
for i in range(nflags[index]):
# The spacing that works for the barbs is a little to much for
# the flags, but this only occurs when we have more than 1
# flag.
if offset != length:
offset += spacing / 2.
poly_verts.extend(
[[endx, endy + offset],
[endx + full_height, endy - full_width / 2 + offset],
[endx, endy - full_width + offset]])
offset -= full_width + spacing
# Add vertices for each barb. These really are lines, but works
# great adding 3 vertices that basically pull the polygon out and
# back down the line
for i in range(nbarbs[index]):
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height, endy + offset + full_width / 2),
(endx, endy + offset)])
offset -= spacing
# Add the vertices for half a barb, if needed
if half_barb[index]:
# If the half barb is the first on the staff, traditionally it
# is offset from the end to make it easy to distinguish from a
# barb with a full one
if offset == length:
poly_verts.append((endx, endy + offset))
offset -= 1.5 * spacing
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height / 2, endy + offset + full_width / 4),
(endx, endy + offset)])
# Rotate the barb according the angle. Making the barb first and
# then rotating it made the math for drawing the barb really easy.
# Also, the transform framework makes doing the rotation simple.
poly_verts = transforms.Affine2D().rotate(-angle).transform(
poly_verts)
barb_list.append(poly_verts)
return barb_list
def set_UVC(self, U, V, C=None):
self.u = ma.masked_invalid(U, copy=False).ravel()
self.v = ma.masked_invalid(V, copy=False).ravel()
if C is not None:
c = ma.masked_invalid(C, copy=False).ravel()
x, y, u, v, c = delete_masked_points(self.x.ravel(),
self.y.ravel(),
self.u, self.v, c)
_check_consistent_shapes(x, y, u, v, c)
else:
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
magnitude = np.hypot(u, v)
flags, barbs, halves, empty = self._find_tails(magnitude,
self.rounding,
**self.barb_increments)
# Get the vertices for each of the barbs
plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty,
self._length, self._pivot, self.sizes,
self.fill_empty, self.flip)
self.set_verts(plot_barbs)
# Set the color array
if C is not None:
self.set_array(c)
# Update the offsets in case the masked data changed
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
self._offsets = xy
self.stale = True
def set_offsets(self, xy):
"""
Set the offsets for the barb polygons. This saves the offsets passed
in and actually sets version masked as appropriate for the existing
U/V data. *offsets* should be a sequence.
ACCEPTS: sequence of pairs of floats
"""
self.x = xy[:, 0]
self.y = xy[:, 1]
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
mcollections.PolyCollection.set_offsets(self, xy)
self.stale = True
set_offsets.__doc__ = mcollections.PolyCollection.set_offsets.__doc__
barbs_doc = _barbs_doc
|
jonyroda97/redbot-amigosprovaveis
|
lib/matplotlib/quiver.py
|
Python
|
gpl-3.0
| 46,115
|
#!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
"""
Application ...
"""
# Copyright (C) 2017 by
# Emmanuel Desmontils <emmanuel.desmontils@univ-nantes.fr>
# Patricia Serrano-Alvarado <patricia.serrano-alvarado@univ-nantes.fr>
# All rights reserved.
# GPL v 2.0 license.
from queue import Empty
import multiprocessing as mp
import datetime as dt
import time
import csv
from tools.tools import now, fromISO, existFile
from tools.ssa import *
from rdflib import Variable, URIRef, Literal
from lxml import etree # http://lxml.de/index.html#documentation
from lib.bgp import serialize2string, egal, calcPrecisionRecall, canonicalize_sparql_bgp, serializeBGP
from collections import OrderedDict
from functools import reduce
#==================================================
SWEEP_IN_ENTRY = 1
SWEEP_IN_DATA = 2
SWEEP_IN_END = 3
SWEEP_IN_QUERY = 4
SWEEP_OUT_QUERY = 5
SWEEP_IN_BGP = 6
SWEEP_ALL_BGP = False
SWEEP_START_SESSION = -1
SWEEP_END_SESSION = -2
SWEEP_PURGE = -3
SWEEP_ENTRY_TIMEOUT = 0.8 # percentage of the gap
SWEEP_PURGE_TIMEOUT = 0.1 # percentage of the gap
SWEEP_DEBUG_BGP_BUILD = False
SWEEP_DEBUB_PR = False
#==================================================
def toStr(s, p, o):
return serialize2string(s)+' '+serialize2string(p)+' '+serialize2string(o)
def listToStr(l) :
s = [serialize2string(s) for s in l]
return s
def mapValues(di,i,j) :
if (di != i) and isinstance(j, Variable):
return i
else:
return None
def hashBGP(bgp):
rep = ''
for (s,p,o) in bgp:
rep += toStr(s,p,o)+' . '
return hash(rep)
#==================================================
class TripplePattern(object):
"""docstring for TripplePattern"""
def __init__(self, s, p, o):
super(TripplePattern, self).__init__()
(self.s, self.p, self.o) = (s, p, o)
def equal(self, tp):
return egal(self.spo(), tp.spo())
def toStr(self):
return serialize2string(self.s)+' '+serialize2string(self.p)+' '+serialize2string(self.o)
def toString(self):
return self.toStr()
def spo(self):
return (self.s, self.p, self.o)
class TriplePatternQuery(TripplePattern):
"""docstring for TriplePatternQuery"""
def __init__(self, s, p, o, time, client, sm, pm, om):
super(TriplePatternQuery, self).__init__(s, p, o)
(self.time, self.client, self.sm, self.pm,
self.om) = (time, client, sm, pm, om)
self.su = set()
self.pu = set()
self.ou = set()
def isDump(self): # tp is <?s ?p ?o> ?
return isinstance(self.s, Variable) and isinstance(self.p, Variable) and isinstance(self.o, Variable)
def renameVars(self, i):
if isinstance(self.s, Variable):
self.s = Variable("s"+str(i).replace("-", "_"))
if isinstance(self.p, Variable):
self.p = Variable("p"+str(i).replace("-", "_"))
if isinstance(self.o, Variable):
self.o = Variable("o"+str(i).replace("-", "_"))
def sign(self):
if isinstance(self.s, Variable):
s = Variable("s")
else:
s = self.s
if isinstance(self.p, Variable):
p = Variable("p")
else:
p = self.p
if isinstance(self.o, Variable):
o = Variable("o")
else:
o = self.o
return hash(toStr(s, p, o))
def nestedLoopOf2(self,baseTPQ):
# 1:sp->sp 2:so->so 3:s->s 4:so->os 5:s->o 6:o->s 8:po->po 9:p->p 10:sp->po
# 0/1/2/3 ->(0/1/2/3,0/1/2/3,0/1/2/3)
base = (None,baseTPQ.s,baseTPQ.p,baseTPQ.o)
if self.s in baseTPQ.sm:
if self.p in baseTPQ.pm: nl = ( 2 , (1,2,0) ) # sp->sp
elif self.o in baseTPQ.om: nl = ( 2 , (1,0,3) ) # so->so
else: nl = ( 1 , (1,0,0) ) # s->s
elif self.s in baseTPQ.om:
if self.o in baseTPQ.sm: nl = ( 2 , (3,0,1) ) # so->os
elif self.p in baseTPQ.pm: nl = ( 2 , (3,2,0) ) # sp->po
else: nl = ( 1 , (3,0,0) ) # s->o
elif self.o in baseTPQ.sm: nl = ( 1 , (0,0,1) ) # o->s
elif self.p in baseTPQ.pm:
if self.o in baseTPQ.om: nl = ( 2 , (0,2,3) ) # po->po
else: nl = ( 1 , (0,2,0) ) # p->p
else: nl = ( 0 , (0,0,0) )
(couv,injection) = nl
# print('injection:',injection)
return ( couv ,
{ 's':doInjection(base,injection[0],self.s),
'p':doInjection(base,injection[1],self.p),
'o':doInjection(base,injection[2],self.o) } )
def doInjection(base, injection, val) :
if injection > 0 : return base[injection]
else: return val
#==================================================
class BasicGraphPattern:
def __init__(self, gap=None, tpq=None):
self.tp_set = []
self.input_set = set() # ens. des hash des entrées, pour ne pas mettre 2 fois la même
if gap is None:
self.gap = dt.timedelta(minutes=1)
else:
self.gap = gap
if tpq is None:
self.birthTime = now()
self.time = now()
self.client = 'Unknown'
else:
self.birthTime = tpq.time
self.time = tpq.time
self.client = tpq.client
self.add(tpq)
def add(self, tpq, sgn=None):
assert isinstance(
tpq, TriplePatternQuery), "BasicGraphPattern.Add : Pb type TPQ"
assert (self.client == 'Unknown') or (self.client ==
tpq.client), "BasicGraphPattern.Add : client différent"
assert tpq.time - self.time <= self.gap, "BasicGraphPattern.Add : TPQ pas dans le gap"
if self.client == 'Unknown':
self.client = tpq.client
self.birthTime = tpq.time
self.time = tpq.time
self.tp_set.append(tpq)
if sgn is None:
self.input_set.add(tpq.sign())
else:
self.input_set.add(sgn)
def update(self, tp, ntpq):
if SWEEP_DEBUG_BGP_BUILD:
print('\t Déjà présent avec ', toStr(tp.s, tp.p, tp.o))
print('\t MàJ des mappings')
print( '\t\t ', listToStr(tp.sm), '+', listToStr(ntpq.sm),
'\n\t\t ', listToStr(tp.pm), '+', listToStr(ntpq.pm),
'\n\t\t ', listToStr(tp.om), '+', listToStr(ntpq.om) )
tp.sm.update(ntpq.sm)
tp.pm.update(ntpq.pm)
tp.om.update(ntpq.om)
self.input_set.add(ntpq.sign()) # ATTENTION : Utile ?
def age(self):
return now() - self.time
def isOld(self):
return self.age() > self.gap
def toString(self):
rep = ''
for tpq in self.tp_set:
rep += tpq.toStr() + " .\n "
return rep
def print(self, tab=''):
print(tab, 'BGP:', self.client, ' at ', self.time)
print(tab, self.toString())
def canBeCandidate(self, tpq):
assert isinstance(
tpq, TriplePatternQuery), "BasicGraphPattern.canBeCandidate : Pb type TPQ"
return (tpq.client == self.client) and (tpq.time - self.time <= self.gap) and (tpq.sign() not in self.input_set)
def findNestedLoop(self, ntpq):
assert isinstance(
ntpq, TriplePatternQuery), "BasicGraphPattern.findTP : Pb type TPQ"
ref_couv = 0
trouve = False
fromTP = None
candTP = None
mapVal = (None, None, None)
# on regarde si une constante du sujet et ou de l'objet est une injection
# provenant d'un tpq existant (par son résultat)
for (_, tpq) in enumerate(self.tp_set):
if SWEEP_DEBUG_BGP_BUILD:
print('_____', '\n\t\t Comparaison de :',
ntpq.toStr(), '\n\t\t avec le TP :', tpq.toStr())
print('\t\tbsm:', listToStr(tpq.sm), '\n\t\t\tbsu:', listToStr(tpq.su),
'\n\t\tbpm:', listToStr(tpq.pm), '\n\t\t\tbpu:', listToStr(tpq.pu),
'\n\t\tbom:', listToStr(tpq.om), '\n\t\t\tbou:', listToStr(tpq.ou),)
(couv, d) = ntpq.nestedLoopOf2(tpq)
#couv : nombre de mappings trouvés (hypothèse de double injection)
#d : indique les "constantes" de ntpq qui font l'objet d'injection
# par tpq (et la variable de celui-ci)
if (couv > ref_couv):
# on prend les cas où il y a le plus grand nombre d'injections.
# doutes sur le fait qu'il peut y en avoir plusieurs...
# on calcule le TPQ possible
ctp = TriplePatternQuery(d['s'], d['p'], d['o'], ntpq.time, ntpq.client, ntpq.sm, ntpq.pm, ntpq.om)
(inTP, _) = ctp.equal(tpq)
if not(inTP):
trouve = True
ref_couv = couv
fromTP = tpq
candTP = ctp
mapVal = mapValues(d['s'],ntpq.s,tpq.s), mapValues(d['p'],ntpq.p,tpq.p), mapValues(d['o'],ntpq.o,tpq.o)
break
# end for tpq
return (trouve, candTP,fromTP, mapVal)
def existTP(self, candtp,fromTP):
inTP = False
tp = None
# peut-être que un TP similaire a déjà été utilisé pour une autre valeur...
# alors pas la peine de le doubler
for tp in self.tp_set:
(inTP, _) = candtp.equal(tp)
if inTP:
(inTP2, _) = fromTP.equal(tp)
if not(inTP2):
break
else:
inTP = False
return (inTP, tp)
#==================================================
def processAgregator(in_queue, out_queue, val_queue, ctx):
entry_timeout = ctx.gap*SWEEP_ENTRY_TIMEOUT
purge_timeout = (ctx.gap*SWEEP_PURGE_TIMEOUT).total_seconds()
currentTime = now()
elist = dict()
try:
inq = in_queue.get()
while inq is not None:
(id, x, val) = inq
if x == SWEEP_IN_ENTRY:
(s, p, o, t, cl) = val
currentTime = now()
elist[id] = (s, p, o, currentTime, cl, set(), set(), set())
elif x == SWEEP_IN_DATA:
if id in elist: # peut être absent car purgé
(s, p, o, t, _, sm, pm, om) = elist[id]
(xs, xp, xo) = val
currentTime = max(currentTime, t) + \
dt.timedelta(microseconds=1)
if isinstance(s, Variable):
sm.add(xs)
if isinstance(p, Variable):
pm.add(xp)
if isinstance(o, Variable):
om.add(xo)
elif x == SWEEP_IN_END:
mss = elist.pop(id, None)
if mss is not None: # peut être absent car purgé
out_queue.put((id, mss))
elif x == SWEEP_START_SESSION:
# print('Agregator - Start Session')
currentTime = now()
elist.clear()
out_queue.put((id, SWEEP_START_SESSION))
elif x == SWEEP_END_SESSION:
# print('Agregator - End Session')
currentTime = now()
for v in elist:
out_queue.put((v, elist.pop(v)))
out_queue.put((id, SWEEP_END_SESSION))
else: # SWEEP_PURGE...
out_queue.put((id, SWEEP_PURGE))
# purge les entrées trop vieilles !
old = []
for id in elist:
(s, p, o, t, _, sm, pm, om) = elist[id]
if (currentTime - t) > entry_timeout:
old.append(id)
for id in old:
v = elist.pop(id)
out_queue.put((id, v))
try:
inq = in_queue.get(timeout=purge_timeout)
except Empty:
# print('purge')
currentTime = now()
inq = (0, SWEEP_PURGE, None)
except KeyboardInterrupt:
# penser à purger les dernières entrées -> comme une fin de session
pass
finally:
for v in elist:
out_queue.put((v, elist.pop(v)))
out_queue.put(None)
val_queue.put(None)
#==================================================
def processBGPDiscover(in_queue, out_queue, val_queue, ctx):
gap = ctx.gap
BGP_list = []
try:
entry = in_queue.get()
while entry != None:
(id, val) = entry
if val == SWEEP_PURGE:
val_queue.put((SWEEP_PURGE, 0, None))
elif val == SWEEP_START_SESSION:
# print('BGPDiscover - Start Session')
BGP_list.clear()
out_queue.put(SWEEP_START_SESSION)
elif val == SWEEP_END_SESSION:
# print('BGPDiscover - End Session')
for bgp in BGP_list:
out_queue.put(bgp)
val_queue.put((SWEEP_IN_BGP, -1, bgp))
BGP_list.clear()
out_queue.put(SWEEP_END_SESSION)
else:
(s, p, o, time, client, sm, pm, om) = val
new_tpq = TriplePatternQuery(s, p, o, time, client, sm, pm, om)
new_tpq.renameVars(id)
if SWEEP_DEBUG_BGP_BUILD:
print(
'============================================== ',id,' ==============================================')
print('Etude de :', new_tpq.toStr())
print('|sm:', listToStr(new_tpq.sm), '\n|pm:',
listToStr(new_tpq.pm), '\n|om:', listToStr(new_tpq.om))
if not(new_tpq.isDump()):
trouve = False
for (i, bgp) in enumerate(BGP_list):
if SWEEP_DEBUG_BGP_BUILD:
print(
'-----------------------------------\n\t Etude avec BGP ', i)
bgp.print('\t\t\t')
if bgp.canBeCandidate(new_tpq):
# Si c'est le même client, dans le gap et un TP identique,
# n'a pas déjà été utilisé pour ce BGP
(trouve, candTP,fromTP,mapVal) = bgp.findNestedLoop(new_tpq)
if trouve:
# le nouveau TPQ pourrait être produit par un nested loop... on teste alors
# sa "forme d'origine" 'candTP'
if SWEEP_DEBUG_BGP_BUILD:
print('\t\t ok avec :', new_tpq.toStr(),' sur ',mapVal,
'\n\t\t |-> ', candTP.toStr())
(ok, tp) = bgp.existTP(candTP,fromTP)
if ok:
# La forme existe déjà. Il faut ajouter les mappings !
# mais il faut que ce ne soit pas celui qui a injecté !
bgp.update(tp, new_tpq)
else: # C'est un nouveau TPQ du BGP !
if SWEEP_DEBUG_BGP_BUILD:
print('\t\t Ajout de ', new_tpq.toStr(
), '\n\t\t avec ', candTP.toStr())
bgp.add(candTP, new_tpq.sign())
(vs,vp,vo) = mapVal
if vs is not None: fromTP.su.add(vs)
if vp is not None: fromTP.pu.add(vp)
if vo is not None: fromTP.ou.add(vo)
if ctx.optimistic:
bgp.time = time
break #on en a trouvé un bon... on arrête de chercher !
else:
if (new_tpq.client == bgp.client) and (new_tpq.time - bgp.time <= gap):
if SWEEP_DEBUG_BGP_BUILD:
print('\t\t Déjà ajouté')
pass
# end "for BGP"
# pas trouvé => nouveau BGP
if not(trouve):
if SWEEP_DEBUG_BGP_BUILD:
print('\t Création de ', new_tpq.toStr(),
'-> BGP ', len(BGP_list))
BGP_list.append(BasicGraphPattern(gap, new_tpq))
# envoyer les trop vieux !
old = []
recent = []
for bgp in BGP_list:
# print(currentTime,bgp.time)
if bgp.isOld():
old.append(bgp)
else:
recent.append(bgp)
for bgp in old:
out_queue.put(bgp)
val_queue.put((SWEEP_IN_BGP, -1, bgp))
BGP_list = recent
ctx.nbBGP.value = len(BGP_list)
entry = in_queue.get()
except KeyboardInterrupt:
# penser à purger les derniers BGP ou uniquement autoutr du get pour gérer fin de session
pass
finally:
for bgp in BGP_list:
out_queue.put(bgp)
val_queue.put((SWEEP_IN_BGP, -1, bgp))
BGP_list.clear()
ctx.nbBGP.value = 0
out_queue.put(None)
#==================================================
def testPrecisionRecallBGP(queryList, bgp, gap):
best = 0
test = [(tp.s, tp.p, tp.o) for tp in bgp.tp_set]
# print(test)
best_precision = 0
best_recall = 0
for i in queryList:
((time, ip, query, qbgp, queryID),
old_bgp, precision, recall) = queryList[i]
if SWEEP_DEBUB_PR:
rep = ''
for (s, p, o) in qbgp:
rep += toStr(s, p, o)+' . \n'
print('comparing with query (%s) : ' % queryID, rep)
if (ip == bgp.client) and (bgp.birthTime >= time) and (bgp.birthTime - time <= gap):
(precision2, recall2, _, _) = calcPrecisionRecall(qbgp, test)
if precision2*recall2 > precision*recall:
#if (precision2 > precision) or ((precision2 == precision) and (recall2 > recall)):
if precision2*recall2 > best_precision*best_recall:
#if (precision2 > best_precision) or ((precision2 == best_precision) and (recall2 > best_recall)):
best = i
best_precision = precision2
best_recall = recall2
if best > 0:
((time, ip, query, qbgp, queryID), old_bgp,
precision, recall) = queryList[best]
queryList[best] = ((time, ip, query, qbgp, queryID),
bgp, best_precision, best_recall)
if SWEEP_DEBUB_PR:
print('association:', queryID, best_precision, best_recall)
bgp.print()
# essayer de replacer le vieux...
if old_bgp is not None:
return testPrecisionRecallBGP(queryList, old_bgp, gap)
else:
return None
else:
return bgp
def processValidation(in_queue, memoryQueue, ctx):
valGap = ctx.gap * 2
gap = ctx.gap
currentTime = now()
queryList = OrderedDict()
try:
inq = in_queue.get()
while inq is not None:
(mode, id, val) = inq
if mode == SWEEP_IN_QUERY:
with ctx.lck:
ctx.stat['nbQueries'] += 1
(time, ip, query, qbgp, queryID) = val
currentTime = now()
if SWEEP_DEBUB_PR:
print('+++')
print(currentTime, ' New query', val)
(precision, recall, bgp) = (0, 0, None)
queryList[id] = ((time, ip, query, qbgp, queryID), bgp, precision, recall)
elif mode == SWEEP_IN_BGP:
ctx.stat['nbBGP'] += 1
bgp = val
currentTime = now()
if SWEEP_DEBUB_PR:
print('+++')
print(currentTime, ' New BGP')
val.print()
old_bgp = testPrecisionRecallBGP(queryList, bgp, gap)
if SWEEP_DEBUB_PR:
if old_bgp is not None:
print('BGP not associated and archieved :')
old_bgp.print()
if old_bgp is not None:
memoryQueue.put( (4, (0, 'none', old_bgp.birthTime, old_bgp.client, None, None, old_bgp, 0, 0) ) )
# dans le cas où le client TPF n'a pas pu exécuter la requête...
elif mode == SWEEP_OUT_QUERY:
# suppress query 'queryID'
for i in queryList:
((time, ip, query, qbgp, queryID),
bgp, precision, recall) = queryList[i]
if queryID == val:
if SWEEP_DEBUB_PR:
print('---')
print(currentTime, ' Deleting query', queryID)
queryList.pop(i)
with ctx.lck:
ctx.stat['nbQueries'] -= 1
if bgp is not None:
if SWEEP_DEBUB_PR:
print('-')
print('extract its BGP')
bgp.print()
old_bgp = testPrecisionRecallBGP(queryList, bgp, gap)
if old_bgp is not None:
memoryQueue.put( (4, (0, 'none', old_bgp.birthTime, old_bgp.client, None, None, old_bgp, 0, 0)) )
else:
if SWEEP_DEBUB_PR:
print('-')
print('No BGP to extract')
break
else: # mode == SWEEP_PURGE
currentTime = now()
# Suppress older queries
old = []
for id in queryList:
((time, ip, query, qbgp, queryID), bgp,
precision, recall) = queryList[id]
if currentTime - time > valGap:
old.append(id)
for id in old:
((time, ip, query, qbgp, queryID), bgp, precision, recall) = queryList.pop(id)
if SWEEP_DEBUB_PR:
print('--- purge ', queryID, '(', time, ') ---',
precision, '/', recall, '---', ' @ ', currentTime, '---')
print(query)
print('---')
#---
assert ip == bgp.client, 'Client Query différent de client BGP'
#---
memoryQueue.put( (4, (id, queryID, time, ip, query, qbgp, bgp, precision, recall)) )
ctx.stat['sumRecall'] += recall
ctx.stat['sumPrecision'] += precision
ctx.stat['sumQuality'] += (recall+precision)/2
if bgp is not None:
if SWEEP_DEBUB_PR:
print(".\n".join([toStr(s, p, o) for (itp, (s, p, o), sm, pm, om) in bgp.tp_set]))
ctx.stat['sumSelectedBGP'] += 1
else:
if SWEEP_DEBUB_PR:
print('Query not assigned')
if SWEEP_DEBUB_PR:
print('--- --- @'+ip+' --- ---')
print(' ')
ctx.nbREQ.value = len(queryList)
inq = in_queue.get()
except KeyboardInterrupt:
# penser à afficher les dernières queries ou uniquement autour du get pour fin de session
pass
finally:
for id in queryList:
((time, ip, query, qbgp, queryID), bgp, precision, recall) = queryList.pop(id)
if SWEEP_DEBUB_PR:
print('--- purge ', queryID, '(', time, ') ---',
precision, '/', recall, '---', ' @ ', currentTime, '---')
print(query)
print('---')
#---
assert ip == bgp.client, 'Client Query différent de client BGP'
#---
memoryQueue.put( (4, (id, queryID, time, ip, query, qbgp, bgp, precision, recall)) )
ctx.stat['sumRecall'] += recall
ctx.stat['sumPrecision'] += precision
ctx.stat['sumQuality'] += (recall+precision)/2
if bgp is not None:
if SWEEP_DEBUB_PR:
print(".\n".join([tp.toStr() for tp in bgp.tp_set]))
ctx.stat['sumSelectedBGP'] += 1
else:
if SWEEP_DEBUB_PR:
print('Query not assigned')
if SWEEP_DEBUB_PR:
print('--- --- @'+ip+' --- ---')
print(' ')
ctx.nbREQ.value = 0
#==================================================
def addBGP2Rank(bgp, nquery, line, precision, recall, ranking):
ok = False
for (i, (t, d, n, query, ll, p, r)) in enumerate(ranking):
if bgp == d:
ok = True
break
if ok:
ll.add(line)
if query == None:
query = nquery
ranking[i] = (now(), d, n+1, query, ll, p+precision, r+recall)
else:
ranking.append((now(),bgp, 1, nquery, {line}, precision, recall))
# todo :
# - faire deux mémoires pour les BGPs, une court terme sur 10*gap par exemple avec la méthode sûre et une long terme avec algo ssc.
# - faire une sauvegarde incrémentale pour permettre de ne concervé que ce qui est nécessaire pour la mémoire à court terme.
# - faire aussi les deux mémoires pour les requêtes
def processMemory(ctx, duration, inQueue, outQueue):
sscBGP = SpaceSavingCounter(ctx.memSize)
sscQueries = SpaceSavingCounter(ctx.memSize)
lastTimeMemorySaved = ctx.startTime
nbMemoryChanges = 0
try:
while True:
try:
inq = inQueue.get(timeout= duration.total_seconds() )
except Empty:
inq = (0, True)
(mode,mess) = inq
if ((mode==0) and (nbMemoryChanges > 0)) or (nbMemoryChanges > 10): # Save memory in a CSV file
with ctx.lck:
ref = lastTimeMemorySaved
saveMemory(ctx.memory,ref)
lastTimeMemorySaved = now()
nbMemoryChanges = 0
if mode==1:
(g,o,tpk) = sscQueries.queryTopK(mess)
outQueue.put( [sscQueries.monitored[e] for e in tpk] )
elif mode==2:
(g,o,tpk) = sscBGP.queryTopK(mess)
outQueue.put( [sscBGP.monitored[e] for e in tpk] )
elif mode==4:
(id, queryID, time, ip, query, qbgp, bgp, precision, recall) = mess
with ctx.lck:
ctx.memory.append( (id, queryID, time, ip, query, bgp, precision, recall) )
nbMemoryChanges += 1
if query is not None :
sbgp = canonicalize_sparql_bgp(qbgp)
with ctx.lck:
addBGP2Rank(sbgp, query, id, precision, recall, ctx.rankingQueries)
sscQueries.add(hashBGP(sbgp),sbgp)
if bgp is not None :
sbgp = canonicalize_sparql_bgp([(tp.s, tp.p, tp.o) for tp in bgp.tp_set])
if SWEEP_ALL_BGP or (len(sbgp) > 1):
with ctx.lck:
addBGP2Rank(sbgp, query, id, 0, 0, ctx.rankingBGPs)
sscBGP.add(hashBGP(sbgp),sbgp)
else:
pass
# Oldest elements in short memory are deleted
threshold = now() - ctx.memDuration
with ctx.lck:
while len(ctx.memory)>0 :
(id, queryID, time, ip, query, bgp, precision, recall) = ctx.memory[0]
if time < threshold : ctx.memory.pop(0)
else: break
i = 0
while i<len(ctx.rankingBGPs) :
(chgDate, d, n, query, ll, p, r) = ctx.rankingBGPs[i]
if chgDate < threshold : ctx.rankingBGPs.pop(i)
else: i += 1
i = 0
while i<len(ctx.rankingQueries) :
(chgDate, d, n, query, ll, p, r) = ctx.rankingQueries[i]
if chgDate < threshold : ctx.rankingQueries.pop(i)
else: i += 1
except KeyboardInterrupt:
if nbMemoryChanges > 0:
with ctx.lck:
saveMemory(ctx.memory,lastTimeMemorySaved)
def saveMemory(memory, lastTimeMemorySaved):
file = 'sweep.csv' # (id, time, ip, query, bgp, precision, recall)
sep = '\t'
exists = existFile(file)
if exists: mode = "a"
else: mode="w"
print('Saving memory ',mode)
with open(file, mode, encoding='utf-8') as f:
fn = ['id', 'qID', 'time', 'ip', 'query', 'bgp', 'precision', 'recall']
writer = csv.DictWriter(f, fieldnames=fn, delimiter=sep)
if not(exists): writer.writeheader()
for (id, queryID, t, ip, query, bgp, precision, recall) in memory:
if t > lastTimeMemorySaved:
if bgp is not None:
bgp_txt = ".\n".join([tp.toStr() for tp in bgp.tp_set])
else:
bgp_txt = "..."
if query is None: query='...'
s = {'id': id, 'qID': queryID, 'time': t, 'ip': ip, 'query': query, 'bgp': bgp_txt, 'precision': precision, 'recall': recall}
writer.writerow(s)
print('Memory saved')
#==================================================
class SWEEP: # Abstract Class
def __init__(self, gap, to, opt, mem = 100):
#---
assert isinstance(gap, dt.timedelta)
#---
self.gap = gap
self.timeout = to
self.optimistic = opt # màj de la date du BGP avec le dernier TP reçu ?
self.lck = mp.Lock()
manager = mp.Manager()
self.memory = manager.list()
self.rankingBGPs = manager.list()
self.rankingQueries = manager.list()
self.startTime = now()
self.memSize = mem # for long term memory (ssc)
self.memDuration = 10*gap # for short term memory
# self.avgPrecision = mp.Value('f',0.0)
# self.avgRecall = mp.Value('f',0.0)
# self.avgQual = mp.Value('f',0.0)
# self.Acuteness = mp.Value('f',0.0)
self.nbBGP = mp.Value('i', 0)
self.nbREQ = mp.Value('i', 0)
self.qId = mp.Value('i', 0)
self.stat = manager.dict({'sumRecall': 0, 'sumPrecision': 0,
'sumQuality': 0, 'nbQueries': 0, 'nbBGP': 0, 'sumSelectedBGP': 0})
self.dataQueue = mp.Queue()
self.entryQueue = mp.Queue()
self.validationQueue = mp.Queue()
self.resQueue = mp.Queue()
self.memoryInQueue = mp.Queue()
self.memoryOutQueue = mp.Queue()
self.dataProcess = mp.Process(target=processAgregator, args=(
self.dataQueue, self.entryQueue, self.validationQueue, self))
self.entryProcess = mp.Process(target=processBGPDiscover, args=(
self.entryQueue, self.resQueue, self.validationQueue, self))
self.validationProcess = mp.Process(
target=processValidation, args=(self.validationQueue, self.memoryInQueue, self))
self.memoryProcess = mp.Process(target=processMemory, args=(self, gap*3, self.memoryInQueue, self.memoryOutQueue))
self.dataProcess.start()
self.entryProcess.start()
self.validationProcess.start()
self.memoryProcess.start()
def setTimeout(self, to):
print('chg to:', to.total_seconds())
self.timeout = to
def swapOptimistic(self):
self.optimistic = not(self.optimistic)
def startSession(self):
self.dataQueue.put((0, SWEEP_START_SESSION, ()))
def endSession(self):
self.dataQueue.put((0, SWEEP_END_SESSION, ()))
def put(self, v):
self.dataQueue.put(v)
# To implement
def putQuery(self, time, ip, query, bgp, queryID):
with self.qId.get_lock():
self.qId.value += 1
qId = self.qId.value
if queryID is None:
queryID = 'id'+str(qId)
self.validationQueue.put(
(SWEEP_IN_QUERY, qId, (time, ip, query, bgp, queryID)))
def putEnd(self, i):
self.dataQueue.put((i, SWEEP_IN_END, ()))
def putEntry(self, i, s, p, o, time, client):
self.dataQueue.put((i, SWEEP_IN_ENTRY, (s, p, o, time, client)))
def putData(self, i, xs, xp, xo):
self.dataQueue.put((i, SWEEP_IN_DATA, (xs, xp, xo)))
def putLog(self, entry_id, entry):
# (s,p,o,t,c,sm,pm,om) = entry
self.entryQueue.put((entry_id, entry))
def delQuery(self, x):
self.validationQueue.put((SWEEP_OUT_QUERY, 0, x))
def get(self):
try:
r = self.resQueue.get()
if r == SWEEP_START_SESSION:
return self.get()
if r == SWEEP_END_SESSION:
return None
else:
return r
except KeyboardInterrupt:
return None
def stop(self):
self.dataQueue.put(None)
self.dataProcess.join()
self.entryProcess.join()
self.validationProcess.join()
self.memoryProcess.join()
def getTopKBGP(self,n):
with self.lck:
self.memoryInQueue.put( (2,n) )
tpk = self.memoryOutQueue.get()
return tpk
def getTopKQueries(self,n):
with self.lck:
self.memoryInQueue.put( (1,n) )
tpk = self.memoryOutQueue.get()
return tpk
def getRankingBGPs(self) :
return self.cloneRanking(self.rankingBGPs)
def getRankingQueries(self) :
return self.cloneRanking(self.rankingQueries)
def cloneRanking(self, ranking) :
res = []
with self.lck:
for (t, d, n, query, ll, p, r) in ranking:
res.append( (t, d, n, query, ll, p, r) )
return res
#==================================================
def makeLog(ip):
#print('Finding bgp')
node_log = etree.Element('log')
node_log.set('ip', ip)
return node_log
def addBGP(n, bgp, node_log):
#print(serializeBGP2str([ x for (x,sm,pm,om,h) in bgp.tp_set]))
entry_node = etree.SubElement(node_log, 'entry')
entry_node.set('datetime', '%s' % bgp.time)
entry_node.set('logline', '%s' % n)
request_node = etree.SubElement(entry_node, 'request')
try:
bgp_node = serializeBGP([(tp.s, tp.p, tp.o) for tp in bgp.tp_set])
entry_node.insert(1, bgp_node)
query = 'select * where{ \n'
for tp in bgp.tp_set:
query += serialize2string(tp.s) + ' ' + serialize2string(tp.p) + \
' ' + serialize2string(tp.o) + ' .\n'
query += ' }'
request_node.text = query
except Exception as e:
print('PB serialize BGP : %s\n%s\n%s', e.__str__(), query, bgp)
return node_log
def save(node_log, lift2):
try:
print('Ecriture de "%s"' % lift2)
tosave = etree.tostring(
node_log,
encoding="UTF-8",
xml_declaration=True,
pretty_print=True,
doctype='<!DOCTYPE log SYSTEM "http://documents.ls2n.fr/be4dbp/log.dtd">')
try:
f = open(lift2, 'w')
f.write(tosave.decode('utf-8'))
except Exception as e:
print(
'PB Test Analysis saving %s : %s',
lift2,
e.__str__())
finally:
f.close()
except etree.DocumentInvalid as e:
print('PB Test Analysis, %s not validated : %s' % (lift2, e))
#==================================================
#==================================================
#==================================================
if __name__ == "__main__":
print("main sweep")
gap = dt.timedelta(minutes=1)
tpq1 = TriplePatternQuery(Variable('s'), URIRef('http://exemple.org/p1'), Literal('2'), now(
), 'Client2', [URIRef('http://exemple.org/test1'), URIRef('http://exemple.org/test2')], [], [])
tpq1.renameVars(1)
print(tpq1.toString())
tpq2 = TriplePatternQuery(URIRef('http://exemple.org/test1'), URIRef(
'http://exemple.org/p2'), Literal('3'), now(), '2', ['a', 'b'], [], [])
tpq2.renameVars(2)
print(tpq2.toString())
tpq3 = TriplePatternQuery(URIRef('http://exemple.org/test2'), URIRef(
'http://exemple.org/p2'), Literal('4'), now(), '2', ['a', 'b'], [], [])
tpq3.renameVars(3)
print(tpq3.toString())
print('\n Début')
bgp = BasicGraphPattern(gap, tpq1)
bgp.print()
print(tpq2.nestedLoopOf2(tpq1))
(t, tp,fTp,mapVal) = bgp.findNestedLoop(tpq2)
if t:
print(tp.toString())
print('Fin')
|
edesmontils/Linked-Data-Query-Profiler
|
archive/sweep copie.py
|
Python
|
gpl-3.0
| 37,290
|
#!/usr/bin/env python
#
# euclid graphics maths module
#
# Copyright (c) 2006 Alex Holkner
# Alex.Holkner@mail.google.com
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''euclid graphics maths module
Documentation and tests are included in the file "euclid.txt", or online
at http://code.google.com/p/pyeuclid
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
__revision__ = '$Revision$'
import math
import operator
import types
# Some magic here. If _use_slots is True, the classes will derive from
# object and will define a __slots__ class variable. If _use_slots is
# False, classes will be old-style and will not define __slots__.
#
# _use_slots = True: Memory efficient, probably faster in future versions
# of Python, "better".
# _use_slots = False: Ordinary classes, much faster than slots in current
# versions of Python (2.4 and 2.5).
_use_slots = True
# If True, allows components of Vector2 and Vector3 to be set via swizzling;
# e.g. v.xyz = (1, 2, 3). This is much, much slower than the more verbose
# v.x = 1; v.y = 2; v.z = 3, and slows down ordinary element setting as
# well. Recommended setting is False.
_enable_swizzle_set = False
# Requires class to derive from object.
if _enable_swizzle_set:
_use_slots = True
# Implement _use_slots magic.
class _EuclidMetaclass(type):
def __new__(cls, name, bases, dct):
if '__slots__' in dct:
dct['__getstate__'] = cls._create_getstate(dct['__slots__'])
dct['__setstate__'] = cls._create_setstate(dct['__slots__'])
if _use_slots:
return type.__new__(cls, name, bases + (object,), dct)
else:
if '__slots__' in dct:
del dct['__slots__']
return types.ClassType.__new__(types.ClassType, name, bases, dct)
@classmethod
def _create_getstate(cls, slots):
def __getstate__(self):
d = {}
for slot in slots:
d[slot] = getattr(self, slot)
return d
return __getstate__
@classmethod
def _create_setstate(cls, slots):
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
return __setstate__
__metaclass__ = _EuclidMetaclass
class Vector2:
__slots__ = ['x', 'y']
__hash__ = None
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def __copy__(self):
return self.__class__(self.x, self.y)
copy = __copy__
def __repr__(self):
return 'Vector2(%.2f, %.2f)' % (self.x, self.y)
def __eq__(self, other):
if isinstance(other, Vector2):
return self.x == other.x and \
self.y == other.y
else:
assert hasattr(other, '__len__') and len(other) == 2
return self.x == other[0] and \
self.y == other[1]
def __ne__(self, other):
return not self.__eq__(other)
def __nonzero__(self):
return self.x != 0 or self.y != 0
def __len__(self):
return 2
def __getitem__(self, key):
return (self.x, self.y)[key]
def __setitem__(self, key, value):
l = [self.x, self.y]
l[key] = value
self.x, self.y = l
def __iter__(self):
return iter((self.x, self.y))
def __getattr__(self, name):
try:
return tuple([(self.x, self.y)['xy'.index(c)] \
for c in name])
except ValueError:
raise AttributeError, name
if _enable_swizzle_set:
# This has detrimental performance on ordinary setattr as well
# if enabled
def __setattr__(self, name, value):
if len(name) == 1:
object.__setattr__(self, name, value)
else:
try:
l = [self.x, self.y]
for c, v in map(None, name, value):
l['xy'.index(c)] = v
self.x, self.y = l
except ValueError:
raise AttributeError, name
def __add__(self, other):
if isinstance(other, Vector2):
# Vector + Vector -> Vector
# Vector + Point -> Point
# Point + Point -> Vector
if self.__class__ is other.__class__:
_class = Vector2
else:
_class = Point2
return _class(self.x + other.x,
self.y + other.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(self.x + other[0],
self.y + other[1])
__radd__ = __add__
def __iadd__(self, other):
if isinstance(other, Vector2):
self.x += other.x
self.y += other.y
else:
self.x += other[0]
self.y += other[1]
return self
def __sub__(self, other):
if isinstance(other, Vector2):
# Vector - Vector -> Vector
# Vector - Point -> Point
# Point - Point -> Vector
if self.__class__ is other.__class__:
_class = Vector2
else:
_class = Point2
return _class(self.x - other.x,
self.y - other.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(self.x - other[0],
self.y - other[1])
def __rsub__(self, other):
if isinstance(other, Vector2):
return Vector2(other.x - self.x,
other.y - self.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(other.x - self[0],
other.y - self[1])
def __mul__(self, other):
assert type(other) in (int, long, float)
return Vector2(self.x * other,
self.y * other)
__rmul__ = __mul__
def __imul__(self, other):
assert type(other) in (int, long, float)
self.x *= other
self.y *= other
return self
def __div__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.div(self.x, other),
operator.div(self.y, other))
def __rdiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.div(other, self.x),
operator.div(other, self.y))
def __floordiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.floordiv(self.x, other),
operator.floordiv(self.y, other))
def __rfloordiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.floordiv(other, self.x),
operator.floordiv(other, self.y))
def __truediv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.truediv(self.x, other),
operator.truediv(self.y, other))
def __rtruediv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.truediv(other, self.x),
operator.truediv(other, self.y))
def __neg__(self):
return Vector2(-self.x,
-self.y)
__pos__ = __copy__
def __abs__(self):
return math.sqrt(self.x ** 2 + \
self.y ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.x ** 2 + \
self.y ** 2
def normalize(self):
d = self.magnitude()
if d:
self.x /= d
self.y /= d
return self
def normalized(self):
d = self.magnitude()
if d:
return Vector2(self.x / d,
self.y / d)
return self.copy()
def dot(self, other):
assert isinstance(other, Vector2)
return self.x * other.x + \
self.y * other.y
def cross(self):
return Vector2(self.y, -self.x)
def reflect(self, normal):
# assume normal is normalized
assert isinstance(normal, Vector2)
d = 2 * (self.x * normal.x + self.y * normal.y)
return Vector2(self.x - d * normal.x,
self.y - d * normal.y)
class Vector3:
__slots__ = ['x', 'y', 'z']
__hash__ = None
def __init__(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def __copy__(self):
return self.__class__(self.x, self.y, self.z)
copy = __copy__
def __repr__(self):
return 'Vector3(%.2f, %.2f, %.2f)' % (self.x,
self.y,
self.z)
def __eq__(self, other):
if isinstance(other, Vector3):
return self.x == other.x and \
self.y == other.y and \
self.z == other.z
else:
assert hasattr(other, '__len__') and len(other) == 3
return self.x == other[0] and \
self.y == other[1] and \
self.z == other[2]
def __ne__(self, other):
return not self.__eq__(other)
def __nonzero__(self):
return self.x != 0 or self.y != 0 or self.z != 0
def __len__(self):
return 3
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __iter__(self):
return iter((self.x, self.y, self.z))
def __getattr__(self, name):
try:
return tuple([(self.x, self.y, self.z)['xyz'.index(c)] \
for c in name])
except ValueError:
raise AttributeError, name
if _enable_swizzle_set:
# This has detrimental performance on ordinary setattr as well
# if enabled
def __setattr__(self, name, value):
if len(name) == 1:
object.__setattr__(self, name, value)
else:
try:
l = [self.x, self.y, self.z]
for c, v in map(None, name, value):
l['xyz'.index(c)] = v
self.x, self.y, self.z = l
except ValueError:
raise AttributeError, name
def __add__(self, other):
if isinstance(other, Vector3):
# Vector + Vector -> Vector
# Vector + Point -> Point
# Point + Point -> Vector
if self.__class__ is other.__class__:
_class = Vector3
else:
_class = Point3
return _class(self.x + other.x,
self.y + other.y,
self.z + other.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(self.x + other[0],
self.y + other[1],
self.z + other[2])
__radd__ = __add__
def __iadd__(self, other):
if isinstance(other, Vector3):
self.x += other.x
self.y += other.y
self.z += other.z
else:
self.x += other[0]
self.y += other[1]
self.z += other[2]
return self
def __sub__(self, other):
if isinstance(other, Vector3):
# Vector - Vector -> Vector
# Vector - Point -> Point
# Point - Point -> Vector
if self.__class__ is other.__class__:
_class = Vector3
else:
_class = Point3
return Vector3(self.x - other.x,
self.y - other.y,
self.z - other.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(self.x - other[0],
self.y - other[1],
self.z - other[2])
def __rsub__(self, other):
if isinstance(other, Vector3):
return Vector3(other.x - self.x,
other.y - self.y,
other.z - self.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(other.x - self[0],
other.y - self[1],
other.z - self[2])
def __mul__(self, other):
if isinstance(other, Vector3):
# TODO component-wise mul/div in-place and on Vector2; docs.
if self.__class__ is Point3 or other.__class__ is Point3:
_class = Point3
else:
_class = Vector3
return _class(self.x * other.x,
self.y * other.y,
self.z * other.z)
else:
assert type(other) in (int, long, float)
return Vector3(self.x * other,
self.y * other,
self.z * other)
__rmul__ = __mul__
def __imul__(self, other):
assert type(other) in (int, long, float)
self.x *= other
self.y *= other
self.z *= other
return self
def __div__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.div(self.x, other),
operator.div(self.y, other),
operator.div(self.z, other))
def __rdiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.div(other, self.x),
operator.div(other, self.y),
operator.div(other, self.z))
def __floordiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.floordiv(self.x, other),
operator.floordiv(self.y, other),
operator.floordiv(self.z, other))
def __rfloordiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.floordiv(other, self.x),
operator.floordiv(other, self.y),
operator.floordiv(other, self.z))
def __truediv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.truediv(self.x, other),
operator.truediv(self.y, other),
operator.truediv(self.z, other))
def __rtruediv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.truediv(other, self.x),
operator.truediv(other, self.y),
operator.truediv(other, self.z))
def __neg__(self):
return Vector3(-self.x,
-self.y,
-self.z)
__pos__ = __copy__
def __abs__(self):
return math.sqrt(self.x ** 2 + \
self.y ** 2 + \
self.z ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.x ** 2 + \
self.y ** 2 + \
self.z ** 2
def normalize(self):
d = self.magnitude()
if d:
self.x /= d
self.y /= d
self.z /= d
return self
def normalized(self):
d = self.magnitude()
if d:
return Vector3(self.x / d,
self.y / d,
self.z / d)
return self.copy()
def dot(self, other):
assert isinstance(other, Vector3)
return self.x * other.x + \
self.y * other.y + \
self.z * other.z
def cross(self, other):
assert isinstance(other, Vector3)
return Vector3(self.y * other.z - self.z * other.y,
-self.x * other.z + self.z * other.x,
self.x * other.y - self.y * other.x)
def reflect(self, normal):
# assume normal is normalized
assert isinstance(normal, Vector3)
d = 2 * (self.x * normal.x + self.y * normal.y + self.z * normal.z)
return Vector3(self.x - d * normal.x,
self.y - d * normal.y,
self.z - d * normal.z)
# a b c
# e f g
# i j k
class Matrix3:
__slots__ = list('abcefgijk')
def __init__(self):
self.identity()
def __copy__(self):
M = Matrix3()
M.a = self.a
M.b = self.b
M.c = self.c
M.e = self.e
M.f = self.f
M.g = self.g
M.i = self.i
M.j = self.j
M.k = self.k
return M
copy = __copy__
def __repr__(self):
return ('Matrix3([% 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f])') \
% (self.a, self.b, self.c,
self.e, self.f, self.g,
self.i, self.j, self.k)
def __getitem__(self, key):
return [self.a, self.e, self.i,
self.b, self.f, self.j,
self.c, self.g, self.k][key]
def __setitem__(self, key, value):
L = self[:]
L[key] = value
(self.a, self.e, self.i,
self.b, self.f, self.j,
self.c, self.g, self.k) = L
def __mul__(self, other):
if isinstance(other, Matrix3):
# Caching repeatedly accessed attributes in local variables
# apparently increases performance by 20%. Attrib: Will McGugan.
Aa = self.a
Ab = self.b
Ac = self.c
Ae = self.e
Af = self.f
Ag = self.g
Ai = self.i
Aj = self.j
Ak = self.k
Ba = other.a
Bb = other.b
Bc = other.c
Be = other.e
Bf = other.f
Bg = other.g
Bi = other.i
Bj = other.j
Bk = other.k
C = Matrix3()
C.a = Aa * Ba + Ab * Be + Ac * Bi
C.b = Aa * Bb + Ab * Bf + Ac * Bj
C.c = Aa * Bc + Ab * Bg + Ac * Bk
C.e = Ae * Ba + Af * Be + Ag * Bi
C.f = Ae * Bb + Af * Bf + Ag * Bj
C.g = Ae * Bc + Af * Bg + Ag * Bk
C.i = Ai * Ba + Aj * Be + Ak * Bi
C.j = Ai * Bb + Aj * Bf + Ak * Bj
C.k = Ai * Bc + Aj * Bg + Ak * Bk
return C
elif isinstance(other, Point2):
A = self
B = other
P = Point2(0, 0)
P.x = A.a * B.x + A.b * B.y + A.c
P.y = A.e * B.x + A.f * B.y + A.g
return P
elif isinstance(other, Vector2):
A = self
B = other
V = Vector2(0, 0)
V.x = A.a * B.x + A.b * B.y
V.y = A.e * B.x + A.f * B.y
return V
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Matrix3)
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ae = self.e
Af = self.f
Ag = self.g
Ai = self.i
Aj = self.j
Ak = self.k
Ba = other.a
Bb = other.b
Bc = other.c
Be = other.e
Bf = other.f
Bg = other.g
Bi = other.i
Bj = other.j
Bk = other.k
self.a = Aa * Ba + Ab * Be + Ac * Bi
self.b = Aa * Bb + Ab * Bf + Ac * Bj
self.c = Aa * Bc + Ab * Bg + Ac * Bk
self.e = Ae * Ba + Af * Be + Ag * Bi
self.f = Ae * Bb + Af * Bf + Ag * Bj
self.g = Ae * Bc + Af * Bg + Ag * Bk
self.i = Ai * Ba + Aj * Be + Ak * Bi
self.j = Ai * Bb + Aj * Bf + Ak * Bj
self.k = Ai * Bc + Aj * Bg + Ak * Bk
return self
def identity(self):
self.a = self.f = self.k = 1.
self.b = self.c = self.e = self.g = self.i = self.j = 0
return self
def scale(self, x, y):
self *= Matrix3.new_scale(x, y)
return self
def translate(self, x, y):
self *= Matrix3.new_translate(x, y)
return self
def rotate(self, angle):
self *= Matrix3.new_rotate(angle)
return self
# Static constructors
def new_identity(cls):
self = cls()
return self
new_identity = classmethod(new_identity)
def new_scale(cls, x, y):
self = cls()
self.a = x
self.f = y
return self
new_scale = classmethod(new_scale)
def new_translate(cls, x, y):
self = cls()
self.c = x
self.g = y
return self
new_translate = classmethod(new_translate)
def new_rotate(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.f = c
self.b = -s
self.e = s
return self
new_rotate = classmethod(new_rotate)
# a b c d
# e f g h
# i j k l
# m n o p
class Matrix4:
__slots__ = list('abcdefghijklmnop')
def __init__(self):
self.identity()
def __copy__(self):
M = Matrix4()
M.a = self.a
M.b = self.b
M.c = self.c
M.d = self.d
M.e = self.e
M.f = self.f
M.g = self.g
M.h = self.h
M.i = self.i
M.j = self.j
M.k = self.k
M.l = self.l
M.m = self.m
M.n = self.n
M.o = self.o
M.p = self.p
return M
copy = __copy__
def __repr__(self):
return ('Matrix4([% 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f])') \
% (self.a, self.b, self.c, self.d,
self.e, self.f, self.g, self.h,
self.i, self.j, self.k, self.l,
self.m, self.n, self.o, self.p)
def __getitem__(self, key):
return [self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p][key]
def __setitem__(self, key, value):
L = self[:]
L[key] = value
(self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p) = L
def __mul__(self, other):
if isinstance(other, Matrix4):
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ad = self.d
Ae = self.e
Af = self.f
Ag = self.g
Ah = self.h
Ai = self.i
Aj = self.j
Ak = self.k
Al = self.l
Am = self.m
An = self.n
Ao = self.o
Ap = self.p
Ba = other.a
Bb = other.b
Bc = other.c
Bd = other.d
Be = other.e
Bf = other.f
Bg = other.g
Bh = other.h
Bi = other.i
Bj = other.j
Bk = other.k
Bl = other.l
Bm = other.m
Bn = other.n
Bo = other.o
Bp = other.p
C = Matrix4()
C.a = Aa * Ba + Ab * Be + Ac * Bi + Ad * Bm
C.b = Aa * Bb + Ab * Bf + Ac * Bj + Ad * Bn
C.c = Aa * Bc + Ab * Bg + Ac * Bk + Ad * Bo
C.d = Aa * Bd + Ab * Bh + Ac * Bl + Ad * Bp
C.e = Ae * Ba + Af * Be + Ag * Bi + Ah * Bm
C.f = Ae * Bb + Af * Bf + Ag * Bj + Ah * Bn
C.g = Ae * Bc + Af * Bg + Ag * Bk + Ah * Bo
C.h = Ae * Bd + Af * Bh + Ag * Bl + Ah * Bp
C.i = Ai * Ba + Aj * Be + Ak * Bi + Al * Bm
C.j = Ai * Bb + Aj * Bf + Ak * Bj + Al * Bn
C.k = Ai * Bc + Aj * Bg + Ak * Bk + Al * Bo
C.l = Ai * Bd + Aj * Bh + Ak * Bl + Al * Bp
C.m = Am * Ba + An * Be + Ao * Bi + Ap * Bm
C.n = Am * Bb + An * Bf + Ao * Bj + Ap * Bn
C.o = Am * Bc + An * Bg + Ao * Bk + Ap * Bo
C.p = Am * Bd + An * Bh + Ao * Bl + Ap * Bp
return C
elif isinstance(other, Point3):
A = self
B = other
P = Point3(0, 0, 0)
P.x = A.a * B.x + A.b * B.y + A.c * B.z + A.d
P.y = A.e * B.x + A.f * B.y + A.g * B.z + A.h
P.z = A.i * B.x + A.j * B.y + A.k * B.z + A.l
return P
elif isinstance(other, Vector3):
A = self
B = other
V = Vector3(0, 0, 0)
V.x = A.a * B.x + A.b * B.y + A.c * B.z
V.y = A.e * B.x + A.f * B.y + A.g * B.z
V.z = A.i * B.x + A.j * B.y + A.k * B.z
return V
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Matrix4)
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ad = self.d
Ae = self.e
Af = self.f
Ag = self.g
Ah = self.h
Ai = self.i
Aj = self.j
Ak = self.k
Al = self.l
Am = self.m
An = self.n
Ao = self.o
Ap = self.p
Ba = other.a
Bb = other.b
Bc = other.c
Bd = other.d
Be = other.e
Bf = other.f
Bg = other.g
Bh = other.h
Bi = other.i
Bj = other.j
Bk = other.k
Bl = other.l
Bm = other.m
Bn = other.n
Bo = other.o
Bp = other.p
self.a = Aa * Ba + Ab * Be + Ac * Bi + Ad * Bm
self.b = Aa * Bb + Ab * Bf + Ac * Bj + Ad * Bn
self.c = Aa * Bc + Ab * Bg + Ac * Bk + Ad * Bo
self.d = Aa * Bd + Ab * Bh + Ac * Bl + Ad * Bp
self.e = Ae * Ba + Af * Be + Ag * Bi + Ah * Bm
self.f = Ae * Bb + Af * Bf + Ag * Bj + Ah * Bn
self.g = Ae * Bc + Af * Bg + Ag * Bk + Ah * Bo
self.h = Ae * Bd + Af * Bh + Ag * Bl + Ah * Bp
self.i = Ai * Ba + Aj * Be + Ak * Bi + Al * Bm
self.j = Ai * Bb + Aj * Bf + Ak * Bj + Al * Bn
self.k = Ai * Bc + Aj * Bg + Ak * Bk + Al * Bo
self.l = Ai * Bd + Aj * Bh + Ak * Bl + Al * Bp
self.m = Am * Ba + An * Be + Ao * Bi + Ap * Bm
self.n = Am * Bb + An * Bf + Ao * Bj + Ap * Bn
self.o = Am * Bc + An * Bg + Ao * Bk + Ap * Bo
self.p = Am * Bd + An * Bh + Ao * Bl + Ap * Bp
return self
def transform(self, other):
A = self
B = other
P = Point3(0, 0, 0)
P.x = A.a * B.x + A.b * B.y + A.c * B.z + A.d
P.y = A.e * B.x + A.f * B.y + A.g * B.z + A.h
P.z = A.i * B.x + A.j * B.y + A.k * B.z + A.l
w = A.m * B.x + A.n * B.y + A.o * B.z + A.p
if w != 0:
P.x /= w
P.y /= w
P.z /= w
return P
def identity(self):
self.a = self.f = self.k = self.p = 1.
self.b = self.c = self.d = self.e = self.g = self.h = \
self.i = self.j = self.l = self.m = self.n = self.o = 0
return self
def scale(self, x, y, z):
self *= Matrix4.new_scale(x, y, z)
return self
def translate(self, x, y, z):
self *= Matrix4.new_translate(x, y, z)
return self
def rotatex(self, angle):
self *= Matrix4.new_rotatex(angle)
return self
def rotatey(self, angle):
self *= Matrix4.new_rotatey(angle)
return self
def rotatez(self, angle):
self *= Matrix4.new_rotatez(angle)
return self
def rotate_axis(self, angle, axis):
self *= Matrix4.new_rotate_axis(angle, axis)
return self
def rotate_euler(self, heading, attitude, bank):
self *= Matrix4.new_rotate_euler(heading, attitude, bank)
return self
def rotate_triple_axis(self, x, y, z):
self *= Matrix4.new_rotate_triple_axis(x, y, z)
return self
def transpose(self):
(self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p) = \
(self.a, self.b, self.c, self.d,
self.e, self.f, self.g, self.h,
self.i, self.j, self.k, self.l,
self.m, self.n, self.o, self.p)
def transposed(self):
M = self.copy()
M.transpose()
return M
# Static constructors
def new(cls, *values):
M = cls()
M[:] = values
return M
new = classmethod(new)
def new_identity(cls):
self = cls()
return self
new_identity = classmethod(new_identity)
def new_scale(cls, x, y, z):
self = cls()
self.a = x
self.f = y
self.k = z
return self
new_scale = classmethod(new_scale)
def new_translate(cls, x, y, z):
self = cls()
self.d = x
self.h = y
self.l = z
return self
new_translate = classmethod(new_translate)
def new_rotatex(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.f = self.k = c
self.g = -s
self.j = s
return self
new_rotatex = classmethod(new_rotatex)
def new_rotatey(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.k = c
self.c = s
self.i = -s
return self
new_rotatey = classmethod(new_rotatey)
def new_rotatez(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.f = c
self.b = -s
self.e = s
return self
new_rotatez = classmethod(new_rotatez)
def new_rotate_axis(cls, angle, axis):
assert(isinstance(axis, Vector3))
vector = axis.normalized()
x = vector.x
y = vector.y
z = vector.z
self = cls()
s = math.sin(angle)
c = math.cos(angle)
c1 = 1. - c
# from the glRotate man page
self.a = x * x * c1 + c
self.b = x * y * c1 - z * s
self.c = x * z * c1 + y * s
self.e = y * x * c1 + z * s
self.f = y * y * c1 + c
self.g = y * z * c1 - x * s
self.i = x * z * c1 - y * s
self.j = y * z * c1 + x * s
self.k = z * z * c1 + c
return self
new_rotate_axis = classmethod(new_rotate_axis)
def new_rotate_euler(cls, heading, attitude, bank):
# from http://www.euclideanspace.com/
ch = math.cos(heading)
sh = math.sin(heading)
ca = math.cos(attitude)
sa = math.sin(attitude)
cb = math.cos(bank)
sb = math.sin(bank)
self = cls()
self.a = ch * ca
self.b = sh * sb - ch * sa * cb
self.c = ch * sa * sb + sh * cb
self.e = sa
self.f = ca * cb
self.g = -ca * sb
self.i = -sh * ca
self.j = sh * sa * cb + ch * sb
self.k = -sh * sa * sb + ch * cb
return self
new_rotate_euler = classmethod(new_rotate_euler)
def new_rotate_triple_axis(cls, x, y, z):
m = cls()
m.a, m.b, m.c = x.x, y.x, z.x
m.e, m.f, m.g = x.y, y.y, z.y
m.i, m.j, m.k = x.z, y.z, z.z
return m
new_rotate_triple_axis = classmethod(new_rotate_triple_axis)
def new_look_at(cls, eye, at, up):
z = (eye - at).normalized()
x = up.cross(z).normalized()
y = z.cross(x)
m = cls.new_rotate_triple_axis(x, y, z)
m.d, m.h, m.l = eye.x, eye.y, eye.z
return m
new_look_at = classmethod(new_look_at)
def new_perspective(cls, fov_y, aspect, near, far):
# from the gluPerspective man page
f = 1 / math.tan(fov_y / 2)
self = cls()
assert near != 0.0 and near != far
self.a = f / aspect
self.f = f
self.k = (far + near) / (near - far)
self.l = 2 * far * near / (near - far)
self.o = -1
self.p = 0
return self
new_perspective = classmethod(new_perspective)
def determinant(self):
return ((self.a * self.f - self.e * self.b)
* (self.k * self.p - self.o * self.l)
- (self.a * self.j - self.i * self.b)
* (self.g * self.p - self.o * self.h)
+ (self.a * self.n - self.m * self.b)
* (self.g * self.l - self.k * self.h)
+ (self.e * self.j - self.i * self.f)
* (self.c * self.p - self.o * self.d)
- (self.e * self.n - self.m * self.f)
* (self.c * self.l - self.k * self.d)
+ (self.i * self.n - self.m * self.j)
* (self.c * self.h - self.g * self.d))
def inverse(self):
tmp = Matrix4()
d = self.determinant();
if abs(d) < 0.001:
# No inverse, return identity
return tmp
else:
d = 1.0 / d;
tmp.a = d * (self.f * (self.k * self.p - self.o * self.l) + self.j * (self.o * self.h - self.g * self.p) + self.n * (self.g * self.l - self.k * self.h));
tmp.e = d * (self.g * (self.i * self.p - self.m * self.l) + self.k * (self.m * self.h - self.e * self.p) + self.o * (self.e * self.l - self.i * self.h));
tmp.i = d * (self.h * (self.i * self.n - self.m * self.j) + self.l * (self.m * self.f - self.e * self.n) + self.p * (self.e * self.j - self.i * self.f));
tmp.m = d * (self.e * (self.n * self.k - self.j * self.o) + self.i * (self.f * self.o - self.n * self.g) + self.m * (self.j * self.g - self.f * self.k));
tmp.b = d * (self.j * (self.c * self.p - self.o * self.d) + self.n * (self.k * self.d - self.c * self.l) + self.b * (self.o * self.l - self.k * self.p));
tmp.f = d * (self.k * (self.a * self.p - self.m * self.d) + self.o * (self.i * self.d - self.a * self.l) + self.c * (self.m * self.l - self.i * self.p));
tmp.j = d * (self.l * (self.a * self.n - self.m * self.b) + self.p * (self.i * self.b - self.a * self.j) + self.d * (self.m * self.j - self.i * self.n));
tmp.n = d * (self.i * (self.n * self.c - self.b * self.o) + self.m * (self.b * self.k - self.j * self.c) + self.a * (self.j * self.o - self.n * self.k));
tmp.c = d * (self.n * (self.c * self.h - self.g * self.d) + self.b * (self.g * self.p - self.o * self.h) + self.f * (self.o * self.d - self.c * self.p));
tmp.g = d * (self.o * (self.a * self.h - self.e * self.d) + self.c * (self.e * self.p - self.m * self.h) + self.g * (self.m * self.d - self.a * self.p));
tmp.k = d * (self.p * (self.a * self.f - self.e * self.b) + self.d * (self.e * self.n - self.m * self.f) + self.h * (self.m * self.b - self.a * self.n));
tmp.o = d * (self.m * (self.f * self.c - self.b * self.g) + self.a * (self.n * self.g - self.f * self.o) + self.e * (self.b * self.o - self.n * self.c));
tmp.d = d * (self.b * (self.k * self.h - self.g * self.l) + self.f * (self.c * self.l - self.k * self.d) + self.j * (self.g * self.d - self.c * self.h));
tmp.h = d * (self.c * (self.i * self.h - self.e * self.l) + self.g * (self.a * self.l - self.i * self.d) + self.k * (self.e * self.d - self.a * self.h));
tmp.l = d * (self.d * (self.i * self.f - self.e * self.j) + self.h * (self.a * self.j - self.i * self.b) + self.l * (self.e * self.b - self.a * self.f));
tmp.p = d * (self.a * (self.f * self.k - self.j * self.g) + self.e * (self.j * self.c - self.b * self.k) + self.i * (self.b * self.g - self.f * self.c));
return tmp;
class Quaternion:
# All methods and naming conventions based off
# http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
# w is the real part, (x, y, z) are the imaginary parts
__slots__ = ['w', 'x', 'y', 'z']
def __init__(self, w=1, x=0, y=0, z=0):
self.w = w
self.x = x
self.y = y
self.z = z
def __copy__(self):
Q = Quaternion()
Q.w = self.w
Q.x = self.x
Q.y = self.y
Q.z = self.z
return Q
copy = __copy__
def __repr__(self):
return 'Quaternion(real=%.2f, imag=<%.2f, %.2f, %.2f>)' % \
(self.w, self.x, self.y, self.z)
def __mul__(self, other):
if isinstance(other, Quaternion):
Ax = self.x
Ay = self.y
Az = self.z
Aw = self.w
Bx = other.x
By = other.y
Bz = other.z
Bw = other.w
Q = Quaternion()
Q.x = Ax * Bw + Ay * Bz - Az * By + Aw * Bx
Q.y = -Ax * Bz + Ay * Bw + Az * Bx + Aw * By
Q.z = Ax * By - Ay * Bx + Az * Bw + Aw * Bz
Q.w = -Ax * Bx - Ay * By - Az * Bz + Aw * Bw
return Q
elif isinstance(other, Vector3):
w = self.w
x = self.x
y = self.y
z = self.z
Vx = other.x
Vy = other.y
Vz = other.z
ww = w * w
w2 = w * 2
wx2 = w2 * x
wy2 = w2 * y
wz2 = w2 * z
xx = x * x
x2 = x * 2
xy2 = x2 * y
xz2 = x2 * z
yy = y * y
yz2 = 2 * y * z
zz = z * z
return other.__class__(\
ww * Vx + wy2 * Vz - wz2 * Vy + \
xx * Vx + xy2 * Vy + xz2 * Vz - \
zz * Vx - yy * Vx,
xy2 * Vx + yy * Vy + yz2 * Vz + \
wz2 * Vx - zz * Vy + ww * Vy - \
wx2 * Vz - xx * Vy,
xz2 * Vx + yz2 * Vy + \
zz * Vz - wy2 * Vx - yy * Vz + \
wx2 * Vy - xx * Vz + ww * Vz)
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Quaternion)
Ax = self.x
Ay = self.y
Az = self.z
Aw = self.w
Bx = other.x
By = other.y
Bz = other.z
Bw = other.w
self.x = Ax * Bw + Ay * Bz - Az * By + Aw * Bx
self.y = -Ax * Bz + Ay * Bw + Az * Bx + Aw * By
self.z = Ax * By - Ay * Bx + Az * Bw + Aw * Bz
self.w = -Ax * Bx - Ay * By - Az * Bz + Aw * Bw
return self
def __abs__(self):
return math.sqrt(self.w ** 2 + \
self.x ** 2 + \
self.y ** 2 + \
self.z ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.w ** 2 + \
self.x ** 2 + \
self.y ** 2 + \
self.z ** 2
def identity(self):
self.w = 1
self.x = 0
self.y = 0
self.z = 0
return self
def rotate_axis(self, angle, axis):
self *= Quaternion.new_rotate_axis(angle, axis)
return self
def rotate_euler(self, heading, attitude, bank):
self *= Quaternion.new_rotate_euler(heading, attitude, bank)
return self
def rotate_matrix(self, m):
self *= Quaternion.new_rotate_matrix(m)
return self
#def inverse(self):
#don't do this as calling conjugated on the normalized quaternion is the same thing
def conjugated(self):
Q = Quaternion()
Q.w = self.w
Q.x = -self.x
Q.y = -self.y
Q.z = -self.z
return Q
def normalize(self):
d = self.magnitude()
if d != 0:
self.w /= d
self.x /= d
self.y /= d
self.z /= d
return self
def normalized(self):
d = self.magnitude()
if d != 0:
Q = Quaternion()
Q.w = self.w / d
Q.x = self.x / d
Q.y = self.y / d
Q.z = self.z / d
return Q
else:
return self.copy()
def get_angle_axis(self):
if self.w > 1:
self = self.normalized()
angle = 2 * math.acos(self.w)
s = math.sqrt(1 - self.w ** 2)
if s < 0.001:
return angle, Vector3(1, 0, 0)
else:
return angle, Vector3(self.x / s, self.y / s, self.z / s)
def get_euler(self):
t = self.x * self.y + self.z * self.w
if t > 0.4999:
heading = 2 * math.atan2(self.x, self.w)
attitude = math.pi / 2
bank = 0
elif t < -0.4999:
heading = -2 * math.atan2(self.x, self.w)
attitude = -math.pi / 2
bank = 0
else:
sqx = self.x ** 2
sqy = self.y ** 2
sqz = self.z ** 2
heading = math.atan2(2 * self.y * self.w - 2 * self.x * self.z,
1 - 2 * sqy - 2 * sqz)
attitude = math.asin(2 * t)
bank = math.atan2(2 * self.x * self.w - 2 * self.y * self.z,
1 - 2 * sqx - 2 * sqz)
return heading, attitude, bank
def get_bank(self):
t = self.x * self.y + self.z * self.w
if t > 0.4999:
bank = 0
elif t < -0.4999:
bank = 0
else:
sqx = self.x ** 2
sqy = self.y ** 2
sqz = self.z ** 2
bank = math.atan2(2 * self.x * self.w - 2 * self.y * self.z,
1 - 2 * sqx - 2 * sqz)
return bank
def get_matrix(self):
xx = self.x ** 2
xy = self.x * self.y
xz = self.x * self.z
xw = self.x * self.w
yy = self.y ** 2
yz = self.y * self.z
yw = self.y * self.w
zz = self.z ** 2
zw = self.z * self.w
M = Matrix4()
M.a = 1 - 2 * (yy + zz)
M.b = 2 * (xy - zw)
M.c = 2 * (xz + yw)
M.e = 2 * (xy + zw)
M.f = 1 - 2 * (xx + zz)
M.g = 2 * (yz - xw)
M.i = 2 * (xz - yw)
M.j = 2 * (yz + xw)
M.k = 1 - 2 * (xx + yy)
return M
# Static constructors
def new_identity(cls):
return cls()
new_identity = classmethod(new_identity)
def new_rotate_axis(cls, angle, axis):
assert(isinstance(axis, Vector3))
assert(abs(axis.normalized().magnitude_squared()-1)<0.001 or axis.normalized().magnitude_squared()==0)
#axis = axis.normalized()
s = math.sin(angle / 2)
Q = cls()
Q.w = math.cos(angle / 2)
Q.x = axis.x * s
Q.y = axis.y * s
Q.z = axis.z * s
return Q
new_rotate_axis = classmethod(new_rotate_axis)
def new_rotate_euler(cls, heading, attitude, bank):
Q = cls()
c1 = math.cos(heading / 2)
s1 = math.sin(heading / 2)
c2 = math.cos(attitude / 2)
s2 = math.sin(attitude / 2)
c3 = math.cos(bank / 2)
s3 = math.sin(bank / 2)
Q.w = c1 * c2 * c3 - s1 * s2 * s3
Q.x = s1 * s2 * c3 + c1 * c2 * s3
Q.y = s1 * c2 * c3 + c1 * s2 * s3
Q.z = c1 * s2 * c3 - s1 * c2 * s3
return Q
new_rotate_euler = classmethod(new_rotate_euler)
def new_rotate_matrix(cls, m):
if m[0*4 + 0] + m[1*4 + 1] + m[2*4 + 2] > 0.00000001:
t = m[0*4 + 0] + m[1*4 + 1] + m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
s*t,
(m[1*4 + 2] - m[2*4 + 1])*s,
(m[2*4 + 0] - m[0*4 + 2])*s,
(m[0*4 + 1] - m[1*4 + 0])*s
)
elif m[0*4 + 0] > m[1*4 + 1] and m[0*4 + 0] > m[2*4 + 2]:
t = m[0*4 + 0] - m[1*4 + 1] - m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[1*4 + 2] - m[2*4 + 1])*s,
s*t,
(m[0*4 + 1] + m[1*4 + 0])*s,
(m[2*4 + 0] + m[0*4 + 2])*s
)
elif m[1*4 + 1] > m[2*4 + 2]:
t = -m[0*4 + 0] + m[1*4 + 1] - m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[2*4 + 0] - m[0*4 + 2])*s,
(m[0*4 + 1] + m[1*4 + 0])*s,
s*t,
(m[1*4 + 2] + m[2*4 + 1])*s
)
else:
t = -m[0*4 + 0] - m[1*4 + 1] + m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[0*4 + 1] - m[1*4 + 0])*s,
(m[2*4 + 0] + m[0*4 + 2])*s,
(m[1*4 + 2] + m[2*4 + 1])*s,
s*t
)
new_rotate_matrix = classmethod(new_rotate_matrix)
def new_interpolate(cls, q1, q2, t):
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
Q = cls()
costheta = q1.w * q2.w + q1.x * q2.x + q1.y * q2.y + q1.z * q2.z
if costheta < 0.:
costheta = -costheta
q1 = q1.conjugated()
elif costheta > 1:
costheta = 1
theta = math.acos(costheta)
if abs(theta) < 0.01:
Q.w = q2.w
Q.x = q2.x
Q.y = q2.y
Q.z = q2.z
return Q
sintheta = math.sqrt(1.0 - costheta * costheta)
if abs(sintheta) < 0.01:
Q.w = (q1.w + q2.w) * 0.5
Q.x = (q1.x + q2.x) * 0.5
Q.y = (q1.y + q2.y) * 0.5
Q.z = (q1.z + q2.z) * 0.5
return Q
ratio1 = math.sin((1 - t) * theta) / sintheta
ratio2 = math.sin(t * theta) / sintheta
Q.w = q1.w * ratio1 + q2.w * ratio2
Q.x = q1.x * ratio1 + q2.x * ratio2
Q.y = q1.y * ratio1 + q2.y * ratio2
Q.z = q1.z * ratio1 + q2.z * ratio2
return Q
new_interpolate = classmethod(new_interpolate)
# Geometry
# Much maths thanks to Paul Bourke, http://astronomy.swin.edu.au/~pbourke
# ---------------------------------------------------------------------------
class Geometry:
def _connect_unimplemented(self, other):
raise AttributeError, 'Cannot connect %s to %s' % \
(self.__class__, other.__class__)
def _intersect_unimplemented(self, other):
raise AttributeError, 'Cannot intersect %s and %s' % \
(self.__class__, other.__class__)
_intersect_point2 = _intersect_unimplemented
_intersect_line2 = _intersect_unimplemented
_intersect_circle = _intersect_unimplemented
_connect_point2 = _connect_unimplemented
_connect_line2 = _connect_unimplemented
_connect_circle = _connect_unimplemented
_intersect_point3 = _intersect_unimplemented
_intersect_line3 = _intersect_unimplemented
_intersect_sphere = _intersect_unimplemented
_intersect_plane = _intersect_unimplemented
_connect_point3 = _connect_unimplemented
_connect_line3 = _connect_unimplemented
_connect_sphere = _connect_unimplemented
_connect_plane = _connect_unimplemented
def intersect(self, other):
raise NotImplementedError
def connect(self, other):
raise NotImplementedError
def distance(self, other):
c = self.connect(other)
if c:
return c.length
return 0.0
def _intersect_point2_circle(P, C):
return abs(P - C.c) <= C.r
def _intersect_line2_line2(A, B):
d = B.v.y * A.v.x - B.v.x * A.v.y
if d == 0:
return None
dy = A.p.y - B.p.y
dx = A.p.x - B.p.x
ua = (B.v.x * dy - B.v.y * dx) / d
if not A._u_in(ua):
return None
ub = (A.v.x * dy - A.v.y * dx) / d
if not B._u_in(ub):
return None
return Point2(A.p.x + ua * A.v.x,
A.p.y + ua * A.v.y)
def _intersect_line2_circle(L, C):
a = L.v.magnitude_squared()
b = 2 * (L.v.x * (L.p.x - C.c.x) + \
L.v.y * (L.p.y - C.c.y))
c = C.c.magnitude_squared() + \
L.p.magnitude_squared() - \
2 * C.c.dot(L.p) - \
C.r ** 2
det = b ** 2 - 4 * a * c
if det < 0:
return None
sq = math.sqrt(det)
u1 = (-b + sq) / (2 * a)
u2 = (-b - sq) / (2 * a)
if not L._u_in(u1):
u1 = max(min(u1, 1.0), 0.0)
if not L._u_in(u2):
u2 = max(min(u2, 1.0), 0.0)
# Tangent
if u1 == u2:
return Point2(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y)
return LineSegment2(Point2(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y),
Point2(L.p.x + u2 * L.v.x,
L.p.y + u2 * L.v.y))
def _connect_point2_line2(P, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((P.x - L.p.x) * L.v.x + \
(P.y - L.p.y) * L.v.y) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
return LineSegment2(P,
Point2(L.p.x + u * L.v.x,
L.p.y + u * L.v.y))
def _connect_point2_circle(P, C):
v = P - C.c
v.normalize()
v *= C.r
return LineSegment2(P, Point2(C.c.x + v.x, C.c.y + v.y))
def _connect_line2_line2(A, B):
d = B.v.y * A.v.x - B.v.x * A.v.y
if d == 0:
# Parallel, connect an endpoint with a line
if isinstance(B, Ray2) or isinstance(B, LineSegment2):
p1, p2 = _connect_point2_line2(B.p, A)
return p2, p1
# No endpoint (or endpoint is on A), possibly choose arbitrary point
# on line.
return _connect_point2_line2(A.p, B)
dy = A.p.y - B.p.y
dx = A.p.x - B.p.x
ua = (B.v.x * dy - B.v.y * dx) / d
if not A._u_in(ua):
ua = max(min(ua, 1.0), 0.0)
ub = (A.v.x * dy - A.v.y * dx) / d
if not B._u_in(ub):
ub = max(min(ub, 1.0), 0.0)
return LineSegment2(Point2(A.p.x + ua * A.v.x, A.p.y + ua * A.v.y),
Point2(B.p.x + ub * B.v.x, B.p.y + ub * B.v.y))
def _connect_circle_line2(C, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((C.c.x - L.p.x) * L.v.x + (C.c.y - L.p.y) * L.v.y) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
point = Point2(L.p.x + u * L.v.x, L.p.y + u * L.v.y)
v = (point - C.c)
v.normalize()
v *= C.r
return LineSegment2(Point2(C.c.x + v.x, C.c.y + v.y), point)
def _connect_circle_circle(A, B):
v = B.c - A.c
v.normalize()
return LineSegment2(Point2(A.c.x + v.x * A.r, A.c.y + v.y * A.r),
Point2(B.c.x - v.x * B.r, B.c.y - v.y * B.r))
class Point2(Vector2, Geometry):
def __repr__(self):
return 'Point2(%.2f, %.2f)' % (self.x, self.y)
def intersect(self, other):
return other._intersect_point2(self)
def _intersect_circle(self, other):
return _intersect_point2_circle(self, other)
def connect(self, other):
return other._connect_point2(self)
def _connect_point2(self, other):
return LineSegment2(other, self)
def _connect_line2(self, other):
c = _connect_point2_line2(self, other)
if c:
return c._swap()
def _connect_circle(self, other):
c = _connect_point2_circle(self, other)
if c:
return c._swap()
class Line2(Geometry):
__slots__ = ['p', 'v']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point2) and \
isinstance(args[1], Vector2) and \
type(args[2]) == float
self.p = args[0].copy()
self.v = args[1] * args[2] / abs(args[1])
elif len(args) == 2:
if isinstance(args[0], Point2) and isinstance(args[1], Point2):
self.p = args[0].copy()
self.v = args[1] - args[0]
elif isinstance(args[0], Point2) and isinstance(args[1], Vector2):
self.p = args[0].copy()
self.v = args[1].copy()
else:
raise AttributeError, '%r' % (args,)
elif len(args) == 1:
if isinstance(args[0], Line2):
self.p = args[0].p.copy()
self.v = args[0].v.copy()
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
if not self.v:
raise AttributeError, 'Line has zero-length vector'
def __copy__(self):
return self.__class__(self.p, self.v)
copy = __copy__
def __repr__(self):
return 'Line2(<%.2f, %.2f> + u<%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.v.x, self.v.y)
p1 = property(lambda self: self.p)
p2 = property(lambda self: Point2(self.p.x + self.v.x,
self.p.y + self.v.y))
def _apply_transform(self, t):
self.p = t * self.p
self.v = t * self.v
def _u_in(self, u):
return True
def intersect(self, other):
return other._intersect_line2(self)
def _intersect_line2(self, other):
return _intersect_line2_line2(self, other)
def _intersect_circle(self, other):
return _intersect_line2_circle(self, other)
def connect(self, other):
return other._connect_line2(self)
def _connect_point2(self, other):
return _connect_point2_line2(other, self)
def _connect_line2(self, other):
return _connect_line2_line2(other, self)
def _connect_circle(self, other):
return _connect_circle_line2(other, self)
class Ray2(Line2):
def __repr__(self):
return 'Ray2(<%.2f, %.2f> + u<%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.v.x, self.v.y)
def _u_in(self, u):
return u >= 0.0
class LineSegment2(Line2):
def __repr__(self):
return 'LineSegment2(<%.2f, %.2f> to <%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.x + self.v.x, self.p.y + self.v.y)
def _u_in(self, u):
return u >= 0.0 and u <= 1.0
def __abs__(self):
return abs(self.v)
def magnitude_squared(self):
return self.v.magnitude_squared()
def _swap(self):
# used by connect methods to switch order of points
self.p = self.p2
self.v *= -1
return self
length = property(lambda self: abs(self.v))
class Circle(Geometry):
__slots__ = ['c', 'r']
def __init__(self, center, radius):
assert isinstance(center, Vector2) and type(radius) == float
self.c = center.copy()
self.r = radius
def __copy__(self):
return self.__class__(self.c, self.r)
copy = __copy__
def __repr__(self):
return 'Circle(<%.2f, %.2f>, radius=%.2f)' % \
(self.c.x, self.c.y, self.r)
def _apply_transform(self, t):
self.c = t * self.c
def intersect(self, other):
return other._intersect_circle(self)
def _intersect_point2(self, other):
return _intersect_point2_circle(other, self)
def _intersect_line2(self, other):
return _intersect_line2_circle(other, self)
def connect(self, other):
return other._connect_circle(self)
def _connect_point2(self, other):
return _connect_point2_circle(other, self)
def _connect_line2(self, other):
c = _connect_circle_line2(self, other)
if c:
return c._swap()
def _connect_circle(self, other):
return _connect_circle_circle(other, self)
# 3D Geometry
# -------------------------------------------------------------------------
def _connect_point3_line3(P, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((P.x - L.p.x) * L.v.x + \
(P.y - L.p.y) * L.v.y + \
(P.z - L.p.z) * L.v.z) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
return LineSegment3(P, Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z))
def _connect_point3_sphere(P, S):
v = P - S.c
v.normalize()
v *= S.r
return LineSegment3(P, Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z))
def _connect_point3_plane(p, plane):
n = plane.n.normalized()
d = p.dot(plane.n) - plane.k
return LineSegment3(p, Point3(p.x - n.x * d, p.y - n.y * d, p.z - n.z * d))
def _connect_line3_line3(A, B):
assert A.v and B.v
p13 = A.p - B.p
d1343 = p13.dot(B.v)
d4321 = B.v.dot(A.v)
d1321 = p13.dot(A.v)
d4343 = B.v.magnitude_squared()
denom = A.v.magnitude_squared() * d4343 - d4321 ** 2
if denom == 0:
# Parallel, connect an endpoint with a line
if isinstance(B, Ray3) or isinstance(B, LineSegment3):
return _connect_point3_line3(B.p, A)._swap()
# No endpoint (or endpoint is on A), possibly choose arbitrary
# point on line.
return _connect_point3_line3(A.p, B)
ua = (d1343 * d4321 - d1321 * d4343) / denom
if not A._u_in(ua):
ua = max(min(ua, 1.0), 0.0)
ub = (d1343 + d4321 * ua) / d4343
if not B._u_in(ub):
ub = max(min(ub, 1.0), 0.0)
return LineSegment3(Point3(A.p.x + ua * A.v.x,
A.p.y + ua * A.v.y,
A.p.z + ua * A.v.z),
Point3(B.p.x + ub * B.v.x,
B.p.y + ub * B.v.y,
B.p.z + ub * B.v.z))
def _connect_line3_plane(L, P):
d = P.n.dot(L.v)
if not d:
# Parallel, choose an endpoint
return _connect_point3_plane(L.p, P)
u = (P.k - P.n.dot(L.p)) / d
if not L._u_in(u):
# intersects out of range, choose nearest endpoint
u = max(min(u, 1.0), 0.0)
return _connect_point3_plane(Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z), P)
# Intersection
return None
def _connect_sphere_line3(S, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((S.c.x - L.p.x) * L.v.x + \
(S.c.y - L.p.y) * L.v.y + \
(S.c.z - L.p.z) * L.v.z) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
point = Point3(L.p.x + u * L.v.x, L.p.y + u * L.v.y, L.p.z + u * L.v.z)
v = (point - S.c)
v.normalize()
v *= S.r
return LineSegment3(Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z),
point)
def _connect_sphere_sphere(A, B):
v = B.c - A.c
v.normalize()
return LineSegment3(Point3(A.c.x + v.x * A.r,
A.c.y + v.y * A.r,
A.c.x + v.z * A.r),
Point3(B.c.x + v.x * B.r,
B.c.y + v.y * B.r,
B.c.x + v.z * B.r))
def _connect_sphere_plane(S, P):
c = _connect_point3_plane(S.c, P)
if not c:
return None
p2 = c.p2
v = p2 - S.c
v.normalize()
v *= S.r
return LineSegment3(Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z),
p2)
def _connect_plane_plane(A, B):
if A.n.cross(B.n):
# Planes intersect
return None
else:
# Planes are parallel, connect to arbitrary point
return _connect_point3_plane(A._get_point(), B)
def _intersect_point3_sphere(P, S):
return abs(P - S.c) <= S.r
def _intersect_line3_sphere(L, S):
a = L.v.magnitude_squared()
b = 2 * (L.v.x * (L.p.x - S.c.x) + \
L.v.y * (L.p.y - S.c.y) + \
L.v.z * (L.p.z - S.c.z))
c = S.c.magnitude_squared() + \
L.p.magnitude_squared() - \
2 * S.c.dot(L.p) - \
S.r ** 2
det = b ** 2 - 4 * a * c
if det < 0:
return None
sq = math.sqrt(det)
u1 = (-b + sq) / (2 * a)
u2 = (-b - sq) / (2 * a)
if not L._u_in(u1):
u1 = max(min(u1, 1.0), 0.0)
if not L._u_in(u2):
u2 = max(min(u2, 1.0), 0.0)
return LineSegment3(Point3(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y,
L.p.z + u1 * L.v.z),
Point3(L.p.x + u2 * L.v.x,
L.p.y + u2 * L.v.y,
L.p.z + u2 * L.v.z))
def _intersect_line3_plane(L, P):
d = P.n.dot(L.v)
if not d:
# Parallel
return None
u = (P.k - P.n.dot(L.p)) / d
if not L._u_in(u):
return None
return Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z)
def _intersect_plane_plane(A, B):
n1_m = A.n.magnitude_squared()
n2_m = B.n.magnitude_squared()
n1d2 = A.n.dot(B.n)
det = n1_m * n2_m - n1d2 ** 2
if det == 0:
# Parallel
return None
c1 = (A.k * n2_m - B.k * n1d2) / det
c2 = (B.k * n1_m - A.k * n1d2) / det
return Line3(Point3(c1 * A.n.x + c2 * B.n.x,
c1 * A.n.y + c2 * B.n.y,
c1 * A.n.z + c2 * B.n.z),
A.n.cross(B.n))
class Point3(Vector3, Geometry):
def __repr__(self):
return 'Point3(%.2f, %.2f, %.2f)' % (self.x, self.y, self.z)
def intersect(self, other):
return other._intersect_point3(self)
def _intersect_sphere(self, other):
return _intersect_point3_sphere(self, other)
def connect(self, other):
return other._connect_point3(self)
def _connect_point3(self, other):
if self != other:
return LineSegment3(other, self)
return None
def _connect_line3(self, other):
c = _connect_point3_line3(self, other)
if c:
return c._swap()
def _connect_sphere(self, other):
c = _connect_point3_sphere(self, other)
if c:
return c._swap()
def _connect_plane(self, other):
c = _connect_point3_plane(self, other)
if c:
return c._swap()
class Line3:
__slots__ = ['p', 'v']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point3) and \
isinstance(args[1], Vector3) and \
type(args[2]) == float
self.p = args[0].copy()
self.v = args[1] * args[2] / abs(args[1])
elif len(args) == 2:
if isinstance(args[0], Point3) and isinstance(args[1], Point3):
self.p = args[0].copy()
self.v = args[1] - args[0]
elif isinstance(args[0], Point3) and isinstance(args[1], Vector3):
self.p = args[0].copy()
self.v = args[1].copy()
else:
raise AttributeError, '%r' % (args,)
elif len(args) == 1:
if isinstance(args[0], Line3):
self.p = args[0].p.copy()
self.v = args[0].v.copy()
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
# XXX This is annoying.
#if not self.v:
# raise AttributeError, 'Line has zero-length vector'
def __copy__(self):
return self.__class__(self.p, self.v)
copy = __copy__
def __repr__(self):
return 'Line3(<%.2f, %.2f, %.2f> + u<%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z, self.v.x, self.v.y, self.v.z)
p1 = property(lambda self: self.p)
p2 = property(lambda self: Point3(self.p.x + self.v.x,
self.p.y + self.v.y,
self.p.z + self.v.z))
def _apply_transform(self, t):
self.p = t * self.p
self.v = t * self.v
def _u_in(self, u):
return True
def intersect(self, other):
return other._intersect_line3(self)
def _intersect_sphere(self, other):
return _intersect_line3_sphere(self, other)
def _intersect_plane(self, other):
return _intersect_line3_plane(self, other)
def connect(self, other):
return other._connect_line3(self)
def _connect_point3(self, other):
return _connect_point3_line3(other, self)
def _connect_line3(self, other):
return _connect_line3_line3(other, self)
def _connect_sphere(self, other):
return _connect_sphere_line3(other, self)
def _connect_plane(self, other):
c = _connect_line3_plane(self, other)
if c:
return c
class Ray3(Line3):
def __repr__(self):
return 'Ray3(<%.2f, %.2f, %.2f> + u<%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z, self.v.x, self.v.y, self.v.z)
def _u_in(self, u):
return u >= 0.0
class LineSegment3(Line3):
def __repr__(self):
return 'LineSegment3(<%.2f, %.2f, %.2f> to <%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z,
self.p.x + self.v.x, self.p.y + self.v.y, self.p.z + self.v.z)
def _u_in(self, u):
return u >= 0.0 and u <= 1.0
def __abs__(self):
return abs(self.v)
def magnitude_squared(self):
return self.v.magnitude_squared()
def _swap(self):
# used by connect methods to switch order of points
self.p = self.p2
self.v *= -1
return self
length = property(lambda self: abs(self.v))
class Sphere:
__slots__ = ['c', 'r']
def __init__(self, center, radius):
assert isinstance(center, Vector3) and type(radius) == float
self.c = center.copy()
self.r = radius
def __copy__(self):
return self.__class__(self.c, self.r)
copy = __copy__
def __repr__(self):
return 'Sphere(<%.2f, %.2f, %.2f>, radius=%.2f)' % \
(self.c.x, self.c.y, self.c.z, self.r)
def _apply_transform(self, t):
self.c = t * self.c
def intersect(self, other):
return other._intersect_sphere(self)
def _intersect_point3(self, other):
return _intersect_point3_sphere(other, self)
def _intersect_line3(self, other):
return _intersect_line3_sphere(other, self)
def connect(self, other):
return other._connect_sphere(self)
def _connect_point3(self, other):
return _connect_point3_sphere(other, self)
def _connect_line3(self, other):
c = _connect_sphere_line3(self, other)
if c:
return c._swap()
def _connect_sphere(self, other):
return _connect_sphere_sphere(other, self)
def _connect_plane(self, other):
c = _connect_sphere_plane(self, other)
if c:
return c
class Plane:
# n.p = k, where n is normal, p is point on plane, k is constant scalar
__slots__ = ['n', 'k']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point3) and \
isinstance(args[1], Point3) and \
isinstance(args[2], Point3)
self.n = (args[1] - args[0]).cross(args[2] - args[0])
self.n.normalize()
self.k = self.n.dot(args[0])
elif len(args) == 2:
if isinstance(args[0], Point3) and isinstance(args[1], Vector3):
self.n = args[1].normalized()
self.k = self.n.dot(args[0])
elif isinstance(args[0], Vector3) and type(args[1]) == float:
self.n = args[0].normalized()
self.k = args[1]
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
if not self.n:
raise AttributeError, 'Points on plane are colinear'
def __copy__(self):
return self.__class__(self.n, self.k)
copy = __copy__
def __repr__(self):
return 'Plane(<%.2f, %.2f, %.2f>.p = %.2f)' % \
(self.n.x, self.n.y, self.n.z, self.k)
def _get_point(self):
# Return an arbitrary point on the plane
if self.n.z:
return Point3(0., 0., self.k / self.n.z)
elif self.n.y:
return Point3(0., self.k / self.n.y, 0.)
else:
return Point3(self.k / self.n.x, 0., 0.)
def _apply_transform(self, t):
p = t * self._get_point()
self.n = t * self.n
self.k = self.n.dot(p)
def intersect(self, other):
return other._intersect_plane(self)
def _intersect_line3(self, other):
return _intersect_line3_plane(other, self)
def _intersect_plane(self, other):
return _intersect_plane_plane(self, other)
def connect(self, other):
return other._connect_plane(self)
def _connect_point3(self, other):
return _connect_point3_plane(other, self)
def _connect_line3(self, other):
return _connect_line3_plane(other, self)
def _connect_sphere(self, other):
return _connect_sphere_plane(other, self)
def _connect_plane(self, other):
return _connect_plane_plane(other, self)
|
pokemon4ik2008/py-airfoil
|
euclid.py
|
Python
|
gpl-3.0
| 70,054
|
# Dict'O'nator - A dictation plugin for gedit.
# Copyright (C) <2016> <Abhinav Singh>
#
# This file is part of Dict'O'nator.
#
# Dict'O'nator is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dict'O'nator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Dict'O'nator. If not, see <http://www.gnu.org/licenses/>.
"""Sets up the logger."""
import logging
import os
logger = logging.getLogger('dictonator')
GEDIT_PLUGIN_PATH = os.path.dirname(os.path.abspath(__file__))
if not os.path.exists(GEDIT_PLUGIN_PATH + '/.logs'):
os.makedirs(GEDIT_PLUGIN_PATH + '/.logs')
LOG_DIR_PATH = GEDIT_PLUGIN_PATH + "/.logs/"
def setup_logger():
# setting format of log
formatter = logging.Formatter('%(threadName)s - %(levelname)s - %(message)s')
logger.setLevel(logging.DEBUG)
# file location
debug_log = LOG_DIR_PATH + 'log.txt'
# adding handler for console logs
sh = logging.StreamHandler()
sh.setFormatter(formatter)
logger.addHandler(sh)
# adding handler for file logs
fh = logging.FileHandler(debug_log)
fh.setFormatter(formatter)
logger.addHandler(fh)
setup_logger()
|
theawless/Dict-O-nator
|
dictonator/setlog.py
|
Python
|
gpl-3.0
| 1,573
|
# Copyright (C) 2008-2009 Mark A. Matienzo
#
# This file is part of worldcat, the Python WorldCat API module.
#
# worldcat is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# worldcat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with worldcat. If not, see <http://www.gnu.org/licenses/>.
""" worldcat/request/xid.py -- Request objects for xID APIs
xID APIs as of this writing include xISBN, xISSN, and xOCLCNUM.
'Alternate request formats' (such as OpenURL and unAPI) have not been
implemented.
"""
from worldcat.exceptions import EmptyRecordNumberError
from worldcat.request import WorldCatRequest
from worldcat.response.xid import xIDResponse
class xIDRequest(WorldCatRequest):
"""request.xid.xIDRequest: Base class for requests from xID APIs.
All xIDRequests require a record number ('rec_num') to be passed when
a class is instantiated. Depending on the request, this will either be
an ISBN, an ISSN, or an OCLC record number.
xIDRequests by default have their 'method' set as 'getEditions' and their
response format set as 'python'.
"""
def __init__(self, rec_num=None, **kwargs):
"""Constructor for xIDRequests."""
if 'method' not in kwargs:
kwargs['method'] = 'getEditions'
if 'format' not in kwargs:
kwargs['format'] = 'python'
if 'fl' not in kwargs:
kwargs['fl'] = '*'
WorldCatRequest.__init__(self, **kwargs)
self.rec_num = rec_num
def get_response(self):
self.http_get()
return xIDResponse(self)
def subclass_validator(self, quiet=False):
"""Validator method for xIDRequests.
Does not validate ISSN or ISBN values; this should be handled
by the xID APIs.
"""
if self.rec_num == None:
if quiet == True:
return False
else:
raise EmptyRecordNumberError
else:
return True
class xISSNRequest(xIDRequest):
"""request.xid.xISSNRequest: Class for xISSN requests
For more information on the xISSN API, see
<http://xissn.worldcat.org/xissnadmin/doc/api.htm>.
Example of an xISSNRequest:
>>> from worldcat.request.xid import xISSNRequest
>>> x = xISSNRequest(rec_num='1895-104X')
>>> x.validate()
>>> r = x.get_response()
"""
def __init__(self, rec_num=None, **kwargs):
"""Constructor method for xISSNRequests."""
xIDRequest.__init__(self, rec_num, **kwargs)
self._validators = {
'method': ('getForms', 'getHistory', 'fixChecksum',
'getMetadata', 'getEditions'),
'format': ('xml', 'html', 'json', 'python',
'ruby', 'text', 'csv', 'php')}
def api_url(self):
self.url = 'http://xissn.worldcat.org/webservices/xid/issn/%s' \
% self.rec_num
class xISBNRequest(xIDRequest):
"""request.xid.xISBNRequest: Class for xISBN requests
"""
def __init__(self, rec_num=None, **kwargs):
"""Constructor method for xISBNRequests."""
xIDRequest.__init__(self, rec_num, **kwargs)
self._validators = {
'method': ('to10', 'to13', 'fixChecksum',
'getMetadata', 'getEditions', 'hyphen'),
'format': ('xml', 'html', 'json', 'python',
'ruby', 'txt', 'csv', 'php')}
def api_url(self):
self.url = 'http://xisbn.worldcat.org/webservices/xid/isbn/%s' \
% self.rec_num
class xOCLCNUMRequest(xIDRequest):
"""request.xid.xOCLCNUMRequest: Class for xOCLCNUM requests
This now replaces the old xOCLCNUMRequest class in worldcat >= 0.3.1.
xOCLCNUMRequest now takes a 'type' argument; one of "oclcnum", "lccn",
or "owi", for OCLC record numbers, Library of Congress Catalog Numbers, or
OCLC Work Identifiers.
"""
def __init__(self, rec_num=None, numtype='oclcnum', **kwargs):
"""Constructor method for xISBNRequests."""
xIDRequest.__init__(self, rec_num, **kwargs)
self.numtype = numtype
self._validators = {
'method': ('getVariants', 'getMetadata', 'getEditions'),
'format': ('xml', 'html', 'json', 'python',
'ruby', 'txt', 'csv', 'php')}
def api_url(self):
self.url = 'http://xisbn.worldcat.org/webservices/xid/%s/%s' \
% (self.numtype, self.rec_num)
|
anarchivist/worldcat
|
worldcat/request/xid.py
|
Python
|
gpl-3.0
| 4,907
|
__author__ = "Steffen Vogel"
__copyright__ = "Copyright 2015, Steffen Vogel"
__license__ = "GPLv3"
__maintainer__ = "Steffen Vogel"
__email__ = "post@steffenvogel.de"
"""
This file is part of transWhat
transWhat is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
transwhat is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with transWhat. If not, see <http://www.gnu.org/licenses/>.
"""
from Spectrum2 import protocol_pb2
import logging
import time
import utils
import base64
import deferred
from deferred import call
class Buddy():
def __init__(self, owner, number, nick, statusMsg, groups, image_hash):
self.nick = nick
self.owner = owner
self.number = number
self.groups = groups
self.image_hash = image_hash if image_hash is not None else ""
self.statusMsg = u""
self.lastseen = 0
self.presence = 0
def update(self, nick, groups, image_hash):
self.nick = nick
self.groups = groups
if image_hash is not None:
self.image_hash = image_hash
def __str__(self):
return "%s (nick=%s)" % (self.number, self.nick)
class BuddyList(dict):
def __init__(self, owner, backend, user, session):
self.owner = owner
self.backend = backend
self.session = session
self.user = user
self.logger = logging.getLogger(self.__class__.__name__)
self.synced = False
def _load(self, buddies):
for buddy in buddies:
number = buddy.buddyName
nick = buddy.alias
statusMsg = buddy.statusMessage.decode('utf-8')
groups = [g for g in buddy.group]
image_hash = buddy.iconHash
self[number] = Buddy(self.owner, number, nick, statusMsg,
groups, image_hash)
self.logger.debug("Update roster")
contacts = self.keys()
contacts.remove('bot')
if self.synced == False:
self.session.sendSync(contacts, delta = False, interactive = True)
self.synced = True
self.logger.debug("Roster add: %s", str(list(contacts)))
for number in contacts:
buddy = self[number]
self.backend.handleBuddyChanged(self.user, number, buddy.nick,
buddy.groups, protocol_pb2.STATUS_NONE,
iconHash = buddy.image_hash if buddy.image_hash is not None else "")
self.session.subscribePresence(number)
self.logger.debug("%s is requesting statuses of: %s", self.user, contacts)
self.session.requestStatuses(contacts, success = self.onStatus)
def onStatus(self, contacts):
self.logger.debug("%s received statuses of: %s", self.user, contacts)
for number, (status, time) in contacts.iteritems():
buddy = self[number]
if status is None:
buddy.statusMsg = ""
else:
buddy.statusMsg = utils.softToUni(status)
self.updateSpectrum(buddy)
def load(self, buddies):
if self.session.loggedIn:
self._load(buddies)
else:
self.session.loginQueue.append(lambda: self._load(buddies))
def update(self, number, nick, groups, image_hash):
if number in self:
buddy = self[number]
buddy.update(nick, groups, image_hash)
else:
buddy = Buddy(self.owner, number, nick, "", groups, image_hash)
self[number] = buddy
self.logger.debug("Roster add: %s", buddy)
self.session.sendSync([number], delta = True, interactive = True)
self.session.subscribePresence(number)
self.session.requestStatuses([number], success = self.onStatus)
if image_hash == "" or image_hash is None:
self.requestVCard(number)
self.updateSpectrum(buddy)
return buddy
def updateSpectrum(self, buddy):
if buddy.presence == 0:
status = protocol_pb2.STATUS_NONE
elif buddy.presence == 'unavailable':
status = protocol_pb2.STATUS_AWAY
else:
status = protocol_pb2.STATUS_ONLINE
statusmsg = buddy.statusMsg
if buddy.lastseen != 0:
timestamp = time.localtime(buddy.lastseen)
statusmsg += time.strftime("\n Last seen: %a, %d %b %Y %H:%M:%S", timestamp)
iconHash = buddy.image_hash if buddy.image_hash is not None else ""
self.logger.debug("Updating buddy %s (%s) in %s, image_hash = %s",
buddy.nick, buddy.number, buddy.groups, iconHash)
self.logger.debug("Status Message: %s", statusmsg)
self.backend.handleBuddyChanged(self.user, buddy.number, buddy.nick,
buddy.groups, status, statusMessage=statusmsg, iconHash=iconHash)
def remove(self, number):
try:
buddy = self[number]
del self[number]
self.backend.handleBuddyChanged(self.user, number, "", [],
protocol_pb2.STATUS_NONE)
self.backend.handleBuddyRemoved(self.user, number)
self.session.unsubscribePresence(number)
# TODO Sync remove
return buddy
except KeyError:
return None
def requestVCard(self, buddy, ID=None):
if buddy == self.user or buddy == self.user.split('@')[0]:
buddy = self.session.legacyName
# Get profile picture
self.logger.debug('Requesting profile picture of %s', buddy)
response = deferred.Deferred()
self.session.requestProfilePicture(buddy, onSuccess = response.run)
response = response.arg(0)
pictureData = response.pictureData()
# Send VCard
if ID != None:
call(self.logger.debug, 'Sending VCard (%s) with image id %s: %s',
ID, response.pictureId(), pictureData.then(base64.b64encode))
call(self.backend.handleVCard, self.user, ID, buddy, "", "",
pictureData)
# Send image hash
if not buddy == self.session.legacyName:
try:
obuddy = self[buddy]
nick = obuddy.nick
groups = obuddy.groups
except KeyError:
nick = ""
groups = []
image_hash = pictureData.then(utils.sha1hash)
call(self.logger.debug, 'Image hash is %s', image_hash)
call(self.update, buddy, nick, groups, image_hash)
|
NeoBelerophon/transwhat
|
buddy.py
|
Python
|
gpl-3.0
| 5,903
|
class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1
max_str = s[i-count:i+1+count]
# center is ch right
delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def test(self):
assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace'
if __name__ == '__main__':
s = Solution()
s.test()
|
cosven/pat_play
|
leetcode/005_tle_error.py
|
Python
|
gpl-3.0
| 1,580
|
import logging
import os
import math
from collections import defaultdict
from gensim import corpora
# 引入斷詞與停用詞的配置
from .Matcher.matcher import Matcher
class Evaluator(Matcher):
"""
讀入一串推文串列,計算出當中可靠度最高的推文
"""
def __init__(self,segLib="Taiba"):
#FIXME 若「線上版本」受記憶體容量限制,需考慮更換為 jieba!
super().__init__(segLib)
self.responses = []
self.segResponses = []
self.totalWords = 0
self.path = os.path.dirname(__file__)
self.debugLog = open(self.path + "/data/EvaluateLog.txt",'w',encoding="utf-8")
self.filteredWords = set() # 必須濾除的回應
self.counterDictionary = defaultdict(int) # 用於統計詞頻
self.tokenDictionary = None # 用於分配詞 id,與建置詞袋
# 中文停用詞與特殊符號加載
self.loadStopWords(path=self.path + "/data/stopwords/chinese_sw.txt")
self.loadStopWords(path=self.path + "/data/stopwords/specialMarks.txt")
self.loadFilterdWord(path=self.path + "/data/stopwords/ptt_words.txt")
def cleanFormerResult(self):
"""
清空之前回應留下的紀錄
"""
self.responses = []
self.segResponses = []
self.totalWords = 0
def getBestResponse(self, responses, topk, debugMode=False):
"""
從 self.responses 中挑選出可靠度前 K 高的回應回傳
Return : List of (reply,grade)
"""
self.cleanFormerResult()
self.buildResponses(responses)
self.segmentResponse()
self.buildCounterDictionary()
candiateList = self.evaluateByGrade(topk, debug=debugMode)
return candiateList
def loadFilterdWord(self,path):
with open(path, 'r', encoding='utf-8') as sw:
for word in sw:
self.filteredWords.add(word.strip('\n'))
def buildResponses(self, responses):
"""
將 json 格式中目前用不上的 user,vote 去除,只留下 Content
"""
self.responses = []
for response in responses:
clean = True
r = response["Content"]
for word in self.filteredWords:
if word in r:
clean = False
if clean:
self.responses.append(response["Content"])
def segmentResponse(self):
"""
對 self.responses 中所有的回應斷詞並去除中文停用詞,儲存於 self.segResponses
"""
self.segResponses = []
for response in self.responses:
keywordResponse = [keyword for keyword in self.wordSegmentation(response)
if keyword not in self.stopwords
and keyword != ' ']
self.totalWords += len(keywordResponse)
self.segResponses.append(keywordResponse)
#logging.info("已完成回應斷詞")
def buildCounterDictionary(self):
"""
統計 self.segResponses 中每個詞出現的次數
"""
for reply in self.segResponses:
for word in reply:
self.counterDictionary[word] += 1
#logging.info("計數字典建置完成")
def buildTokenDictionary(self):
"""
為 self.segResponses 中的詞配置一個 id
"""
self.tokenDictionary = corpora.Dictionary(self.segResponses)
logging.info("詞袋字典建置完成,%s" % str(self.tokenDictionary))
def evaluateByGrade(self,topk,debug=False):
"""
依照每個詞出現的在該文件出現的情形,給予每個回覆一個分數
若該回覆包含越多高詞頻的詞,其得分越高
Args:
- 若 debug 為 True,列出每筆評論的評分與斷詞情形
Return: (BestResponse,Grade)
- BestResponse: 得分最高的回覆
- Grade: 該回覆獲得的分數
"""
bestResponse = ""
candiates = []
avgWords = self.totalWords/len(self.segResponses)
for i in range(0, len(self.segResponses)):
wordCount = len(self.segResponses[i])
sourceCount = len(self.responses[i])
meanful = 0
if wordCount == 0 or sourceCount > 24:
continue
cur_grade = 0.
for word in self.segResponses[i]:
wordWeight = self.counterDictionary[word]
if wordWeight > 1:
meanful += math.log(wordWeight,10)
cur_grade += wordWeight
cur_grade = cur_grade * meanful / (math.log(len(self.segResponses[i])+1,avgWords) + 1)
candiates.append([self.responses[i],cur_grade])
if debug:
result = self.responses[i] + '\t' + str(self.segResponses[i]) + '\t' + str(cur_grade)
self.debugLog.write(result+'\n')
print(result)
candiates = sorted(candiates,key=lambda candiate:candiate[1],reverse=True)
return candiates[:topk]
class ClusteringEvaluator(Evaluator):
"""
基於聚類評比推文可靠度
"""
pass
|
zake7749/Chatbot
|
Chatbot/QuestionAnswering/responsesEvaluate.py
|
Python
|
gpl-3.0
| 5,244
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Customer', fields ['company_name', 'contact_name', 'contact_mail']
db.delete_unique('readings_customer', ['company_name', 'contact_name', 'contact_mail'])
# Deleting model 'Customer'
db.delete_table('readings_customer')
# Deleting model 'CustomerCallLog'
db.delete_table('readings_customercalllog')
def backwards(self, orm):
# Adding model 'Customer'
db.create_table('readings_customer', (
('payment_status', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('contact_mail', self.gf('django.db.models.fields.CharField')(max_length=100)),
('contact_address', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('contact_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('payment_confirmation', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('customer_type', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('company_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('api_key', self.gf('django.db.models.fields.CharField')(max_length=255)),
('contact_phone', self.gf('django.db.models.fields.CharField')(max_length=25, null=True, blank=True)),
))
db.send_create_signal('readings', ['Customer'])
# Adding unique constraint on 'Customer', fields ['company_name', 'contact_name', 'contact_mail']
db.create_unique('readings_customer', ['company_name', 'contact_name', 'contact_mail'])
# Adding model 'CustomerCallLog'
db.create_table('readings_customercalllog', (
('customer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['readings.Customer'])),
('max_longitude', self.gf('django.db.models.fields.FloatField')()),
('min_longitude', self.gf('django.db.models.fields.FloatField')()),
('start_time', self.gf('django.db.models.fields.BigIntegerField')()),
('results_limit', self.gf('django.db.models.fields.IntegerField')()),
('processing_time', self.gf('django.db.models.fields.FloatField')()),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('min_latitude', self.gf('django.db.models.fields.FloatField')()),
('data_format', self.gf('django.db.models.fields.CharField')(max_length=10)),
('max_latitude', self.gf('django.db.models.fields.FloatField')()),
('since_last_call', self.gf('django.db.models.fields.BooleanField')(default=False)),
('use_utc', self.gf('django.db.models.fields.BooleanField')(default=False)),
('end_time', self.gf('django.db.models.fields.BigIntegerField')()),
('results_returned', self.gf('django.db.models.fields.IntegerField')()),
('global_data', self.gf('django.db.models.fields.BooleanField')(default=False)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('readings', ['CustomerCallLog'])
models = {
'readings.reading': {
'Meta': {'unique_together': "(('latitude', 'longitude', 'daterecorded', 'user_id'),)", 'object_name': 'Reading'},
'altitude': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'client_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'daterecorded': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'db_index': 'True'}),
'location_accuracy': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {'db_index': 'True'}),
'observation_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'observation_unit': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'provider': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'reading': ('django.db.models.fields.FloatField', [], {}),
'reading_accuracy': ('django.db.models.fields.FloatField', [], {}),
'sharing': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tzoffset': ('django.db.models.fields.BigIntegerField', [], {}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'readings.readingsync': {
'Meta': {'object_name': 'ReadingSync'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processing_time': ('django.db.models.fields.FloatField', [], {}),
'readings': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['readings']
|
sibsibsib/pressureNET-server
|
readings/migrations/0021_auto__del_customer__del_unique_customer_company_name_contact_name_cont.py
|
Python
|
gpl-3.0
| 5,747
|
# This file is part of the Minecraft Overviewer.
#
# Minecraft Overviewer is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# Minecraft Overviewer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the Overviewer. If not, see <http://www.gnu.org/licenses/>.
import sys
import imp
import os
import os.path
import zipfile
from cStringIO import StringIO
import math
from random import randint
import numpy
from PIL import Image, ImageEnhance, ImageOps, ImageDraw
import logging
import functools
import util
from c_overviewer import alpha_over
class TextureException(Exception):
"To be thrown when a texture is not found."
pass
color_map = ["white", "orange", "magenta", "light_blue", "yellow", "lime", "pink", "gray",
"silver", "cyan", "purple", "blue", "brown", "green", "red", "black"]
##
## Textures object
##
class Textures(object):
"""An object that generates a set of block sprites to use while
rendering. It accepts a background color, north direction, and
local textures path.
"""
def __init__(self, texturepath=None, bgcolor=(26, 26, 26, 0), northdirection=0):
self.bgcolor = bgcolor
self.rotation = northdirection
self.find_file_local_path = texturepath
# not yet configurable
self.texture_size = 24
self.texture_dimensions = (self.texture_size, self.texture_size)
# this is set in in generate()
self.generated = False
# see load_image_texture()
self.texture_cache = {}
# once we find a jarfile that contains a texture, we cache the ZipFile object here
self.jar = None
self.jarpath = ""
##
## pickle support
##
def __getstate__(self):
# we must get rid of the huge image lists, and other images
attributes = self.__dict__.copy()
for attr in ['blockmap', 'biome_grass_texture', 'watertexture', 'lavatexture', 'firetexture', 'portaltexture', 'lightcolor', 'grasscolor', 'foliagecolor', 'watercolor', 'texture_cache']:
try:
del attributes[attr]
except KeyError:
pass
return attributes
def __setstate__(self, attrs):
# regenerate textures, if needed
for attr, val in attrs.iteritems():
setattr(self, attr, val)
self.texture_cache = {}
if self.generated:
self.generate()
##
## The big one: generate()
##
def generate(self):
# generate biome grass mask
self.biome_grass_texture = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/grass_top.png"), self.load_image_texture("assets/minecraft/textures/blocks/grass_side_overlay.png"))
# generate the blocks
global blockmap_generators
global known_blocks, used_datas
self.blockmap = [None] * max_blockid * max_data
for (blockid, data), texgen in blockmap_generators.iteritems():
tex = texgen(self, blockid, data)
self.blockmap[blockid * max_data + data] = self.generate_texture_tuple(tex)
if self.texture_size != 24:
# rescale biome grass
self.biome_grass_texture = self.biome_grass_texture.resize(self.texture_dimensions, Image.ANTIALIAS)
# rescale the rest
for i, tex in enumerate(blockmap):
if tex is None:
continue
block = tex[0]
scaled_block = block.resize(self.texture_dimensions, Image.ANTIALIAS)
blockmap[i] = self.generate_texture_tuple(scaled_block)
self.generated = True
##
## Helpers for opening textures
##
def find_file(self, filename, mode="rb", verbose=False):
"""Searches for the given file and returns an open handle to it.
This searches the following locations in this order:
* In the directory textures_path given in the initializer
* In the resource pack given by textures_path
* The program dir (same dir as overviewer.py) for extracted textures
* On Darwin, in /Applications/Minecraft for extracted textures
* Inside a minecraft client jar. Client jars are searched for in the
following location depending on platform:
* On Windows, at %APPDATA%/.minecraft/versions/
* On Darwin, at
$HOME/Library/Application Support/minecraft/versions
* at $HOME/.minecraft/versions/
Only the latest non-snapshot version >1.6 is used
* The overviewer_core/data/textures dir
In all of these, files are searched for in '.', 'anim', 'misc/', and
'environment/'.
"""
if verbose: logging.info("Starting search for {0}".format(filename))
# a list of subdirectories to search for a given file,
# after the obvious '.'
search_dirs = ['anim', 'misc', 'environment', 'item', 'item/chests', 'entity', 'entity/chest']
search_zip_paths = [filename,] + [d + '/' + filename for d in search_dirs]
def search_dir(base):
"""Search the given base dir for filename, in search_dirs."""
for path in [os.path.join(base, d, filename) for d in ['',] + search_dirs]:
if verbose: logging.info('filename: ' + filename + ' ; path: ' + path)
if os.path.isfile(path):
return path
return None
if verbose: logging.info('search_zip_paths: ' + ', '.join(search_zip_paths))
# A texture path was given on the command line. Search this location
# for the file first.
if self.find_file_local_path:
if os.path.isdir(self.find_file_local_path):
path = search_dir(self.find_file_local_path)
if path:
if verbose: logging.info("Found %s in '%s'", filename, path)
return open(path, mode)
elif os.path.isfile(self.find_file_local_path):
# Must be a resource pack. Look for the requested file within
# it.
try:
pack = zipfile.ZipFile(self.find_file_local_path)
for packfilename in search_zip_paths:
try:
# pack.getinfo() will raise KeyError if the file is
# not found.
pack.getinfo(packfilename)
if verbose: logging.info("Found %s in '%s'", packfilename, self.find_file_local_path)
return pack.open(packfilename)
except (KeyError, IOError):
pass
try:
# 2nd try with completed path.
packfilename = 'assets/minecraft/textures/' + packfilename
pack.getinfo(packfilename)
if verbose: logging.info("Found %s in '%s'", packfilename, self.find_file_local_path)
return pack.open(packfilename)
except (KeyError, IOError):
pass
except (zipfile.BadZipfile, IOError):
pass
# If we haven't returned at this point, then the requested file was NOT
# found in the user-specified texture path or resource pack.
if verbose: logging.info("Did not find the file in specified texture path")
# Look in the location of the overviewer executable for the given path
programdir = util.get_program_path()
path = search_dir(programdir)
if path:
if verbose: logging.info("Found %s in '%s'", filename, path)
return open(path, mode)
if sys.platform.startswith("darwin"):
path = search_dir("/Applications/Minecraft")
if path:
if verbose: logging.info("Found %s in '%s'", filename, path)
return open(path, mode)
if verbose: logging.info("Did not find the file in overviewer executable directory")
if verbose: logging.info("Looking for installed minecraft jar files...")
# we've sucessfully loaded something from here before, so let's quickly try
# this before searching again
if self.jar is not None:
for jarfilename in search_zip_paths:
try:
self.jar.getinfo(jarfilename)
if verbose: logging.info("Found (cached) %s in '%s'", jarfilename, self.jarpath)
return self.jar.open(jarfilename)
except (KeyError, IOError), e:
pass
# Find an installed minecraft client jar and look in it for the texture
# file we need.
versiondir = ""
if "APPDATA" in os.environ and sys.platform.startswith("win"):
versiondir = os.path.join(os.environ['APPDATA'], ".minecraft", "versions")
elif "HOME" in os.environ:
# For linux:
versiondir = os.path.join(os.environ['HOME'], ".minecraft", "versions")
if not os.path.exists(versiondir) and sys.platform.startswith("darwin"):
# For Mac:
versiondir = os.path.join(os.environ['HOME'], "Library",
"Application Support", "minecraft", "versions")
try:
if verbose: logging.info("Looking in the following directory: \"%s\"" % versiondir)
versions = os.listdir(versiondir)
if verbose: logging.info("Found these versions: {0}".format(versions))
except OSError:
# Directory doesn't exist? Ignore it. It will find no versions and
# fall through the checks below to the error at the bottom of the
# method.
versions = []
most_recent_version = [0,0,0]
for version in versions:
# Look for the latest non-snapshot that is at least 1.8. This
# version is only compatible with >=1.8, and we cannot in general
# tell if a snapshot is more or less recent than a release.
# Allow two component names such as "1.8" and three component names
# such as "1.8.1"
if version.count(".") not in (1,2):
continue
try:
versionparts = [int(x) for x in version.split(".")]
except ValueError:
continue
if versionparts < [1,8]:
continue
if versionparts > most_recent_version:
most_recent_version = versionparts
if most_recent_version != [0,0,0]:
if verbose: logging.info("Most recent version >=1.8.0: {0}. Searching it for the file...".format(most_recent_version))
jarname = ".".join(str(x) for x in most_recent_version)
jarpath = os.path.join(versiondir, jarname, jarname + ".jar")
if os.path.isfile(jarpath):
jar = zipfile.ZipFile(jarpath)
for jarfilename in search_zip_paths:
try:
jar.getinfo(jarfilename)
if verbose: logging.info("Found %s in '%s'", jarfilename, jarpath)
self.jar, self.jarpath = jar, jarpath
return jar.open(jarfilename)
except (KeyError, IOError), e:
pass
if verbose: logging.info("Did not find file {0} in jar {1}".format(filename, jarpath))
else:
if verbose: logging.info("Did not find any non-snapshot minecraft jars >=1.8.0")
# Last ditch effort: look for the file is stored in with the overviewer
# installation. We include a few files that aren't included with Minecraft
# textures. This used to be for things such as water and lava, since
# they were generated by the game and not stored as images. Nowdays I
# believe that's not true, but we still have a few files distributed
# with overviewer.
if verbose: logging.info("Looking for texture in overviewer_core/data/textures")
path = search_dir(os.path.join(programdir, "overviewer_core", "data", "textures"))
if path:
if verbose: logging.info("Found %s in '%s'", filename, path)
return open(path, mode)
elif hasattr(sys, "frozen") or imp.is_frozen("__main__"):
# windows special case, when the package dir doesn't exist
path = search_dir(os.path.join(programdir, "textures"))
if path:
if verbose: logging.info("Found %s in '%s'", filename, path)
return open(path, mode)
raise TextureException("Could not find the textures while searching for '{0}'. Try specifying the 'texturepath' option in your config file.\nSet it to the path to a Minecraft Resource pack.\nAlternately, install the Minecraft client (which includes textures)\nAlso see <http://docs.overviewer.org/en/latest/running/#installing-the-textures>\n(Remember, this version of Overviewer requires a 1.10-compatible resource pack)\n(Also note that I won't automatically use snapshots; you'll have to use the texturepath option to use a snapshot jar)".format(filename))
def load_image_texture(self, filename):
# Textures may be animated or in a different resolution than 16x16.
# This method will always return a 16x16 image
img = self.load_image(filename)
w,h = img.size
if w != h:
img = img.crop((0,0,w,w))
if w != 16:
img = img.resize((16, 16), Image.ANTIALIAS)
self.texture_cache[filename] = img
return img
def load_image(self, filename):
"""Returns an image object"""
if filename in self.texture_cache:
return self.texture_cache[filename]
fileobj = self.find_file(filename)
buffer = StringIO(fileobj.read())
img = Image.open(buffer).convert("RGBA")
self.texture_cache[filename] = img
return img
def load_water(self):
"""Special-case function for loading water, handles
MCPatcher-compliant custom animated water."""
watertexture = getattr(self, "watertexture", None)
if watertexture:
return watertexture
try:
# try the MCPatcher case first
watertexture = self.load_image("custom_water_still.png")
watertexture = watertexture.crop((0, 0, watertexture.size[0], watertexture.size[0]))
except TextureException:
watertexture = self.load_image_texture("assets/minecraft/textures/blocks/water_still.png")
self.watertexture = watertexture
return watertexture
def load_lava(self):
"""Special-case function for loading lava, handles
MCPatcher-compliant custom animated lava."""
lavatexture = getattr(self, "lavatexture", None)
if lavatexture:
return lavatexture
try:
# try the MCPatcher lava first, in case it's present
lavatexture = self.load_image("custom_lava_still.png")
lavatexture = lavatexture.crop((0, 0, lavatexture.size[0], lavatexture.size[0]))
except TextureException:
lavatexture = self.load_image_texture("assets/minecraft/textures/blocks/lava_still.png")
self.lavatexture = lavatexture
return lavatexture
def load_fire(self):
"""Special-case function for loading fire, handles
MCPatcher-compliant custom animated fire."""
firetexture = getattr(self, "firetexture", None)
if firetexture:
return firetexture
try:
# try the MCPatcher case first
firetextureNS = self.load_image("custom_fire_n_s.png")
firetextureNS = firetextureNS.crop((0, 0, firetextureNS.size[0], firetextureNS.size[0]))
firetextureEW = self.load_image("custom_fire_e_w.png")
firetextureEW = firetextureEW.crop((0, 0, firetextureEW.size[0], firetextureEW.size[0]))
firetexture = (firetextureNS,firetextureEW)
except TextureException:
fireNS = self.load_image_texture("assets/minecraft/textures/blocks/fire_layer_0.png")
fireEW = self.load_image_texture("assets/minecraft/textures/blocks/fire_layer_1.png")
firetexture = (fireNS, fireEW)
self.firetexture = firetexture
return firetexture
def load_portal(self):
"""Special-case function for loading portal, handles
MCPatcher-compliant custom animated portal."""
portaltexture = getattr(self, "portaltexture", None)
if portaltexture:
return portaltexture
try:
# try the MCPatcher case first
portaltexture = self.load_image("custom_portal.png")
portaltexture = portaltexture.crop((0, 0, portaltexture.size[0], portaltexture.size[1]))
except TextureException:
portaltexture = self.load_image_texture("assets/minecraft/textures/blocks/portal.png")
self.portaltexture = portaltexture
return portaltexture
def load_light_color(self):
"""Helper function to load the light color texture."""
if hasattr(self, "lightcolor"):
return self.lightcolor
try:
lightcolor = list(self.load_image("light_normal.png").getdata())
except Exception:
logging.warning("Light color image could not be found.")
lightcolor = None
self.lightcolor = lightcolor
return lightcolor
def load_grass_color(self):
"""Helper function to load the grass color texture."""
if not hasattr(self, "grasscolor"):
self.grasscolor = list(self.load_image("grass.png").getdata())
return self.grasscolor
def load_foliage_color(self):
"""Helper function to load the foliage color texture."""
if not hasattr(self, "foliagecolor"):
self.foliagecolor = list(self.load_image("foliage.png").getdata())
return self.foliagecolor
#I guess "watercolor" is wrong. But I can't correct as my texture pack don't define water color.
def load_water_color(self):
"""Helper function to load the water color texture."""
if not hasattr(self, "watercolor"):
self.watercolor = list(self.load_image("watercolor.png").getdata())
return self.watercolor
def _split_terrain(self, terrain):
"""Builds and returns a length 256 array of each 16x16 chunk
of texture.
"""
textures = []
(terrain_width, terrain_height) = terrain.size
texture_resolution = terrain_width / 16
for y in xrange(16):
for x in xrange(16):
left = x*texture_resolution
upper = y*texture_resolution
right = left+texture_resolution
lower = upper+texture_resolution
region = terrain.transform(
(16, 16),
Image.EXTENT,
(left,upper,right,lower),
Image.BICUBIC)
textures.append(region)
return textures
##
## Image Transformation Functions
##
@staticmethod
def transform_image_top(img):
"""Takes a PIL image and rotates it left 45 degrees and shrinks the y axis
by a factor of 2. Returns the resulting image, which will be 24x12 pixels
"""
# Resize to 17x17, since the diagonal is approximately 24 pixels, a nice
# even number that can be split in half twice
img = img.resize((17, 17), Image.ANTIALIAS)
# Build the Affine transformation matrix for this perspective
transform = numpy.matrix(numpy.identity(3))
# Translate up and left, since rotations are about the origin
transform *= numpy.matrix([[1,0,8.5],[0,1,8.5],[0,0,1]])
# Rotate 45 degrees
ratio = math.cos(math.pi/4)
#transform *= numpy.matrix("[0.707,-0.707,0;0.707,0.707,0;0,0,1]")
transform *= numpy.matrix([[ratio,-ratio,0],[ratio,ratio,0],[0,0,1]])
# Translate back down and right
transform *= numpy.matrix([[1,0,-12],[0,1,-12],[0,0,1]])
# scale the image down by a factor of 2
transform *= numpy.matrix("[1,0,0;0,2,0;0,0,1]")
transform = numpy.array(transform)[:2,:].ravel().tolist()
newimg = img.transform((24,12), Image.AFFINE, transform)
return newimg
@staticmethod
def transform_image_side(img):
"""Takes an image and shears it for the left side of the cube (reflect for
the right side)"""
# Size of the cube side before shear
img = img.resize((12,12), Image.ANTIALIAS)
# Apply shear
transform = numpy.matrix(numpy.identity(3))
transform *= numpy.matrix("[1,0,0;-0.5,1,0;0,0,1]")
transform = numpy.array(transform)[:2,:].ravel().tolist()
newimg = img.transform((12,18), Image.AFFINE, transform)
return newimg
@staticmethod
def transform_image_slope(img):
"""Takes an image and shears it in the shape of a slope going up
in the -y direction (reflect for +x direction). Used for minetracks"""
# Take the same size as trasform_image_side
img = img.resize((12,12), Image.ANTIALIAS)
# Apply shear
transform = numpy.matrix(numpy.identity(3))
transform *= numpy.matrix("[0.75,-0.5,3;0.25,0.5,-3;0,0,1]")
transform = numpy.array(transform)[:2,:].ravel().tolist()
newimg = img.transform((24,24), Image.AFFINE, transform)
return newimg
@staticmethod
def transform_image_angle(img, angle):
"""Takes an image an shears it in arbitrary angle with the axis of
rotation being vertical.
WARNING! Don't use angle = pi/2 (or multiplies), it will return
a blank image (or maybe garbage).
NOTE: angle is in the image not in game, so for the left side of a
block angle = 30 degree.
"""
# Take the same size as trasform_image_side
img = img.resize((12,12), Image.ANTIALIAS)
# some values
cos_angle = math.cos(angle)
sin_angle = math.sin(angle)
# function_x and function_y are used to keep the result image in the
# same position, and constant_x and constant_y are the coordinates
# for the center for angle = 0.
constant_x = 6.
constant_y = 6.
function_x = 6.*(1-cos_angle)
function_y = -6*sin_angle
big_term = ( (sin_angle * (function_x + constant_x)) - cos_angle* (function_y + constant_y))/cos_angle
# The numpy array is not really used, but is helpful to
# see the matrix used for the transformation.
transform = numpy.array([[1./cos_angle, 0, -(function_x + constant_x)/cos_angle],
[-sin_angle/(cos_angle), 1., big_term ],
[0, 0, 1.]])
transform = tuple(transform[0]) + tuple(transform[1])
newimg = img.transform((24,24), Image.AFFINE, transform)
return newimg
def build_block(self, top, side):
"""From a top texture and a side texture, build a block image.
top and side should be 16x16 image objects. Returns a 24x24 image
"""
img = Image.new("RGBA", (24,24), self.bgcolor)
original_texture = top.copy()
top = self.transform_image_top(top)
if not side:
alpha_over(img, top, (0,0), top)
return img
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = side.split()[3]
side = ImageEnhance.Brightness(side).enhance(0.9)
side.putalpha(sidealpha)
othersidealpha = otherside.split()[3]
otherside = ImageEnhance.Brightness(otherside).enhance(0.8)
otherside.putalpha(othersidealpha)
alpha_over(img, top, (0,0), top)
alpha_over(img, side, (0,6), side)
alpha_over(img, otherside, (12,6), otherside)
# Manually touch up 6 pixels that leave a gap because of how the
# shearing works out. This makes the blocks perfectly tessellate-able
for x,y in [(13,23), (17,21), (21,19)]:
# Copy a pixel to x,y from x-1,y
img.putpixel((x,y), img.getpixel((x-1,y)))
for x,y in [(3,4), (7,2), (11,0)]:
# Copy a pixel to x,y from x+1,y
img.putpixel((x,y), img.getpixel((x+1,y)))
return img
def build_slab_block(self, top, side, upper):
"""From a top texture and a side texture, build a slab block image.
top and side should be 16x16 image objects. Returns a 24x24 image
"""
# cut the side texture in half
mask = side.crop((0,8,16,16))
side = Image.new(side.mode, side.size, self.bgcolor)
alpha_over(side, mask,(0,0,16,8), mask)
# plain slab
top = self.transform_image_top(top)
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
sidealpha = side.split()[3]
side = ImageEnhance.Brightness(side).enhance(0.9)
side.putalpha(sidealpha)
othersidealpha = otherside.split()[3]
otherside = ImageEnhance.Brightness(otherside).enhance(0.8)
otherside.putalpha(othersidealpha)
# upside down slab
delta = 0
if upper:
delta = 6
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, side, (0,12 - delta), side)
alpha_over(img, otherside, (12,12 - delta), otherside)
alpha_over(img, top, (0,6 - delta), top)
# Manually touch up 6 pixels that leave a gap because of how the
# shearing works out. This makes the blocks perfectly tessellate-able
if upper:
for x,y in [(3,4), (7,2), (11,0)]:
# Copy a pixel to x,y from x+1,y
img.putpixel((x,y), img.getpixel((x+1,y)))
for x,y in [(13,17), (17,15), (21,13)]:
# Copy a pixel to x,y from x-1,y
img.putpixel((x,y), img.getpixel((x-1,y)))
else:
for x,y in [(3,10), (7,8), (11,6)]:
# Copy a pixel to x,y from x+1,y
img.putpixel((x,y), img.getpixel((x+1,y)))
for x,y in [(13,23), (17,21), (21,19)]:
# Copy a pixel to x,y from x-1,y
img.putpixel((x,y), img.getpixel((x-1,y)))
return img
def build_full_block(self, top, side1, side2, side3, side4, bottom=None):
"""From a top texture, a bottom texture and 4 different side textures,
build a full block with four differnts faces. All images should be 16x16
image objects. Returns a 24x24 image. Can be used to render any block.
side1 is in the -y face of the cube (top left, east)
side2 is in the +x (top right, south)
side3 is in the -x (bottom left, north)
side4 is in the +y (bottom right, west)
A non transparent block uses top, side 3 and side 4.
If top is a tuple then first item is the top image and the second
item is an increment (integer) from 0 to 16 (pixels in the
original minecraft texture). This increment will be used to crop the
side images and to paste the top image increment pixels lower, so if
you use an increment of 8, it will draw a half-block.
NOTE: this method uses the bottom of the texture image (as done in
minecraft with beds and cackes)
"""
increment = 0
if isinstance(top, tuple):
increment = int(round((top[1] / 16.)*12.)) # range increment in the block height in pixels (half texture size)
crop_height = increment
top = top[0]
if side1 is not None:
side1 = side1.copy()
ImageDraw.Draw(side1).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side2 is not None:
side2 = side2.copy()
ImageDraw.Draw(side2).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side3 is not None:
side3 = side3.copy()
ImageDraw.Draw(side3).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side4 is not None:
side4 = side4.copy()
ImageDraw.Draw(side4).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
img = Image.new("RGBA", (24,24), self.bgcolor)
# first back sides
if side1 is not None :
side1 = self.transform_image_side(side1)
side1 = side1.transpose(Image.FLIP_LEFT_RIGHT)
# Darken this side.
sidealpha = side1.split()[3]
side1 = ImageEnhance.Brightness(side1).enhance(0.9)
side1.putalpha(sidealpha)
alpha_over(img, side1, (0,0), side1)
if side2 is not None :
side2 = self.transform_image_side(side2)
# Darken this side.
sidealpha2 = side2.split()[3]
side2 = ImageEnhance.Brightness(side2).enhance(0.8)
side2.putalpha(sidealpha2)
alpha_over(img, side2, (12,0), side2)
if bottom is not None :
bottom = self.transform_image_top(bottom)
alpha_over(img, bottom, (0,12), bottom)
# front sides
if side3 is not None :
side3 = self.transform_image_side(side3)
# Darken this side
sidealpha = side3.split()[3]
side3 = ImageEnhance.Brightness(side3).enhance(0.9)
side3.putalpha(sidealpha)
alpha_over(img, side3, (0,6), side3)
if side4 is not None :
side4 = self.transform_image_side(side4)
side4 = side4.transpose(Image.FLIP_LEFT_RIGHT)
# Darken this side
sidealpha = side4.split()[3]
side4 = ImageEnhance.Brightness(side4).enhance(0.8)
side4.putalpha(sidealpha)
alpha_over(img, side4, (12,6), side4)
if top is not None :
top = self.transform_image_top(top)
alpha_over(img, top, (0, increment), top)
# Manually touch up 6 pixels that leave a gap because of how the
# shearing works out. This makes the blocks perfectly tessellate-able
for x,y in [(13,23), (17,21), (21,19)]:
# Copy a pixel to x,y from x-1,y
img.putpixel((x,y), img.getpixel((x-1,y)))
for x,y in [(3,4), (7,2), (11,0)]:
# Copy a pixel to x,y from x+1,y
img.putpixel((x,y), img.getpixel((x+1,y)))
return img
def build_sprite(self, side):
"""From a side texture, create a sprite-like texture such as those used
for spiderwebs or flowers."""
img = Image.new("RGBA", (24,24), self.bgcolor)
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, side, (6,3), side)
alpha_over(img, otherside, (6,3), otherside)
return img
def build_billboard(self, tex):
"""From a texture, create a billboard-like texture such as
those used for tall grass or melon stems.
"""
img = Image.new("RGBA", (24,24), self.bgcolor)
front = tex.resize((14, 12), Image.ANTIALIAS)
alpha_over(img, front, (5,9))
return img
def generate_opaque_mask(self, img):
""" Takes the alpha channel of the image and generates a mask
(used for lighting the block) that deprecates values of alpha
smallers than 50, and sets every other value to 255. """
alpha = img.split()[3]
return alpha.point(lambda a: int(min(a, 25.5) * 10))
def tint_texture(self, im, c):
# apparently converting to grayscale drops the alpha channel?
i = ImageOps.colorize(ImageOps.grayscale(im), (0,0,0), c)
i.putalpha(im.split()[3]); # copy the alpha band back in. assuming RGBA
return i
def generate_texture_tuple(self, img):
""" This takes an image and returns the needed tuple for the
blockmap array."""
if img is None:
return None
return (img, self.generate_opaque_mask(img))
##
## The other big one: @material and associated framework
##
# global variables to collate information in @material decorators
blockmap_generators = {}
known_blocks = set()
used_datas = set()
max_blockid = 0
max_data = 0
transparent_blocks = set()
solid_blocks = set()
fluid_blocks = set()
nospawn_blocks = set()
nodata_blocks = set()
# the material registration decorator
def material(blockid=[], data=[0], **kwargs):
# mapping from property name to the set to store them in
properties = {"transparent" : transparent_blocks, "solid" : solid_blocks, "fluid" : fluid_blocks, "nospawn" : nospawn_blocks, "nodata" : nodata_blocks}
# make sure blockid and data are iterable
try:
iter(blockid)
except:
blockid = [blockid,]
try:
iter(data)
except:
data = [data,]
def inner_material(func):
global blockmap_generators
global max_data, max_blockid
# create a wrapper function with a known signature
@functools.wraps(func)
def func_wrapper(texobj, blockid, data):
return func(texobj, blockid, data)
used_datas.update(data)
if max(data) >= max_data:
max_data = max(data) + 1
for block in blockid:
# set the property sets appropriately
known_blocks.update([block])
if block >= max_blockid:
max_blockid = block + 1
for prop in properties:
try:
if block in kwargs.get(prop, []):
properties[prop].update([block])
except TypeError:
if kwargs.get(prop, False):
properties[prop].update([block])
# populate blockmap_generators with our function
for d in data:
blockmap_generators[(block, d)] = func_wrapper
return func_wrapper
return inner_material
# shortcut function for pure blocks, default to solid, nodata
def block(blockid=[], top_image=None, side_image=None, **kwargs):
new_kwargs = {'solid' : True, 'nodata' : True}
new_kwargs.update(kwargs)
if top_image is None:
raise ValueError("top_image was not provided")
if side_image is None:
side_image = top_image
@material(blockid=blockid, **new_kwargs)
def inner_block(self, unused_id, unused_data):
return self.build_block(self.load_image_texture(top_image), self.load_image_texture(side_image))
return inner_block
# shortcut function for sprite blocks, defaults to transparent, nodata
def sprite(blockid=[], imagename=None, **kwargs):
new_kwargs = {'transparent' : True, 'nodata' : True}
new_kwargs.update(kwargs)
if imagename is None:
raise ValueError("imagename was not provided")
@material(blockid=blockid, **new_kwargs)
def inner_sprite(self, unused_id, unused_data):
return self.build_sprite(self.load_image_texture(imagename))
return inner_sprite
# shortcut function for billboard blocks, defaults to transparent, nodata
def billboard(blockid=[], imagename=None, **kwargs):
new_kwargs = {'transparent' : True, 'nodata' : True}
new_kwargs.update(kwargs)
if imagename is None:
raise ValueError("imagename was not provided")
@material(blockid=blockid, **new_kwargs)
def inner_billboard(self, unused_id, unused_data):
return self.build_billboard(self.load_image_texture(imagename))
return inner_billboard
##
## and finally: actual texture definitions
##
# stone
@material(blockid=1, data=range(7), solid=True)
def stone(self, blockid, data):
if data == 0: # regular old-school stone
img = self.load_image_texture("assets/minecraft/textures/blocks/stone.png")
elif data == 1: # granite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_granite.png")
elif data == 2: # polished granite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_granite_smooth.png")
elif data == 3: # diorite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_diorite.png")
elif data == 4: # polished diorite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_diorite_smooth.png")
elif data == 5: # andesite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_andesite.png")
elif data == 6: # polished andesite
img = self.load_image_texture("assets/minecraft/textures/blocks/stone_andesite_smooth.png")
return self.build_block(img, img)
@material(blockid=2, data=range(11)+[0x10,], solid=True)
def grass(self, blockid, data):
# 0x10 bit means SNOW
side_img = self.load_image_texture("assets/minecraft/textures/blocks/grass_side.png")
if data & 0x10:
side_img = self.load_image_texture("assets/minecraft/textures/blocks/grass_side_snowed.png")
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/grass_top.png"), side_img)
if not data & 0x10:
alpha_over(img, self.biome_grass_texture, (0, 0), self.biome_grass_texture)
return img
# dirt
@material(blockid=3, data=range(3), solid=True)
def dirt_blocks(self, blockid, data):
side_img = self.load_image_texture("assets/minecraft/textures/blocks/dirt.png")
if data == 0: # normal
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/dirt.png"), side_img)
if data == 1: # grassless
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/dirt.png"), side_img)
if data == 2: # podzol
side_img = self.load_image_texture("assets/minecraft/textures/blocks/dirt_podzol_side.png")
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/dirt_podzol_top.png"), side_img)
return img
# cobblestone
block(blockid=4, top_image="assets/minecraft/textures/blocks/cobblestone.png")
# wooden planks
@material(blockid=5, data=range(6), solid=True)
def wooden_planks(self, blockid, data):
if data == 0: # normal
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png"), self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png"))
if data == 1: # pine
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png"),self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png"))
if data == 2: # birch
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png"),self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png"))
if data == 3: # jungle wood
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png"),self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png"))
if data == 4: # acacia
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png"),self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png"))
if data == 5: # dark oak
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png"),self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png"))
@material(blockid=6, data=range(16), transparent=True)
def saplings(self, blockid, data):
# usual saplings
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_oak.png")
if data & 0x3 == 1: # spruce sapling
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_spruce.png")
elif data & 0x3 == 2: # birch sapling
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_birch.png")
elif data & 0x3 == 3: # jungle sapling
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_jungle.png")
elif data & 0x3 == 4: # acacia sapling
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_acacia.png")
elif data & 0x3 == 5: # dark oak/roofed oak/big oak sapling
tex = self.load_image_texture("assets/minecraft/textures/blocks/sapling_roofed_oak.png")
return self.build_sprite(tex)
# bedrock
block(blockid=7, top_image="assets/minecraft/textures/blocks/bedrock.png")
@material(blockid=8, data=range(16), fluid=True, transparent=True, nospawn=True)
def water(self, blockid, data):
watertex = self.load_water()
return self.build_block(watertex, watertex)
# other water, glass, and ice (no inner surfaces)
# uses pseudo-ancildata found in iterate.c
@material(blockid=[9, 20, 79, 95], data=range(512), fluid=(9,), transparent=True, nospawn=True, solid=(79, 20, 95))
def no_inner_surfaces(self, blockid, data):
if blockid == 9:
texture = self.load_water()
elif blockid == 20:
texture = self.load_image_texture("assets/minecraft/textures/blocks/glass.png")
elif blockid == 95:
texture = self.load_image_texture("assets/minecraft/textures/blocks/glass_%s.png" % color_map[data & 0x0f])
else:
texture = self.load_image_texture("assets/minecraft/textures/blocks/ice.png")
# now that we've used the lower 4 bits to get color, shift down to get the 5 bits that encode face hiding
if blockid != 9: # water doesn't have a shifted pseudodata
data = data >> 4
if (data & 0b10000) == 16:
top = texture
else:
top = None
if (data & 0b0001) == 1:
side1 = texture # top left
else:
side1 = None
if (data & 0b1000) == 8:
side2 = texture # top right
else:
side2 = None
if (data & 0b0010) == 2:
side3 = texture # bottom left
else:
side3 = None
if (data & 0b0100) == 4:
side4 = texture # bottom right
else:
side4 = None
# if nothing shown do not draw at all
if top is None and side3 is None and side4 is None:
return None
img = self.build_full_block(top,None,None,side3,side4)
return img
@material(blockid=[10, 11], data=range(16), fluid=True, transparent=False, nospawn=True)
def lava(self, blockid, data):
lavatex = self.load_lava()
return self.build_block(lavatex, lavatex)
# sand
@material(blockid=12, data=range(2), solid=True)
def sand_blocks(self, blockid, data):
if data == 0: # normal
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/sand.png"), self.load_image_texture("assets/minecraft/textures/blocks/sand.png"))
if data == 1: # red
img = self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/red_sand.png"), self.load_image_texture("assets/minecraft/textures/blocks/red_sand.png"))
return img
# gravel
block(blockid=13, top_image="assets/minecraft/textures/blocks/gravel.png")
# gold ore
block(blockid=14, top_image="assets/minecraft/textures/blocks/gold_ore.png")
# iron ore
block(blockid=15, top_image="assets/minecraft/textures/blocks/iron_ore.png")
# coal ore
block(blockid=16, top_image="assets/minecraft/textures/blocks/coal_ore.png")
@material(blockid=[17,162], data=range(12), solid=True)
def wood(self, blockid, data):
# extract orientation and wood type frorm data bits
wood_type = data & 3
wood_orientation = data & 12
if self.rotation == 1:
if wood_orientation == 4: wood_orientation = 8
elif wood_orientation == 8: wood_orientation = 4
elif self.rotation == 3:
if wood_orientation == 4: wood_orientation = 8
elif wood_orientation == 8: wood_orientation = 4
# choose textures
if blockid == 17: # regular wood:
if wood_type == 0: # normal
top = self.load_image_texture("assets/minecraft/textures/blocks/log_oak_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_oak.png")
if wood_type == 1: # spruce
top = self.load_image_texture("assets/minecraft/textures/blocks/log_spruce_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_spruce.png")
if wood_type == 2: # birch
top = self.load_image_texture("assets/minecraft/textures/blocks/log_birch_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_birch.png")
if wood_type == 3: # jungle wood
top = self.load_image_texture("assets/minecraft/textures/blocks/log_jungle_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_jungle.png")
elif blockid == 162: # acacia/dark wood:
if wood_type == 0: # acacia
top = self.load_image_texture("assets/minecraft/textures/blocks/log_acacia_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_acacia.png")
elif wood_type == 1: # dark oak
top = self.load_image_texture("assets/minecraft/textures/blocks/log_big_oak_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_big_oak.png")
else:
top = self.load_image_texture("assets/minecraft/textures/blocks/log_acacia_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/log_acacia.png")
# choose orientation and paste textures
if wood_orientation == 0:
return self.build_block(top, side)
elif wood_orientation == 4: # east-west orientation
return self.build_full_block(side.rotate(90), None, None, top, side.rotate(90))
elif wood_orientation == 8: # north-south orientation
return self.build_full_block(side, None, None, side.rotate(270), top)
@material(blockid=[18, 161], data=range(16), transparent=True, solid=True)
def leaves(self, blockid, data):
# mask out the bits 4 and 8
# they are used for player placed and check-for-decay blocks
data = data & 0x7
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_oak.png")
if (blockid, data) == (18, 1): # pine!
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_spruce.png")
elif (blockid, data) == (18, 2): # birth tree
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_birch.png")
elif (blockid, data) == (18, 3): # jungle tree
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_jungle.png")
elif (blockid, data) == (161, 4): # acacia tree
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_acacia.png")
elif (blockid, data) == (161, 5):
t = self.load_image_texture("assets/minecraft/textures/blocks/leaves_big_oak.png")
return self.build_block(t, t)
# sponge
block(blockid=19, top_image="assets/minecraft/textures/blocks/sponge.png")
# lapis lazuli ore
block(blockid=21, top_image="assets/minecraft/textures/blocks/lapis_ore.png")
# lapis lazuli block
block(blockid=22, top_image="assets/minecraft/textures/blocks/lapis_block.png")
# dispensers, dropper, furnaces, and burning furnaces
@material(blockid=[23, 61, 62, 158], data=range(6), solid=True)
def furnaces(self, blockid, data):
# first, do the rotation if needed
if self.rotation == 1:
if data == 2: data = 5
elif data == 3: data = 4
elif data == 4: data = 2
elif data == 5: data = 3
elif self.rotation == 2:
if data == 2: data = 3
elif data == 3: data = 2
elif data == 4: data = 5
elif data == 5: data = 4
elif self.rotation == 3:
if data == 2: data = 4
elif data == 3: data = 5
elif data == 4: data = 3
elif data == 5: data = 2
top = self.load_image_texture("assets/minecraft/textures/blocks/furnace_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/furnace_side.png")
if blockid == 61:
front = self.load_image_texture("assets/minecraft/textures/blocks/furnace_front_off.png")
elif blockid == 62:
front = self.load_image_texture("assets/minecraft/textures/blocks/furnace_front_on.png")
elif blockid == 23:
front = self.load_image_texture("assets/minecraft/textures/blocks/dispenser_front_horizontal.png")
if data == 0: # dispenser pointing down
return self.build_block(top, top)
elif data == 1: # dispenser pointing up
dispenser_top = self.load_image_texture("assets/minecraft/textures/blocks/dispenser_front_vertical.png")
return self.build_block(dispenser_top, top)
elif blockid == 158:
front = self.load_image_texture("assets/minecraft/textures/blocks/dropper_front_horizontal.png")
if data == 0: # dropper pointing down
return self.build_block(top, top)
elif data == 1: # dispenser pointing up
dropper_top = self.load_image_texture("assets/minecraft/textures/blocks/dropper_front_vertical.png")
return self.build_block(dropper_top, top)
if data == 3: # pointing west
return self.build_full_block(top, None, None, side, front)
elif data == 4: # pointing north
return self.build_full_block(top, None, None, front, side)
else: # in any other direction the front can't be seen
return self.build_full_block(top, None, None, side, side)
# sandstone
@material(blockid=24, data=range(3), solid=True)
def sandstone(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_top.png")
if data == 0: # normal
return self.build_block(top, self.load_image_texture("assets/minecraft/textures/blocks/sandstone_normal.png"))
if data == 1: # hieroglyphic
return self.build_block(top, self.load_image_texture("assets/minecraft/textures/blocks/sandstone_carved.png"))
if data == 2: # soft
return self.build_block(top, self.load_image_texture("assets/minecraft/textures/blocks/sandstone_smooth.png"))
# red sandstone
@material(blockid=179, data=range(3), solid=True)
def sandstone(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_top.png")
if data == 0: # normal
side = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_normal.png")
return self.build_full_block(top, None, None, side, side, self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_bottom.png") )
if data == 1: # hieroglyphic
return self.build_block(top, self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_carved.png"))
if data == 2: # soft
return self.build_block(top, self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_smooth.png"))
# note block
block(blockid=25, top_image="assets/minecraft/textures/blocks/noteblock.png")
@material(blockid=26, data=range(12), transparent=True, nospawn=True)
def bed(self, blockid, data):
# first get rotation done
# Masked to not clobber block head/foot info
if self.rotation == 1:
if (data & 0b0011) == 0: data = data & 0b1100 | 1
elif (data & 0b0011) == 1: data = data & 0b1100 | 2
elif (data & 0b0011) == 2: data = data & 0b1100 | 3
elif (data & 0b0011) == 3: data = data & 0b1100 | 0
elif self.rotation == 2:
if (data & 0b0011) == 0: data = data & 0b1100 | 2
elif (data & 0b0011) == 1: data = data & 0b1100 | 3
elif (data & 0b0011) == 2: data = data & 0b1100 | 0
elif (data & 0b0011) == 3: data = data & 0b1100 | 1
elif self.rotation == 3:
if (data & 0b0011) == 0: data = data & 0b1100 | 3
elif (data & 0b0011) == 1: data = data & 0b1100 | 0
elif (data & 0b0011) == 2: data = data & 0b1100 | 1
elif (data & 0b0011) == 3: data = data & 0b1100 | 2
increment = 8
left_face = None
right_face = None
if data & 0x8 == 0x8: # head of the bed
top = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_top.png")
if data & 0x00 == 0x00: # head pointing to West
top = top.copy().rotate(270)
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_side.png")
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_end.png")
if data & 0x01 == 0x01: # ... North
top = top.rotate(270)
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_end.png")
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_side.png")
if data & 0x02 == 0x02: # East
top = top.rotate(180)
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_side.png").transpose(Image.FLIP_LEFT_RIGHT)
right_face = None
if data & 0x03 == 0x03: # South
right_face = None
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_head_side.png").transpose(Image.FLIP_LEFT_RIGHT)
else: # foot of the bed
top = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_top.png")
if data & 0x00 == 0x00: # head pointing to West
top = top.rotate(270)
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_side.png")
right_face = None
if data & 0x01 == 0x01: # ... North
top = top.rotate(270)
left_face = None
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_side.png")
if data & 0x02 == 0x02: # East
top = top.rotate(180)
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_side.png").transpose(Image.FLIP_LEFT_RIGHT)
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_end.png").transpose(Image.FLIP_LEFT_RIGHT)
if data & 0x03 == 0x03: # South
left_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_end.png")
right_face = self.load_image_texture("assets/minecraft/textures/blocks/bed_feet_side.png").transpose(Image.FLIP_LEFT_RIGHT)
top = (top, increment)
return self.build_full_block(top, None, None, left_face, right_face)
# powered, detector, activator and normal rails
@material(blockid=[27, 28, 66, 157], data=range(14), transparent=True)
def rails(self, blockid, data):
# first, do rotation
# Masked to not clobber powered rail on/off info
# Ascending and flat straight
if self.rotation == 1:
if (data & 0b0111) == 0: data = data & 0b1000 | 1
elif (data & 0b0111) == 1: data = data & 0b1000 | 0
elif (data & 0b0111) == 2: data = data & 0b1000 | 5
elif (data & 0b0111) == 3: data = data & 0b1000 | 4
elif (data & 0b0111) == 4: data = data & 0b1000 | 2
elif (data & 0b0111) == 5: data = data & 0b1000 | 3
elif self.rotation == 2:
if (data & 0b0111) == 2: data = data & 0b1000 | 3
elif (data & 0b0111) == 3: data = data & 0b1000 | 2
elif (data & 0b0111) == 4: data = data & 0b1000 | 5
elif (data & 0b0111) == 5: data = data & 0b1000 | 4
elif self.rotation == 3:
if (data & 0b0111) == 0: data = data & 0b1000 | 1
elif (data & 0b0111) == 1: data = data & 0b1000 | 0
elif (data & 0b0111) == 2: data = data & 0b1000 | 4
elif (data & 0b0111) == 3: data = data & 0b1000 | 5
elif (data & 0b0111) == 4: data = data & 0b1000 | 3
elif (data & 0b0111) == 5: data = data & 0b1000 | 2
if blockid == 66: # normal minetrack only
#Corners
if self.rotation == 1:
if data == 6: data = 7
elif data == 7: data = 8
elif data == 8: data = 6
elif data == 9: data = 9
elif self.rotation == 2:
if data == 6: data = 8
elif data == 7: data = 9
elif data == 8: data = 6
elif data == 9: data = 7
elif self.rotation == 3:
if data == 6: data = 9
elif data == 7: data = 6
elif data == 8: data = 8
elif data == 9: data = 7
img = Image.new("RGBA", (24,24), self.bgcolor)
if blockid == 27: # powered rail
if data & 0x8 == 0: # unpowered
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_golden.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png") # they don't exist but make the code
# much simplier
elif data & 0x8 == 0x8: # powered
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_golden_powered.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png") # leave corners for code simplicity
# filter the 'powered' bit
data = data & 0x7
elif blockid == 28: # detector rail
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_detector.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png") # leave corners for code simplicity
elif blockid == 66: # normal rail
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png")
elif blockid == 157: # activator rail
if data & 0x8 == 0: # unpowered
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_activator.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png") # they don't exist but make the code
# much simplier
elif data & 0x8 == 0x8: # powered
raw_straight = self.load_image_texture("assets/minecraft/textures/blocks/rail_activator_powered.png")
raw_corner = self.load_image_texture("assets/minecraft/textures/blocks/rail_normal_turned.png") # leave corners for code simplicity
# filter the 'powered' bit
data = data & 0x7
## use transform_image to scale and shear
if data == 0:
track = self.transform_image_top(raw_straight)
alpha_over(img, track, (0,12), track)
elif data == 6:
track = self.transform_image_top(raw_corner)
alpha_over(img, track, (0,12), track)
elif data == 7:
track = self.transform_image_top(raw_corner.rotate(270))
alpha_over(img, track, (0,12), track)
elif data == 8:
# flip
track = self.transform_image_top(raw_corner.transpose(Image.FLIP_TOP_BOTTOM).rotate(90))
alpha_over(img, track, (0,12), track)
elif data == 9:
track = self.transform_image_top(raw_corner.transpose(Image.FLIP_TOP_BOTTOM))
alpha_over(img, track, (0,12), track)
elif data == 1:
track = self.transform_image_top(raw_straight.rotate(90))
alpha_over(img, track, (0,12), track)
#slopes
elif data == 2: # slope going up in +x direction
track = self.transform_image_slope(raw_straight)
track = track.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, track, (2,0), track)
# the 2 pixels move is needed to fit with the adjacent tracks
elif data == 3: # slope going up in -x direction
# tracks are sprites, in this case we are seeing the "side" of
# the sprite, so draw a line to make it beautiful.
ImageDraw.Draw(img).line([(11,11),(23,17)],fill=(164,164,164))
# grey from track texture (exterior grey).
# the track doesn't start from image corners, be carefull drawing the line!
elif data == 4: # slope going up in -y direction
track = self.transform_image_slope(raw_straight)
alpha_over(img, track, (0,0), track)
elif data == 5: # slope going up in +y direction
# same as "data == 3"
ImageDraw.Draw(img).line([(1,17),(12,11)],fill=(164,164,164))
return img
# sticky and normal piston body
@material(blockid=[29, 33], data=[0,1,2,3,4,5,8,9,10,11,12,13], transparent=True, solid=True, nospawn=True)
def piston(self, blockid, data):
# first, rotation
# Masked to not clobber block head/foot info
if self.rotation == 1:
if (data & 0b0111) == 2: data = data & 0b1000 | 5
elif (data & 0b0111) == 3: data = data & 0b1000 | 4
elif (data & 0b0111) == 4: data = data & 0b1000 | 2
elif (data & 0b0111) == 5: data = data & 0b1000 | 3
elif self.rotation == 2:
if (data & 0b0111) == 2: data = data & 0b1000 | 3
elif (data & 0b0111) == 3: data = data & 0b1000 | 2
elif (data & 0b0111) == 4: data = data & 0b1000 | 5
elif (data & 0b0111) == 5: data = data & 0b1000 | 4
elif self.rotation == 3:
if (data & 0b0111) == 2: data = data & 0b1000 | 4
elif (data & 0b0111) == 3: data = data & 0b1000 | 5
elif (data & 0b0111) == 4: data = data & 0b1000 | 3
elif (data & 0b0111) == 5: data = data & 0b1000 | 2
if blockid == 29: # sticky
piston_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_top_sticky.png").copy()
else: # normal
piston_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_top_normal.png").copy()
# other textures
side_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_side.png").copy()
back_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_bottom.png").copy()
interior_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_inner.png").copy()
if data & 0x08 == 0x08: # pushed out, non full blocks, tricky stuff
# remove piston texture from piston body
ImageDraw.Draw(side_t).rectangle((0, 0,16,3),outline=(0,0,0,0),fill=(0,0,0,0))
if data & 0x07 == 0x0: # down
side_t = side_t.rotate(180)
img = self.build_full_block(back_t ,None ,None ,side_t, side_t)
elif data & 0x07 == 0x1: # up
img = self.build_full_block((interior_t, 4) ,None ,None ,side_t, side_t)
elif data & 0x07 == 0x2: # east
img = self.build_full_block(side_t , None, None ,side_t.rotate(90), back_t)
elif data & 0x07 == 0x3: # west
img = self.build_full_block(side_t.rotate(180) ,None ,None ,side_t.rotate(270), None)
temp = self.transform_image_side(interior_t)
temp = temp.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, temp, (9,5), temp)
elif data & 0x07 == 0x4: # north
img = self.build_full_block(side_t.rotate(90) ,None ,None , None, side_t.rotate(270))
temp = self.transform_image_side(interior_t)
alpha_over(img, temp, (3,5), temp)
elif data & 0x07 == 0x5: # south
img = self.build_full_block(side_t.rotate(270) ,None , None ,back_t, side_t.rotate(90))
else: # pushed in, normal full blocks, easy stuff
if data & 0x07 == 0x0: # down
side_t = side_t.rotate(180)
img = self.build_full_block(back_t ,None ,None ,side_t, side_t)
elif data & 0x07 == 0x1: # up
img = self.build_full_block(piston_t ,None ,None ,side_t, side_t)
elif data & 0x07 == 0x2: # east
img = self.build_full_block(side_t ,None ,None ,side_t.rotate(90), back_t)
elif data & 0x07 == 0x3: # west
img = self.build_full_block(side_t.rotate(180) ,None ,None ,side_t.rotate(270), piston_t)
elif data & 0x07 == 0x4: # north
img = self.build_full_block(side_t.rotate(90) ,None ,None ,piston_t, side_t.rotate(270))
elif data & 0x07 == 0x5: # south
img = self.build_full_block(side_t.rotate(270) ,None ,None ,back_t, side_t.rotate(90))
return img
# sticky and normal piston shaft
@material(blockid=34, data=[0,1,2,3,4,5,8,9,10,11,12,13], transparent=True, nospawn=True)
def piston_extension(self, blockid, data):
# first, rotation
# Masked to not clobber block head/foot info
if self.rotation == 1:
if (data & 0b0111) == 2: data = data & 0b1000 | 5
elif (data & 0b0111) == 3: data = data & 0b1000 | 4
elif (data & 0b0111) == 4: data = data & 0b1000 | 2
elif (data & 0b0111) == 5: data = data & 0b1000 | 3
elif self.rotation == 2:
if (data & 0b0111) == 2: data = data & 0b1000 | 3
elif (data & 0b0111) == 3: data = data & 0b1000 | 2
elif (data & 0b0111) == 4: data = data & 0b1000 | 5
elif (data & 0b0111) == 5: data = data & 0b1000 | 4
elif self.rotation == 3:
if (data & 0b0111) == 2: data = data & 0b1000 | 4
elif (data & 0b0111) == 3: data = data & 0b1000 | 5
elif (data & 0b0111) == 4: data = data & 0b1000 | 3
elif (data & 0b0111) == 5: data = data & 0b1000 | 2
if (data & 0x8) == 0x8: # sticky
piston_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_top_sticky.png").copy()
else: # normal
piston_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_top_normal.png").copy()
# other textures
side_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_side.png").copy()
back_t = self.load_image_texture("assets/minecraft/textures/blocks/piston_top_normal.png").copy()
# crop piston body
ImageDraw.Draw(side_t).rectangle((0, 4,16,16),outline=(0,0,0,0),fill=(0,0,0,0))
# generate the horizontal piston extension stick
h_stick = Image.new("RGBA", (24,24), self.bgcolor)
temp = self.transform_image_side(side_t)
alpha_over(h_stick, temp, (1,7), temp)
temp = self.transform_image_top(side_t.rotate(90))
alpha_over(h_stick, temp, (1,1), temp)
# Darken it
sidealpha = h_stick.split()[3]
h_stick = ImageEnhance.Brightness(h_stick).enhance(0.85)
h_stick.putalpha(sidealpha)
# generate the vertical piston extension stick
v_stick = Image.new("RGBA", (24,24), self.bgcolor)
temp = self.transform_image_side(side_t.rotate(90))
alpha_over(v_stick, temp, (12,6), temp)
temp = temp.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(v_stick, temp, (1,6), temp)
# Darken it
sidealpha = v_stick.split()[3]
v_stick = ImageEnhance.Brightness(v_stick).enhance(0.85)
v_stick.putalpha(sidealpha)
# Piston orientation is stored in the 3 first bits
if data & 0x07 == 0x0: # down
side_t = side_t.rotate(180)
img = self.build_full_block((back_t, 12) ,None ,None ,side_t, side_t)
alpha_over(img, v_stick, (0,-3), v_stick)
elif data & 0x07 == 0x1: # up
img = Image.new("RGBA", (24,24), self.bgcolor)
img2 = self.build_full_block(piston_t ,None ,None ,side_t, side_t)
alpha_over(img, v_stick, (0,4), v_stick)
alpha_over(img, img2, (0,0), img2)
elif data & 0x07 == 0x2: # east
img = self.build_full_block(side_t ,None ,None ,side_t.rotate(90), None)
temp = self.transform_image_side(back_t).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, temp, (2,2), temp)
alpha_over(img, h_stick, (6,3), h_stick)
elif data & 0x07 == 0x3: # west
img = Image.new("RGBA", (24,24), self.bgcolor)
img2 = self.build_full_block(side_t.rotate(180) ,None ,None ,side_t.rotate(270), piston_t)
alpha_over(img, h_stick, (0,0), h_stick)
alpha_over(img, img2, (0,0), img2)
elif data & 0x07 == 0x4: # north
img = self.build_full_block(side_t.rotate(90) ,None ,None , piston_t, side_t.rotate(270))
alpha_over(img, h_stick.transpose(Image.FLIP_LEFT_RIGHT), (0,0), h_stick.transpose(Image.FLIP_LEFT_RIGHT))
elif data & 0x07 == 0x5: # south
img = Image.new("RGBA", (24,24), self.bgcolor)
img2 = self.build_full_block(side_t.rotate(270) ,None ,None ,None, side_t.rotate(90))
temp = self.transform_image_side(back_t)
alpha_over(img2, temp, (10,2), temp)
alpha_over(img, img2, (0,0), img2)
alpha_over(img, h_stick.transpose(Image.FLIP_LEFT_RIGHT), (-3,2), h_stick.transpose(Image.FLIP_LEFT_RIGHT))
return img
# cobweb
sprite(blockid=30, imagename="assets/minecraft/textures/blocks/web.png", nospawn=True)
@material(blockid=31, data=range(3), transparent=True)
def tall_grass(self, blockid, data):
if data == 0: # dead shrub
texture = self.load_image_texture("assets/minecraft/textures/blocks/deadbush.png")
elif data == 1: # tall grass
texture = self.load_image_texture("assets/minecraft/textures/blocks/tallgrass.png")
elif data == 2: # fern
texture = self.load_image_texture("assets/minecraft/textures/blocks/fern.png")
return self.build_billboard(texture)
# dead bush
billboard(blockid=32, imagename="assets/minecraft/textures/blocks/deadbush.png")
@material(blockid=35, data=range(16), solid=True)
def wool(self, blockid, data):
texture = self.load_image_texture("assets/minecraft/textures/blocks/wool_colored_%s.png" % color_map[data])
return self.build_block(texture, texture)
# dandelion
sprite(blockid=37, imagename="assets/minecraft/textures/blocks/flower_dandelion.png")
# flowers
@material(blockid=38, data=range(10), transparent=True)
def flower(self, blockid, data):
flower_map = ["rose", "blue_orchid", "allium", "houstonia", "tulip_red", "tulip_orange",
"tulip_white", "tulip_pink", "oxeye_daisy", "dandelion"]
texture = self.load_image_texture("assets/minecraft/textures/blocks/flower_%s.png" % flower_map[data])
return self.build_billboard(texture)
# brown mushroom
sprite(blockid=39, imagename="assets/minecraft/textures/blocks/mushroom_brown.png")
# red mushroom
sprite(blockid=40, imagename="assets/minecraft/textures/blocks/mushroom_red.png")
# block of gold
block(blockid=41, top_image="assets/minecraft/textures/blocks/gold_block.png")
# block of iron
block(blockid=42, top_image="assets/minecraft/textures/blocks/iron_block.png")
# double slabs and slabs
# these wooden slabs are unobtainable without cheating, they are still
# here because lots of pre-1.3 worlds use this blocks
@material(blockid=[43, 44, 181, 182, 204, 205], data=range(16), transparent=(44,182,205), solid=True)
def slabs(self, blockid, data):
if blockid == 44 or blockid == 182:
texture = data & 7
else: # data > 8 are special double slabs
texture = data
if blockid == 44 or blockid == 43:
if texture== 0: # stone slab
top = self.load_image_texture("assets/minecraft/textures/blocks/stone_slab_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/stone_slab_side.png")
elif texture== 1: # sandstone slab
top = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_normal.png")
elif texture== 2: # wooden slab
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png")
elif texture== 3: # cobblestone slab
top = side = self.load_image_texture("assets/minecraft/textures/blocks/cobblestone.png")
elif texture== 4: # brick
top = side = self.load_image_texture("assets/minecraft/textures/blocks/brick.png")
elif texture== 5: # stone brick
top = side = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick.png")
elif texture== 6: # nether brick slab
top = side = self.load_image_texture("assets/minecraft/textures/blocks/nether_brick.png")
elif texture== 7: #quartz
top = side = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_side.png")
elif texture== 8: # special stone double slab with top texture only
top = side = self.load_image_texture("assets/minecraft/textures/blocks/stone_slab_top.png")
elif texture== 9: # special sandstone double slab with top texture only
top = side = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_top.png")
else:
return None
elif blockid == 182: # single red sandstone slab
if texture == 0:
top = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_normal.png")
else:
return None
elif blockid == 181: # double red sandstone slab
if texture == 0: # red sandstone
top = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_normal.png")
elif texture == 8: # 'full' red sandstone (smooth)
top = side = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_top.png");
else:
return None
elif blockid == 204 or blockid == 205: # purpur slab (single=205 double=204)
top = side = self.load_image_texture("assets/minecraft/textures/blocks/purpur_block.png");
if blockid == 43 or blockid == 181 or blockid == 204: # double slab
return self.build_block(top, side)
return self.build_slab_block(top, side, data & 8 == 8);
# brick block
block(blockid=45, top_image="assets/minecraft/textures/blocks/brick.png")
# TNT
block(blockid=46, top_image="assets/minecraft/textures/blocks/tnt_top.png", side_image="assets/minecraft/textures/blocks/tnt_side.png", nospawn=True)
# bookshelf
block(blockid=47, top_image="assets/minecraft/textures/blocks/planks_oak.png", side_image="assets/minecraft/textures/blocks/bookshelf.png")
# moss stone
block(blockid=48, top_image="assets/minecraft/textures/blocks/cobblestone_mossy.png")
# obsidian
block(blockid=49, top_image="assets/minecraft/textures/blocks/obsidian.png")
# torch, redstone torch (off), redstone torch(on)
@material(blockid=[50, 75, 76], data=[1, 2, 3, 4, 5], transparent=True)
def torches(self, blockid, data):
# first, rotations
if self.rotation == 1:
if data == 1: data = 3
elif data == 2: data = 4
elif data == 3: data = 2
elif data == 4: data = 1
elif self.rotation == 2:
if data == 1: data = 2
elif data == 2: data = 1
elif data == 3: data = 4
elif data == 4: data = 3
elif self.rotation == 3:
if data == 1: data = 4
elif data == 2: data = 3
elif data == 3: data = 1
elif data == 4: data = 2
# choose the proper texture
if blockid == 50: # torch
small = self.load_image_texture("assets/minecraft/textures/blocks/torch_on.png")
elif blockid == 75: # off redstone torch
small = self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_off.png")
else: # on redstone torch
small = self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_on.png")
# compose a torch bigger than the normal
# (better for doing transformations)
torch = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(torch,small,(-4,-3))
alpha_over(torch,small,(-5,-2))
alpha_over(torch,small,(-3,-2))
# angle of inclination of the texture
rotation = 15
if data == 1: # pointing south
torch = torch.rotate(-rotation, Image.NEAREST) # nearest filter is more nitid.
img = self.build_full_block(None, None, None, torch, None, None)
elif data == 2: # pointing north
torch = torch.rotate(rotation, Image.NEAREST)
img = self.build_full_block(None, None, torch, None, None, None)
elif data == 3: # pointing west
torch = torch.rotate(rotation, Image.NEAREST)
img = self.build_full_block(None, torch, None, None, None, None)
elif data == 4: # pointing east
torch = torch.rotate(-rotation, Image.NEAREST)
img = self.build_full_block(None, None, None, None, torch, None)
elif data == 5: # standing on the floor
# compose a "3d torch".
img = Image.new("RGBA", (24,24), self.bgcolor)
small_crop = small.crop((2,2,14,14))
slice = small_crop.copy()
ImageDraw.Draw(slice).rectangle((6,0,12,12),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(slice).rectangle((0,0,4,12),outline=(0,0,0,0),fill=(0,0,0,0))
alpha_over(img, slice, (7,5))
alpha_over(img, small_crop, (6,6))
alpha_over(img, small_crop, (7,6))
alpha_over(img, slice, (7,7))
return img
# fire
@material(blockid=51, data=range(16), transparent=True)
def fire(self, blockid, data):
firetextures = self.load_fire()
side1 = self.transform_image_side(firetextures[0])
side2 = self.transform_image_side(firetextures[1]).transpose(Image.FLIP_LEFT_RIGHT)
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, side1, (12,0), side1)
alpha_over(img, side2, (0,0), side2)
alpha_over(img, side1, (0,6), side1)
alpha_over(img, side2, (12,6), side2)
return img
# monster spawner
block(blockid=52, top_image="assets/minecraft/textures/blocks/mob_spawner.png", transparent=True)
# wooden, cobblestone, red brick, stone brick, netherbrick, sandstone, spruce, birch, jungle, quartz, and red sandstone stairs.
@material(blockid=[53,67,108,109,114,128,134,135,136,156,163,164,180,203], data=range(128), transparent=True, solid=True, nospawn=True)
def stairs(self, blockid, data):
# preserve the upside-down bit
upside_down = data & 0x4
# find solid quarters within the top or bottom half of the block
# NW NE SE SW
quarters = [data & 0x8, data & 0x10, data & 0x20, data & 0x40]
# rotate the quarters so we can pretend northdirection is always upper-left
numpy.roll(quarters, [0,1,3,2][self.rotation])
nw,ne,se,sw = quarters
if blockid == 53: # wooden
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
elif blockid == 67: # cobblestone
texture = self.load_image_texture("assets/minecraft/textures/blocks/cobblestone.png").copy()
elif blockid == 108: # red brick stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/brick.png").copy()
elif blockid == 109: # stone brick stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick.png").copy()
elif blockid == 114: # netherbrick stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/nether_brick.png").copy()
elif blockid == 128: # sandstone stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_normal.png").copy()
elif blockid == 134: # spruce wood stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png").copy()
elif blockid == 135: # birch wood stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png").copy()
elif blockid == 136: # jungle good stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png").copy()
elif blockid == 156: # quartz block stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_side.png").copy()
elif blockid == 163: # acacia wood stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png").copy()
elif blockid == 164: # dark oak stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png").copy()
elif blockid == 180: # red sandstone stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_normal.png").copy()
elif blockid == 203: # purpur stairs
texture = self.load_image_texture("assets/minecraft/textures/blocks/purpur_block.png").copy()
outside_l = texture.copy()
outside_r = texture.copy()
inside_l = texture.copy()
inside_r = texture.copy()
# sandstone, red sandstone, and quartz stairs have special top texture
if blockid == 128:
texture = self.load_image_texture("assets/minecraft/textures/blocks/sandstone_top.png").copy()
elif blockid == 156:
texture = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_top.png").copy()
elif blockid == 180:
texture = self.load_image_texture("assets/minecraft/textures/blocks/red_sandstone_top.png").copy()
slab_top = texture.copy()
push = 8 if upside_down else 0
def rect(tex,coords):
ImageDraw.Draw(tex).rectangle(coords,outline=(0,0,0,0),fill=(0,0,0,0))
# cut out top or bottom half from inner surfaces
rect(inside_l, (0,8-push,15,15-push))
rect(inside_r, (0,8-push,15,15-push))
# cut out missing or obstructed quarters from each surface
if not nw:
rect(outside_l, (0,push,7,7+push))
rect(texture, (0,0,7,7))
if not nw or sw:
rect(inside_r, (8,push,15,7+push)) # will be flipped
if not ne:
rect(texture, (8,0,15,7))
if not ne or nw:
rect(inside_l, (0,push,7,7+push))
if not ne or se:
rect(inside_r, (0,push,7,7+push)) # will be flipped
if not se:
rect(outside_r, (0,push,7,7+push)) # will be flipped
rect(texture, (8,8,15,15))
if not se or sw:
rect(inside_l, (8,push,15,7+push))
if not sw:
rect(outside_l, (8,push,15,7+push))
rect(outside_r, (8,push,15,7+push)) # will be flipped
rect(texture, (0,8,7,15))
img = Image.new("RGBA", (24,24), self.bgcolor)
if upside_down:
# top should have no cut-outs after all
texture = slab_top
else:
# render the slab-level surface
slab_top = self.transform_image_top(slab_top)
alpha_over(img, slab_top, (0,6))
# render inner left surface
inside_l = self.transform_image_side(inside_l)
# Darken the vertical part of the second step
sidealpha = inside_l.split()[3]
# darken it a bit more than usual, looks better
inside_l = ImageEnhance.Brightness(inside_l).enhance(0.8)
inside_l.putalpha(sidealpha)
alpha_over(img, inside_l, (6,3))
# render inner right surface
inside_r = self.transform_image_side(inside_r).transpose(Image.FLIP_LEFT_RIGHT)
# Darken the vertical part of the second step
sidealpha = inside_r.split()[3]
# darken it a bit more than usual, looks better
inside_r = ImageEnhance.Brightness(inside_r).enhance(0.7)
inside_r.putalpha(sidealpha)
alpha_over(img, inside_r, (6,3))
# render outer surfaces
alpha_over(img, self.build_full_block(texture, None, None, outside_l, outside_r))
return img
# normal, locked (used in april's fool day), ender and trapped chest
# NOTE: locked chest used to be id95 (which is now stained glass)
@material(blockid=[54,130,146], data=range(30), transparent = True)
def chests(self, blockid, data):
# the first 3 bits are the orientation as stored in minecraft,
# bits 0x8 and 0x10 indicate which half of the double chest is it.
# first, do the rotation if needed
orientation_data = data & 7
if self.rotation == 1:
if orientation_data == 2: data = 5 | (data & 24)
elif orientation_data == 3: data = 4 | (data & 24)
elif orientation_data == 4: data = 2 | (data & 24)
elif orientation_data == 5: data = 3 | (data & 24)
elif self.rotation == 2:
if orientation_data == 2: data = 3 | (data & 24)
elif orientation_data == 3: data = 2 | (data & 24)
elif orientation_data == 4: data = 5 | (data & 24)
elif orientation_data == 5: data = 4 | (data & 24)
elif self.rotation == 3:
if orientation_data == 2: data = 4 | (data & 24)
elif orientation_data == 3: data = 5 | (data & 24)
elif orientation_data == 4: data = 3 | (data & 24)
elif orientation_data == 5: data = 2 | (data & 24)
if blockid == 130 and not data in [2,3,4,5]: return None
# iterate.c will only return the ancil data (without pseudo
# ancil data) for locked and ender chests, so only
# ancilData = 2,3,4,5 are used for this blockids
if data & 24 == 0:
if blockid == 130: t = self.load_image("ender.png")
else:
try:
t = self.load_image("normal.png")
except (TextureException, IOError):
t = self.load_image("chest.png")
# the textures is no longer in terrain.png, get it from
# item/chest.png and get by cropping all the needed stuff
if t.size != (64,64): t = t.resize((64,64), Image.ANTIALIAS)
# top
top = t.crop((14,0,28,14))
top.load() # every crop need a load, crop is a lazy operation
# see PIL manual
img = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(img,top,(1,1))
top = img
# front
front_top = t.crop((14,14,28,19))
front_top.load()
front_bottom = t.crop((14,34,28,43))
front_bottom.load()
front_lock = t.crop((1,0,3,4))
front_lock.load()
front = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(front,front_top, (1,1))
alpha_over(front,front_bottom, (1,6))
alpha_over(front,front_lock, (7,3))
# left side
# left side, right side, and back are esentially the same for
# the default texture, we take it anyway just in case other
# textures make use of it.
side_l_top = t.crop((0,14,14,19))
side_l_top.load()
side_l_bottom = t.crop((0,34,14,43))
side_l_bottom.load()
side_l = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(side_l,side_l_top, (1,1))
alpha_over(side_l,side_l_bottom, (1,6))
# right side
side_r_top = t.crop((28,14,43,20))
side_r_top.load()
side_r_bottom = t.crop((28,33,42,43))
side_r_bottom.load()
side_r = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(side_r,side_l_top, (1,1))
alpha_over(side_r,side_l_bottom, (1,6))
# back
back_top = t.crop((42,14,56,18))
back_top.load()
back_bottom = t.crop((42,33,56,43))
back_bottom.load()
back = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(back,side_l_top, (1,1))
alpha_over(back,side_l_bottom, (1,6))
else:
# large chest
# the textures is no longer in terrain.png, get it from
# item/chest.png and get all the needed stuff
t = self.load_image("normal_double.png")
if t.size != (128,64): t = t.resize((128,64), Image.ANTIALIAS)
# top
top = t.crop((14,0,44,14))
top.load()
img = Image.new("RGBA", (32,16), self.bgcolor)
alpha_over(img,top,(1,1))
top = img
# front
front_top = t.crop((14,14,44,18))
front_top.load()
front_bottom = t.crop((14,33,44,43))
front_bottom.load()
front_lock = t.crop((1,0,3,5))
front_lock.load()
front = Image.new("RGBA", (32,16), self.bgcolor)
alpha_over(front,front_top,(1,1))
alpha_over(front,front_bottom,(1,5))
alpha_over(front,front_lock,(15,3))
# left side
side_l_top = t.crop((0,14,14,18))
side_l_top.load()
side_l_bottom = t.crop((0,33,14,43))
side_l_bottom.load()
side_l = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(side_l,side_l_top, (1,1))
alpha_over(side_l,side_l_bottom,(1,5))
# right side
side_r_top = t.crop((44,14,58,18))
side_r_top.load()
side_r_bottom = t.crop((44,33,58,43))
side_r_bottom.load()
side_r = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(side_r,side_r_top, (1,1))
alpha_over(side_r,side_r_bottom,(1,5))
# back
back_top = t.crop((58,14,88,18))
back_top.load()
back_bottom = t.crop((58,33,88,43))
back_bottom.load()
back = Image.new("RGBA", (32,16), self.bgcolor)
alpha_over(back,back_top,(1,1))
alpha_over(back,back_bottom,(1,5))
if data & 24 == 8: # double chest, first half
top = top.crop((0,0,16,16))
top.load()
front = front.crop((0,0,16,16))
front.load()
back = back.crop((0,0,16,16))
back.load()
#~ side = side_l
elif data & 24 == 16: # double, second half
top = top.crop((16,0,32,16))
top.load()
front = front.crop((16,0,32,16))
front.load()
back = back.crop((16,0,32,16))
back.load()
#~ side = side_r
else: # just in case
return None
# compose the final block
img = Image.new("RGBA", (24,24), self.bgcolor)
if data & 7 == 2: # north
side = self.transform_image_side(side_r)
alpha_over(img, side, (1,7))
back = self.transform_image_side(back)
alpha_over(img, back.transpose(Image.FLIP_LEFT_RIGHT), (11,7))
front = self.transform_image_side(front)
top = self.transform_image_top(top.rotate(180))
alpha_over(img, top, (0,2))
elif data & 7 == 3: # south
side = self.transform_image_side(side_l)
alpha_over(img, side, (1,7))
front = self.transform_image_side(front).transpose(Image.FLIP_LEFT_RIGHT)
top = self.transform_image_top(top.rotate(180))
alpha_over(img, top, (0,2))
alpha_over(img, front,(11,7))
elif data & 7 == 4: # west
side = self.transform_image_side(side_r)
alpha_over(img, side.transpose(Image.FLIP_LEFT_RIGHT), (11,7))
front = self.transform_image_side(front)
alpha_over(img, front,(1,7))
top = self.transform_image_top(top.rotate(270))
alpha_over(img, top, (0,2))
elif data & 7 == 5: # east
back = self.transform_image_side(back)
side = self.transform_image_side(side_l).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, side, (11,7))
alpha_over(img, back, (1,7))
top = self.transform_image_top(top.rotate(270))
alpha_over(img, top, (0,2))
else: # just in case
img = None
return img
# redstone wire
# uses pseudo-ancildata found in iterate.c
@material(blockid=55, data=range(128), transparent=True)
def wire(self, blockid, data):
if data & 0b1000000 == 64: # powered redstone wire
redstone_wire_t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_dust_line0.png").rotate(90)
redstone_wire_t = self.tint_texture(redstone_wire_t,(255,0,0))
redstone_cross_t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_dust_dot.png")
redstone_cross_t = self.tint_texture(redstone_cross_t,(255,0,0))
else: # unpowered redstone wire
redstone_wire_t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_dust_line0.png").rotate(90)
redstone_wire_t = self.tint_texture(redstone_wire_t,(48,0,0))
redstone_cross_t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_dust_dot.png")
redstone_cross_t = self.tint_texture(redstone_cross_t,(48,0,0))
# generate an image per redstone direction
branch_top_left = redstone_cross_t.copy()
ImageDraw.Draw(branch_top_left).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_top_left).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_top_left).rectangle((0,11,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
branch_top_right = redstone_cross_t.copy()
ImageDraw.Draw(branch_top_right).rectangle((0,0,15,4),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_top_right).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_top_right).rectangle((0,11,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
branch_bottom_right = redstone_cross_t.copy()
ImageDraw.Draw(branch_bottom_right).rectangle((0,0,15,4),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_bottom_right).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_bottom_right).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
branch_bottom_left = redstone_cross_t.copy()
ImageDraw.Draw(branch_bottom_left).rectangle((0,0,15,4),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_bottom_left).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(branch_bottom_left).rectangle((0,11,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# generate the bottom texture
if data & 0b111111 == 0:
bottom = redstone_cross_t.copy()
# see iterate.c for where these masks come from
has_x = (data & 0b1010) > 0
has_z = (data & 0b0101) > 0
if has_x and has_z:
bottom = redstone_cross_t.copy()
if has_x:
alpha_over(bottom, redstone_wire_t.copy())
if has_z:
alpha_over(bottom, redstone_wire_t.copy().rotate(90))
else:
if has_x:
bottom = redstone_wire_t.copy()
elif has_z:
bottom = redstone_wire_t.copy().rotate(90)
elif data & 0b1111 == 0:
bottom = redstone_cross_t.copy()
# check for going up redstone wire
if data & 0b100000 == 32:
side1 = redstone_wire_t.rotate(90)
else:
side1 = None
if data & 0b010000 == 16:
side2 = redstone_wire_t.rotate(90)
else:
side2 = None
img = self.build_full_block(None,side1,side2,None,None,bottom)
return img
# diamond ore
block(blockid=56, top_image="assets/minecraft/textures/blocks/diamond_ore.png")
# diamond block
block(blockid=57, top_image="assets/minecraft/textures/blocks/diamond_block.png")
# crafting table
# needs two different sides
@material(blockid=58, solid=True, nodata=True)
def crafting_table(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/crafting_table_top.png")
side3 = self.load_image_texture("assets/minecraft/textures/blocks/crafting_table_side.png")
side4 = self.load_image_texture("assets/minecraft/textures/blocks/crafting_table_front.png")
img = self.build_full_block(top, None, None, side3, side4, None)
return img
# crops with 8 data values (like wheat)
@material(blockid=59, data=range(8), transparent=True, nospawn=True)
def crops8(self, blockid, data):
raw_crop = self.load_image_texture("assets/minecraft/textures/blocks/wheat_stage_%d.png" % data)
crop1 = self.transform_image_top(raw_crop)
crop2 = self.transform_image_side(raw_crop)
crop3 = crop2.transpose(Image.FLIP_LEFT_RIGHT)
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, crop1, (0,12), crop1)
alpha_over(img, crop2, (6,3), crop2)
alpha_over(img, crop3, (6,3), crop3)
return img
# farmland and grass path (15/16 blocks)
@material(blockid=[60,208], data=range(9), solid=True)
def farmland(self, blockid, data):
if blockid == 60:
side = self.load_image_texture("assets/minecraft/textures/blocks/dirt.png")
top = self.load_image_texture("assets/minecraft/textures/blocks/farmland_wet.png")
if data == 0:
top = self.load_image_texture("assets/minecraft/textures/blocks/farmland_dry.png")
# dirt.png is 16 pixels tall, so we need to crop it before building full block
side = side.crop((0, 1, 16, 16))
return self.build_full_block((top, 1), side, side, side, side)
else:
top = self.load_image_texture("assets/minecraft/textures/blocks/grass_path_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/grass_path_side.png")
# side already has 1 transparent pixel at the top, so it doesn't need to be modified
# just shift the top image down 1 pixel
return self.build_full_block((top, 1), side, side, side, side)
# signposts
@material(blockid=63, data=range(16), transparent=True)
def signpost(self, blockid, data):
# first rotations
if self.rotation == 1:
data = (data + 4) % 16
elif self.rotation == 2:
data = (data + 8) % 16
elif self.rotation == 3:
data = (data + 12) % 16
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
# cut the planks to the size of a signpost
ImageDraw.Draw(texture).rectangle((0,12,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# If the signpost is looking directly to the image, draw some
# random dots, they will look as text.
if data in (0,1,2,3,4,5,15):
for i in range(15):
x = randint(4,11)
y = randint(3,7)
texture.putpixel((x,y),(0,0,0,255))
# Minecraft uses wood texture for the signpost stick
texture_stick = self.load_image_texture("assets/minecraft/textures/blocks/log_oak.png")
texture_stick = texture_stick.resize((12,12), Image.ANTIALIAS)
ImageDraw.Draw(texture_stick).rectangle((2,0,12,12),outline=(0,0,0,0),fill=(0,0,0,0))
img = Image.new("RGBA", (24,24), self.bgcolor)
# W N ~90 E S ~270
angles = (330.,345.,0.,15.,30.,55.,95.,120.,150.,165.,180.,195.,210.,230.,265.,310.)
angle = math.radians(angles[data])
post = self.transform_image_angle(texture, angle)
# choose the position of the "3D effect"
incrementx = 0
if data in (1,6,7,8,9,14):
incrementx = -1
elif data in (3,4,5,11,12,13):
incrementx = +1
alpha_over(img, texture_stick,(11, 8),texture_stick)
# post2 is a brighter signpost pasted with a small shift,
# gives to the signpost some 3D effect.
post2 = ImageEnhance.Brightness(post).enhance(1.2)
alpha_over(img, post2,(incrementx, -3),post2)
alpha_over(img, post, (0,-2), post)
return img
# wooden and iron door
# uses pseudo-ancildata found in iterate.c
@material(blockid=[64,71,193,194,195,196,197], data=range(32), transparent=True)
def door(self, blockid, data):
#Masked to not clobber block top/bottom & swung info
if self.rotation == 1:
if (data & 0b00011) == 0: data = data & 0b11100 | 1
elif (data & 0b00011) == 1: data = data & 0b11100 | 2
elif (data & 0b00011) == 2: data = data & 0b11100 | 3
elif (data & 0b00011) == 3: data = data & 0b11100 | 0
elif self.rotation == 2:
if (data & 0b00011) == 0: data = data & 0b11100 | 2
elif (data & 0b00011) == 1: data = data & 0b11100 | 3
elif (data & 0b00011) == 2: data = data & 0b11100 | 0
elif (data & 0b00011) == 3: data = data & 0b11100 | 1
elif self.rotation == 3:
if (data & 0b00011) == 0: data = data & 0b11100 | 3
elif (data & 0b00011) == 1: data = data & 0b11100 | 0
elif (data & 0b00011) == 2: data = data & 0b11100 | 1
elif (data & 0b00011) == 3: data = data & 0b11100 | 2
if data & 0x8 == 0x8: # top of the door
if blockid == 64: # classic wood door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_wood_upper.png")
elif blockid == 71: # iron door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_iron_upper.png")
elif blockid == 193: # spruce door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_spruce_upper.png")
elif blockid == 194: # birch door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_birch_upper.png")
elif blockid == 195: # jungle door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_jungle_upper.png")
elif blockid == 196: # acacia door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_acacia_upper.png")
elif blockid == 197: # dark_oak door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_dark_oak_upper.png")
else: # bottom of the door
if blockid == 64:
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_wood_lower.png")
elif blockid == 71: # iron door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_iron_lower.png")
elif blockid == 193: # spruce door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_spruce_lower.png")
elif blockid == 194: # birch door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_birch_lower.png")
elif blockid == 195: # jungle door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_jungle_lower.png")
elif blockid == 196: # acacia door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_acacia_lower.png")
elif blockid == 197: # dark_oak door
raw_door = self.load_image_texture("assets/minecraft/textures/blocks/door_dark_oak_lower.png")
# if you want to render all doors as closed, then force
# force closed to be True
if data & 0x4 == 0x4:
closed = False
else:
closed = True
if data & 0x10 == 0x10:
# hinge on the left (facing same door direction)
hinge_on_left = True
else:
# hinge on the right (default single door)
hinge_on_left = False
# mask out the high bits to figure out the orientation
img = Image.new("RGBA", (24,24), self.bgcolor)
if (data & 0x03) == 0: # facing west when closed
if hinge_on_left:
if closed:
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
alpha_over(img, tex, (0,6), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
tex = tex.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (12,6), tex)
else:
if closed:
tex = self.transform_image_side(raw_door)
alpha_over(img, tex, (0,6), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
tex = tex.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (0,0), tex)
if (data & 0x03) == 1: # facing north when closed
if hinge_on_left:
if closed:
tex = self.transform_image_side(raw_door).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (0,0), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door)
alpha_over(img, tex, (0,6), tex)
else:
if closed:
tex = self.transform_image_side(raw_door).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (0,0), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door)
alpha_over(img, tex, (12,0), tex)
if (data & 0x03) == 2: # facing east when closed
if hinge_on_left:
if closed:
tex = self.transform_image_side(raw_door)
alpha_over(img, tex, (12,0), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door)
tex = tex.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (0,0), tex)
else:
if closed:
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
alpha_over(img, tex, (12,0), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (12,6), tex)
if (data & 0x03) == 3: # facing south when closed
if hinge_on_left:
if closed:
tex = self.transform_image_side(raw_door).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (12,6), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
alpha_over(img, tex, (12,0), tex)
else:
if closed:
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
tex = tex.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (12,6), tex)
else:
# flip first to set the doornob on the correct side
tex = self.transform_image_side(raw_door.transpose(Image.FLIP_LEFT_RIGHT))
alpha_over(img, tex, (0,6), tex)
return img
# ladder
@material(blockid=65, data=[2, 3, 4, 5], transparent=True)
def ladder(self, blockid, data):
# first rotations
if self.rotation == 1:
if data == 2: data = 5
elif data == 3: data = 4
elif data == 4: data = 2
elif data == 5: data = 3
elif self.rotation == 2:
if data == 2: data = 3
elif data == 3: data = 2
elif data == 4: data = 5
elif data == 5: data = 4
elif self.rotation == 3:
if data == 2: data = 4
elif data == 3: data = 5
elif data == 4: data = 3
elif data == 5: data = 2
img = Image.new("RGBA", (24,24), self.bgcolor)
raw_texture = self.load_image_texture("assets/minecraft/textures/blocks/ladder.png")
if data == 5:
# normally this ladder would be obsured by the block it's attached to
# but since ladders can apparently be placed on transparent blocks, we
# have to render this thing anyway. same for data == 2
tex = self.transform_image_side(raw_texture)
alpha_over(img, tex, (0,6), tex)
return img
if data == 2:
tex = self.transform_image_side(raw_texture).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (12,6), tex)
return img
if data == 3:
tex = self.transform_image_side(raw_texture).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, tex, (0,0), tex)
return img
if data == 4:
tex = self.transform_image_side(raw_texture)
alpha_over(img, tex, (12,0), tex)
return img
# wall signs
@material(blockid=68, data=[2, 3, 4, 5], transparent=True)
def wall_sign(self, blockid, data): # wall sign
# first rotations
if self.rotation == 1:
if data == 2: data = 5
elif data == 3: data = 4
elif data == 4: data = 2
elif data == 5: data = 3
elif self.rotation == 2:
if data == 2: data = 3
elif data == 3: data = 2
elif data == 4: data = 5
elif data == 5: data = 4
elif self.rotation == 3:
if data == 2: data = 4
elif data == 3: data = 5
elif data == 4: data = 3
elif data == 5: data = 2
texture = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
# cut the planks to the size of a signpost
ImageDraw.Draw(texture).rectangle((0,12,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# draw some random black dots, they will look as text
""" don't draw text at the moment, they are used in blank for decoration
if data in (3,4):
for i in range(15):
x = randint(4,11)
y = randint(3,7)
texture.putpixel((x,y),(0,0,0,255))
"""
img = Image.new("RGBA", (24,24), self.bgcolor)
incrementx = 0
if data == 2: # east
incrementx = +1
sign = self.build_full_block(None, None, None, None, texture)
elif data == 3: # west
incrementx = -1
sign = self.build_full_block(None, texture, None, None, None)
elif data == 4: # north
incrementx = +1
sign = self.build_full_block(None, None, texture, None, None)
elif data == 5: # south
incrementx = -1
sign = self.build_full_block(None, None, None, texture, None)
sign2 = ImageEnhance.Brightness(sign).enhance(1.2)
alpha_over(img, sign2,(incrementx, 2),sign2)
alpha_over(img, sign, (0,3), sign)
return img
# levers
@material(blockid=69, data=range(16), transparent=True)
def levers(self, blockid, data):
if data & 8 == 8: powered = True
else: powered = False
data = data & 7
# first rotations
if self.rotation == 1:
# on wall levers
if data == 1: data = 3
elif data == 2: data = 4
elif data == 3: data = 2
elif data == 4: data = 1
# on floor levers
elif data == 5: data = 6
elif data == 6: data = 5
elif self.rotation == 2:
if data == 1: data = 2
elif data == 2: data = 1
elif data == 3: data = 4
elif data == 4: data = 3
elif data == 5: data = 5
elif data == 6: data = 6
elif self.rotation == 3:
if data == 1: data = 4
elif data == 2: data = 3
elif data == 3: data = 1
elif data == 4: data = 2
elif data == 5: data = 6
elif data == 6: data = 5
# generate the texture for the base of the lever
t_base = self.load_image_texture("assets/minecraft/textures/blocks/stone.png").copy()
ImageDraw.Draw(t_base).rectangle((0,0,15,3),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t_base).rectangle((0,12,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t_base).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t_base).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# generate the texture for the stick
stick = self.load_image_texture("assets/minecraft/textures/blocks/lever.png").copy()
c_stick = Image.new("RGBA", (16,16), self.bgcolor)
tmp = ImageEnhance.Brightness(stick).enhance(0.8)
alpha_over(c_stick, tmp, (1,0), tmp)
alpha_over(c_stick, stick, (0,0), stick)
t_stick = self.transform_image_side(c_stick.rotate(45, Image.NEAREST))
# where the lever will be composed
img = Image.new("RGBA", (24,24), self.bgcolor)
# wall levers
if data == 1: # facing SOUTH
# levers can't be placed in transparent blocks, so this
# direction is almost invisible
return None
elif data == 2: # facing NORTH
base = self.transform_image_side(t_base)
# paste it twice with different brightness to make a fake 3D effect
alpha_over(img, base, (12,-1), base)
alpha = base.split()[3]
base = ImageEnhance.Brightness(base).enhance(0.9)
base.putalpha(alpha)
alpha_over(img, base, (11,0), base)
# paste the lever stick
pos = (7,-7)
if powered:
t_stick = t_stick.transpose(Image.FLIP_TOP_BOTTOM)
pos = (7,6)
alpha_over(img, t_stick, pos, t_stick)
elif data == 3: # facing WEST
base = self.transform_image_side(t_base)
# paste it twice with different brightness to make a fake 3D effect
base = base.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, base, (0,-1), base)
alpha = base.split()[3]
base = ImageEnhance.Brightness(base).enhance(0.9)
base.putalpha(alpha)
alpha_over(img, base, (1,0), base)
# paste the lever stick
t_stick = t_stick.transpose(Image.FLIP_LEFT_RIGHT)
pos = (5,-7)
if powered:
t_stick = t_stick.transpose(Image.FLIP_TOP_BOTTOM)
pos = (6,6)
alpha_over(img, t_stick, pos, t_stick)
elif data == 4: # facing EAST
# levers can't be placed in transparent blocks, so this
# direction is almost invisible
return None
# floor levers
elif data == 5: # pointing south when off
# lever base, fake 3d again
base = self.transform_image_top(t_base)
alpha = base.split()[3]
tmp = ImageEnhance.Brightness(base).enhance(0.8)
tmp.putalpha(alpha)
alpha_over(img, tmp, (0,12), tmp)
alpha_over(img, base, (0,11), base)
# lever stick
pos = (3,2)
if not powered:
t_stick = t_stick.transpose(Image.FLIP_LEFT_RIGHT)
pos = (11,2)
alpha_over(img, t_stick, pos, t_stick)
elif data == 6: # pointing east when off
# lever base, fake 3d again
base = self.transform_image_top(t_base.rotate(90))
alpha = base.split()[3]
tmp = ImageEnhance.Brightness(base).enhance(0.8)
tmp.putalpha(alpha)
alpha_over(img, tmp, (0,12), tmp)
alpha_over(img, base, (0,11), base)
# lever stick
pos = (2,3)
if not powered:
t_stick = t_stick.transpose(Image.FLIP_LEFT_RIGHT)
pos = (10,2)
alpha_over(img, t_stick, pos, t_stick)
return img
# wooden and stone pressure plates, and weighted pressure plates
@material(blockid=[70, 72,147,148], data=[0,1], transparent=True)
def pressure_plate(self, blockid, data):
if blockid == 70: # stone
t = self.load_image_texture("assets/minecraft/textures/blocks/stone.png").copy()
elif blockid == 72: # wooden
t = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
elif blockid == 147: # light golden
t = self.load_image_texture("assets/minecraft/textures/blocks/gold_block.png").copy()
else: # blockid == 148: # heavy iron
t = self.load_image_texture("assets/minecraft/textures/blocks/iron_block.png").copy()
# cut out the outside border, pressure plates are smaller
# than a normal block
ImageDraw.Draw(t).rectangle((0,0,15,15),outline=(0,0,0,0))
# create the textures and a darker version to make a 3d by
# pasting them with an offstet of 1 pixel
img = Image.new("RGBA", (24,24), self.bgcolor)
top = self.transform_image_top(t)
alpha = top.split()[3]
topd = ImageEnhance.Brightness(top).enhance(0.8)
topd.putalpha(alpha)
#show it 3d or 2d if unpressed or pressed
if data == 0:
alpha_over(img,topd, (0,12),topd)
alpha_over(img,top, (0,11),top)
elif data == 1:
alpha_over(img,top, (0,12),top)
return img
# normal and glowing redstone ore
block(blockid=[73, 74], top_image="assets/minecraft/textures/blocks/redstone_ore.png")
# stone a wood buttons
@material(blockid=(77,143), data=range(16), transparent=True)
def buttons(self, blockid, data):
# 0x8 is set if the button is pressed mask this info and render
# it as unpressed
data = data & 0x7
if self.rotation == 1:
if data == 1: data = 3
elif data == 2: data = 4
elif data == 3: data = 2
elif data == 4: data = 1
elif self.rotation == 2:
if data == 1: data = 2
elif data == 2: data = 1
elif data == 3: data = 4
elif data == 4: data = 3
elif self.rotation == 3:
if data == 1: data = 4
elif data == 2: data = 3
elif data == 3: data = 1
elif data == 4: data = 2
if blockid == 77:
t = self.load_image_texture("assets/minecraft/textures/blocks/stone.png").copy()
else:
t = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
# generate the texture for the button
ImageDraw.Draw(t).rectangle((0,0,15,5),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t).rectangle((0,10,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(t).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
img = Image.new("RGBA", (24,24), self.bgcolor)
button = self.transform_image_side(t)
if data == 1: # facing SOUTH
# buttons can't be placed in transparent blocks, so this
# direction can't be seen
return None
elif data == 2: # facing NORTH
# paste it twice with different brightness to make a 3D effect
alpha_over(img, button, (12,-1), button)
alpha = button.split()[3]
button = ImageEnhance.Brightness(button).enhance(0.9)
button.putalpha(alpha)
alpha_over(img, button, (11,0), button)
elif data == 3: # facing WEST
# paste it twice with different brightness to make a 3D effect
button = button.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, button, (0,-1), button)
alpha = button.split()[3]
button = ImageEnhance.Brightness(button).enhance(0.9)
button.putalpha(alpha)
alpha_over(img, button, (1,0), button)
elif data == 4: # facing EAST
# buttons can't be placed in transparent blocks, so this
# direction can't be seen
return None
return img
# snow
@material(blockid=78, data=range(16), transparent=True, solid=True)
def snow(self, blockid, data):
# still not rendered correctly: data other than 0
tex = self.load_image_texture("assets/minecraft/textures/blocks/snow.png")
# make the side image, top 3/4 transparent
mask = tex.crop((0,12,16,16))
sidetex = Image.new(tex.mode, tex.size, self.bgcolor)
alpha_over(sidetex, mask, (0,12,16,16), mask)
img = Image.new("RGBA", (24,24), self.bgcolor)
top = self.transform_image_top(tex)
side = self.transform_image_side(sidetex)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, side, (0,6), side)
alpha_over(img, otherside, (12,6), otherside)
alpha_over(img, top, (0,9), top)
return img
# snow block
block(blockid=80, top_image="assets/minecraft/textures/blocks/snow.png")
# cactus
@material(blockid=81, data=range(15), transparent=True, solid=True, nospawn=True)
def cactus(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/cactus_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/cactus_side.png")
img = Image.new("RGBA", (24,24), self.bgcolor)
top = self.transform_image_top(top)
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
sidealpha = side.split()[3]
side = ImageEnhance.Brightness(side).enhance(0.9)
side.putalpha(sidealpha)
othersidealpha = otherside.split()[3]
otherside = ImageEnhance.Brightness(otherside).enhance(0.8)
otherside.putalpha(othersidealpha)
alpha_over(img, side, (1,6), side)
alpha_over(img, otherside, (11,6), otherside)
alpha_over(img, top, (0,0), top)
return img
# clay block
block(blockid=82, top_image="assets/minecraft/textures/blocks/clay.png")
# sugar cane
@material(blockid=83, data=range(16), transparent=True)
def sugar_cane(self, blockid, data):
tex = self.load_image_texture("assets/minecraft/textures/blocks/reeds.png")
return self.build_sprite(tex)
# jukebox
@material(blockid=84, data=range(16), solid=True)
def jukebox(self, blockid, data):
return self.build_block(self.load_image_texture("assets/minecraft/textures/blocks/jukebox_top.png"), self.load_image_texture("assets/minecraft/textures/blocks/noteblock.png"))
# nether and normal fences
# uses pseudo-ancildata found in iterate.c
@material(blockid=[85, 188, 189, 190, 191, 192, 113], data=range(16), transparent=True, nospawn=True)
def fence(self, blockid, data):
# no need for rotations, it uses pseudo data.
# create needed images for Big stick fence
if blockid == 85: # normal fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
elif blockid == 188: # spruce fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png").copy()
elif blockid == 189: # birch fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png").copy()
elif blockid == 190: # jungle fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png").copy()
elif blockid == 191: # big/dark oak fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png").copy()
elif blockid == 192: # acacia oak fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png").copy()
else: # netherbrick fence
fence_top = self.load_image_texture("assets/minecraft/textures/blocks/nether_brick.png").copy()
fence_side = self.load_image_texture("assets/minecraft/textures/blocks/nether_brick.png").copy()
fence_small_side = self.load_image_texture("assets/minecraft/textures/blocks/nether_brick.png").copy()
# generate the textures of the fence
ImageDraw.Draw(fence_top).rectangle((0,0,5,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_top).rectangle((10,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_top).rectangle((0,0,15,5),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_top).rectangle((0,10,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_side).rectangle((0,0,5,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_side).rectangle((10,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# Create the sides and the top of the big stick
fence_side = self.transform_image_side(fence_side)
fence_other_side = fence_side.transpose(Image.FLIP_LEFT_RIGHT)
fence_top = self.transform_image_top(fence_top)
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = fence_side.split()[3]
fence_side = ImageEnhance.Brightness(fence_side).enhance(0.9)
fence_side.putalpha(sidealpha)
othersidealpha = fence_other_side.split()[3]
fence_other_side = ImageEnhance.Brightness(fence_other_side).enhance(0.8)
fence_other_side.putalpha(othersidealpha)
# Compose the fence big stick
fence_big = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(fence_big,fence_side, (5,4),fence_side)
alpha_over(fence_big,fence_other_side, (7,4),fence_other_side)
alpha_over(fence_big,fence_top, (0,0),fence_top)
# Now render the small sticks.
# Create needed images
ImageDraw.Draw(fence_small_side).rectangle((0,0,15,0),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_small_side).rectangle((0,4,15,6),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_small_side).rectangle((0,10,15,16),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_small_side).rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(fence_small_side).rectangle((11,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# Create the sides and the top of the small sticks
fence_small_side = self.transform_image_side(fence_small_side)
fence_small_other_side = fence_small_side.transpose(Image.FLIP_LEFT_RIGHT)
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = fence_small_other_side.split()[3]
fence_small_other_side = ImageEnhance.Brightness(fence_small_other_side).enhance(0.9)
fence_small_other_side.putalpha(sidealpha)
sidealpha = fence_small_side.split()[3]
fence_small_side = ImageEnhance.Brightness(fence_small_side).enhance(0.9)
fence_small_side.putalpha(sidealpha)
# Create img to compose the fence
img = Image.new("RGBA", (24,24), self.bgcolor)
# Position of fence small sticks in img.
# These postitions are strange because the small sticks of the
# fence are at the very left and at the very right of the 16x16 images
pos_top_left = (2,3)
pos_top_right = (10,3)
pos_bottom_right = (10,7)
pos_bottom_left = (2,7)
# +x axis points top right direction
# +y axis points bottom right direction
# First compose small sticks in the back of the image,
# then big stick and thecn small sticks in the front.
if (data & 0b0001) == 1:
alpha_over(img,fence_small_side, pos_top_left,fence_small_side) # top left
if (data & 0b1000) == 8:
alpha_over(img,fence_small_other_side, pos_top_right,fence_small_other_side) # top right
alpha_over(img,fence_big,(0,0),fence_big)
if (data & 0b0010) == 2:
alpha_over(img,fence_small_other_side, pos_bottom_left,fence_small_other_side) # bottom left
if (data & 0b0100) == 4:
alpha_over(img,fence_small_side, pos_bottom_right,fence_small_side) # bottom right
return img
# pumpkin
@material(blockid=[86, 91], data=range(4), solid=True)
def pumpkin(self, blockid, data): # pumpkins, jack-o-lantern
# rotation
if self.rotation == 1:
if data == 0: data = 1
elif data == 1: data = 2
elif data == 2: data = 3
elif data == 3: data = 0
elif self.rotation == 2:
if data == 0: data = 2
elif data == 1: data = 3
elif data == 2: data = 0
elif data == 3: data = 1
elif self.rotation == 3:
if data == 0: data = 3
elif data == 1: data = 0
elif data == 2: data = 1
elif data == 3: data = 2
# texture generation
top = self.load_image_texture("assets/minecraft/textures/blocks/pumpkin_top.png")
frontName = "assets/minecraft/textures/blocks/pumpkin_face_off.png" if blockid == 86 else "assets/minecraft/textures/blocks/pumpkin_face_on.png"
front = self.load_image_texture(frontName)
side = self.load_image_texture("assets/minecraft/textures/blocks/pumpkin_side.png")
if data == 0: # pointing west
img = self.build_full_block(top, None, None, side, front)
elif data == 1: # pointing north
img = self.build_full_block(top, None, None, front, side)
else: # in any other direction the front can't be seen
img = self.build_full_block(top, None, None, side, side)
return img
# netherrack
block(blockid=87, top_image="assets/minecraft/textures/blocks/netherrack.png")
# soul sand
block(blockid=88, top_image="assets/minecraft/textures/blocks/soul_sand.png")
# glowstone
block(blockid=89, top_image="assets/minecraft/textures/blocks/glowstone.png")
# portal
@material(blockid=90, data=[1, 2, 4, 5, 8, 10], transparent=True)
def portal(self, blockid, data):
# no rotations, uses pseudo data
portaltexture = self.load_portal()
img = Image.new("RGBA", (24,24), self.bgcolor)
side = self.transform_image_side(portaltexture)
otherside = side.transpose(Image.FLIP_TOP_BOTTOM)
if data in (1,4,5):
alpha_over(img, side, (5,4), side)
if data in (2,8,10):
alpha_over(img, otherside, (5,4), otherside)
return img
# cake!
@material(blockid=92, data=range(6), transparent=True, nospawn=True)
def cake(self, blockid, data):
# cake textures
top = self.load_image_texture("assets/minecraft/textures/blocks/cake_top.png").copy()
side = self.load_image_texture("assets/minecraft/textures/blocks/cake_side.png").copy()
fullside = side.copy()
inside = self.load_image_texture("assets/minecraft/textures/blocks/cake_inner.png")
img = Image.new("RGBA", (24,24), self.bgcolor)
if data == 0: # unbitten cake
top = self.transform_image_top(top)
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
# darken sides slightly
sidealpha = side.split()[3]
side = ImageEnhance.Brightness(side).enhance(0.9)
side.putalpha(sidealpha)
othersidealpha = otherside.split()[3]
otherside = ImageEnhance.Brightness(otherside).enhance(0.8)
otherside.putalpha(othersidealpha)
# composite the cake
alpha_over(img, side, (1,6), side)
alpha_over(img, otherside, (11,7), otherside) # workaround, fixes a hole
alpha_over(img, otherside, (12,6), otherside)
alpha_over(img, top, (0,6), top)
else:
# cut the textures for a bitten cake
coord = int(16./6.*data)
ImageDraw.Draw(side).rectangle((16 - coord,0,16,16),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(top).rectangle((0,0,coord,16),outline=(0,0,0,0),fill=(0,0,0,0))
# the bitten part of the cake always points to the west
# composite the cake for every north orientation
if self.rotation == 0: # north top-left
# create right side
rs = self.transform_image_side(side).transpose(Image.FLIP_LEFT_RIGHT)
# create bitten side and its coords
deltax = 2*data
deltay = -1*data
if data == 3: deltax += 1 # special case fixing pixel holes
ls = self.transform_image_side(inside)
# create top side
t = self.transform_image_top(top)
# darken sides slightly
sidealpha = ls.split()[3]
ls = ImageEnhance.Brightness(ls).enhance(0.9)
ls.putalpha(sidealpha)
othersidealpha = rs.split()[3]
rs = ImageEnhance.Brightness(rs).enhance(0.8)
rs.putalpha(othersidealpha)
# compose the cake
alpha_over(img, rs, (12,6), rs)
alpha_over(img, ls, (1 + deltax,6 + deltay), ls)
alpha_over(img, t, (0,6), t)
elif self.rotation == 1: # north top-right
# bitten side not shown
# create left side
ls = self.transform_image_side(side.transpose(Image.FLIP_LEFT_RIGHT))
# create top
t = self.transform_image_top(top.rotate(-90))
# create right side
rs = self.transform_image_side(fullside).transpose(Image.FLIP_LEFT_RIGHT)
# darken sides slightly
sidealpha = ls.split()[3]
ls = ImageEnhance.Brightness(ls).enhance(0.9)
ls.putalpha(sidealpha)
othersidealpha = rs.split()[3]
rs = ImageEnhance.Brightness(rs).enhance(0.8)
rs.putalpha(othersidealpha)
# compose the cake
alpha_over(img, ls, (2,6), ls)
alpha_over(img, t, (0,6), t)
alpha_over(img, rs, (12,6), rs)
elif self.rotation == 2: # north bottom-right
# bitten side not shown
# left side
ls = self.transform_image_side(fullside)
# top
t = self.transform_image_top(top.rotate(180))
# right side
rs = self.transform_image_side(side.transpose(Image.FLIP_LEFT_RIGHT)).transpose(Image.FLIP_LEFT_RIGHT)
# darken sides slightly
sidealpha = ls.split()[3]
ls = ImageEnhance.Brightness(ls).enhance(0.9)
ls.putalpha(sidealpha)
othersidealpha = rs.split()[3]
rs = ImageEnhance.Brightness(rs).enhance(0.8)
rs.putalpha(othersidealpha)
# compose the cake
alpha_over(img, ls, (2,6), ls)
alpha_over(img, t, (1,6), t)
alpha_over(img, rs, (12,6), rs)
elif self.rotation == 3: # north bottom-left
# create left side
ls = self.transform_image_side(side)
# create top
t = self.transform_image_top(top.rotate(90))
# create right side and its coords
deltax = 12-2*data
deltay = -1*data
if data == 3: deltax += -1 # special case fixing pixel holes
rs = self.transform_image_side(inside).transpose(Image.FLIP_LEFT_RIGHT)
# darken sides slightly
sidealpha = ls.split()[3]
ls = ImageEnhance.Brightness(ls).enhance(0.9)
ls.putalpha(sidealpha)
othersidealpha = rs.split()[3]
rs = ImageEnhance.Brightness(rs).enhance(0.8)
rs.putalpha(othersidealpha)
# compose the cake
alpha_over(img, ls, (2,6), ls)
alpha_over(img, t, (1,6), t)
alpha_over(img, rs, (1 + deltax,6 + deltay), rs)
return img
# redstone repeaters ON and OFF
@material(blockid=[93,94], data=range(16), transparent=True, nospawn=True)
def repeater(self, blockid, data):
# rotation
# Masked to not clobber delay info
if self.rotation == 1:
if (data & 0b0011) == 0: data = data & 0b1100 | 1
elif (data & 0b0011) == 1: data = data & 0b1100 | 2
elif (data & 0b0011) == 2: data = data & 0b1100 | 3
elif (data & 0b0011) == 3: data = data & 0b1100 | 0
elif self.rotation == 2:
if (data & 0b0011) == 0: data = data & 0b1100 | 2
elif (data & 0b0011) == 1: data = data & 0b1100 | 3
elif (data & 0b0011) == 2: data = data & 0b1100 | 0
elif (data & 0b0011) == 3: data = data & 0b1100 | 1
elif self.rotation == 3:
if (data & 0b0011) == 0: data = data & 0b1100 | 3
elif (data & 0b0011) == 1: data = data & 0b1100 | 0
elif (data & 0b0011) == 2: data = data & 0b1100 | 1
elif (data & 0b0011) == 3: data = data & 0b1100 | 2
# generate the diode
top = self.load_image_texture("assets/minecraft/textures/blocks/repeater_off.png") if blockid == 93 else self.load_image_texture("assets/minecraft/textures/blocks/repeater_on.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/stone_slab_side.png")
increment = 13
if (data & 0x3) == 0: # pointing east
pass
if (data & 0x3) == 1: # pointing south
top = top.rotate(270)
if (data & 0x3) == 2: # pointing west
top = top.rotate(180)
if (data & 0x3) == 3: # pointing north
top = top.rotate(90)
img = self.build_full_block( (top, increment), None, None, side, side)
# compose a "3d" redstone torch
t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_off.png").copy() if blockid == 93 else self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_on.png").copy()
torch = Image.new("RGBA", (24,24), self.bgcolor)
t_crop = t.crop((2,2,14,14))
slice = t_crop.copy()
ImageDraw.Draw(slice).rectangle((6,0,12,12),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(slice).rectangle((0,0,4,12),outline=(0,0,0,0),fill=(0,0,0,0))
alpha_over(torch, slice, (6,4))
alpha_over(torch, t_crop, (5,5))
alpha_over(torch, t_crop, (6,5))
alpha_over(torch, slice, (6,6))
# paste redstone torches everywhere!
# the torch is too tall for the repeater, crop the bottom.
ImageDraw.Draw(torch).rectangle((0,16,24,24),outline=(0,0,0,0),fill=(0,0,0,0))
# touch up the 3d effect with big rectangles, just in case, for other texture packs
ImageDraw.Draw(torch).rectangle((0,24,10,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(torch).rectangle((12,15,24,24),outline=(0,0,0,0),fill=(0,0,0,0))
# torch positions for every redstone torch orientation.
#
# This is a horrible list of torch orientations. I tried to
# obtain these orientations by rotating the positions for one
# orientation, but pixel rounding is horrible and messes the
# torches.
if (data & 0x3) == 0: # pointing east
if (data & 0xC) == 0: # one tick delay
moving_torch = (1,1)
static_torch = (-3,-1)
elif (data & 0xC) == 4: # two ticks delay
moving_torch = (2,2)
static_torch = (-3,-1)
elif (data & 0xC) == 8: # three ticks delay
moving_torch = (3,2)
static_torch = (-3,-1)
elif (data & 0xC) == 12: # four ticks delay
moving_torch = (4,3)
static_torch = (-3,-1)
elif (data & 0x3) == 1: # pointing south
if (data & 0xC) == 0: # one tick delay
moving_torch = (1,1)
static_torch = (5,-1)
elif (data & 0xC) == 4: # two ticks delay
moving_torch = (0,2)
static_torch = (5,-1)
elif (data & 0xC) == 8: # three ticks delay
moving_torch = (-1,2)
static_torch = (5,-1)
elif (data & 0xC) == 12: # four ticks delay
moving_torch = (-2,3)
static_torch = (5,-1)
elif (data & 0x3) == 2: # pointing west
if (data & 0xC) == 0: # one tick delay
moving_torch = (1,1)
static_torch = (5,3)
elif (data & 0xC) == 4: # two ticks delay
moving_torch = (0,0)
static_torch = (5,3)
elif (data & 0xC) == 8: # three ticks delay
moving_torch = (-1,0)
static_torch = (5,3)
elif (data & 0xC) == 12: # four ticks delay
moving_torch = (-2,-1)
static_torch = (5,3)
elif (data & 0x3) == 3: # pointing north
if (data & 0xC) == 0: # one tick delay
moving_torch = (1,1)
static_torch = (-3,3)
elif (data & 0xC) == 4: # two ticks delay
moving_torch = (2,0)
static_torch = (-3,3)
elif (data & 0xC) == 8: # three ticks delay
moving_torch = (3,0)
static_torch = (-3,3)
elif (data & 0xC) == 12: # four ticks delay
moving_torch = (4,-1)
static_torch = (-3,3)
# this paste order it's ok for east and south orientation
# but it's wrong for north and west orientations. But using the
# default texture pack the torches are small enough to no overlap.
alpha_over(img, torch, static_torch, torch)
alpha_over(img, torch, moving_torch, torch)
return img
# redstone comparator (149 is inactive, 150 is active)
@material(blockid=[149,150], data=range(16), transparent=True, nospawn=True)
def comparator(self, blockid, data):
# rotation
# add self.rotation to the lower 2 bits, mod 4
data = data & 0b1100 | (((data & 0b11) + self.rotation) % 4)
top = self.load_image_texture("assets/minecraft/textures/blocks/comparator_off.png") if blockid == 149 else self.load_image_texture("assets/minecraft/textures/blocks/comparator_on.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/stone_slab_side.png")
increment = 13
if (data & 0x3) == 0: # pointing north
pass
static_torch = (-3,-1)
torch = ((0,2),(6,-1))
if (data & 0x3) == 1: # pointing east
top = top.rotate(270)
static_torch = (5,-1)
torch = ((-4,-1),(0,2))
if (data & 0x3) == 2: # pointing south
top = top.rotate(180)
static_torch = (5,3)
torch = ((0,-4),(-4,-1))
if (data & 0x3) == 3: # pointing west
top = top.rotate(90)
static_torch = (-3,3)
torch = ((1,-4),(6,-1))
def build_torch(active):
# compose a "3d" redstone torch
t = self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_off.png").copy() if not active else self.load_image_texture("assets/minecraft/textures/blocks/redstone_torch_on.png").copy()
torch = Image.new("RGBA", (24,24), self.bgcolor)
t_crop = t.crop((2,2,14,14))
slice = t_crop.copy()
ImageDraw.Draw(slice).rectangle((6,0,12,12),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(slice).rectangle((0,0,4,12),outline=(0,0,0,0),fill=(0,0,0,0))
alpha_over(torch, slice, (6,4))
alpha_over(torch, t_crop, (5,5))
alpha_over(torch, t_crop, (6,5))
alpha_over(torch, slice, (6,6))
return torch
active_torch = build_torch(True)
inactive_torch = build_torch(False)
back_torch = active_torch if (blockid == 150 or data & 0b1000 == 0b1000) else inactive_torch
static_torch_img = active_torch if (data & 0b100 == 0b100) else inactive_torch
img = self.build_full_block( (top, increment), None, None, side, side)
alpha_over(img, static_torch_img, static_torch, static_torch_img)
alpha_over(img, back_torch, torch[0], back_torch)
alpha_over(img, back_torch, torch[1], back_torch)
return img
# trapdoor
# the trapdoor is looks like a sprite when opened, that's not good
@material(blockid=[96,167], data=range(16), transparent=True, nospawn=True)
def trapdoor(self, blockid, data):
# rotation
# Masked to not clobber opened/closed info
if self.rotation == 1:
if (data & 0b0011) == 0: data = data & 0b1100 | 3
elif (data & 0b0011) == 1: data = data & 0b1100 | 2
elif (data & 0b0011) == 2: data = data & 0b1100 | 0
elif (data & 0b0011) == 3: data = data & 0b1100 | 1
elif self.rotation == 2:
if (data & 0b0011) == 0: data = data & 0b1100 | 1
elif (data & 0b0011) == 1: data = data & 0b1100 | 0
elif (data & 0b0011) == 2: data = data & 0b1100 | 3
elif (data & 0b0011) == 3: data = data & 0b1100 | 2
elif self.rotation == 3:
if (data & 0b0011) == 0: data = data & 0b1100 | 2
elif (data & 0b0011) == 1: data = data & 0b1100 | 3
elif (data & 0b0011) == 2: data = data & 0b1100 | 1
elif (data & 0b0011) == 3: data = data & 0b1100 | 0
# texture generation
if blockid == 96:
texture = self.load_image_texture("assets/minecraft/textures/blocks/trapdoor.png")
else:
texture = self.load_image_texture("assets/minecraft/textures/blocks/iron_trapdoor.png")
if data & 0x4 == 0x4: # opened trapdoor
if data & 0x3 == 0: # west
img = self.build_full_block(None, None, None, None, texture)
if data & 0x3 == 1: # east
img = self.build_full_block(None, texture, None, None, None)
if data & 0x3 == 2: # south
img = self.build_full_block(None, None, texture, None, None)
if data & 0x3 == 3: # north
img = self.build_full_block(None, None, None, texture, None)
elif data & 0x4 == 0: # closed trapdoor
if data & 0x8 == 0x8: # is a top trapdoor
img = Image.new("RGBA", (24,24), self.bgcolor)
t = self.build_full_block((texture, 12), None, None, texture, texture)
alpha_over(img, t, (0,-9),t)
else: # is a bottom trapdoor
img = self.build_full_block((texture, 12), None, None, texture, texture)
return img
# block with hidden silverfish (stone, cobblestone and stone brick)
@material(blockid=97, data=range(3), solid=True)
def hidden_silverfish(self, blockid, data):
if data == 0: # stone
t = self.load_image_texture("assets/minecraft/textures/blocks/stone.png")
elif data == 1: # cobblestone
t = self.load_image_texture("assets/minecraft/textures/blocks/cobblestone.png")
elif data == 2: # stone brick
t = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick.png")
img = self.build_block(t, t)
return img
# stone brick
@material(blockid=98, data=range(4), solid=True)
def stone_brick(self, blockid, data):
if data == 0: # normal
t = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick.png")
elif data == 1: # mossy
t = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick_mossy.png")
elif data == 2: # cracked
t = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick_cracked.png")
elif data == 3: # "circle" stone brick
t = self.load_image_texture("assets/minecraft/textures/blocks/stonebrick_carved.png")
img = self.build_full_block(t, None, None, t, t)
return img
# huge brown and red mushroom
@material(blockid=[99,100], data= range(11) + [14,15], solid=True)
def huge_mushroom(self, blockid, data):
# rotation
if self.rotation == 1:
if data == 1: data = 3
elif data == 2: data = 6
elif data == 3: data = 9
elif data == 4: data = 2
elif data == 6: data = 8
elif data == 7: data = 1
elif data == 8: data = 4
elif data == 9: data = 7
elif self.rotation == 2:
if data == 1: data = 9
elif data == 2: data = 8
elif data == 3: data = 7
elif data == 4: data = 6
elif data == 6: data = 4
elif data == 7: data = 3
elif data == 8: data = 2
elif data == 9: data = 1
elif self.rotation == 3:
if data == 1: data = 7
elif data == 2: data = 4
elif data == 3: data = 1
elif data == 4: data = 2
elif data == 6: data = 8
elif data == 7: data = 9
elif data == 8: data = 6
elif data == 9: data = 3
# texture generation
if blockid == 99: # brown
cap = self.load_image_texture("assets/minecraft/textures/blocks/mushroom_block_skin_brown.png")
else: # red
cap = self.load_image_texture("assets/minecraft/textures/blocks/mushroom_block_skin_red.png")
stem = self.load_image_texture("assets/minecraft/textures/blocks/mushroom_block_skin_stem.png")
porous = self.load_image_texture("assets/minecraft/textures/blocks/mushroom_block_inside.png")
if data == 0: # fleshy piece
img = self.build_full_block(porous, None, None, porous, porous)
if data == 1: # north-east corner
img = self.build_full_block(cap, None, None, cap, porous)
if data == 2: # east side
img = self.build_full_block(cap, None, None, porous, cap)
if data == 3: # south-east corner
img = self.build_full_block(cap, None, None, porous, cap)
if data == 4: # north side
img = self.build_full_block(cap, None, None, cap, porous)
if data == 5: # top piece
img = self.build_full_block(cap, None, None, porous, porous)
if data == 6: # south side
img = self.build_full_block(cap, None, None, cap, porous)
if data == 7: # north-west corner
img = self.build_full_block(cap, None, None, cap, cap)
if data == 8: # west side
img = self.build_full_block(cap, None, None, porous, cap)
if data == 9: # south-west corner
img = self.build_full_block(cap, None, None, porous, cap)
if data == 10: # stem
img = self.build_full_block(porous, None, None, stem, stem)
if data == 14: # all cap
img = self.build_block(cap,cap)
if data == 15: # all stem
img = self.build_block(stem,stem)
return img
# iron bars and glass pane
# TODO glass pane is not a sprite, it has a texture for the side,
# at the moment is not used
@material(blockid=[101,102, 160], data=range(256), transparent=True, nospawn=True)
def panes(self, blockid, data):
# no rotation, uses pseudo data
if blockid == 101:
# iron bars
t = self.load_image_texture("assets/minecraft/textures/blocks/iron_bars.png")
elif blockid == 160:
t = self.load_image_texture("assets/minecraft/textures/blocks/glass_%s.png" % color_map[data & 0xf])
else:
# glass panes
t = self.load_image_texture("assets/minecraft/textures/blocks/glass.png")
left = t.copy()
right = t.copy()
# generate the four small pieces of the glass pane
ImageDraw.Draw(right).rectangle((0,0,7,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(left).rectangle((8,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
up_left = self.transform_image_side(left)
up_right = self.transform_image_side(right).transpose(Image.FLIP_TOP_BOTTOM)
dw_right = self.transform_image_side(right)
dw_left = self.transform_image_side(left).transpose(Image.FLIP_TOP_BOTTOM)
# Create img to compose the texture
img = Image.new("RGBA", (24,24), self.bgcolor)
# +x axis points top right direction
# +y axis points bottom right direction
# First compose things in the back of the image,
# then things in the front.
# the lower 4 bits encode color, the upper 4 encode adjencies
data = data >> 4
if (data & 0b0001) == 1 or data == 0:
alpha_over(img,up_left, (6,3),up_left) # top left
if (data & 0b1000) == 8 or data == 0:
alpha_over(img,up_right, (6,3),up_right) # top right
if (data & 0b0010) == 2 or data == 0:
alpha_over(img,dw_left, (6,3),dw_left) # bottom left
if (data & 0b0100) == 4 or data == 0:
alpha_over(img,dw_right, (6,3),dw_right) # bottom right
return img
# melon
block(blockid=103, top_image="assets/minecraft/textures/blocks/melon_top.png", side_image="assets/minecraft/textures/blocks/melon_side.png", solid=True)
# pumpkin and melon stem
# TODO To render it as in game needs from pseudo data and ancil data:
# once fully grown the stem bends to the melon/pumpkin block,
# at the moment only render the growing stem
@material(blockid=[104,105], data=range(8), transparent=True)
def stem(self, blockid, data):
# the ancildata value indicates how much of the texture
# is shown.
# not fully grown stem or no pumpkin/melon touching it,
# straight up stem
t = self.load_image_texture("assets/minecraft/textures/blocks/melon_stem_disconnected.png").copy()
img = Image.new("RGBA", (16,16), self.bgcolor)
alpha_over(img, t, (0, int(16 - 16*((data + 1)/8.))), t)
img = self.build_sprite(t)
if data & 7 == 7:
# fully grown stem gets brown color!
# there is a conditional in rendermode-normal.c to not
# tint the data value 7
img = self.tint_texture(img, (211,169,116))
return img
# vines
@material(blockid=106, data=range(16), transparent=True)
def vines(self, blockid, data):
# rotation
# vines data is bit coded. decode it first.
# NOTE: the directions used in this function are the new ones used
# in minecraft 1.0.0, no the ones used by overviewer
# (i.e. north is top-left by defalut)
# rotate the data by bitwise shift
shifts = 0
if self.rotation == 1:
shifts = 1
elif self.rotation == 2:
shifts = 2
elif self.rotation == 3:
shifts = 3
for i in range(shifts):
data = data * 2
if data & 16:
data = (data - 16) | 1
# decode data and prepare textures
raw_texture = self.load_image_texture("assets/minecraft/textures/blocks/vine.png")
s = w = n = e = None
if data & 1: # south
s = raw_texture
if data & 2: # west
w = raw_texture
if data & 4: # north
n = raw_texture
if data & 8: # east
e = raw_texture
# texture generation
img = self.build_full_block(None, n, e, w, s)
return img
# fence gates
@material(blockid=[107, 183, 184, 185, 186, 187], data=range(8), transparent=True, nospawn=True)
def fence_gate(self, blockid, data):
# rotation
opened = False
if data & 0x4:
data = data & 0x3
opened = True
if self.rotation == 1:
if data == 0: data = 1
elif data == 1: data = 2
elif data == 2: data = 3
elif data == 3: data = 0
elif self.rotation == 2:
if data == 0: data = 2
elif data == 1: data = 3
elif data == 2: data = 0
elif data == 3: data = 1
elif self.rotation == 3:
if data == 0: data = 3
elif data == 1: data = 0
elif data == 2: data = 1
elif data == 3: data = 2
if opened:
data = data | 0x4
# create the closed gate side
if blockid == 107: # Oak
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png").copy()
elif blockid == 183: # Spruce
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png").copy()
elif blockid == 184: # Birch
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png").copy()
elif blockid == 185: # Jungle
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png").copy()
elif blockid == 186: # Dark Oak
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png").copy()
elif blockid == 187: # Acacia
gate_side = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png").copy()
else:
return None
gate_side_draw = ImageDraw.Draw(gate_side)
gate_side_draw.rectangle((7,0,15,0),outline=(0,0,0,0),fill=(0,0,0,0))
gate_side_draw.rectangle((7,4,9,6),outline=(0,0,0,0),fill=(0,0,0,0))
gate_side_draw.rectangle((7,10,15,16),outline=(0,0,0,0),fill=(0,0,0,0))
gate_side_draw.rectangle((0,12,15,16),outline=(0,0,0,0),fill=(0,0,0,0))
gate_side_draw.rectangle((0,0,4,15),outline=(0,0,0,0),fill=(0,0,0,0))
gate_side_draw.rectangle((14,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# darken the sides slightly, as with the fences
sidealpha = gate_side.split()[3]
gate_side = ImageEnhance.Brightness(gate_side).enhance(0.9)
gate_side.putalpha(sidealpha)
# create the other sides
mirror_gate_side = self.transform_image_side(gate_side.transpose(Image.FLIP_LEFT_RIGHT))
gate_side = self.transform_image_side(gate_side)
gate_other_side = gate_side.transpose(Image.FLIP_LEFT_RIGHT)
mirror_gate_other_side = mirror_gate_side.transpose(Image.FLIP_LEFT_RIGHT)
# Create img to compose the fence gate
img = Image.new("RGBA", (24,24), self.bgcolor)
if data & 0x4:
# opened
data = data & 0x3
if data == 0:
alpha_over(img, gate_side, (2,8), gate_side)
alpha_over(img, gate_side, (13,3), gate_side)
elif data == 1:
alpha_over(img, gate_other_side, (-1,3), gate_other_side)
alpha_over(img, gate_other_side, (10,8), gate_other_side)
elif data == 2:
alpha_over(img, mirror_gate_side, (-1,7), mirror_gate_side)
alpha_over(img, mirror_gate_side, (10,2), mirror_gate_side)
elif data == 3:
alpha_over(img, mirror_gate_other_side, (2,1), mirror_gate_other_side)
alpha_over(img, mirror_gate_other_side, (13,7), mirror_gate_other_side)
else:
# closed
# positions for pasting the fence sides, as with fences
pos_top_left = (2,3)
pos_top_right = (10,3)
pos_bottom_right = (10,7)
pos_bottom_left = (2,7)
if data == 0 or data == 2:
alpha_over(img, gate_other_side, pos_top_right, gate_other_side)
alpha_over(img, mirror_gate_other_side, pos_bottom_left, mirror_gate_other_side)
elif data == 1 or data == 3:
alpha_over(img, gate_side, pos_top_left, gate_side)
alpha_over(img, mirror_gate_side, pos_bottom_right, mirror_gate_side)
return img
# mycelium
block(blockid=110, top_image="assets/minecraft/textures/blocks/mycelium_top.png", side_image="assets/minecraft/textures/blocks/mycelium_side.png")
# lilypad
# At the moment of writing this lilypads has no ancil data and their
# orientation depends on their position on the map. So it uses pseudo
# ancildata.
@material(blockid=111, data=range(4), transparent=True)
def lilypad(self, blockid, data):
t = self.load_image_texture("assets/minecraft/textures/blocks/waterlily.png").copy()
if data == 0:
t = t.rotate(180)
elif data == 1:
t = t.rotate(270)
elif data == 2:
t = t
elif data == 3:
t = t.rotate(90)
return self.build_full_block(None, None, None, None, None, t)
# nether brick
block(blockid=112, top_image="assets/minecraft/textures/blocks/nether_brick.png")
# nether wart
@material(blockid=115, data=range(4), transparent=True)
def nether_wart(self, blockid, data):
if data == 0: # just come up
t = self.load_image_texture("assets/minecraft/textures/blocks/nether_wart_stage_0.png")
elif data in (1, 2):
t = self.load_image_texture("assets/minecraft/textures/blocks/nether_wart_stage_1.png")
else: # fully grown
t = self.load_image_texture("assets/minecraft/textures/blocks/nether_wart_stage_2.png")
# use the same technic as tall grass
img = self.build_billboard(t)
return img
# enchantment table
# TODO there's no book at the moment
@material(blockid=116, transparent=True, nodata=True)
def enchantment_table(self, blockid, data):
# no book at the moment
top = self.load_image_texture("assets/minecraft/textures/blocks/enchanting_table_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/enchanting_table_side.png")
img = self.build_full_block((top, 4), None, None, side, side)
return img
# brewing stand
# TODO this is a place holder, is a 2d image pasted
@material(blockid=117, data=range(5), transparent=True)
def brewing_stand(self, blockid, data):
base = self.load_image_texture("assets/minecraft/textures/blocks/brewing_stand_base.png")
img = self.build_full_block(None, None, None, None, None, base)
t = self.load_image_texture("assets/minecraft/textures/blocks/brewing_stand.png")
stand = self.build_billboard(t)
alpha_over(img,stand,(0,-2))
return img
# cauldron
@material(blockid=118, data=range(4), transparent=True)
def cauldron(self, blockid, data):
side = self.load_image_texture("assets/minecraft/textures/blocks/cauldron_side.png")
top = self.load_image_texture("assets/minecraft/textures/blocks/cauldron_top.png")
bottom = self.load_image_texture("assets/minecraft/textures/blocks/cauldron_inner.png")
water = self.transform_image_top(self.load_water())
if data == 0: # empty
img = self.build_full_block(top, side, side, side, side)
if data == 1: # 1/3 filled
img = self.build_full_block(None , side, side, None, None)
alpha_over(img, water, (0,8), water)
img2 = self.build_full_block(top , None, None, side, side)
alpha_over(img, img2, (0,0), img2)
if data == 2: # 2/3 filled
img = self.build_full_block(None , side, side, None, None)
alpha_over(img, water, (0,4), water)
img2 = self.build_full_block(top , None, None, side, side)
alpha_over(img, img2, (0,0), img2)
if data == 3: # 3/3 filled
img = self.build_full_block(None , side, side, None, None)
alpha_over(img, water, (0,0), water)
img2 = self.build_full_block(top , None, None, side, side)
alpha_over(img, img2, (0,0), img2)
return img
# end portal and end_gateway
@material(blockid=[119,209], transparent=True, nodata=True)
def end_portal(self, blockid, data):
img = Image.new("RGBA", (24,24), self.bgcolor)
# generate a black texure with white, blue and grey dots resembling stars
t = Image.new("RGBA", (16,16), (0,0,0,255))
for color in [(155,155,155,255), (100,255,100,255), (255,255,255,255)]:
for i in range(6):
x = randint(0,15)
y = randint(0,15)
t.putpixel((x,y),color)
if blockid == 209: # end_gateway
return self.build_block(t, t)
t = self.transform_image_top(t)
alpha_over(img, t, (0,0), t)
return img
# end portal frame (data range 8 to get all orientations of filled)
@material(blockid=120, data=range(8), transparent=True)
def end_portal_frame(self, blockid, data):
# The bottom 2 bits are oritation info but seems there is no
# graphical difference between orientations
top = self.load_image_texture("assets/minecraft/textures/blocks/endframe_top.png")
eye_t = self.load_image_texture("assets/minecraft/textures/blocks/endframe_eye.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/endframe_side.png")
img = self.build_full_block((top, 4), None, None, side, side)
if data & 0x4 == 0x4: # ender eye on it
# generate the eye
eye_t = self.load_image_texture("assets/minecraft/textures/blocks/endframe_eye.png").copy()
eye_t_s = self.load_image_texture("assets/minecraft/textures/blocks/endframe_eye.png").copy()
# cut out from the texture the side and the top of the eye
ImageDraw.Draw(eye_t).rectangle((0,0,15,4),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(eye_t_s).rectangle((0,4,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# trnasform images and paste
eye = self.transform_image_top(eye_t)
eye_s = self.transform_image_side(eye_t_s)
eye_os = eye_s.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, eye_s, (5,5), eye_s)
alpha_over(img, eye_os, (9,5), eye_os)
alpha_over(img, eye, (0,0), eye)
return img
# end stone
block(blockid=121, top_image="assets/minecraft/textures/blocks/end_stone.png")
# dragon egg
# NOTE: this isn't a block, but I think it's better than nothing
block(blockid=122, top_image="assets/minecraft/textures/blocks/dragon_egg.png")
# inactive redstone lamp
block(blockid=123, top_image="assets/minecraft/textures/blocks/redstone_lamp_off.png")
# active redstone lamp
block(blockid=124, top_image="assets/minecraft/textures/blocks/redstone_lamp_on.png")
# daylight sensor.
@material(blockid=151, transparent=True)
def daylight_sensor(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/daylight_detector_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/daylight_detector_side.png")
# cut the side texture in half
mask = side.crop((0,8,16,16))
side = Image.new(side.mode, side.size, self.bgcolor)
alpha_over(side, mask,(0,0,16,8), mask)
# plain slab
top = self.transform_image_top(top)
side = self.transform_image_side(side)
otherside = side.transpose(Image.FLIP_LEFT_RIGHT)
sidealpha = side.split()[3]
side = ImageEnhance.Brightness(side).enhance(0.9)
side.putalpha(sidealpha)
othersidealpha = otherside.split()[3]
otherside = ImageEnhance.Brightness(otherside).enhance(0.8)
otherside.putalpha(othersidealpha)
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, side, (0,12), side)
alpha_over(img, otherside, (12,12), otherside)
alpha_over(img, top, (0,6), top)
return img
# wooden double and normal slabs
# these are the new wooden slabs, blockids 43 44 still have wooden
# slabs, but those are unobtainable without cheating
@material(blockid=[125, 126], data=range(16), transparent=(44,), solid=True)
def wooden_slabs(self, blockid, data):
texture = data & 7
if texture== 0: # oak
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_oak.png")
elif texture== 1: # spruce
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_spruce.png")
elif texture== 2: # birch
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_birch.png")
elif texture== 3: # jungle
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_jungle.png")
elif texture== 4: # acacia
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_acacia.png")
elif texture== 5: # dark wood
top = side = self.load_image_texture("assets/minecraft/textures/blocks/planks_big_oak.png")
else:
return None
if blockid == 125: # double slab
return self.build_block(top, side)
return self.build_slab_block(top, side, data & 8 == 8);
# emerald ore
block(blockid=129, top_image="assets/minecraft/textures/blocks/emerald_ore.png")
# emerald block
block(blockid=133, top_image="assets/minecraft/textures/blocks/emerald_block.png")
# cocoa plant
@material(blockid=127, data=range(12), transparent=True)
def cocoa_plant(self, blockid, data):
orientation = data & 3
# rotation
if self.rotation == 1:
if orientation == 0: orientation = 1
elif orientation == 1: orientation = 2
elif orientation == 2: orientation = 3
elif orientation == 3: orientation = 0
elif self.rotation == 2:
if orientation == 0: orientation = 2
elif orientation == 1: orientation = 3
elif orientation == 2: orientation = 0
elif orientation == 3: orientation = 1
elif self.rotation == 3:
if orientation == 0: orientation = 3
elif orientation == 1: orientation = 0
elif orientation == 2: orientation = 1
elif orientation == 3: orientation = 2
size = data & 12
if size == 8: # big
t = self.load_image_texture("assets/minecraft/textures/blocks/cocoa_stage_2.png")
c_left = (0,3)
c_right = (8,3)
c_top = (5,2)
elif size == 4: # normal
t = self.load_image_texture("assets/minecraft/textures/blocks/cocoa_stage_1.png")
c_left = (-2,2)
c_right = (8,2)
c_top = (5,2)
elif size == 0: # small
t = self.load_image_texture("assets/minecraft/textures/blocks/cocoa_stage_0.png")
c_left = (-3,2)
c_right = (6,2)
c_top = (5,2)
# let's get every texture piece necessary to do this
stalk = t.copy()
ImageDraw.Draw(stalk).rectangle((0,0,11,16),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(stalk).rectangle((12,4,16,16),outline=(0,0,0,0),fill=(0,0,0,0))
top = t.copy() # warning! changes with plant size
ImageDraw.Draw(top).rectangle((0,7,16,16),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(top).rectangle((7,0,16,6),outline=(0,0,0,0),fill=(0,0,0,0))
side = t.copy() # warning! changes with plant size
ImageDraw.Draw(side).rectangle((0,0,6,16),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(side).rectangle((0,0,16,3),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(side).rectangle((0,14,16,16),outline=(0,0,0,0),fill=(0,0,0,0))
# first compose the block of the cocoa plant
block = Image.new("RGBA", (24,24), self.bgcolor)
tmp = self.transform_image_side(side).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over (block, tmp, c_right,tmp) # right side
tmp = tmp.transpose(Image.FLIP_LEFT_RIGHT)
alpha_over (block, tmp, c_left,tmp) # left side
tmp = self.transform_image_top(top)
alpha_over(block, tmp, c_top,tmp)
if size == 0:
# fix a pixel hole
block.putpixel((6,9), block.getpixel((6,10)))
# compose the cocoa plant
img = Image.new("RGBA", (24,24), self.bgcolor)
if orientation in (2,3): # south and west
tmp = self.transform_image_side(stalk).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, block,(-1,-2), block)
alpha_over(img, tmp, (4,-2), tmp)
if orientation == 3:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
elif orientation in (0,1): # north and east
tmp = self.transform_image_side(stalk.transpose(Image.FLIP_LEFT_RIGHT))
alpha_over(img, block,(-1,5), block)
alpha_over(img, tmp, (2,12), tmp)
if orientation == 0:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
return img
# command block
@material(blockid=[137,210,211], solid=True, nodata=True)
def command_block(self, blockid, data):
if blockid == 210:
front = self.load_image_texture("assets/minecraft/textures/blocks/repeating_command_block_front.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/repeating_command_block_side.png")
back = self.load_image_texture("assets/minecraft/textures/blocks/repeating_command_block_back.png")
elif blockid == 211:
front = self.load_image_texture("assets/minecraft/textures/blocks/chain_command_block_front.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/chain_command_block_side.png")
back = self.load_image_texture("assets/minecraft/textures/blocks/chain_command_block_back.png")
else:
front = self.load_image_texture("assets/minecraft/textures/blocks/command_block_front.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/command_block_side.png")
back = self.load_image_texture("assets/minecraft/textures/blocks/command_block_back.png")
return self.build_full_block(side, side, back, front, side)
# beacon block
# at the moment of writing this, it seems the beacon block doens't use
# the data values
@material(blockid=138, transparent=True, nodata = True)
def beacon(self, blockid, data):
# generate the three pieces of the block
t = self.load_image_texture("assets/minecraft/textures/blocks/glass.png")
glass = self.build_block(t,t)
t = self.load_image_texture("assets/minecraft/textures/blocks/obsidian.png")
obsidian = self.build_full_block((t,12),None, None, t, t)
obsidian = obsidian.resize((20,20), Image.ANTIALIAS)
t = self.load_image_texture("assets/minecraft/textures/blocks/beacon.png")
crystal = self.build_block(t,t)
crystal = crystal.resize((16,16),Image.ANTIALIAS)
# compose the block
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, obsidian, (2, 4), obsidian)
alpha_over(img, crystal, (4,3), crystal)
alpha_over(img, glass, (0,0), glass)
return img
# cobblestone and mossy cobblestone walls, chorus plants
# one additional bit of data value added for mossy and cobblestone
@material(blockid=[139, 199], data=range(32), transparent=True, nospawn=True)
def cobblestone_wall(self, blockid, data):
# chorus plants
if blockid == 199:
t = self.load_image_texture("assets/minecraft/textures/blocks/chorus_plant.png").copy()
# no rotation, uses pseudo data
elif data & 0b10000 == 0:
# cobblestone
t = self.load_image_texture("assets/minecraft/textures/blocks/cobblestone.png").copy()
else:
# mossy cobblestone
t = self.load_image_texture("assets/minecraft/textures/blocks/cobblestone_mossy.png").copy()
wall_pole_top = t.copy()
wall_pole_side = t.copy()
wall_side_top = t.copy()
wall_side = t.copy()
# _full is used for walls without pole
wall_side_top_full = t.copy()
wall_side_full = t.copy()
# generate the textures of the wall
ImageDraw.Draw(wall_pole_top).rectangle((0,0,3,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_pole_top).rectangle((12,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_pole_top).rectangle((0,0,15,3),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_pole_top).rectangle((0,12,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_pole_side).rectangle((0,0,3,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_pole_side).rectangle((12,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# Create the sides and the top of the pole
wall_pole_side = self.transform_image_side(wall_pole_side)
wall_pole_other_side = wall_pole_side.transpose(Image.FLIP_LEFT_RIGHT)
wall_pole_top = self.transform_image_top(wall_pole_top)
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = wall_pole_side.split()[3]
wall_pole_side = ImageEnhance.Brightness(wall_pole_side).enhance(0.8)
wall_pole_side.putalpha(sidealpha)
othersidealpha = wall_pole_other_side.split()[3]
wall_pole_other_side = ImageEnhance.Brightness(wall_pole_other_side).enhance(0.7)
wall_pole_other_side.putalpha(othersidealpha)
# Compose the wall pole
wall_pole = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(wall_pole,wall_pole_side, (3,4),wall_pole_side)
alpha_over(wall_pole,wall_pole_other_side, (9,4),wall_pole_other_side)
alpha_over(wall_pole,wall_pole_top, (0,0),wall_pole_top)
# create the sides and the top of a wall attached to a pole
ImageDraw.Draw(wall_side).rectangle((0,0,15,2),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side).rectangle((0,0,11,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side_top).rectangle((0,0,11,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side_top).rectangle((0,0,15,4),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side_top).rectangle((0,11,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# full version, without pole
ImageDraw.Draw(wall_side_full).rectangle((0,0,15,2),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side_top_full).rectangle((0,4,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(wall_side_top_full).rectangle((0,4,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
# compose the sides of a wall atached to a pole
tmp = Image.new("RGBA", (24,24), self.bgcolor)
wall_side = self.transform_image_side(wall_side)
wall_side_top = self.transform_image_top(wall_side_top)
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = wall_side.split()[3]
wall_side = ImageEnhance.Brightness(wall_side).enhance(0.7)
wall_side.putalpha(sidealpha)
alpha_over(tmp,wall_side, (0,0),wall_side)
alpha_over(tmp,wall_side_top, (-5,3),wall_side_top)
wall_side = tmp
wall_other_side = wall_side.transpose(Image.FLIP_LEFT_RIGHT)
# compose the sides of the full wall
tmp = Image.new("RGBA", (24,24), self.bgcolor)
wall_side_full = self.transform_image_side(wall_side_full)
wall_side_top_full = self.transform_image_top(wall_side_top_full.rotate(90))
# Darken the sides slightly. These methods also affect the alpha layer,
# so save them first (we don't want to "darken" the alpha layer making
# the block transparent)
sidealpha = wall_side_full.split()[3]
wall_side_full = ImageEnhance.Brightness(wall_side_full).enhance(0.7)
wall_side_full.putalpha(sidealpha)
alpha_over(tmp,wall_side_full, (4,0),wall_side_full)
alpha_over(tmp,wall_side_top_full, (3,-4),wall_side_top_full)
wall_side_full = tmp
wall_other_side_full = wall_side_full.transpose(Image.FLIP_LEFT_RIGHT)
# Create img to compose the wall
img = Image.new("RGBA", (24,24), self.bgcolor)
# Position wall imgs around the wall bit stick
pos_top_left = (-5,-2)
pos_bottom_left = (-8,4)
pos_top_right = (5,-3)
pos_bottom_right = (7,4)
# +x axis points top right direction
# +y axis points bottom right direction
# There are two special cases for wall without pole.
# Normal case:
# First compose the walls in the back of the image,
# then the pole and then the walls in the front.
if (data == 0b1010) or (data == 0b11010):
alpha_over(img, wall_other_side_full,(0,2), wall_other_side_full)
elif (data == 0b0101) or (data == 0b10101):
alpha_over(img, wall_side_full,(0,2), wall_side_full)
else:
if (data & 0b0001) == 1:
alpha_over(img,wall_side, pos_top_left,wall_side) # top left
if (data & 0b1000) == 8:
alpha_over(img,wall_other_side, pos_top_right,wall_other_side) # top right
alpha_over(img,wall_pole,(0,0),wall_pole)
if (data & 0b0010) == 2:
alpha_over(img,wall_other_side, pos_bottom_left,wall_other_side) # bottom left
if (data & 0b0100) == 4:
alpha_over(img,wall_side, pos_bottom_right,wall_side) # bottom right
return img
# carrots, potatoes
@material(blockid=[141,142], data=range(8), transparent=True, nospawn=True)
def crops4(self, blockid, data):
# carrots and potatoes have 8 data, but only 4 visual stages
stage = {0:0,
1:0,
2:1,
3:1,
4:2,
5:2,
6:2,
7:3}[data]
if blockid == 141: # carrots
raw_crop = self.load_image_texture("assets/minecraft/textures/blocks/carrots_stage_%d.png" % stage)
else: # potatoes
raw_crop = self.load_image_texture("assets/minecraft/textures/blocks/potatoes_stage_%d.png" % stage)
crop1 = self.transform_image_top(raw_crop)
crop2 = self.transform_image_side(raw_crop)
crop3 = crop2.transpose(Image.FLIP_LEFT_RIGHT)
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, crop1, (0,12), crop1)
alpha_over(img, crop2, (6,3), crop2)
alpha_over(img, crop3, (6,3), crop3)
return img
# anvils
@material(blockid=145, data=range(12), transparent=True)
def anvil(self, blockid, data):
# anvils only have two orientations, invert it for rotations 1 and 3
orientation = data & 0x1
if self.rotation in (1,3):
if orientation == 1:
orientation = 0
else:
orientation = 1
# get the correct textures
# the bits 0x4 and 0x8 determine how damaged is the anvil
if (data & 0xc) == 0: # non damaged anvil
top = self.load_image_texture("assets/minecraft/textures/blocks/anvil_top_damaged_0.png")
elif (data & 0xc) == 0x4: # slightly damaged
top = self.load_image_texture("assets/minecraft/textures/blocks/anvil_top_damaged_1.png")
elif (data & 0xc) == 0x8: # very damaged
top = self.load_image_texture("assets/minecraft/textures/blocks/anvil_top_damaged_2.png")
# everything else use this texture
big_side = self.load_image_texture("assets/minecraft/textures/blocks/anvil_base.png").copy()
small_side = self.load_image_texture("assets/minecraft/textures/blocks/anvil_base.png").copy()
base = self.load_image_texture("assets/minecraft/textures/blocks/anvil_base.png").copy()
small_base = self.load_image_texture("assets/minecraft/textures/blocks/anvil_base.png").copy()
# cut needed patterns
ImageDraw.Draw(big_side).rectangle((0,8,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(small_side).rectangle((0,0,2,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(small_side).rectangle((13,0,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(small_side).rectangle((0,8,15,15),outline=(0,0,0,0),fill=(0,0,0,0))
ImageDraw.Draw(base).rectangle((0,0,15,15),outline=(0,0,0,0))
ImageDraw.Draw(base).rectangle((1,1,14,14),outline=(0,0,0,0))
ImageDraw.Draw(small_base).rectangle((0,0,15,15),outline=(0,0,0,0))
ImageDraw.Draw(small_base).rectangle((1,1,14,14),outline=(0,0,0,0))
ImageDraw.Draw(small_base).rectangle((2,2,13,13),outline=(0,0,0,0))
ImageDraw.Draw(small_base).rectangle((3,3,12,12),outline=(0,0,0,0))
# check orientation and compose the anvil
if orientation == 1: # bottom-left top-right
top = top.rotate(90)
left_side = small_side
left_pos = (1,7)
right_side = big_side
right_pos = (10,5)
else: # top-left bottom-right
right_side = small_side
right_pos = (12,7)
left_side = big_side
left_pos = (3,5)
img = Image.new("RGBA", (24,24), self.bgcolor)
# darken sides
alpha = big_side.split()[3]
big_side = ImageEnhance.Brightness(big_side).enhance(0.8)
big_side.putalpha(alpha)
alpha = small_side.split()[3]
small_side = ImageEnhance.Brightness(small_side).enhance(0.9)
small_side.putalpha(alpha)
alpha = base.split()[3]
base_d = ImageEnhance.Brightness(base).enhance(0.8)
base_d.putalpha(alpha)
# compose
base = self.transform_image_top(base)
base_d = self.transform_image_top(base_d)
small_base = self.transform_image_top(small_base)
top = self.transform_image_top(top)
alpha_over(img, base_d, (0,12), base_d)
alpha_over(img, base_d, (0,11), base_d)
alpha_over(img, base_d, (0,10), base_d)
alpha_over(img, small_base, (0,10), small_base)
alpha_over(img, top, (0,0), top)
left_side = self.transform_image_side(left_side)
right_side = self.transform_image_side(right_side).transpose(Image.FLIP_LEFT_RIGHT)
alpha_over(img, left_side, left_pos, left_side)
alpha_over(img, right_side, right_pos, right_side)
return img
# block of redstone
block(blockid=152, top_image="assets/minecraft/textures/blocks/redstone_block.png")
# nether quartz ore
block(blockid=153, top_image="assets/minecraft/textures/blocks/quartz_ore.png")
# block of quartz
@material(blockid=155, data=range(5), solid=True)
def quartz_block(self, blockid, data):
if data in (0,1): # normal and chiseled quartz block
if data == 0:
top = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_side.png")
else:
top = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_chiseled_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_chiseled.png")
return self.build_block(top, side)
# pillar quartz block with orientation
top = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_lines_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/quartz_block_lines.png").copy()
if data == 2: # vertical
return self.build_block(top, side)
elif data == 3: # north-south oriented
if self.rotation in (0,2):
return self.build_full_block(side, None, None, top, side.rotate(90))
return self.build_full_block(side.rotate(90), None, None, side.rotate(90), top)
elif data == 4: # east-west oriented
if self.rotation in (0,2):
return self.build_full_block(side.rotate(90), None, None, side.rotate(90), top)
return self.build_full_block(side, None, None, top, side.rotate(90))
# hopper
@material(blockid=154, data=range(4), transparent=True)
def hopper(self, blockid, data):
#build the top
side = self.load_image_texture("assets/minecraft/textures/blocks/hopper_outside.png")
top = self.load_image_texture("assets/minecraft/textures/blocks/hopper_top.png")
bottom = self.load_image_texture("assets/minecraft/textures/blocks/hopper_inside.png")
hop_top = self.build_full_block((top,10), side, side, side, side, side)
#build a solid block for mid/top
hop_mid = self.build_full_block((top,5), side, side, side, side, side)
hop_bot = self.build_block(side,side)
hop_mid = hop_mid.resize((17,17),Image.ANTIALIAS)
hop_bot = hop_bot.resize((10,10),Image.ANTIALIAS)
#compose the final block
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, hop_bot, (7,14), hop_bot)
alpha_over(img, hop_mid, (3,3), hop_mid)
alpha_over(img, hop_top, (0,-6), hop_top)
return img
# slime block
block(blockid=165, top_image="assets/minecraft/textures/blocks/slime.png")
# prismarine block
@material(blockid=168, data=range(3), solid=True)
def prismarine_block(self, blockid, data):
if data == 0: # prismarine
t = self.load_image_texture("assets/minecraft/textures/blocks/prismarine_rough.png")
elif data == 1: # prismarine bricks
t = self.load_image_texture("assets/minecraft/textures/blocks/prismarine_bricks.png")
elif data == 2: # dark prismarine
t = self.load_image_texture("assets/minecraft/textures/blocks/prismarine_dark.png")
img = self.build_block(t, t)
return img
# sea lantern
block(blockid=169, top_image="assets/minecraft/textures/blocks/sea_lantern.png")
# hay block
@material(blockid=170, data=range(9), solid=True)
def hayblock(self, blockid, data):
top = self.load_image_texture("assets/minecraft/textures/blocks/hay_block_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/hay_block_side.png")
if self.rotation == 1:
if data == 4: data = 8
elif data == 8: data = 4
elif self.rotation == 3:
if data == 4: data = 8
elif data == 8: data = 4
# choose orientation and paste textures
if data == 4: # east-west orientation
return self.build_full_block(side.rotate(90), None, None, top, side.rotate(90))
elif data == 8: # north-south orientation
return self.build_full_block(side, None, None, side.rotate(90), top)
else:
return self.build_block(top, side)
# carpet - wool block that's small?
@material(blockid=171, data=range(16), transparent=True)
def carpet(self, blockid, data):
texture = self.load_image_texture("assets/minecraft/textures/blocks/wool_colored_%s.png" % color_map[data])
return self.build_full_block((texture,15),texture,texture,texture,texture)
#clay block
block(blockid=172, top_image="assets/minecraft/textures/blocks/hardened_clay.png")
#stained hardened clay
@material(blockid=159, data=range(16), solid=True)
def stained_clay(self, blockid, data):
texture = self.load_image_texture("assets/minecraft/textures/blocks/hardened_clay_stained_%s.png" % color_map[data])
return self.build_block(texture,texture)
#coal block
block(blockid=173, top_image="assets/minecraft/textures/blocks/coal_block.png")
# packed ice block
block(blockid=174, top_image="assets/minecraft/textures/blocks/ice_packed.png")
@material(blockid=175, data=range(16), transparent=True)
def flower(self, blockid, data):
double_plant_map = ["sunflower", "syringa", "grass", "fern", "rose", "paeonia", "paeonia", "paeonia"]
plant = double_plant_map[data & 0x7]
if data & 0x8:
part = "top"
else:
part = "bottom"
png = "assets/minecraft/textures/blocks/double_plant_%s_%s.png" % (plant,part)
texture = self.load_image_texture(png)
img = self.build_billboard(texture)
#sunflower top
if data == 8:
bloom_tex = self.load_image_texture("assets/minecraft/textures/blocks/double_plant_sunflower_front.png")
alpha_over(img, bloom_tex.resize((14, 11), Image.ANTIALIAS), (5,5))
return img
# chorus flower
@material(blockid=200, data=range(6), solid=True)
def chorus_flower(self, blockid, data):
# aged 5, dead
if data == 5:
texture = self.load_image_texture("assets/minecraft/textures/blocks/chorus_flower_dead.png")
else:
texture = self.load_image_texture("assets/minecraft/textures/blocks/chorus_flower.png")
return self.build_block(texture,texture)
# purpur block
block(blockid=201, top_image="assets/minecraft/textures/blocks/purpur_block.png")
# purpur pilar
@material(blockid=202, data=range(12) , solid=True)
def purpur_pillar(self, blockid, data):
pillar_orientation = data & 12
top=self.load_image_texture("assets/minecraft/textures/blocks/purpur_pillar_top.png")
side=self.load_image_texture("assets/minecraft/textures/blocks/purpur_pillar.png")
if pillar_orientation == 0: # east-west orientation
return self.build_block(top, side)
elif pillar_orientation == 4: # east-west orientation
return self.build_full_block(side.rotate(90), None, None, top, side.rotate(90))
elif pillar_orientation == 8: # north-south orientation
return self.build_full_block(side, None, None, side.rotate(270), top)
# end brick
block(blockid=206, top_image="assets/minecraft/textures/blocks/end_bricks.png")
# frosted ice
@material(blockid=212, data=range(4), solid=True)
def frosted_ice(self, blockid, data):
img = self.load_image_texture("assets/minecraft/textures/blocks/frosted_ice_%d.png" % data)
return self.build_block(img, img)
# magma block
block(blockid=213, top_image="assets/minecraft/textures/blocks/magma.png")
# nether wart block
block(blockid=214, top_image="assets/minecraft/textures/blocks/nether_wart_block.png")
# red nether brick
block(blockid=215, top_image="assets/minecraft/textures/blocks/red_nether_brick.png")
@material(blockid=216, data=range(12), solid=True)
def boneblock(self, blockid, data):
# extract orientation
boneblock_orientation = data & 12
if self.rotation == 1:
if boneblock_orientation == 4: boneblock_orientation = 8
elif boneblock_orientation == 8: boneblock_orientation = 4
elif self.rotation == 3:
if boneblock_orientation == 4: boneblock_orientation = 8
elif boneblock_orientation == 8: boneblock_orientation = 4
top = self.load_image_texture("assets/minecraft/textures/blocks/bone_block_top.png")
side = self.load_image_texture("assets/minecraft/textures/blocks/bone_block_side.png")
# choose orientation and paste textures
if boneblock_orientation == 0:
return self.build_block(top, side)
elif boneblock_orientation == 4: # east-west orientation
return self.build_full_block(side.rotate(90), None, None, top, side.rotate(90))
elif boneblock_orientation == 8: # north-south orientation
return self.build_full_block(side, None, None, side.rotate(270), top)
# structure block
@material(blockid=255, data=range(4), solid=True)
def structure_block(self, blockid, data):
if data == 0:
img = self.load_image_texture("assets/minecraft/textures/blocks/structure_block_save.png")
elif data == 1:
img = self.load_image_texture("assets/minecraft/textures/blocks/structure_block_load.png")
elif data == 2:
img = self.load_image_texture("assets/minecraft/textures/blocks/structure_block_corner.png")
elif data == 3:
img = self.load_image_texture("assets/minecraft/textures/blocks/structure_block_data.png")
return self.build_block(img, img)
# beetroots
@material(blockid=207, data=range(4), transparent=True, nospawn=True)
def crops(self, blockid, data):
raw_crop = self.load_image_texture("assets/minecraft/textures/blocks/beetroots_stage_%d.png" % data)
crop1 = self.transform_image_top(raw_crop)
crop2 = self.transform_image_side(raw_crop)
crop3 = crop2.transpose(Image.FLIP_LEFT_RIGHT)
img = Image.new("RGBA", (24,24), self.bgcolor)
alpha_over(img, crop1, (0,12), crop1)
alpha_over(img, crop2, (6,3), crop2)
alpha_over(img, crop3, (6,3), crop3)
return img
|
aheadley/Minecraft-Overviewer
|
overviewer_core/textures.py
|
Python
|
gpl-3.0
| 193,926
|
""" Plotting Service generates graphs according to the client specifications
and data
"""
__RCSID__ = "$Id$"
import os
import hashlib
from types import DictType, ListType
from DIRAC import S_OK, S_ERROR, rootPath, gConfig, gLogger, gMonitor
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.FrameworkSystem.Service.PlotCache import gPlotCache
def initializePlottingHandler( serviceInfo ):
#Get data location
plottingSection = PathFinder.getServiceSection( "Framework/Plotting" )
dataPath = gConfig.getValue( "%s/DataLocation" % plottingSection, "data/graphs" )
dataPath = dataPath.strip()
if "/" != dataPath[0]:
dataPath = os.path.realpath( "%s/%s" % ( gConfig.getValue( '/LocalSite/InstancePath', rootPath ), dataPath ) )
gLogger.info( "Data will be written into %s" % dataPath )
try:
os.makedirs( dataPath )
except:
pass
try:
testFile = "%s/plot__.test" % dataPath
fd = file( testFile, "w" )
fd.close()
os.unlink( testFile )
except IOError:
gLogger.fatal( "Can't write to %s" % dataPath )
return S_ERROR( "Data location is not writable" )
gPlotCache.setPlotsLocation( dataPath )
gMonitor.registerActivity( "plotsDrawn", "Drawn plot images", "Plotting requests", "plots", gMonitor.OP_SUM )
return S_OK()
class PlottingHandler( RequestHandler ):
def __calculatePlotHash( self, data, metadata, subplotMetadata ):
m = hashlib.md5()
m.update( repr( {'Data':data, 'PlotMetadata':metadata, 'SubplotMetadata':subplotMetadata} ) )
return m.hexdigest()
types_generatePlot = [ [DictType, ListType], DictType ]
def export_generatePlot( self, data, plotMetadata, subplotMetadata = {} ):
""" Create a plot according to the client specification and return its name
"""
plotHash = self.__calculatePlotHash( data, plotMetadata, subplotMetadata )
result = gPlotCache.getPlot( plotHash, data, plotMetadata, subplotMetadata )
if not result['OK']:
return result
return S_OK( result['Value']['plot'] )
def transfer_toClient( self, fileId, token, fileHelper ):
"""
Get graphs data
"""
retVal = gPlotCache.getPlotData( fileId )
if not retVal[ 'OK' ]:
return retVal
retVal = fileHelper.sendData( retVal[ 'Value' ] )
if not retVal[ 'OK' ]:
return retVal
fileHelper.sendEOF()
return S_OK()
|
coberger/DIRAC
|
FrameworkSystem/Service/PlottingHandler.py
|
Python
|
gpl-3.0
| 2,415
|
# Copyright 2015 Adam Greenstein <adamgreenstein@comcast.net>
#
# Switcharoo Cartographer is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Switcharoo Cartographer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Switcharoo Cartographer. If not, see <http://www.gnu.org/licenses/>.
from data import Access, Entry, EntryError
from Queue import Queue
class EntryQueue(Queue):
def __init__(self, transverse, maxsize=0):
self.reddit = transverse.reddit
self.events = transverse.events
Queue.__init__(self, maxsize)
def _init(self, maxsize):
Queue._init(self, maxsize)
nodes = Access(self.events).get_entry(maxsize)
for node in nodes:
try:
self.queue.append(Entry(node['raw_url'], self.reddit))
self.events.on_adding_to_queue(node['raw_url'])
except EntryError:
# TODO Remove old entry from DB
pass
def _put(self, url):
try:
entry = Entry(url, self.reddit)
if self._is_unique(entry):
self.events.on_adding_to_queue(url)
self.queue.append(entry)
else:
self.events.on_not_adding_to_queue(url)
except EntryError:
self.events.on_not_adding_to_queue(url)
def _get(self):
return self.queue.popleft()
def _is_unique(self, entry):
# TODO Logic here to determine if new url found
if entry not in self.queue:
return Access(self.events).is_unique_entry(entry)
else:
return False
|
admgrn/Switcharoo
|
scraper/scraper/entryqueue.py
|
Python
|
gpl-3.0
| 2,041
|
from config import Prefix
from sigma.core.blacklist import check_black
async def custom_command_detection(ev, message, args):
if message.guild:
if message.content.startswith(Prefix):
cmd = message.content[len(Prefix):].lower()
if cmd not in ev.bot.plugin_manager.commands:
if not check_black(ev.db, message):
try:
custom_commands = ev.db.get_settings(message.guild.id, 'CustomCommands')
except:
ev.db.set_settings(message.guild.id, 'CustomCommands', {})
custom_commands = {}
if cmd in custom_commands:
response = custom_commands[cmd]
await message.channel.send(response)
|
aurora-pro/apex-sigma
|
sigma/plugins/moderation/other/custom_command_detection.py
|
Python
|
gpl-3.0
| 805
|
"""This sample demonstrates some very simple lightning effects."""
import math
import d3d11
import d3d11x
from d3d11c import *
def heightCallback(x, z, byteIndex, data):
return data[byteIndex] * 0.03
class SampleLights(d3d11x.Frame):
def onCreate(self):
#Heightmap.
#self.heightmap = d3d11x.HeightMap(self.device, None, (64, 64), heightCallback, (2, 1, 2), (8, 8), False)
self.heightmap = d3d11x.HeightMap(self.device, d3d11x.getResourceDir("Textures", "heightmap.dds"),
(64, 64), heightCallback, (2, 1, 2), (8, 8), False)
self.heightmap.textureView = self.loadTextureView("ground-marble.dds")
#Sphere mesh.
meshPath = d3d11x.getResourceDir("Mesh", "sphere.obj")
self.sphere = d3d11x.Mesh(self.device, meshPath)
self.sphere.textureView = self.loadTextureView("misc-white.bmp")
def createLights(self):
#Add 7 lights (maximum defined in 'Shared.fx').
lights = []
for i in range(1, 8):
#Each light is little farther than the previous one.
distance = i * 5
lightTime = self.time * (i * 0.5)
#Use sin() and cos() to create a nice little movement pattern.
x = math.sin(lightTime) * distance
z = math.cos(lightTime) * distance
y = self.heightmap.getHeight(x, z)
pos = d3d11.Vector(x, y + 1, z)
#Set color (RGBA) (from 0.0 to 1.0). 30.0 is just a magic value which looks good.
red = i / 30.0
green = (7 - i) / 30.0
color = (red, green, 0, 0)
lights.append((pos, color))
return lights
def onRender(self):
#View- and projectionmatrix.
view = self.createLookAt((-50, 25, -50), (0, 0, 0))
projection = self.createProjection(45, 0.1, 300.0)
lights = self.createLights()
#First the heightmap.
self.heightmap.setLights(lights)
#Add some ambient lightning so that it is not so dark.
self.heightmap.effect.set("lightAmbient", (0.5, 0.5, 0.5, 0))
self.heightmap.render(d3d11.Matrix(), view, projection)
#Then our "light spheres".
self.sphere.setLights(lights)
for light in lights:
#World matrix.
meshWorld = d3d11.Matrix()
lightPos = light[0]
#Add little to y to lift the sphere off the ground.
meshWorld.translate((lightPos.x, lightPos.y + 1, lightPos.z))
#Set ambient to light color.
self.sphere.effect.set("lightAmbient", light[1])
self.sphere.render(meshWorld, view, projection)
if __name__ == "__main__":
sample = SampleLights("Lights - DirectPython 11", __doc__)
sample.mainloop()
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/directpy11/Samples/SampleLights.py
|
Python
|
gpl-3.0
| 2,818
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_switch_leaf_selector
short_description: Add a leaf Selector with Node Block Range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics
description:
- Add a leaf Selector with Node Block range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics.
- More information from the internal APIC class I(infra:LeafS), I(infra:NodeBlk), I(infra:RsAccNodePGrp) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
notes:
- This module is to be used with M(aci_switch_policy_leaf_profile)
One first creates a leaf profile (infra:NodeP) and then creates an associated selector (infra:LeafS),
options:
description:
description:
- The description to assign to the C(leaf)
leaf_profile:
description:
- Name of the Leaf Profile to which we add a Selector.
aliases: [ leaf_profile_name ]
leaf:
description:
- Name of Leaf Selector.
aliases: [ name, leaf_name, leaf_profile_leaf_name, leaf_selector_name ]
leaf_node_blk:
description:
- Name of Node Block range to be added to Leaf Selector of given Leaf Profile
aliases: [ leaf_node_blk_name, node_blk_name ]
leaf_node_blk_description:
description:
- The description to assign to the C(leaf_node_blk)
from:
description:
- Start of Node Block Range
aliases: [ node_blk_range_from, from_range, range_from ]
to:
description:
- Start of Node Block Range
aliases: [ node_blk_range_to, to_range, range_to ]
policy_group:
description:
- Name of the Policy Group to be added to Leaf Selector of given Leaf Profile
aliases: [ name, policy_group_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: adding a switch policy leaf profile selector associated Node Block range (w/ policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
policy_group: somepolicygroupname
state: present
- name: adding a switch policy leaf profile selector associated Node Block range (w/o policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
state: present
- name: Removing a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: absent
- name: Querying a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'description': dict(type='str'),
'leaf_profile': dict(type='str', aliases=['leaf_profile_name']),
'leaf': dict(type='str', aliases=['name', 'leaf_name', 'leaf_profile_leaf_name', 'leaf_selector_name']),
'leaf_node_blk': dict(type='str', aliases=['leaf_node_blk_name', 'node_blk_name']),
'leaf_node_blk_description': dict(type='str'),
'from': dict(type='int', aliases=['node_blk_range_from', 'from_range', 'range_from']),
'to': dict(type='int', aliases=['node_blk_range_to', 'to_range', 'range_to']),
'policy_group': dict(type='str', aliases=['policy_group_name']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query']),
})
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['leaf_profile', 'leaf']],
['state', 'present', ['leaf_profile', 'leaf', 'leaf_node_blk', 'from', 'to']]
]
)
description = module.params['description']
leaf_profile = module.params['leaf_profile']
leaf = module.params['leaf']
leaf_node_blk = module.params['leaf_node_blk']
leaf_node_blk_description = module.params['leaf_node_blk_description']
from_ = module.params['from']
to_ = module.params['to']
policy_group = module.params['policy_group']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='infraNodeP',
aci_rn='infra/nprof-{0}'.format(leaf_profile),
filter_target='eq(infraNodeP.name, "{0}")'.format(leaf_profile),
module_object=leaf_profile
),
subclass_1=dict(
aci_class='infraLeafS',
# NOTE: normal rn: leaves-{name}-typ-{type}, hence here hardcoded to range for purposes of module
aci_rn='leaves-{0}-typ-range'.format(leaf),
filter_target='eq(infraLeafS.name, "{0}")'.format(leaf),
module_object=leaf,
),
# NOTE: infraNodeBlk is not made into a subclass because there is a 1-1 mapping between node block and leaf selector name
child_classes=['infraNodeBlk', 'infraRsAccNodePGrp']
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class='infraLeafS',
class_config=dict(
descr=description,
name=leaf,
),
child_configs=[
dict(
infraNodeBlk=dict(
attributes=dict(
descr=leaf_node_blk_description,
name=leaf_node_blk,
from_=from_,
to_=to_,
)
)
),
dict(
infraRsAccNodePGrp=dict(
attributes=dict(
tDn='uni/infra/funcprof/accnodepgrp-{0}'.format(policy_group),
)
)
),
],
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class='infraLeafS')
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
|
tdtrask/ansible
|
lib/ansible/modules/network/aci/aci_switch_leaf_selector.py
|
Python
|
gpl-3.0
| 10,067
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import time
import numpy
import os
import pmt
from gnuradio import gr, gr_unittest
from gnuradio import blocks
class test_multiply_matrix_ff (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
self.multiplier = None
def tearDown (self):
self.tb = None
self.multiplier = None
def run_once(self, X_in, A, tpp=gr.TPP_DONT, A2=None, tags=None, msg_A=None):
""" Run the test for given input-, output- and matrix values.
Every row from X_in is considered an input signal on a port. """
X_in = numpy.matrix(X_in)
A_matrix = numpy.matrix(A)
(N, M) = A_matrix.shape
self.assertTrue(N == X_in.shape[0])
# Calc expected
Y_out_exp = numpy.matrix(numpy.zeros((M, X_in.shape[1])))
self.multiplier = blocks.multiply_matrix_ff(A, tpp)
if A2 is not None:
self.multiplier.set_A(A2)
A = A2
A_matrix = numpy.matrix(A)
for i in xrange(N):
if tags is None:
these_tags = ()
else:
these_tags = (tags[i],)
self.tb.connect(blocks.vector_source_f(X_in[i].tolist()[0], tags=these_tags), (self.multiplier, i))
sinks = []
for i in xrange(M):
sinks.append(blocks.vector_sink_f())
self.tb.connect((self.multiplier, i), sinks[i])
# Run and check
self.tb.run()
for i in xrange(X_in.shape[1]):
Y_out_exp[:,i] = A_matrix * X_in[:,i]
Y_out = [list(x.data()) for x in sinks]
if tags is not None:
self.the_tags = []
for i in xrange(M):
self.the_tags.append(sinks[i].tags())
self.assertEqual(list(Y_out), Y_out_exp.tolist())
def test_001_t (self):
""" Simplest possible check: N==M, unit matrix """
X_in = (
(1, 2, 3, 4),
(5, 6, 7, 8),
)
A = (
(1, 0),
(0, 1),
)
self.run_once(X_in, A)
def test_002_t (self):
""" Switch check: N==M, flipped unit matrix """
X_in = (
(1, 2, 3, 4),
(5, 6, 7, 8),
)
A = (
(0, 1),
(1, 0),
)
self.run_once(X_in, A)
def test_003_t (self):
""" Average """
X_in = (
(1, 1, 1, 1),
(2, 2, 2, 2),
)
A = (
(0.5, 0.5),
(0.5, 0.5),
)
self.run_once(X_in, A)
def test_004_t (self):
""" Set """
X_in = (
(1, 2, 3, 4),
(5, 6, 7, 8),
)
A1 = (
(1, 0),
(0, 1),
)
A2 = (
(0, 1),
(1, 0),
)
self.run_once(X_in, A1, A2=A2)
def test_005_t (self):
""" Tags """
X_in = (
(1, 2, 3, 4),
(5, 6, 7, 8),
)
A = (
(0, 1), # Flip them round
(1, 0),
)
tag1 = gr.tag_t()
tag1.offset = 0
tag1.key = pmt.intern("in1")
tag1.value = pmt.PMT_T
tag2 = gr.tag_t()
tag2.offset = 0
tag2.key = pmt.intern("in2")
tag2.value = pmt.PMT_T
self.run_once(X_in, A, tpp=gr.TPP_ONE_TO_ONE, tags=(tag1, tag2))
self.assertTrue(pmt.equal(tag1.key, self.the_tags[0][0].key))
self.assertTrue(pmt.equal(tag2.key, self.the_tags[1][0].key))
#def test_006_t (self):
#""" Message passing """
#X_in = (
#(1, 2, 3, 4),
#(5, 6, 7, 8),
#)
#A1 = (
#(1, 0),
#(0, 1),
#)
#msg_A = (
#(0, 1),
#(1, 0),
#)
#self.run_once(X_in, A1, msg_A=msg_A)
if __name__ == '__main__':
#gr_unittest.run(test_multiply_matrix_ff, "test_multiply_matrix_ff.xml")
gr_unittest.run(test_multiply_matrix_ff)
|
douggeiger/gnuradio
|
gr-blocks/python/blocks/qa_multiply_matrix_ff.py
|
Python
|
gpl-3.0
| 4,810
|