code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
class ArchTracker(object):
pass
|
MrKiven/REST_ARCH
|
rest_arch/tracker.py
|
Python
|
mit
| 62
|
"""
Django settings for classics project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')z07pcwp**51-493z*bkc3=xwqq=6ogm^^5yi$eipltb$cab69'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'classics.urls'
WSGI_APPLICATION = 'classics.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'classics',
'USER': 'classics',
'PASSWORD': '111111',
'HOST': 'localhost',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'classics/static'),
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'classics/templates'),
)
|
caterpillar/classics
|
classics/settings.py
|
Python
|
mit
| 2,470
|
from json import JSONEncoder
class ModelParaJson(JSONEncoder):
def default(self, o):
return o.__dict__
|
AEDA-Solutions/matweb
|
backend/Framework/ModelParaJson.py
|
Python
|
mit
| 115
|
from setuptools import setup
setup(name="waveasr",
author="Patrick Tesh",
description="Waveform-based Automatic Speech Recognition with Kaldi and Tensorflow",
url="https://github.com/PaddyT/waveform-asr",
packages=['waveasr'],
version=0.1)
|
PaddyT/waveform-asr
|
setup.py
|
Python
|
mit
| 271
|
<<<<<<< HEAD
<<<<<<< HEAD
#!K:\ArcherVMPeridot\htdocs\Scripts\python.exe
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
=======
#!K:\ArcherVMPeridot\htdocs\Scripts\python.exe
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
#!K:\ArcherVMPeridot\htdocs\Scripts\python.exe
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
ArcherSys/ArcherSys
|
Scripts/rst2odt.py
|
Python
|
mit
| 2,516
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Random geometry utilities useful for kinematics and interpolation.
Author: tennessee
Created on: 2017-03-21
Copyright 2017, Tennessee Carmel-Veilleux.
"""
from __future__ import print_function
from numpy import sqrt, pi, sin, cos, floor
from numpy import arctan2 as atan2
import numpy as np
from collections import Sequence
def get_intersection_2circ(p0, r0, p1, r1):
p0x, p0y = tuple(p0)
p1x, p1y = tuple(p1)
d = sqrt(((p0x - p1x) ** 2 + (p0y - p1y) ** 2))
if d > (r0 + r1):
# Separate circles
return []
elif d < abs(r0 - r1):
# Concentric circles
return []
elif d == 0 and r0 == r1:
# Coincident circles
return []
a = ((r0 ** 2) - (r1 ** 2) + (d ** 2)) / (2.0 * d)
h = sqrt((r0 ** 2) - (a ** 2))
p2x = p0x + (a * (p1x - p0x)) / d
p2y = p0y + (a * (p1y - p0y)) / d
p3x_1 = p2x + (h * (p1y - p0y)) / d
p3x_2 = p2x - (h * (p1y - p0y)) / d
p3y_1 = p2y - (h * (p1x - p0x)) / d
p3y_2 = p2y + (h * (p1x - p0x)) / d
return ((p3x_1, p3y_1), (p3x_2, p3y_2))
def get_circ_center_2pts_r(p1, p2, r):
"""
Find the centers of the two circles that share two points p1/p2 and a radius.
From algorithm at http://mathforum.org/library/drmath/view/53027.html. Adapted from version at
https://rosettacode.org/wiki/Circles_of_given_radius_through_two_points#Python.
:param p1: First point , tuple (x, y)
:param p2: Second point, tuple (x, y)
:param r: Radius of circle
:return: a list of 2 points that are centers of circles of radius r sharing p1/p2
"""
if r == 0.0:
raise ValueError('No solution due to no radius')
(x1, y1), (x2, y2) = tuple(p1), tuple(p2)
if p1 == p2:
raise ValueError('Infinite numbre of solutions')
# Distance in x and y between points
dx = x2 - x1
dy = y1 - y2
# Dist between points
q = sqrt(dx ** 2 + dy ** 2)
if q > (2.0 * r):
raise ValueError('Too much distance between points to fit within radius')
# Halfway point
x3 = (x1 + x2) / 2.0
y3 = (y1 + y2) / 2.0
# Distance along the mirror line
d = sqrt(r ** 2 - ((q / 2.0) ** 2))
# First circle center
# c1 = (x3 + ((d * dy) / q), y3 + ((d * dx) / q))
# Second circle center
# c2 = (x3 - ((d * dy) / q), y3 - ((d * dx) / q))
c1x = x3 + sqrt(r ** 2 - (q / 2.0) ** 2) * (y1 - y2) / q
c1y = y3 + sqrt(r ** 2 - (q / 2.0) ** 2) * (x2 - x1) / q
c2x = x3 - sqrt(r ** 2 - (q / 2.0) ** 2) * (y1 - y2) / q
c2y = y3 - sqrt(r ** 2 - (q / 2.0) ** 2) * (x2 - x1) / q
return ((c1x, c1y), (c2x, c2y))
def centroid_extents(all_xy):
"""
Find the centroid and bounding box of a list of cloud of points
:param all_xy: List of arrays Nx2 (x,y) pairs
:return: tuple (cx, cy, width, height, min_x, max_x, min_y, max_y) of centroid/bounding box
"""
if len(all_xy) == 0:
raise ValueError("Cannot find extents of nothing !")
if not isinstance(all_xy, Sequence):
all_xy = [all_xy]
if len(all_xy[0].shape) == 1:
min_x = all_xy[0][0]
max_x = all_xy[0][0]
min_y = all_xy[0][1]
max_y = all_xy[0][1]
else:
min_x = all_xy[0][0,0]
max_x = all_xy[0][0,0]
min_y = all_xy[0][0,1]
max_y = all_xy[0][0,1]
cx = 0.0
cy = 0.0
num_x = 0
num_y = 0
for group in all_xy:
if isinstance(group, Sequence):
group = np.asarray(group, float)
if len(group.shape) == 1:
# Handle input being a list of one-dimensional, 2-element arrays
min_x = min(min_x, group[0])
max_x = max(max_x, group[0])
min_y = min(min_y, group[1])
max_y = max(max_y, group[1])
cx += group[0]
cy += group[1]
num_x += 1
num_y += 1
else:
min_x = min(min_x, np.amin(group[:, 0]))
max_x = max(max_x, np.amax(group[:, 0]))
min_y = min(min_y, np.amin(group[:, 1]))
max_y = max(max_y, np.amax(group[:, 1]))
cx += np.sum(group[:, 0])
cy += np.sum(group[:, 1])
num_x += np.alen(group[:, 0])
num_y += np.alen(group[:, 1])
cx /= num_x
cy /= num_y
width = max_x - min_x
height = max_y - min_y
return (cx, cy, width, height, min_x, max_x, min_y, max_y)
def distance(p1, p2):
"""
Distance between two points p1 and p2
:param p1: First point
:param p2: Second point
:return: The euclidean distance between the two points
"""
return sqrt((p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2)
def dotp2(v1, v2):
return (v1[0] * v2[0]) + (v1[1] * v2[1])
def crossp2(v1, v2):
return (v1[0] * v2[1]) - (v1[1] * v2[0])
def angle_between_signed(v1, v2):
return atan2(crossp2(v1, v2), dotp2(v1, v2))
def find_closest_point(all_xy, p):
min_idx = 0
min_dist = distance(all_xy[0], p)
for idx, (x, y) in enumerate(all_xy):
dist = distance((x, y), p)
if dist < min_dist:
min_idx = idx
min_dist = dist
return min_idx, min_dist, all_xy[min_idx]
def circle_empty(all_xy, center, radius):
eps = radius / 1e6
for point in all_xy:
# Is current point within circle ? If so, circle not empty. Circumference is excluded
if distance(point, center) < (radius - eps):
return False
return True
def sorted_by_manhattan_distance(all_xy, p, within):
candidates = [p2 for p2 in all_xy if (abs(p2[0] - p[0]) + abs(p2[1] - p[1])) <= within]
return sorted(candidates, key=lambda p2: (abs(p2[0] - p[0]) + abs(p2[1] - p[1])))
def concave_hull_wheel(all_xy, wheel_radius):
# Get centroid and bounding box of points
cx, cy, width, height, min_x, max_y, min_y, max_y = centroid_extents(all_xy)
# Offset all points relative to the centroid
centered_all_xy = []
for x, y in all_xy:
centered_all_xy.append((x - cx, y - cy))
# Determine starting segment, which is outside shape (for sure)
r = 2.0 * max(width, height)
theta = 0
start_point = (r * cos(theta), r * sin(theta))
# Find point closest to tip of segment at 0, it becomes our actual starting point
min_idx, min_dist, _ = find_closest_point(centered_all_xy, start_point)
start_point = tuple(all_xy[min_idx])
print("Starting point: %s" % repr(start_point))
outline_points = [start_point]
circle_centers = []
last_point = start_point
done = False
while not done:
# Find next closest points of a rolling circle where the circle advances clockwise (negative cross product)
min_idx = 0
min_dist = r
min_circle_center = (0, 0)
found = False
candidates = sorted_by_manhattan_distance(all_xy, last_point, within=(4.0 * wheel_radius))
for idx, candidate in enumerate(candidates):
candidate = tuple(candidate)
# Skip last point (starting point of current circle)
if candidate == last_point:
continue
dist = distance(last_point, candidate)
# Point too far to fit on circle edge
if dist > (2.0 * wheel_radius):
continue
try:
cir_candidates = get_circ_center_2pts_r(last_point, candidate, wheel_radius)
except ValueError:
# Cannot find two circles
continue
# Try both candidates to find a circle that is empty
empty_circle_center = None
num_empty = 0
for circle_center in cir_candidates:
if circle_empty(candidates, circle_center, wheel_radius):
num_empty += 1
empty_circle_center = circle_center
if num_empty == 0:
# No empty circles: not a circle rolling on the outside
# print("Found no empty: %s" % repr(cir_candidates))
continue
elif num_empty == 2:
# Should never see two emtpy circles
# print("Found two empty circle candidates for last point %s: %s" % (last_point, cir_candidates))
continue
# Find rotation of "wheel" by tracing vector v1 from center of circle to last point, and v2 from center
# of circle to candidate. If angle is negative, wheel is rotating counter-clockwise around points.
# Imagine trying to see if a "spoke" on a wheel turned clockwise while rotating counter-clockwise around
# a circle.
v1 = (last_point[0] - empty_circle_center[0], last_point[1] - empty_circle_center[1])
v2 = (candidate[0] - empty_circle_center[0], candidate[1] - empty_circle_center[1])
if angle_between_signed(v1, v2) >= 0:
continue
# We got here, found a candidate point! Determine if it is the minimum!
if dist < min_dist:
# print("Found a new minimum candidate: %s", repr(all_xy[min_idx]))
min_dist = dist
min_idx = idx
min_circle_center = empty_circle_center
found = True
# break
if not found:
raise ValueError("Computation diverged and no candidate was found. Should not happen!")
# Got through all the points. New outline point is on concave hull
outline_point = tuple(candidates[min_idx])
print("Found outline point (%.2f, %.2f)" % (outline_point[0], outline_point[1]))
if outline_point in outline_points:
# Got back to start, we're done!
done = True
# Append starting point again to close the path
outline_points.append(outline_point)
else:
circle_centers.append(min_circle_center)
outline_points.append(outline_point)
last_point = outline_point
return np.asarray(outline_points, dtype="float64"), np.asarray(circle_centers, dtype="float64")
def split_line_into_segments(p1, p2, seg_length):
segments = []
p1x, p1y = p1
p2x, p2y = p2
line_length = sqrt((p2x - p1x) ** 2.0 + (p2y - p1y) ** 2.0)
intermediate_segments = int(floor(float(line_length) / float(seg_length)))
if intermediate_segments == 0:
intermediate_segments = 1
for seg_idx in xrange(intermediate_segments):
k = ((seg_idx * seg_length) / line_length)
x = p1x + k * (p2x - p1x)
y = p1y + k * (p2y - p1y)
segments.append((x, y))
segments.append((p2x, p2y))
return segments
def get_scale_2d(scale):
"""
Generate a 2D homogenous coordinates transformation matrix for scaling
:param scale: a scalar of the scale factor to apply: < 1.0 shrinks, > 1.0 grows, 1.0 is identity
:return: A 3x3 transformation matrix applying the given scaling
"""
return np.array([[scale, 0.0, 0.0],
[0.0, scale, 0.0],
[0.0, 0.0, 1.0]])
def get_translation_2d(translation):
"""
Generate a 2D homogenous coordinates transformation matrix for a translation
:param translation: (x,y) pair of translation coordinates
:return: A 3x3 transformation matrix applying the given translation
"""
return np.array([[1.0, 0.0, translation[0]],
[0.0, 1.0, translation[1]],
[0.0, 0.0, 1.0]])
def get_rotation_2d(theta, around=(0.0, 0.0)):
"""
Generate a 2D homogenous coordinates transformation matrix for a rotation around a point
:param theta: Rotation angle in radians (positive == CCW)
:param around: Point around which to affect the center of rotation (default (0, 0))
:return: A 3x3 transformation matrix applying the given rotation
"""
return np.array([[cos(theta), -sin(theta), around[0] * (1.0 - cos(theta)) + (around[1] * sin(theta))],
[sin(theta), cos(theta), around[1] * (1.0 - cos(theta)) - (around[0] * sin(theta))],
[0.0, 0.0, 1.0]])
def apply_transform_2d(points, transform):
"""
Apply a 2D tranformation matrix (3x3 with input points as column vectors convention) to an array of 2D points.
2D points must be an array of 2D row vectors: lines are the points (x,y).
:param points: Array of row vectors of 2D points to transform
:param transform: 3x3 transformation matrix to apply
:return: Transformed array of row vectors
"""
# Augment with a column of 1s, transport to column vectors of points
augmented_points = np.column_stack((points, np.ones((points.shape[0], 1)))).transpose()
# 3x3 * 3xn = 3xn -> transpose to row vectors of points, remove augmentation
transformed = np.dot(transform, augmented_points).transpose()[:, 0:2]
return transformed
def list_of_tuples(a_2d_array):
return [tuple(row) for row in a_2d_array]
|
tcarmelveilleux/CheapDrawBot
|
Software/utils/geometry.py
|
Python
|
mit
| 13,000
|
import re
import os
import sys
from Bio import Seq
from Bio import SeqIO
from Bio import SeqUtils
import numpy as np
import pandas as pd
import cairi
from multiprocessing import Pool
import matplotlib.pyplot as plt
#MAYBE WE DONT NEED THAT ANYMORE ...
# # STUPID FIX TO AVOID OLDER PANDAS HERE ...
# # PYTHONPATH seems to be ignored by th ipython ...
# sys.path.insert(1,"/home/venevs/.local/lib/python2.7/site-packages/")
# # this is needed just to avoid BUG in pandas (float indexing related: https://github.com/pydata/pandas/issues/5824)
# # when tsking quantile(q=0.75) ...
# import scipy.stats as stat
RIBO_LIMIT = 24
path = os.path.join(os.path.expanduser('~'),'GENOMES_BACTER_RELEASE69/genbank')
dat = pd.read_csv(os.path.join(path,"complete_CDS_Rnd_Equal.dat"))
plot_path = os.path.join(os.path.expanduser('~'),'GENOMES_BACTER_RELEASE69/plots')
# first: identify ribosomal proteins ...
# here is our heuristic way to check if it's a ribosomal protein or not, given corresponding gene's product description ...
ribo = re.compile("ribosomal.+protein",re.I)
ribo_check = lambda line: bool(ribo.search(line)) if not('transferase' in line) else False
dat['ribosomal'] = dat['product'].apply(ribo_check)
# based on these identified proteins, then calculate CAI ....
# group the data by the GenomicId ...
orgs = dat.groupby('GenomicID')
genom_id = orgs.groups.keys()
ribo_counts = [(idx,orgs.get_group(idx)['ribosomal'].nonzero()[0].size) for idx in genom_id]
ribo_cai_info = pd.DataFrame(ribo_counts,columns=['GenomicID','ribo_count'])
# some lists to describe organism's CAI distribution features ...
percentile = []
median = []
mean = []
sigma = []
idx_for_ribo = []
ribo_count_for_df = []
#
pid_cai_list = []
for idx,ribo_count in ribo_cai_info.itertuples(index=False):
if ribo_count >= RIBO_LIMIT:
cds_dat = orgs.get_group(idx)
ribo_cds = cds_dat[cds_dat['ribosomal']]['cDNA_rnd'] # cDNA_rnd of ribosomal proteins ...
codon_usage = cairi.count_codons(ribo_cds)
codon_index = cairi.generate_codon_index(codon_usage,genetic_table=list(cds_dat['table'])[0]) # fix that ...
# we need to track index from 'dat', as there are some stupid duplications ...
pid_cai = pd.DataFrame(((dat_idx,pid,cairi.cai_for_gene(sequence,codon_index)) for dat_idx,pid,sequence in cds_dat[['pid','cDNA_rnd']].itertuples()),columns=['dat_idx','pid','CAI'])
pid_cai = pid_cai.set_index(keys='dat_idx')
# characterize CAI distribution for a given organism ...
local_mean = pid_cai['CAI'].mean()
local_median = pid_cai['CAI'].median()
local_sigma = pid_cai['CAI'].std()
mean.append(local_mean)
median.append(local_median)
sigma.append(local_sigma)
idx_for_ribo.append(idx)
ribo_count_for_df.append(ribo_count)
#
local_ribo_indexes = cds_dat['ribosomal'].nonzero()[0]
local_ribo = pid_cai.iloc[local_ribo_indexes].reset_index(drop=True)
# let's also check our t.o. score
qH_all = pid_cai['CAI'].quantile(q=0.75)
qL_rib = local_ribo['CAI'].quantile(q=0.25)
percentile.append( bool(qL_rib >= qH_all) )
#
# OPTIONAL HISTOGRAM PLOTTING ...
# # # let's also plot histograms ...
# # plt.clf()
# # plt.hist(pid_cai['CAI'],range=(0,1),bins=100,color='blue',alpha=1.0)
# # plt.hist(local_ribo['CAI'],range=(0,1),bins=25,color='red',alpha=0.8)
# # plt.title("%s, CAI median: %.2f, CoV %.3f, t.o. %s"%(idx,local_median,local_sigma/local_mean,str(qL_rib >= qH_all)))
# # plt.savefig(os.path.join(plot_path,idx+".pdf"))
#
pid_cai_list.append( pid_cai )
# ttt = ["30S ribosomal subunit protein S9", "ribosomal-protein-alanine acetyltransferase", "Ribosomal protein L33", "ribosomal subunit interface protein", "Ribosomal protein S10", "ribosomal 5S rRNA E-loop binding protein Ctc/L25/TL5", "ribosomal-protein-alanine acetyltransferase", "16S ribosomal RNA methyltransferase KsgA/Dim1 family protein", "30S ribosomal proteinS16", "Acetyltransferases including N-acetylases of ribosomal proteins"]
org_cai_descr = {"GenomicID":idx_for_ribo,"ribo_count":ribo_count_for_df,"TrOp":percentile,"median_cai":median,"mean_cai":mean,"sigma_cai":sigma}
org_cai_df = pd.DataFrame(org_cai_descr)
pid_cai_df = pd.concat(pid_cai_list)
#
# # before any mergings ...
# ###########################################
# # MERGE BY THE INDEX .... TO BE CONTINUED ...
# ###########################################
# # 1) merging
# yyy = dat.join(pid_cai_df,lsuffix='',rsuffix='_wnans')#
# # 2) merging orther way ...
# xxx = pd.concat([dat,pid_cai_df],axis=1)
# #
# indexes = (xxx.CAI != yyy.CAI).nonzero()[0]
# # beware (np.nan==np.nan) is False ...
# # so there are ~1200 indexes ...
# # TO BE CONTINUED ...
# # merging is done, outputtting and that's it ...
dat_with_cai = dat.join(pid_cai_df,lsuffix='',rsuffix='_wnans')
# then simple check ...
# all instances, where (pid != pid_wnans) must be NULL ...
if dat_with_cai.pid_wnans[dat_with_cai.pid!=dat_with_cai.pid_wnans].isnull().all():
pass
else:
print "ACHTUNG!!! All pid_wnans items whose (pid_wnans!=pid), must be NULL. Check"
########### let's try joining the 'org_cai_df' to the dat_with_cai as well, so that we'd be able to easily grab Trans.Optimized
########### organisms ...
dat_with_cai_trop = pd.merge(dat_with_cai, org_cai_df, how='left', on='GenomicID')
# apparently 'join' is a legacy procedure, so using 'merge' is encouraged instead!
# http://stackoverflow.com/questions/10114399/pandas-simple-join-not-working
# output CDS info with the calculated CAI ...
dat_with_cai_trop[['GenomicID','cDNA_rnd','fid','pid','product','protein','status','table','ribosomal','CAI','TrOp']].to_csv(os.path.join(path,"complete_CDS_CAI_DNA_Rnd_Equal.dat"),index=False)
# ['GenomicID', 'cDNA_rnd', 'fid', 'pid', 'product', 'protein', 'status', 'table', 'ribosomal', 'pid_wnans', 'CAI']
# ['GenomicID', 'cDNA_rnd', 'fid', 'pid', 'product', 'protein', 'status', 'table', 'ribosomal', 'CAI']
# #
# # some characterization plotting ...
# plt.clf()
# org_cai_trop = org_cai_df[org_cai_df["TrOp"]]
# org_cai_notrop = org_cai_df[~org_cai_df["TrOp"]]
# trop_dots = plt.plot(org_cai_trop.median_cai,np.true_divide(org_cai_trop.sigma_cai,org_cai_trop.mean_cai),'ro',label='translational optimization')
# notrop_dots = plt.plot(org_cai_notrop.median_cai,np.true_divide(org_cai_notrop.sigma_cai,org_cai_notrop.mean_cai),'bo',alpha=0.8,label='No translational optimization')
# ax = plt.gca()
# ax.set_title("Organism level CAI: t.o. criteria comparison (Margalit vs ours)")
# ax.set_xlabel("median CAI")
# ax.set_ylabel("CAI coefficient of variation") # using plain sigma works worse ...
# ax.legend(loc='best')
# plt.savefig(os.path.join(path,"org_cai_to.pdf"))
# #
# #
# plt.clf()
# size_dot = lambda x: 10 if 50<x<60 else 120
# plt.scatter(x=org_cai_df.median_cai,y=np.true_divide(org_cai_df.sigma_cai,org_cai_df.mean_cai),s=org_cai_df.ribo_count.apply(size_dot),c="blue",edgecolor=None)
# ax = plt.gca()
# ax.set_title("Organism level CAI: effect of # ribosomal proteins (no effect)")
# ax.set_xlabel("median CAI")
# ax.set_ylabel("CAI coefficient of variation") # using plain sigma works worse ...
# plt.savefig(os.path.join(path,"org_cai_ribonum.pdf"))
|
sergpolly/Thermal_adapt_scripts
|
BOOTSTRAPS/Equal_Cherry_extract_analyse_CAI_Rnd.py
|
Python
|
mit
| 7,375
|
"""
Set of "markup" template filters for Django. These filters transform plain text
markup syntaxes to HTML; currently there is support for:
* Textile, which requires the PyTextile library available at
http://loopcore.com/python-textile/
* Markdown, which requires the Python-markdown library from
http://www.freewisdom.org/projects/python-markdown
* reStructuredText, which requires docutils from http://docutils.sf.net/
"""
import warnings
from django import template
from django.conf import settings
from django.utils.encoding import smart_text, force_text
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def textile(value):
try:
import textile
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in 'textile' filter: The Python textile library isn't installed.")
return force_text(value)
else:
return mark_safe(force_text(textile.textile(smart_text(value)), encoding='utf-8'))
@register.filter(is_safe=True)
def markdown(value, arg=''):
"""
Runs Markdown over a given value, optionally using various
extensions python-markdown supports.
Syntax::
{{ value|markdown:"extension1_name,extension2_name..." }}
To enable safe mode, which strips raw HTML and only returns HTML
generated by actual Markdown syntax, pass "safe" as the first
extension in the list.
If the version of Markdown in use does not support extensions,
they will be silently ignored.
"""
try:
import markdown
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in 'markdown' filter: The Python markdown library isn't installed.")
return force_text(value)
else:
# markdown.version was first added in 1.6b. The only version of markdown
# to fully support extensions before 1.6b was the shortlived 1.6a.
python_markdown_deprecation = ("The use of Python-Markdown "
"< 2.1 in Django is deprecated; please update to the current version")
if hasattr(markdown, 'version'):
extensions = [e for e in arg.split(",") if e]
if len(extensions) > 0 and extensions[0] == "safe":
extensions = extensions[1:]
safe_mode = True
else:
safe_mode = False
# Unicode support only in markdown v1.7 or above. Version_info
# exist only in markdown v1.6.2rc-2 or above.
markdown_vers = getattr(markdown, "version_info", None)
if markdown_vers < (1,7):
warnings.warn(python_markdown_deprecation, DeprecationWarning)
return mark_safe(force_text(markdown.markdown(smart_text(value), extensions, safe_mode=safe_mode)))
else:
if markdown_vers >= (2,1):
if safe_mode:
return mark_safe(markdown.markdown(force_text(value), extensions, safe_mode=safe_mode, enable_attributes=False))
else:
return mark_safe(markdown.markdown(force_text(value), extensions, safe_mode=safe_mode))
else:
warnings.warn(python_markdown_deprecation, DeprecationWarning)
return mark_safe(markdown.markdown(force_text(value), extensions, safe_mode=safe_mode))
else:
warnings.warn(python_markdown_deprecation, DeprecationWarning)
return mark_safe(force_text(markdown.markdown(smart_text(value))))
@register.filter(is_safe=True)
def restructuredtext(value):
try:
from docutils.core import publish_parts
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in 'restructuredtext' filter: The Python docutils library isn't installed.")
return force_text(value)
else:
docutils_settings = getattr(settings, "RESTRUCTUREDTEXT_FILTER_SETTINGS", {})
parts = publish_parts(source=smart_text(value), writer_name="html4css1", settings_overrides=docutils_settings)
return mark_safe(force_text(parts["fragment"]))
|
zeraien/odb_shared_django
|
templatetags/markup.py
|
Python
|
mit
| 4,253
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
def get_field_types():
"""Get a dict with all registration field types"""
from .simple import (TextField, NumberField, TextAreaField, CheckboxField, DateField, BooleanField, PhoneField,
CountryField, FileField, EmailField)
from .choices import SingleChoiceField, AccommodationField, MultiChoiceField
return {field.name: field for field in (TextField, NumberField, TextAreaField, SingleChoiceField, CheckboxField,
DateField, BooleanField, PhoneField, CountryField, FileField, EmailField,
AccommodationField, MultiChoiceField)}
|
mic4ael/indico
|
indico/modules/events/registration/fields/__init__.py
|
Python
|
mit
| 911
|
#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):4429")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
DOGECOINCASH/DOGEC
|
contrib/seeds/makeseeds.py
|
Python
|
mit
| 708
|
#!/usr/bin/python
DOCUMENTATION = '''
---
module: elasticbeanstalk_env
short_description: create, update, delete beanstalk application environments
description:
- creates, updates, deletes beanstalk environments.
options:
app_name:
description:
- name of the beanstalk application you wish to manage the versions of
required: true
default: null
env_name:
description:
- unique name for the deployment environment. Must be from 4 to 23 characters in length. The name can contain only letters, numbers, and hyphens. It cannot start or end with a hyphen. This name must be unique in your account.
required: true
default: null
version_label:
description:
- label of the version you want to deploy in the environment
required: false
default: null
description:
description:
- describes this environment
required: false
default: null
wait_timeout:
description:
- Number of seconds to wait for an environment to change state.
required: false
default: 900
template_name:
description:
- name of the configuration template to use in deployment. You must specify either this parameter or a solution_stack_name
required: false
default: null
solution_stack_name:
description:
- this is an alternative to specifying a template_name. You must specify either this or a template_name, but not both
required: false
default: null
cname_prefix:
description:
- if specified, the environment attempts to use this value as the prefix for the CNAME. If not specified, the environment uses the environment name.
required: false
default: null
option_settings:
description:
- 'A dictionary array of settings to add of the form: { Namespace: ..., OptionName: ... , Value: ... }. If specified, AWS Elastic Beanstalk sets the specified configuration options to the requested value in the configuration set for the new environment. These override the values obtained from the solution stack or the configuration template'
required: false
default: null
tier_name:
description:
- name of the tier
required: false
default: WebServer
choices: ['WebServer', 'Worker']
state:
description:
- whether to ensure the environment is present or absent, or to list existing environments
required: false
default: present
choices: ['absent','present','list']
author: Harpreet Singh
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Create or update environment
- elasticbeanstalk_env:
region: us-east-1
app_name: Sample App
env_name: sampleApp-env
version_label: Sample Application
solution_stack_name: "64bit Amazon Linux 2014.09 v1.2.1 running Docker 1.5.0"
option_settings:
- Namespace: aws:elasticbeanstalk:application:environment
OptionName: PARAM1
Value: bar
- Namespace: aws:elasticbeanstalk:application:environment
OptionName: PARAM2
Value: foobar
register: env
# Delete environment
- elasticbeanstalk_env:
app_name: Sample App
env_name: sampleApp-env
state: absent
wait_timeout: 360
region: us-west-2
'''
try:
import boto.beanstalk
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
IGNORE_CODE = "Throttling"
def wait_for(ebs, app_name, env_name, wait_timeout, testfunc):
timeout_time = time.time() + wait_timeout
while True:
try:
env = describe_env(ebs, app_name, env_name)
except boto.exception.BotoServerError, e:
if e.code != IGNORE_CODE:
raise e
if testfunc(env):
return env
if time.time() > timeout_time:
raise ValueError("The timeout has expired")
time.sleep(15)
def health_is_green(env):
return env["Health"] == "Green"
def health_is_grey(env):
return env["Health"] == "Grey"
def terminated(env):
return env["Status"] == "Terminated"
def describe_env(ebs, app_name, env_name):
result = ebs.describe_environments(application_name=app_name,
environment_names=[env_name])
envs = result["DescribeEnvironmentsResponse"]["DescribeEnvironmentsResult"]["Environments"]
return None if len(envs) != 1 else envs[0]
def update_required(ebs, env, params):
updates = []
if env["VersionLabel"] != params["version_label"]:
updates.append(('VersionLabel', env['VersionLabel'], params['version_label']))
if env["TemplateName"] != params["template_name"]:
updates.append(('TemplateName', env['TemplateName'], params['template_name']))
result = ebs.describe_configuration_settings(application_name=params["app_name"],
environment_name=params["env_name"])
options = result["DescribeConfigurationSettingsResponse"]["DescribeConfigurationSettingsResult"]["ConfigurationSettings"][0]["OptionSettings"]
for setting in params["option_settings"]:
change = new_or_changed_option(options, setting)
if change is not None:
updates.append(change)
return updates
def new_or_changed_option(options, setting):
for option in options:
if option["Namespace"] == setting["Namespace"] and \
option["OptionName"] == setting["OptionName"]:
if (setting['Namespace'] in ['aws:autoscaling:launchconfiguration','aws:ec2:vpc'] and \
setting['OptionName'] in ['SecurityGroups', 'ELBSubnets', 'Subnets'] and \
set(setting['Value'].split(',')).issubset(option['Value'].split(','))) or \
option["Value"] == setting["Value"]:
return None
else:
return (option["Namespace"] + ':' + option["OptionName"], option["Value"], setting["Value"])
return (setting["Namespace"] + ':' + setting["OptionName"], "<NEW>", setting["Value"])
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def check_env(ebs, app_name, env_name, module):
state = module.params['state']
env = describe_env(ebs, app_name, env_name)
result = {}
if state == 'present' and env is None:
result = dict(changed=True, output = "Environment would be created")
elif state == 'present' and env is not None:
updates = update_required(ebs, env, module.params)
if len(updates) > 0:
result = dict(changed=True, output = "Environment would be updated", env=env, updates=updates)
else:
result = dict(changed=False, output="Environment is up-to-date", env=env)
elif state == 'absent' and env is None:
result = dict(changed=False, output="Environment does not exist")
elif state == 'absent' and env is not None:
result = dict(changed=True, output="Environment will be deleted", env=env)
module.exit_json(**result)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
app_name = dict(required=True),
env_name = dict(required=True),
version_label = dict(),
description = dict(),
state = dict(choices=['present','absent','list'], default='present'),
wait_timeout = dict(default=900, type='int'),
template_name = dict(),
solution_stack_name = dict(),
cname_prefix = dict(),
option_settings = dict(type='list',default=[]),
options_to_remove = dict(type='list',default=[]),
tier_name = dict(default='WebServer', choices=['WebServer','Worker'])
),
)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[['solution_stack_name','template_name']],
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
app_name = module.params['app_name']
env_name = module.params['env_name']
version_label = module.params['version_label']
description = module.params['description']
state = module.params['state']
wait_timeout = module.params['wait_timeout']
template_name = module.params['template_name']
solution_stack_name = module.params['solution_stack_name']
cname_prefix = module.params['cname_prefix']
option_settings = module.params['option_settings']
options_to_remove = module.params['options_to_remove']
tier_type = 'Standard'
tier_name = module.params['tier_name']
if tier_name == 'Worker':
tier_type = 'SQS/HTTP'
option_setting_tups = [(os['Namespace'],os['OptionName'],os['Value']) for os in option_settings]
option_to_remove_tups = [(otr['Namespace'],otr['OptionName']) for otr in options_to_remove]
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
try:
ebs = boto.beanstalk.connect_to_region(region)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg='No Authentication Handler found: %s ' % str(e))
except Exception, e:
module.fail_json(msg='Failed to connect to Beanstalk: %s' % str(e))
update = False
result = {}
if state == 'list':
try:
env = describe_env(ebs, app_name, env_name)
result = dict(changed=False, env=env)
except Exception, err:
error_msg = boto_exception(err)
module.fail_json(msg=error_msg)
if module.check_mode and state != 'list':
check_env(ebs, app_name, env_name, module)
module.fail_json('ASSERTION FAILURE: check_version() should not return control.')
if state == 'present':
try:
ebs.create_environment(app_name, env_name, version_label, template_name,
solution_stack_name, cname_prefix, description,
option_setting_tups, None, tier_name,
tier_type, '1.0')
env = wait_for(ebs, app_name, env_name, wait_timeout, health_is_green)
result = dict(changed=True, env=env)
except Exception, err:
error_msg = boto_exception(err)
if 'Environment %s already exists' % env_name in error_msg:
update = True
else:
module.fail_json(msg=error_msg)
if update:
try:
env = describe_env(ebs, app_name, env_name)
updates = update_required(ebs, env, module.params)
if len(updates) > 0:
ebs.update_environment(environment_name=env_name,
version_label=version_label,
template_name=template_name,
description=description,
option_settings=option_setting_tups,
options_to_remove=None)
wait_for(ebs, app_name, env_name, wait_timeout, health_is_grey)
env = wait_for(ebs, app_name, env_name, wait_timeout, health_is_green)
result = dict(changed=True, env=env, updates=updates)
else:
result = dict(changed=False, env=env)
except Exception, err:
error_msg = boto_exception(err)
module.fail_json(msg=error_msg)
if state == 'absent':
try:
ebs.terminate_environment(environment_name=env_name)
env = wait_for(ebs, app_name, env_name, wait_timeout, terminated)
result = dict(changed=True, env=env)
except Exception, err:
error_msg = boto_exception(err)
if 'No Environment found for EnvironmentName = \'%s\'' % env_name in error_msg:
result = dict(changed=False, output='Environment not found')
else:
module.fail_json(msg=error_msg)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
|
skorochkin/ansible-elastic-beanstalk-with-cleanup
|
library/elasticbeanstalk_env.py
|
Python
|
mit
| 12,266
|
'''
Sample: Speudogap model with r = 1
'''
from builtins import input
from numpy import *
from matplotlib.pyplot import *
import time,pdb
from nrgmap.hybri_sc import get_hybri_skew
from nrgmap.discretization import quick_map,check_disc
from nrgmap.chainmapper import map2chain,check_spec
from nrgmap.utils import get_wlist
def run():
'''
run this sample, visual check is quite slow!
'''
#generate the hybridization function.
nband=2
Gamma=0.5/pi
Lambda=1.2
D0=2.
Gap=0.3
D=sqrt(D0**2+Gap**2)
wlist=get_wlist(w0=1e-12,Nw=10000,mesh_type='sclog',Gap=Gap,D=D)
rhofunc=get_hybri_skew(Gap,Gamma,D=D,eta=1e-12,skew=0.3)
rholist=array([rhofunc(w) for w in wlist])
#create the discretized model
N=33 #the chain length
nz=500 #the number of twisting parameter z
z=linspace(0.5/nz,1-0.5/nz,nz)
tick_type='adaptive'
print('''Start mapping the hybridization function into a discrete set of baths.
%s sites for each(pos/neg) branch
%s z numbers
tick type -> %s
Lambda -> %s
'''%(N,nz,tick_type,Lambda))
discmodel=quick_map(rhofunc=rhofunc,wlist=wlist,N=N,z=z,Nx=200000,tick_params={'tick_type':tick_type,'Lambda':Lambda},autofix=1e-5)[1]
#map to a chain
print('Start mapping the discrete model to a chain')
chains=map2chain(discmodel,normalize_method='qr')
print('Done')
plot_wlist=wlist[::30]
docheck=input('Check whether this star model recover the hybridization function?(y/n):')=='y'
if docheck:
ion()
check_disc(rhofunc=rhofunc,wlist=plot_wlist,discmodel=discmodel,smearing=1.,mode='pauli')
print('***The superconducting model needs some special gradients to cope the smearing factor here,\
\nwhich is not included for general purpose,\
\nso, don\'t be disappointed by the poor match here, they are artifacts.***')
ylim(-0.1,0.2)
print('Press `c` to continue.')
pdb.set_trace()
docheck=input('Check whether this chain recover the hybridization function?(y/n):')=='y'
if docheck:
ion();cla()
check_spec(rhofunc=rhofunc,chains=chains,wlist=plot_wlist,smearing=1.,mode='pauli')
ylim(-0.1,0.2)
print('Press `c` to continue.')
pdb.set_trace()
dosave=input('Save the chain datas?(y/n):')=='y'
if dosave:
for iz,chain in zip(z,chains):
chain.save('data/superconductor_%s'%iz)
if __name__=='__main__':
run()
|
GiggleLiu/nrg_mapping
|
samples/sample_superconductor.py
|
Python
|
mit
| 2,483
|
import util
def decrypt(c1, c2, c3, n1, n2, n3):
"""
Decrypt by executing a Low Public Exponent Attack based on the Chinese Remainder Theorem.
For a good explanation:
http://crypto.stackexchange.com/questions/6713/low-public-exponent-attack-for-rsa
"""
t1 = c1 * (n2 * n3) * util.modinv(n2 * n3, n1)
t2 = c2 * (n1 * n3) * util.modinv(n1 * n3, n2)
t3 = c3 * (n1 * n2) * util.modinv(n1 * n2, n3)
cubed = (t1 + t2 + t3) % (n1 * n2 * n3)
return util.iroot(cubed, 3)
if __name__ == '__main__':
# TODO(Badguy's public exponent)
e =
# TODO(Badguy's three public moduli)
n1 =
n2 =
n3 =
# TODO(Three ciphertexts of the same message, extracted from the server)
c1 =
c2 =
c3 =
# Decrypt using the Low Public Exponent Attack
print util.deserialize(decrypt(c1, c2, c3, n1, n2, n3))
|
historical-ctf/historical-ctf.github.io
|
static/scripts/task3/solution.py
|
Python
|
mit
| 867
|
import unittest
from sound.rtttl_parser import RtttlParser
class TestRtttlParser(unittest.TestCase):
SONG = {
'rtttl': 'The Simpsons:d=4,o=5,b=160:c.6,e6,f#6,8a6,g.6,e6,c6,8a,8f#,8f#,8f#,2g,8p,8p,8f#,8f#,8f#,8g,a#.,8c6,8c6,8c6,c6',
'name': 'The Simpsons',
'defaults': ['d=4', 'o=5', 'b=160'],
'notes': ['c.6', 'e6', 'f#6', '8a6', 'g.6', 'e6', 'c6', '8a', '8f#', '8f#', '8f#', '2g', '8p', '8p', '8f#', '8f#', '8f#', '8g', 'a#.', '8c6', '8c6', '8c6', 'c6'],
'interpretation': [(0.5625, 1046), (0.375, 1318), (0.375, 1479), (0.1875, 1760), (0.5625, 1567), (0.375, 1318), (0.375, 1046), (0.1875, 880), (0.1875, 739), (0.1875, 739), (0.1875, 739), (0.75, 783), (0.1875, 0), (0.1875, 0), (0.1875, 739), (0.1875, 739), (0.1875, 739), (0.1875, 783), (0.5625, 932), (0.1875, 1046), (0.1875, 1046), (0.1875, 1046), (0.375, 1046)]
}
def test_frequencies(self):
self.assertTrue(isinstance(RtttlParser(self.SONG['rtttl']).FREQUENCIES, dict))
def test_interpret(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.interpret()
expected_result = self.SONG['interpretation']
self.assertTrue(result == expected_result)
def test_get_name(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_name()
expected_result = self.SONG['name']
self.assertTrue(result == expected_result)
def test_get_defaults(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_defaults()
expected_result = self.SONG['defaults']
self.assertTrue(result == expected_result)
def test_get_notes(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_notes()
expected_result = self.SONG['notes']
self.assertTrue(result == expected_result)
def test_get_note_elements(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_note_elements('4c#.7')
expected_result = ('4', 'c#', '.', '7')
self.assertTrue(result == expected_result)
def test_get_note_pitch(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_note_pitch('4c#.7')
expected_result = 'c#'
self.assertTrue(result == expected_result)
def test_get_note_octave(self):
parser = RtttlParser(self.SONG['rtttl'])
result = parser.get_note_octave('4c#.7')
expected_result = '7'
self.assertTrue(result == expected_result)
if __name__ == '__main__':
unittest.main()
|
hertzwhenip/virtual-hammond
|
server/tests/test_rtttl_parser.py
|
Python
|
mit
| 2,592
|
import cPickle
import numpy as np
import h5py
from numpy.linalg import norm
from numpy import sqrt
import multiprocessing
import time
tp='real'
#fr = open('dat/db.dat','rb')
#imask = cPickle.load(fr)
#umask = cPickle.load(fr)
#fr.close()
#fr = open('dat/training.dat','rb')
#cPickle.load(fr)
#sm = cPickle.load(fr)
#tm = cPickle.load(fr)
#fr.close()
fr = open('dat/training-'+tp+'.dat','rb')
S = cPickle.load(fr)
#St = cPickle.load(fr)
#Sv = cPickle.load(fr)
States = cPickle.load(fr)
Bu = cPickle.load(fr)
Bi = cPickle.load(fr)
U = cPickle.load(fr)
Vt = cPickle.load(fr)
fr.close()
#fr = open('dat/temp-'+tp+'.dat','rb')
#U = cPickle.load(fr)
#Vt = cPickle.load(fr)
#Bu = cPickle.load(fr)
#Bi = cPickle.load(fr)
#fr.close()
States=States.tolil()
#f = h5py.File("data.hdf5","r")
#U = f['u']
#Vt = f['vt']
#Bu = f['Buser']
#Bi = f['Bitem']
def evaluate(S):
"""
S: M x N lil sparse matrix
U: M x 500 users vectors
Vt 500 x N items vectors
Bu: M x 5 users states bias
Bi: N x 5 items states bias
"""
I,J = S.nonzero()
L = S.getnnz()
rtn = 0
for i in xrange(L):
s = States[I[i],J[i]]
rtn+=(S[I[i],J[i]]-U[I[i],:].dot(Vt[:,J[i]])-Bu[I[i],s]-Bi[J[i],s])**2
return sqrt(rtn/L)
def derivative1(S):
dU = np.zeros((S.shape[0],400))
dVt = np.zeros((400, S.shape[1]))
I,J = S.nonzero()
L = S.getnnz()
for i in xrange(L):
s = States[I[i],J[i]]
dU[I[i],:]+=-2*(S[I[i],J[i]]-U[I[i],:].dot(Vt[:,J[i]])-Bu[I[i],s]-Bi[J[i],s])*Vt[:,J[i]].T
dVt[:,J[i]]+=-2*(S[I[i],J[i]]-U[I[i],:].dot(Vt[:,J[i]])-Bu[I[i],s]-Bi[J[i],s])*U[I[i],:].T
dU+=U
dVt+=Vt
return (dU ,dVt)
def derivative2(S):
dBu = np.zeros((S.shape[0],5))
dBi = np.zeros((S.shape[1],5))
I,J = S.nonzero()
L = S.getnnz()
for i in xrange(L):
s = States[I[i],J[i]]
dBu[I[i],s]+=-2*(S[I[i],J[i]]-U[I[i],:].dot(Vt[:,J[i]])-Bu[I[i],s]-Bi[J[i],s])
dBi[J[i],s]+=-2*(S[I[i],J[i]]-U[I[i],:].dot(Vt[:,J[i]])-Bu[I[i],s]-Bi[J[i],s])
dBu+=Bu
dBi+=Bi
return (dBu, dBi)
def worker1(d,S):
dU ,dVt = derivative1(S)
d['dU']=dU
d['dVt']=dVt
def worker2(d,S):
print evaluate(S)
dBu, dBi = derivative2(S)
d['dBu'] = dBu
d['dBi'] = dBi
if __name__=='__main__':
try:
for i in xrange(70):
mgr = multiprocessing.Manager()
d = mgr.dict()
p1 = multiprocessing.Process(target=worker1, args=(d,S))
p2 = multiprocessing.Process(target=worker2, args=(d,S))
p1.start()
p2.start()
p1.join()
p2.join()
U-=0.0002*d['dU']
Vt-=0.0002*d['dVt']
Bu-=0.0002*d['dBu']
Bi-=0.0002*d['dBi']
finally:
fw = open('dat/temp-'+tp+'.dat','wb')
cPickle.dump(U,fw)
cPickle.dump(Vt,fw)
cPickle.dump(Bu,fw)
cPickle.dump(Bi,fw)
fw.close()
|
wattlebird/Chi
|
sim/training.py
|
Python
|
mit
| 2,961
|
#!/usr/bin/python
from manager import Packet, Manager
from threading import Event, Lock
import cherrypy
import datetime
class RequestManager(Manager):
def __init__(self):
Manager.__init__(self, 'RequestManager')
def process(self, packet):
request_id = packet.payload.get('request_id', None)
if request_id is not None:
self._requests[request_id]['packet'] = packet
self._requests[request_id]['event'].set()
def starting(self):
cherrypy.server.socket_host = '0.0.0.0'
cherrypy.config.update({'engine.autoreload.on': False})
cherrypy.tree.mount(self, '/')
cherrypy.engine.start()
self._next_request_id = 0
self._requests = {}
self._requests_lock = Lock()
def stopping(self):
cherrypy.engine.exit()
def register_request(self):
request_id = None
with self._requests_lock:
request_id = self._next_request_id
self._next_request_id += 1
self._requests[request_id] = { 'event': Event(), 'packet': None }
return request_id
def wait_for_response(self, request_id, timeout=120):
req = None
with self._requests_lock:
req = self._requests[request_id]
e = req['event']
start = datetime.datetime.now()
while not e.wait(0.1) and not self._event.wait(0.1):
if (datetime.datetime.now() - start).total_seconds() > timeout:
e.set()
response_packet = None
with self._requests_lock:
response_packet = self._requests[request_id]['packet']
del self._requests[request_id]
return response_packet
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def index(self):
request_id = self.register_request()
self.send(Packet('index', 'DataManager', self.name, payload={ 'request_id': request_id }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
l = payloadresponse_packet.payload.get('list', None)
if l is not None:
response_packet.payload['count'] = len(l)
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def encode_list(self, profile=None):
request_id = self.register_request()
if profile is None:
profile = []
if isinstance(profile, basestring):
profile = [profile]
self.send(Packet('encode_list', 'DataManager', self.name, payload={ 'request_id': request_id, 'profiles': profile }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
l = response_packet.payload.get('list', None)
if l is not None:
response_packet.payload['count'] = len(l)
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
def get_next(self, profile=None, status=None):
request_id = self.register_request()
if profile is None:
profile = []
if isinstance(profile, basestring):
profile = [profile]
if status is None:
status = []
if isinstance(status, basestring):
status = [status]
self.send(Packet('get_next', 'DataManager', self.name, payload={ 'request_id': request_id, 'profiles': profile, 'statuses': status }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def version(self):
request_id = self.register_request()
self.send(Packet('version', 'DataManager', self.name, payload={ 'request_id': request_id }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.json_in()
def add_encode(self):
request_id = self.register_request()
obj = cherrypy.request.json
self.send(Packet('add_encode', 'DataManager', self.name, payload={ 'request_id': request_id, 'obj': obj }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.json_in()
def update_encode(self):
request_id = self.register_request()
obj = cherrypy.request.json
self.send(Packet('update_encode', 'DataManager', self.name, payload={ 'request_id': request_id, 'obj': obj }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def reset_to_pending(self, status=None):
request_id = self.register_request()
if status is None:
status = []
if isinstance(status, basestring):
status = [status]
self.send(Packet('reset_to_pending', 'DataManager', self.name, payload={ 'request_id': request_id, 'statuses': status }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def get_all_with_status(self, status=None):
request_id = self.register_request()
if status is None:
status = []
if isinstance(status, basestring):
status = [status]
self.send(Packet('get_all_with_status', 'DataManager', self.name, payload={ 'request_id': request_id, 'statuses': status }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
l = response_packet.payload.get('list', None)
if l is not None:
response_packet.payload['count'] = len(l)
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def get_active(self):
request_id = self.register_request()
self.send(Packet('get_active', 'DataManager', self.name, payload={ 'request_id': request_id }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
l = response_packet.payload.get('list', None)
if l is not None:
response_packet.payload['count'] = len(l)
return response_packet.payload
else:
return None
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.response_headers(headers=[('Access-Control-Allow-Origin', '*')])
def get_count_per_status(self, start_time=None):
request_id = self.register_request()
self.send(Packet('get_count_per_status', 'DataManager', self.name, payload={ 'request_id': request_id, 'start_time': start_time }))
response_packet = self.wait_for_response(request_id)
if response_packet is not None:
return response_packet.payload
else:
return None
|
rickbassham/videoencode
|
requestmanager.py
|
Python
|
mit
| 8,078
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.data.component Contains the DataComponent class
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from ..core.component import ModelingComponent
from ...core.tools import filesystem as fs
# -----------------------------------------------------------------
class DataComponent(ModelingComponent):
"""
This class...
"""
def __init__(self, config=None):
"""
The constructor ...
:param config:
:return:
"""
# Call the constructor of the base class
super(DataComponent, self).__init__(config)
# -- Attributes --
# The path to the galaxy info file
self.galaxy_info_path = None
# -----------------------------------------------------------------
def setup(self):
"""
This function ...
:return:
"""
# Call the setup function of the base class
super(DataComponent, self).setup()
# Set the path to the galaxy info file
self.galaxy_info_path = fs.join(self.data_path, "info.dat")
# -----------------------------------------------------------------
|
Stargrazer82301/CAAPR
|
CAAPR/CAAPR_AstroMagic/PTS/pts/modeling/data/component.py
|
Python
|
mit
| 1,631
|
import os
import logging
from . import loader
from nose import plugins
LOG = logging.getLogger('nose.' + __name__)
class NoseScriptUnit(plugins.Plugin):
name = "jscriptunit"
def options(self, parser, env):
super(NoseScriptUnit, self).options(parser, env)
parser.add_option("--jscriptunit-paths",
dest="require_paths",
action="append",
default=env.get('JSCRIPTUNIT_PATHS'),
help="allow requires from those paths")
def wantFile(self, path):
basename = os.path.basename(path)
if basename.endswith('.js'):
return bool(self.conf.testMatch.match(basename))
def loadTestsFromFile(self, filename):
return loader.TestRunner(filename, self.conf).load_tests()
|
simpoir/nosescript
|
nosescript/unit.py
|
Python
|
mit
| 828
|
"""
A Genpolar task about generators
Eunice Chen 1/3/2014
"""
import numpy as np
def rtpairs(R,N):
'''generates r,n pairs
r and n are lists of the same amount of numbers
'''
for i in range(len(R)):
r=R[i]
n=N[i]
theta = 0.0 #starting with theta = 0
for j in range(N[i]):
theta +=2*(np.pi)/N[i]
yield R[i],theta
def rtuniform(n, rmax, m):
'''generate r,n with uniform spacing
'''
R=np.arange(0,rmax,rmax/n)
N=np.arange(1, n*m, m)
return rtpairs(R, N)
|
EuniceChen1/Raytracer-project
|
genpolar.py
|
Python
|
mit
| 544
|
import sys
import MonkeyLex
import MonkeyParser
import MonkeyInterpreter
if __name__ == '__main__':
if len(sys.argv) == 2:
data = open(sys.argv[1], 'r').read()
prog = MonkeyParser.parse(data, 1)
print prog
mintp = MonkeyInterpreter.MonkeyInterpreter(prog, "Hello.py")
mintp.translate()
mintp.save()
|
ideaworld/FHIR_Tester
|
FHIR_Tester_backend/services/monkey/Monkey.py
|
Python
|
mit
| 352
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import isolate_node, sync_mempools, set_node_times, reconnect_isolated_node, assert_equal, \
assert_raises_rpc_error
'''
p2p-instantsend.py
Tests InstantSend functionality (prevent doublespend for unconfirmed transactions)
'''
class InstantSendTest(DashTestFramework):
def set_test_params(self):
self.set_dash_test_params(9, 5, fast_dip3_enforcement=True)
# set sender, receiver, isolated nodes
self.isolated_idx = 1
self.receiver_idx = 2
self.sender_idx = 3
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 0)
self.nodes[0].spork("SPORK_3_INSTANTSEND_BLOCK_FILTERING", 0)
self.wait_for_sporks_same()
self.test_mempool_doublespend()
self.test_block_doublespend()
def test_block_doublespend(self):
sender = self.nodes[self.sender_idx]
receiver = self.nodes[self.receiver_idx]
isolated = self.nodes[self.isolated_idx]
# feed the sender with some balance
sender_addr = sender.getnewaddress()
self.nodes[0].sendtoaddress(sender_addr, 1)
self.bump_mocktime(1)
set_node_times(self.nodes, self.mocktime)
self.nodes[0].generate(2)
self.sync_all()
# create doublespending transaction, but don't relay it
dblspnd_tx = self.create_raw_tx(sender, isolated, 0.5, 1, 100)
# isolate one node from network
isolate_node(isolated)
# instantsend to receiver
receiver_addr = receiver.getnewaddress()
is_id = sender.sendtoaddress(receiver_addr, 0.9)
# wait for the transaction to propagate
connected_nodes = self.nodes.copy()
del connected_nodes[self.isolated_idx]
sync_mempools(connected_nodes)
for node in connected_nodes:
self.wait_for_instantlock(is_id, node)
# send doublespend transaction to isolated node
isolated.sendrawtransaction(dblspnd_tx['hex'])
# generate block on isolated node with doublespend transaction
self.bump_mocktime(1)
set_node_times(self.nodes, self.mocktime)
isolated.generate(1)
wrong_block = isolated.getbestblockhash()
# connect isolated block to network
reconnect_isolated_node(isolated, 0)
# check doublespend block is rejected by other nodes
timeout = 10
for i in range(0, self.num_nodes):
if i == self.isolated_idx:
continue
res = self.nodes[i].waitforblock(wrong_block, timeout)
assert (res['hash'] != wrong_block)
# wait for long time only for first node
timeout = 1
# send coins back to the controller node without waiting for confirmations
receiver.sendtoaddress(self.nodes[0].getnewaddress(), 0.9, "", "", True)
assert_equal(receiver.getwalletinfo()["balance"], 0)
# mine more blocks
# TODO: mine these blocks on an isolated node
self.bump_mocktime(1)
set_node_times(self.nodes, self.mocktime)
self.nodes[0].generate(2)
self.sync_all()
def test_mempool_doublespend(self):
sender = self.nodes[self.sender_idx]
receiver = self.nodes[self.receiver_idx]
isolated = self.nodes[self.isolated_idx]
# feed the sender with some balance
sender_addr = sender.getnewaddress()
self.nodes[0].sendtoaddress(sender_addr, 1)
self.bump_mocktime(1)
set_node_times(self.nodes, self.mocktime)
self.nodes[0].generate(2)
self.sync_all()
# create doublespending transaction, but don't relay it
dblspnd_tx = self.create_raw_tx(sender, isolated, 0.5, 1, 100)
dblspnd_txid = bytes_to_hex_str(hash256(hex_str_to_bytes(dblspnd_tx['hex']))[::-1])
# isolate one node from network
isolate_node(isolated)
# send doublespend transaction to isolated node
isolated.sendrawtransaction(dblspnd_tx['hex'])
# let isolated node rejoin the network
# The previously isolated node should NOT relay the doublespending TX
reconnect_isolated_node(isolated, 0)
for node in self.nodes:
if node is not isolated:
assert_raises_rpc_error(-5, "No such mempool or blockchain transaction", node.getrawtransaction, dblspnd_txid)
# instantsend to receiver. The previously isolated node should prune the doublespend TX and request the correct
# TX from other nodes.
receiver_addr = receiver.getnewaddress()
is_id = sender.sendtoaddress(receiver_addr, 0.9)
# wait for the transaction to propagate
sync_mempools(self.nodes)
for node in self.nodes:
self.wait_for_instantlock(is_id, node)
assert_raises_rpc_error(-5, "No such mempool or blockchain transaction", isolated.getrawtransaction, dblspnd_txid)
# send coins back to the controller node without waiting for confirmations
receiver.sendtoaddress(self.nodes[0].getnewaddress(), 0.9, "", "", True)
assert_equal(receiver.getwalletinfo()["balance"], 0)
# mine more blocks
self.bump_mocktime(1)
set_node_times(self.nodes, self.mocktime)
self.nodes[0].generate(2)
self.sync_all()
if __name__ == '__main__':
InstantSendTest().main()
|
nmarley/dash
|
test/functional/p2p-instantsend.py
|
Python
|
mit
| 5,807
|
from django.conf import settings
from django.utils.timezone import now
from {{ project_name }}.conf import config
from .utils import intspace, set_param
def extra(request):
ctx = {
'dir': dir, 'list': list, 'len': len, 'enumerate': enumerate, 'range': range,
'settings': settings, 'config': config,
'now': now, 'intspace': intspace, 'set_param': set_param}
return ctx
|
pinkevich/django-project-template
|
project_name/core/context_processors.py
|
Python
|
mit
| 404
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-25 20:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('talks_keeper', '0002_auto_20161224_1512'),
]
operations = [
migrations.AlterField(
model_name='talk',
name='is_our_talk',
field=models.BooleanField(default=False, verbose_name='Ми звернулися?'),
),
]
|
samitnuk/talks_keeper
|
talks_keeper/migrations/0003_auto_20161225_2036.py
|
Python
|
mit
| 505
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('arm_settings', '0007_authsettings_admin'),
]
operations = [
migrations.AddField(
model_name='authsettings_admin',
name='user_id',
field=models.CharField(default=b'Admin', unique=True, max_length=5),
),
]
|
shwetams/arm-samples-py
|
arm_basic_samples/arm_settings/migrations/0008_authsettings_admin_user_id.py
|
Python
|
mit
| 448
|
#!/usr/bin/env python
import sys, os, random, pickle, json, codecs, fileinput
import numpy as np
from model import AutumnNER
import sklearn.metrics as skm
import argparse
import conlleval
parser = argparse.ArgumentParser(description='Train and evaluate BiLSTM on a given dataset')
parser.add_argument('files', metavar='FILE', nargs='*', help='files to read, if empty, stdin is used')
parser.add_argument('--seed', dest='seed', type=int,
default=1, help='seed for training')
def main(args):
print >> sys.stderr, "Running Autumn NER model annotating module"
print >> sys.stderr, args
random.seed(args.seed)
print >> sys.stderr, "Loading vocab.."
with open(os.path.join('saved_model','word_vocab.pickle'),'r') as f:
word_vocab = pickle.load(f)
print >> sys.stderr, "Loading labels.."
with open(os.path.join('saved_model','label_space.pickle'),'r') as f:
labels = pickle.load(f)
# Create the model, passing in relevant parameters
bilstm = AutumnNER(labels=labels,
word_vocab=word_vocab,
word_embeddings=None)
bilstm.restore('./saved_model/main')
inputs = []
for line in fileinput.input(args.files):
line = line.rstrip()
if len(line) == 0:
continue
items = line.split(' ')
inputs.append([ x.strip() for x in items if len(x.strip()) > 0])
for line, tag in zip(inputs, bilstm.predict(inputs,batch_size=20)):
for pair in zip(line, tag):
print ' '.join(pair)
print ''
def report_performance(model, X_test,y_test, outname):
micro_evaluation = model.evaluate(X_test,y_test,macro=False)
macro_evaluation = model.evaluate(X_test,y_test,macro=True)
print "Micro Test Eval: F={:.4f} P={:.4f} R={:.4f}".format(*micro_evaluation)
print "Macro Test Eval: F={:.4f} P={:.4f} R={:.4f}".format(*macro_evaluation)
pred_test = model.predict(X_test)
with open(outname,'w') as f:
for (x,y,z) in zip(X_test,y_test,pred_test):
for token, y_true, y_pred in zip(x,y,z):
print >> f, token, y_true, y_pred
print >> f, ''
with open(outname,'r') as f:
evaluation.report(evaluation.evaluate(f))
def load_embeddings(fname, vocab, dim=200):
cached = 'scratch/embeddings_{}.npy'.format(abs(hash(' '.join(vocab))))
if not os.path.exists(cached):
weight_matrix = np.random.uniform(-0.05, 0.05, (len(vocab),dim)).astype(np.float32)
ct = 0
with codecs.open(fname, encoding='utf-8') as f:
for line in f:
word, vec = line.split(u' ', 1)
if word not in vocab:
continue
idx = vocab.index(word)
vec = np.array(vec.split(), dtype=np.float32)
weight_matrix[idx,:dim] = vec[:dim]
ct += 1
if ct % 33 == 0:
sys.stdout.write('Loading embeddings {}/{} \r'.format(ct, len(vocab)))
print "Loaded {}/{} embedding vectors".format(ct, len(vocab))
np.save(cached,weight_matrix)
else:
weight_matrix = np.load(cached)
print >> sys.stderr, "Loaded weight matrix {}..".format(weight_matrix.shape)
return weight_matrix
def load_dataset(fname, shuffle=False):
dataset = []
with open(fname,'r') as f:
dataset = [ x.split('\n') for x in f.read().split('\n\n') if x ]
vocab = []
output = []
for x in dataset:
tokens, labels = zip(*[ z.split(' ') for z in x if z ])
for t in tokens:
t = t.lower()
if t not in vocab:
vocab.append(t)
output.append((tokens, labels))
return output, vocab
if __name__ == '__main__':
main(parser.parse_args())
|
tttr222/autumn_ner
|
annotate.py
|
Python
|
mit
| 3,940
|
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Fujicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as signature hash functions.
This file is modified from python-fujicoinlib.
"""
from collections import namedtuple
import hashlib
import struct
import unittest
from typing import List, Dict
from .key import TaggedHash, tweak_add_pubkey
from .messages import (
CTransaction,
CTxOut,
hash256,
ser_string,
ser_uint256,
sha256,
uint256_from_str,
)
MAX_SCRIPT_ELEMENT_SIZE = 520
LOCKTIME_THRESHOLD = 500000000
ANNEX_TAG = 0x50
LEAF_VERSION_TAPSCRIPT = 0xc0
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
def bn2vch(v):
"""Convert number to fujicoin-specific little endian format."""
# We need v.bit_length() bits, plus a sign bit for every nonzero number.
n_bits = v.bit_length() + (v != 0)
# The number of bytes for that is:
n_bytes = (n_bits + 7) // 8
# Convert number to absolute value + sign in top bit.
encoded_v = 0 if v == 0 else abs(v) | ((v < 0) << (n_bytes * 8 - 1))
# Serialize to bytes
return encoded_v.to_bytes(n_bytes, 'little')
class CScriptOp(int):
"""A single script opcode"""
__slots__ = ()
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bytes([len(d)]) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n - 1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1 + 1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n]
OPCODE_NAMES: Dict[CScriptOp, str] = {}
_opcode_instances: List[CScriptOp] = []
# Populate opcode instance table
for n in range(0xff + 1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE = OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# BIP 342 opcodes (Tapscript)
OP_CHECKSIGADD = CScriptOp(0xba)
OP_INVALIDOPCODE = CScriptOp(0xff)
OPCODE_NAMES.update({
OP_0: 'OP_0',
OP_PUSHDATA1: 'OP_PUSHDATA1',
OP_PUSHDATA2: 'OP_PUSHDATA2',
OP_PUSHDATA4: 'OP_PUSHDATA4',
OP_1NEGATE: 'OP_1NEGATE',
OP_RESERVED: 'OP_RESERVED',
OP_1: 'OP_1',
OP_2: 'OP_2',
OP_3: 'OP_3',
OP_4: 'OP_4',
OP_5: 'OP_5',
OP_6: 'OP_6',
OP_7: 'OP_7',
OP_8: 'OP_8',
OP_9: 'OP_9',
OP_10: 'OP_10',
OP_11: 'OP_11',
OP_12: 'OP_12',
OP_13: 'OP_13',
OP_14: 'OP_14',
OP_15: 'OP_15',
OP_16: 'OP_16',
OP_NOP: 'OP_NOP',
OP_VER: 'OP_VER',
OP_IF: 'OP_IF',
OP_NOTIF: 'OP_NOTIF',
OP_VERIF: 'OP_VERIF',
OP_VERNOTIF: 'OP_VERNOTIF',
OP_ELSE: 'OP_ELSE',
OP_ENDIF: 'OP_ENDIF',
OP_VERIFY: 'OP_VERIFY',
OP_RETURN: 'OP_RETURN',
OP_TOALTSTACK: 'OP_TOALTSTACK',
OP_FROMALTSTACK: 'OP_FROMALTSTACK',
OP_2DROP: 'OP_2DROP',
OP_2DUP: 'OP_2DUP',
OP_3DUP: 'OP_3DUP',
OP_2OVER: 'OP_2OVER',
OP_2ROT: 'OP_2ROT',
OP_2SWAP: 'OP_2SWAP',
OP_IFDUP: 'OP_IFDUP',
OP_DEPTH: 'OP_DEPTH',
OP_DROP: 'OP_DROP',
OP_DUP: 'OP_DUP',
OP_NIP: 'OP_NIP',
OP_OVER: 'OP_OVER',
OP_PICK: 'OP_PICK',
OP_ROLL: 'OP_ROLL',
OP_ROT: 'OP_ROT',
OP_SWAP: 'OP_SWAP',
OP_TUCK: 'OP_TUCK',
OP_CAT: 'OP_CAT',
OP_SUBSTR: 'OP_SUBSTR',
OP_LEFT: 'OP_LEFT',
OP_RIGHT: 'OP_RIGHT',
OP_SIZE: 'OP_SIZE',
OP_INVERT: 'OP_INVERT',
OP_AND: 'OP_AND',
OP_OR: 'OP_OR',
OP_XOR: 'OP_XOR',
OP_EQUAL: 'OP_EQUAL',
OP_EQUALVERIFY: 'OP_EQUALVERIFY',
OP_RESERVED1: 'OP_RESERVED1',
OP_RESERVED2: 'OP_RESERVED2',
OP_1ADD: 'OP_1ADD',
OP_1SUB: 'OP_1SUB',
OP_2MUL: 'OP_2MUL',
OP_2DIV: 'OP_2DIV',
OP_NEGATE: 'OP_NEGATE',
OP_ABS: 'OP_ABS',
OP_NOT: 'OP_NOT',
OP_0NOTEQUAL: 'OP_0NOTEQUAL',
OP_ADD: 'OP_ADD',
OP_SUB: 'OP_SUB',
OP_MUL: 'OP_MUL',
OP_DIV: 'OP_DIV',
OP_MOD: 'OP_MOD',
OP_LSHIFT: 'OP_LSHIFT',
OP_RSHIFT: 'OP_RSHIFT',
OP_BOOLAND: 'OP_BOOLAND',
OP_BOOLOR: 'OP_BOOLOR',
OP_NUMEQUAL: 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY: 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL: 'OP_NUMNOTEQUAL',
OP_LESSTHAN: 'OP_LESSTHAN',
OP_GREATERTHAN: 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL: 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL: 'OP_GREATERTHANOREQUAL',
OP_MIN: 'OP_MIN',
OP_MAX: 'OP_MAX',
OP_WITHIN: 'OP_WITHIN',
OP_RIPEMD160: 'OP_RIPEMD160',
OP_SHA1: 'OP_SHA1',
OP_SHA256: 'OP_SHA256',
OP_HASH160: 'OP_HASH160',
OP_HASH256: 'OP_HASH256',
OP_CODESEPARATOR: 'OP_CODESEPARATOR',
OP_CHECKSIG: 'OP_CHECKSIG',
OP_CHECKSIGVERIFY: 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG: 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY: 'OP_CHECKMULTISIGVERIFY',
OP_NOP1: 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY: 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY: 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4: 'OP_NOP4',
OP_NOP5: 'OP_NOP5',
OP_NOP6: 'OP_NOP6',
OP_NOP7: 'OP_NOP7',
OP_NOP8: 'OP_NOP8',
OP_NOP9: 'OP_NOP9',
OP_NOP10: 'OP_NOP10',
OP_CHECKSIGADD: 'OP_CHECKSIGADD',
OP_INVALIDOPCODE: 'OP_INVALIDOPCODE',
})
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super().__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum:
__slots__ = ("value",)
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes([len(r)]) + r
@staticmethod
def decode(vch):
result = 0
# We assume valid push_size and minimal encoding
value = vch[1:]
if len(value) == 0:
return result
for i, byte in enumerate(value):
result |= int(byte) << 8 * i
if value[-1] >= 0x80:
# Mask for all but the highest result bit
num_mask = (2**(len(value) * 8) - 1) >> 1
result &= num_mask
result *= -1
return result
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
__slots__ = ()
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bytes([other])
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bytes([CScriptOp(OP_0)])
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes([CScriptOp.encode_op_n(other)])
elif other == -1:
other = bytes([OP_1NEGATE])
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# add makes no sense for a CScript()
raise NotImplementedError
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super().__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super().__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = self[i]
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = self[i]
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = self[i] + (self[i + 1] << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = self[i] + (self[i + 1] << 8) + (self[i + 2] << 16) + (self[i + 3] << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i + datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
def _repr(o):
if isinstance(o, bytes):
return "x('%s')" % o.hex()
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_DEFAULT = 0 # Taproot-only default, semantics same as SIGHASH_ALL
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def LegacySignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for _ in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize_without_witness()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitV0SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)
class TestFrameworkScript(unittest.TestCase):
def test_bn2vch(self):
self.assertEqual(bn2vch(0), bytes([]))
self.assertEqual(bn2vch(1), bytes([0x01]))
self.assertEqual(bn2vch(-1), bytes([0x81]))
self.assertEqual(bn2vch(0x7F), bytes([0x7F]))
self.assertEqual(bn2vch(-0x7F), bytes([0xFF]))
self.assertEqual(bn2vch(0x80), bytes([0x80, 0x00]))
self.assertEqual(bn2vch(-0x80), bytes([0x80, 0x80]))
self.assertEqual(bn2vch(0xFF), bytes([0xFF, 0x00]))
self.assertEqual(bn2vch(-0xFF), bytes([0xFF, 0x80]))
self.assertEqual(bn2vch(0x100), bytes([0x00, 0x01]))
self.assertEqual(bn2vch(-0x100), bytes([0x00, 0x81]))
self.assertEqual(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
self.assertEqual(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
self.assertEqual(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
self.assertEqual(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
self.assertEqual(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
self.assertEqual(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
self.assertEqual(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
self.assertEqual(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
def test_cscriptnum_encoding(self):
# round-trip negative and multi-byte CScriptNums
values = [0, 1, -1, -2, 127, 128, -255, 256, (1 << 15) - 1, -(1 << 16), (1 << 24) - 1, (1 << 31), 1 - (1 << 32), 1 << 40, 1500, -1500]
for value in values:
self.assertEqual(CScriptNum.decode(CScriptNum.encode(CScriptNum(value))), value)
def TaprootSignatureHash(txTo, spent_utxos, hash_type, input_index = 0, scriptpath = False, script = CScript(), codeseparator_pos = -1, annex = None, leaf_ver = LEAF_VERSION_TAPSCRIPT):
assert (len(txTo.vin) == len(spent_utxos))
assert (input_index < len(txTo.vin))
out_type = SIGHASH_ALL if hash_type == 0 else hash_type & 3
in_type = hash_type & SIGHASH_ANYONECANPAY
spk = spent_utxos[input_index].scriptPubKey
ss = bytes([0, hash_type]) # epoch, hash_type
ss += struct.pack("<i", txTo.nVersion)
ss += struct.pack("<I", txTo.nLockTime)
if in_type != SIGHASH_ANYONECANPAY:
ss += sha256(b"".join(i.prevout.serialize() for i in txTo.vin))
ss += sha256(b"".join(struct.pack("<q", u.nValue) for u in spent_utxos))
ss += sha256(b"".join(ser_string(u.scriptPubKey) for u in spent_utxos))
ss += sha256(b"".join(struct.pack("<I", i.nSequence) for i in txTo.vin))
if out_type == SIGHASH_ALL:
ss += sha256(b"".join(o.serialize() for o in txTo.vout))
spend_type = 0
if annex is not None:
spend_type |= 1
if (scriptpath):
spend_type |= 2
ss += bytes([spend_type])
if in_type == SIGHASH_ANYONECANPAY:
ss += txTo.vin[input_index].prevout.serialize()
ss += struct.pack("<q", spent_utxos[input_index].nValue)
ss += ser_string(spk)
ss += struct.pack("<I", txTo.vin[input_index].nSequence)
else:
ss += struct.pack("<I", input_index)
if (spend_type & 1):
ss += sha256(ser_string(annex))
if out_type == SIGHASH_SINGLE:
if input_index < len(txTo.vout):
ss += sha256(txTo.vout[input_index].serialize())
else:
ss += bytes(0 for _ in range(32))
if (scriptpath):
ss += TaggedHash("TapLeaf", bytes([leaf_ver]) + ser_string(script))
ss += bytes([0])
ss += struct.pack("<i", codeseparator_pos)
assert len(ss) == 175 - (in_type == SIGHASH_ANYONECANPAY) * 49 - (out_type != SIGHASH_ALL and out_type != SIGHASH_SINGLE) * 32 + (annex is not None) * 32 + scriptpath * 37
return TaggedHash("TapSighash", ss)
def taproot_tree_helper(scripts):
if len(scripts) == 0:
return ([], bytes())
if len(scripts) == 1:
# One entry: treat as a leaf
script = scripts[0]
assert(not callable(script))
if isinstance(script, list):
return taproot_tree_helper(script)
assert(isinstance(script, tuple))
version = LEAF_VERSION_TAPSCRIPT
name = script[0]
code = script[1]
if len(script) == 3:
version = script[2]
assert version & 1 == 0
assert isinstance(code, bytes)
h = TaggedHash("TapLeaf", bytes([version]) + ser_string(code))
if name is None:
return ([], h)
return ([(name, version, code, bytes())], h)
elif len(scripts) == 2 and callable(scripts[1]):
# Two entries, and the right one is a function
left, left_h = taproot_tree_helper(scripts[0:1])
right_h = scripts[1](left_h)
left = [(name, version, script, control + right_h) for name, version, script, control in left]
right = []
else:
# Two or more entries: descend into each side
split_pos = len(scripts) // 2
left, left_h = taproot_tree_helper(scripts[0:split_pos])
right, right_h = taproot_tree_helper(scripts[split_pos:])
left = [(name, version, script, control + right_h) for name, version, script, control in left]
right = [(name, version, script, control + left_h) for name, version, script, control in right]
if right_h < left_h:
right_h, left_h = left_h, right_h
h = TaggedHash("TapBranch", left_h + right_h)
return (left + right, h)
# A TaprootInfo object has the following fields:
# - scriptPubKey: the scriptPubKey (witness v1 CScript)
# - internal_pubkey: the internal pubkey (32 bytes)
# - negflag: whether the pubkey in the scriptPubKey was negated from internal_pubkey+tweak*G (bool).
# - tweak: the tweak (32 bytes)
# - leaves: a dict of name -> TaprootLeafInfo objects for all known leaves
TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,internal_pubkey,negflag,tweak,leaves")
# A TaprootLeafInfo object has the following fields:
# - script: the leaf script (CScript or bytes)
# - version: the leaf version (0xc0 for BIP342 tapscript)
# - merklebranch: the merkle branch to use for this leaf (32*N bytes)
TaprootLeafInfo = namedtuple("TaprootLeafInfo", "script,version,merklebranch")
def taproot_construct(pubkey, scripts=None):
"""Construct a tree of Taproot spending conditions
pubkey: a 32-byte xonly pubkey for the internal pubkey (bytes)
scripts: a list of items; each item is either:
- a (name, CScript or bytes, leaf version) tuple
- a (name, CScript or bytes) tuple (defaulting to leaf version 0xc0)
- another list of items (with the same structure)
- a list of two items; the first of which is an item itself, and the
second is a function. The function takes as input the Merkle root of the
first item, and produces a (fictitious) partner to hash with.
Returns: a TaprootInfo object
"""
if scripts is None:
scripts = []
ret, h = taproot_tree_helper(scripts)
tweak = TaggedHash("TapTweak", pubkey + h)
tweaked, negated = tweak_add_pubkey(pubkey, tweak)
leaves = dict((name, TaprootLeafInfo(script, version, merklebranch)) for name, version, script, merklebranch in ret)
return TaprootInfo(CScript([OP_1, tweaked]), pubkey, negated + 0, tweak, leaves)
def is_op_success(o):
return o == 0x50 or o == 0x62 or o == 0x89 or o == 0x8a or o == 0x8d or o == 0x8e or (o >= 0x7e and o <= 0x81) or (o >= 0x83 and o <= 0x86) or (o >= 0x95 and o <= 0x99) or (o >= 0xbb and o <= 0xfe)
|
fujicoin/fujicoin
|
test/functional/test_framework/script.py
|
Python
|
mit
| 28,869
|
# The MIT License (MIT)
# Copyright (c) 2016 Chris Webb
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .psql import PsqlBaseWindowCommand, set_status
class PsqlConfigCommand(PsqlBaseWindowCommand):
def description(self):
return 'Configures the current window\'s settings based on the inputs.'
def run(self, edit, *args, **kwargs):
self.settings = dict(*args, **kwargs)
set_status('PostgreSQL configuration updated.')
|
chriswebb/sublime-psql-tools
|
psql_config.py
|
Python
|
mit
| 1,463
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('disease', '0008_auto_20150814_0740'),
]
operations = [
migrations.CreateModel(
name='AnalyzeDataOrder',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, verbose_name='ID', auto_created=True)),
('uploaded_filename', models.CharField(max_length=256)),
('status', models.BooleanField(default=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='analyzedataorder',
unique_together=set([('user', 'uploaded_filename')]),
),
]
|
jiivan/genoomy
|
genoome/disease/migrations/0009_auto_20150821_1638.py
|
Python
|
mit
| 945
|
import os
import signal
def exit_code_str(exception_name, command, exit_code):
"""String representation for an InvocationError, with exit code
NOTE: this might also be used by plugin tests (tox-venv at the time of writing),
so some coordination is needed if this is ever moved or a different solution for this hack
is found.
NOTE: this is a separate function because pytest-mock `spy` does not work on Exceptions
We can use neither a class method nor a static because of https://bugs.python.org/issue23078.
Even a normal method failed with "TypeError: descriptor '__getattribute__' requires a
'BaseException' object but received a 'type'".
"""
str_ = "{} for command {}".format(exception_name, command)
if exit_code is not None:
str_ += " (exited with code {:d})".format(exit_code)
if (os.name == "posix") and (exit_code > 128):
signals = {
number: name for name, number in vars(signal).items() if name.startswith("SIG")
}
number = exit_code - 128
name = signals.get(number)
if name:
str_ += (
"\nNote: this might indicate a fatal error signal "
"({:d} - 128 = {:d}: {})".format(number + 128, number, name)
)
return str_
class Error(Exception):
def __str__(self):
return "{}: {}".format(self.__class__.__name__, self.args[0])
class MissingSubstitution(Error):
FLAG = "TOX_MISSING_SUBSTITUTION"
"""placeholder for debugging configurations"""
def __init__(self, name):
self.name = name
class ConfigError(Error):
"""Error in tox configuration."""
class UnsupportedInterpreter(Error):
"""Signals an unsupported Interpreter."""
class InterpreterNotFound(Error):
"""Signals that an interpreter could not be found."""
class InvocationError(Error):
"""An error while invoking a script."""
def __init__(self, command, exit_code=None):
super(Error, self).__init__(command, exit_code)
self.command = command
self.exit_code = exit_code
def __str__(self):
return exit_code_str(self.__class__.__name__, self.command, self.exit_code)
class MissingFile(Error):
"""An error while invoking a script."""
class MissingDirectory(Error):
"""A directory did not exist."""
class MissingDependency(Error):
"""A dependency could not be found or determined."""
class MissingRequirement(Error):
"""A requirement defined in :config:`require` is not met."""
class MinVersionError(Error):
"""The installed tox version is lower than requested minversion."""
def __init__(self, message):
self.message = message
super(MinVersionError, self).__init__(message)
|
Avira/tox
|
src/tox/exception.py
|
Python
|
mit
| 2,795
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Squid Bot documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 13 08:47:18 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import django
from shutil import copyfile
sys.path.insert(0, os.path.abspath('../..'))
on_rtd = os.getenv('READTHEDOCS') == 'True'
if on_rtd:
copyfile('../../web/environment_example.py', '../../web/environment.py')
import web.environment
os.environ['DJANGO_SETTINGS_MODULE'] = 'web.settings'
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Squid Bot'
copyright = '2016, Squid-Bot'
author = 'bsquidwrd'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'SquidBotdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'SquidBot.tex', 'Squid Bot Documentation',
'Squid-Bot', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'squidbot', 'Squid Bot Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'SquidBot', 'Squid Bot Documentation',
author, 'SquidBot', 'A bot with many talents.',
'Miscellaneous'),
]
|
bsquidwrd/Squid-Bot
|
docs/source/conf.py
|
Python
|
mit
| 5,099
|
# Given a m x n matrix, if an element is 0, set its entire row and column to 0. Do it in place.
# click to show follow up.
# Follow up:
# Did you use extra space?
# A straight forward solution using O(mn) space is probably a bad idea.
# A simple improvement uses O(m + n) space, but still not the best solution.
# Could you devise a constant space solution?
class Solution:
# @param {integer[][]} matrix
# @return {void} Do not return anything, modify matrix in-place instead.
def setZeroes(self, matrix):
fr = fc = False
for i in xrange(len(matrix)):
for j in xrange(len(matrix[0])):
if matrix[i][j] == 0:
matrix[i][0] = matrix[0][j] = 0
if i == 0:
fr = True
if j == 0:
fc = True
for j in xrange(1, len(matrix[0])):
if matrix[0][j] == 0:
for i in xrange(1, len(matrix)):
matrix[i][j] = 0
for i in xrange(1, len(matrix)):
if matrix[i][0] == 0:
for j in xrange(1, len(matrix[0])):
matrix[i][j] = 0
if fr:
for j in xrange(len(matrix[0])):
matrix[0][j] = 0
if fc:
for i in xrange(len(matrix)):
matrix[i][0] = 0
# for test
# return matrix
|
abawchen/leetcode
|
solutions/073_set_matrix_zeroes.py
|
Python
|
mit
| 1,406
|
# -*- coding: utf-8 -*-
"""
MapTool.py - base class for map tool
======================================================================
AUTHOR: Wei Wan, Purdue University
EMAIL: rcac-help@purdue.edu
Copyright (c) 2016 Purdue University
See the file "license.terms" for information on usage and
redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
======================================================================
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from gabbs.controls.MapControlProperty import *
from gabbs.controls.MapToolAction import MapToolFeatureAction
from gabbs.controls.MapToolSelect import(MapToolSelectSingle,
MapToolSelectRectangle,
MapToolSelectPolygon,
MapToolSelectFreehand,
MapToolSelectRadius)
import gabbs.controls.MapToolSelectUtils as MapToolSelectUtils
import gabbs.resources_rc
from gabbs.MapUtils import iface, debug_trace
class MapTool(QObject):
def __init__(self):
QObject.__init__(self)
self.canvas = iface.mapCanvas
self.window = iface.mainWindow
self.mapToolBar = None
def addMapToolPan(self, size, style):
self.mapToolBar = iface.mapToolBar
if size == mapControlStyle.SMALL:
tools = ['PAN']
elif size == mapControlStyle.LARGE:
tools = ['PAN']
else:
tools = ['PAN']
mapToolButtons = []
for tool in tools:
button = mapToolButton()
if tool == "PAN":
button.name = "PAN"
button.action = QAction(QIcon(':/gabbs/resources/img/pan.png'),QString("Pan"), self.window)
button.mapFunction = self.pan
button.checkable = True
mapToolButtons.append(button)
for button in mapToolButtons:
if button.checkable is True:
button.action.setCheckable(True)
# Connect the trigger signal to a slot.)
button.action.triggered.connect(button.mapFunction)
if button.name == "PAN":
self.toolPan = QgsMapToolPan(self.canvas)
self.toolPan.setAction(button.action)
# Add button to toolbar
self.mapToolBar.addAction(button.action)
def addMapToolZoom(self, size, style, option):
self.mapToolBar = iface.mapToolBar
style = mapControlStyle.DROPDOWN_MENU
if size == zoomControlStyle.SMALL:
tools = ['ZOOMIN', 'ZOOMOUT']
elif size == zoomControlStyle.LARGE:
tools = ['ZOOMIN', 'ZOOMOUT', 'ZOOMHOME', 'ZOOMFULL', 'ZOOMLAYER']
elif size == zoomControlStyle.CUSTOM:
if len(option) > 0:
tools = option
else:
tools = ['ZOOMIN', 'ZOOMOUT']
else:
tools = ['ZOOMIN', 'ZOOMOUT']
mapToolButtons = []
for tool in tools:
button = mapToolButton()
if tool == "ZOOMIN":
button.name = "ZOOMIN"
button.action = QAction(QIcon(':/gabbs/resources/img/zoom-in.png'), QString("Zoom In"), self.window)
button.mapFunction = self.zoomIn
button.checkable = True
elif tool == "ZOOMOUT":
button.name = "ZOOMOUT"
button.action = QAction(QIcon(':/gabbs/resources/img/zoom-out.png'), QString("Zoom Out"), self.window)
button.mapFunction = self.zoomOut
button.checkable = True
elif tool == "ZOOMHOME":
button.name = "ZOOMHOME"
button.action = QAction(QIcon(':/gabbs/resources/img/zoom-home.png'), QString("Zoom Home"),self.window)
button.mapFunction = self.zoomHome
elif tool == "ZOOMFULL":
button.name = "ZOOMFULL"
button.action = QAction(QIcon(':/gabbs/resources/img/zoom-full.png'), QString("Zoom Full"),self.window)
button.mapFunction = self.zoomFull
elif tool == "ZOOMLAYER":
button.name = "ZOOMLAYER"
button.action = QAction(QIcon(':/gabbs/resources/img/zoom-layer.png'),QString("Zoom To Layer"), self.window)
button.mapFunction = self.zoomLayer
mapToolButtons.append(button)
if style == mapControlStyle.DROPDOWN_MENU:
self.popupMenu = QMenu(self.window)
for button in mapToolButtons:
if button.checkable is True:
button.action.setCheckable(True)
# Connect the trigger signal to a slot.)
button.action.triggered.connect(button.mapFunction)
if button.name == "PAN":
self.toolPan = QgsMapToolPan(self.canvas)
self.toolPan.setAction(button.action)
elif button.name == "ZOOMIN":
self.toolZoomIn = QgsMapToolZoom(self.canvas, False) # false = in
self.toolZoomIn.setAction(button.action)
elif button.name == "ZOOMOUT":
self.toolZoomOut = QgsMapToolZoom(self.canvas, True) # true = out
self.toolZoomOut.setAction(button.action)
elif button.name == "ZOOMHOME":
pass
elif button.name == "ZOOMFULL":
pass
elif button.name == "ZOOMLAYER":
pass
# Add button to toolbar
if style == mapControlStyle.DROPDOWN_MENU:
self.popupMenu.addAction(button.action)
elif style == mapControlStyle.HORIZONTAL_BAR:
self.mapToolBar.addAction(button.action)
else:
self.mapToolBar.addAction(button.action)
if style == mapControlStyle.DROPDOWN_MENU:
self.toolButton = QToolButton()
self.toolButton.setMenu(self.popupMenu)
self.toolButton.setDefaultAction(mapToolButtons[0].action)
self.toolButton.setPopupMode(QToolButton.InstantPopup)
self.mapToolBar.addWidget(self.toolButton)
def addMapToolSelect(self, size, style, option):
self.mapToolBar = iface.mapToolBar
if size == selectControlStyle.SMALL:
tools = ['RECTANGLE']
elif size == selectControlStyle.LARGE:
tools = ["SINGLE", 'RECTANGLE', 'POLYGON', 'FREEHAND', 'RADIUS']
elif size == selectControlStyle.CUSTOM:
if len(option) > 0:
tools = option
else:
tools = ['RECTANGLE']
else:
tools = ['RECTANGLE']
mapToolButtons = []
for tool in tools:
button = mapToolButton()
button.checkable = True
if tool == "SINGLE":
button.name = "SINGLE"
button.action = QAction(QIcon(':/gabbs/resources/img/select.png'),QString("Select"), self.window)
button.mapFunction = self.selectSingle
elif tool == "RECTANGLE":
button.name = "RECTANGLE"
button.action = QAction(QIcon(':/gabbs/resources/img/select-rectangle.png'), QString("Select Rectangle"), self.window)
button.mapFunction = self.selectRectangle
elif tool == "POLYGON":
button.name = "POLYGON"
button.action = QAction(QIcon(':/gabbs/resources/img/select-polygon.png'), QString("Select Polygon"), self.window)
button.mapFunction = self.selectPolygon
elif tool == "FREEHAND":
button.name = "FREEHAND"
button.action = QAction(QIcon(':/gabbs/resources/img/select-freehand.png'), QString("Select Freehand"), self.window)
button.mapFunction = self.selectFreehand
elif tool == "RADIUS":
button.name = "RADIUS"
button.action = QAction(QIcon(':/gabbs/resources/img/select-radius.png'), QString("Select Radius"), self.window)
button.mapFunction = self.selectRadius
mapToolButtons.append(button)
if style == mapControlStyle.DROPDOWN_MENU:
self.popupMenu = QMenu(self.window)
for button in mapToolButtons:
if button.checkable is True:
button.action.setCheckable(True)
# Connect the trigger signal to a slot
button.action.triggered.connect(button.mapFunction)
if button.name == "SINGLE":
self.toolSelect = MapToolSelectSingle(iface.mapCanvas)
self.toolSelect.setAction(button.action)
elif button.name == "RECTANGLE":
self.toolSelectRectangle = MapToolSelectRectangle(iface.mapCanvas)
self.toolSelectRectangle.setAction(button.action)
elif button.name == "POLYGON":
self.toolSelectPolygon = MapToolSelectPolygon(iface.mapCanvas)
self.toolSelectPolygon.setAction(button.action)
elif button.name == "FREEHAND":
self.toolSelectFreehand = MapToolSelectFreehand(iface.mapCanvas)
self.toolSelectFreehand.setAction(button.action)
elif button.name == "RADIUS":
self.toolSelectRadius = MapToolSelectRadius(iface.mapCanvas)
self.toolSelectRadius.setAction(button.action)
# Add button to toolbar
if style == mapControlStyle.DROPDOWN_MENU:
self.popupMenu.addAction(button.action)
elif style == mapControlStyle.HORIZONTAL_BAR:
self.mapToolBar.addAction(button.action)
else:
self.mapToolBar.addAction(button.action)
if style == mapControlStyle.DROPDOWN_MENU:
self.toolButton = QToolButton()
self.toolButton.setMenu(self.popupMenu)
self.toolButton.setDefaultAction(mapToolButtons[0].action)
self.toolButton.setPopupMode(QToolButton.InstantPopup)
self.mapToolBar.addWidget(self.toolButton)
def addMapToolAction(self, size, style):
self.mapToolBar = iface.mapToolBar
tools = ['FEATURE_ACTION']
mapToolButtons = []
for tool in tools:
button = mapToolButton()
if tool == "FEATURE_ACTION":
button.name = "FEATURE_ACTION"
button.action = QAction(QIcon(':/gabbs/resources/img/mAction.png'),QString("Feature Action"), self.window)
button.mapFunction = self.featureAction
button.checkable = True
mapToolButtons.append(button)
for button in mapToolButtons:
if button.checkable is True:
button.action.setCheckable(True)
# Connect the trigger signal to a slot.)
button.action.triggered.connect(button.mapFunction)
if button.name == "FEATURE_ACTION":
self.toolFeatureAction = MapToolFeatureAction(self.canvas)
self.toolFeatureAction.setAction(button.action)
# Add button to toolbar
self.mapToolBar.addAction(button.action)
def pan(self):
self.canvas.setMapTool(self.toolPan)
def zoomIn(self):
self.canvas.setMapTool(self.toolZoomIn)
def zoomOut(self):
self.canvas.setMapTool(self.toolZoomOut)
def zoomHome(self):
# Scale the map
if iface.mapZoomScale:
iface.mapCanvas.zoomScale(iface.mapZoomScale)
# Center the map
if iface.mapCenterPoint:
iface.mapCanvas.setCenter(iface.mapCenterPoint)
def zoomFull(self):
self.canvas.zoomToFullExtent()
def zoomLayer(self):
# TO-DO NOT WORKING
lyr = self.canvas.currentLayer()
lyrCrs = lyr.crs()
if lyrCrs != iface.coordRefSys:
coordTrans = QgsCoordinateTransform(lyrCrs, iface.coordRefSys)
extent = coordTrans.transform(lyr.extent(), QgsCoordinateTransform.ForwardTransform)
#extent.scale( 1.05 )
self.canvas.setExtent(extent)
def selectSingle(self):
self.canvas.setMapTool(self.toolSelect)
def selectRectangle(self):
self.canvas.setMapTool(self.toolSelectRectangle)
def selectPolygon(self):
self.canvas.setMapTool(self.toolSelectPolygon)
def selectFreehand(self):
self.canvas.setMapTool(self.toolSelectFreehand)
def selectRadius(self):
self.canvas.setMapTool(self.toolSelectRadius)
def featureAction(self):
self.canvas.setMapTool(self.toolFeatureAction)
class mapToolButton(QObject):
def __init__(self):
QObject.__init__(self)
self.icon = None
self.name = " "
self.action = None
self.mapTool = None
self.mapFunction = None
self.checkable = False
|
waneric/PyMapLib
|
src/gabbs/controls/MapTool.py
|
Python
|
mit
| 12,950
|
from ..benchmark import run
from lib import sink
import config
import os
import numpy as np
import math
from lib.geo.point import Point
def get_matrix_dimensions():
nx = 25
ny = 25
return (nx, ny,)
def execute():
config.count_hubs = 1
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
nx, ny = get_matrix_dimensions()
x_range = np.linspace(0, 1, nx)
y_range = np.linspace(1, 60, ny)
run.init()
for x in x_range:
config.Z = x
for y in y_range:
config.lock_time = y
run.single_run()
def plot():
from ..multivariate import plot as plt
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
config.axis_x = {
'name' : r'$L$',
'column' : 'config_lock_time'
}
config.axis_y = {
'name' : r'$Z$',
'column' : 'config_Z'
}
config.output_nx, config.output_ny = get_matrix_dimensions()
plt.run()
|
mauzeh/formation-flight
|
runs/multihub/L_Z_1hub/run.py
|
Python
|
mit
| 986
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the getchaintips RPC.
- introduce a network split
- work on chains of different lengths
- join the network together again
- verify that getchaintips now returns two chain tips.
"""
from test_framework.test_framework import nealcoinTestFramework
from test_framework.util import assert_equal
class GetChainTipsTest (nealcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def run_test (self):
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 1)
assert_equal (tips[0]['branchlen'], 0)
assert_equal (tips[0]['height'], 200)
assert_equal (tips[0]['status'], 'active')
# Split the network and build two chains of different lengths.
self.split_network ()
self.nodes[0].generate(10)
self.nodes[2].generate(20)
self.sync_all ()
tips = self.nodes[1].getchaintips ()
assert_equal (len (tips), 1)
shortTip = tips[0]
assert_equal (shortTip['branchlen'], 0)
assert_equal (shortTip['height'], 210)
assert_equal (tips[0]['status'], 'active')
tips = self.nodes[3].getchaintips ()
assert_equal (len (tips), 1)
longTip = tips[0]
assert_equal (longTip['branchlen'], 0)
assert_equal (longTip['height'], 220)
assert_equal (tips[0]['status'], 'active')
# Join the network halves and check that we now have two tips
# (at least at the nodes that previously had the short chain).
self.join_network ()
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 2)
assert_equal (tips[0], longTip)
assert_equal (tips[1]['branchlen'], 10)
assert_equal (tips[1]['status'], 'valid-fork')
tips[1]['branchlen'] = 0
tips[1]['status'] = 'active'
assert_equal (tips[1], shortTip)
if __name__ == '__main__':
GetChainTipsTest ().main ()
|
appop/bitcoin
|
qa/rpc-tests/getchaintips.py
|
Python
|
mit
| 2,209
|
from pymongo import MongoClient
from corrdb.common.core import setup_app
from flask.ext.testing import TestCase
import time
import twill
class MongodbTest(TestCase):
def create_app(self):
try:
browser = twill.get_browser()
browser.go("http://localhost:5200/")
app = setup_app(__name__, 'corrdb.tests.integrate')
app.config['LIVESERVER_PORT'] = 5210
app.config['TESTING'] = True
app.config['MONGODB_SETTINGS'] = {'db': 'corr-integrate','host': 'localhost','port': 27017}
except:
app = setup_app(__name__, 'corrdb.tests.integrate')
app.config['LIVESERVER_PORT'] = 5200
app.config['TESTING'] = True
app.config['MONGODB_SETTINGS'] = {'db': 'corr-integrate','host': 'localhost','port': 27017}
return app
def setUp(self):
self.client = MongoClient('localhost', 27017)
self.db = self.client['corr-integrate']
self.user_collection = self.db['usermodel']
self.project_collection = self.db['projectmodel']
self.record_collection = self.db['recordmodel']
# Put some dummy things in the db.
print "Supposed to setup the testcase."
print "Which probably means to push some testing records in the database."
def test_Mongodb(self):
print "This is a test to check that the api endpoints are working properly."
# time.sleep(30)
browser = twill.get_browser()
browser.go("http://localhost:27017/")
self.assertTrue(browser.get_code() in (200, 201))
def tearDown(self):
print "Supposed to tear down the testcase."
print "Which most likely means to clear the database of all records."
|
wd15/corr
|
corr-db/corrdb/tests/test_mongodb.py
|
Python
|
mit
| 1,750
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 17 20:36:34 2013
This file calls function from the ACS C library wrapper
@author: Pete
"""
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
import acsc
import time
axes = {'y':0, 'z':1, 'turbine':4, 'tow':5}
axis = 5
acc = 100
flags = 0
vel = 1
target = 2
dt = 0.01
hc = acsc.OpenCommDirect()
if hc == acsc.INVALID:
print("Cannot connect to controller, error", acsc.GetLastError())
else:
acsc.Enable(hc, axis)
acsc.SetVelocity(hc, axis, vel)
acsc.SetAcceleration(hc, axis, acc)
acsc.SetDeceleration(hc, axis, acc)
pvec = []
tvec = []
vvec = []
t = np.arange(0, 6*np.pi, dt)
x = np.sin(2*t)
acsc.MultiPoint(hc, None, axis, 0)
for n in range(len(x)):
acsc.AddPoint(hc, axis, x[n])
acsc.EndSequence(hc, axis)
while acsc.GetAxisState(hc, axis) != "stopped":
position = acsc.GetRPosition(hc, axis, acsc.SYNCHRONOUS)
vel = acsc.GetRVelocity(hc, axis, acsc.SYNCHRONOUS)
pvec.append(position)
tvec.append(time.time())
vvec.append(vel)
print(position)
time.sleep(dt*2)
pvec = np.asarray(pvec)
tvec = np.asarray(tvec) - tvec[0]
print("Generating plot")
plt.close('all')
plt.plot(tvec, pvec)
plt.hold(True)
plt.plot(t, x, '--k')
acsc.CloseComm(hc)
|
petebachant/ACSpy
|
examples/acsc_mpoint_test.py
|
Python
|
mit
| 1,462
|
import json
import re
from urllib.request import urlopen
from urllib.error import HTTPError
import psycopg2
from bs4 import BeautifulSoup
from psycopg2 import IntegrityError
from psycopg2.extras import Json
class Scraper:
def __init__(self, db, user, password, api_key, min_congress, max_congress,
host='localhost', update_stewart_meta=False):
"""
GPO scraper class, which also handles database setup.
"""
self.con = psycopg2.connect('dbname={} user={} password={} host={}'.format(db, user, password, host))
self.cur = self.con.cursor(cursor_factory=psycopg2.extras.DictCursor)
self._execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ")
table_names = [t[0] for t in self.cur.fetchall()]
if len(set(table_names)) == 0:
self._execute("""
CREATE TABLE members(
id integer PRIMARY KEY,
metadata json,
committee_membership json);
CREATE TABLE hearings(
id text PRIMARY KEY,
transcript text,
congress integer,
session integer,
chamber text,
date date,
committees text[],
subcommittees text[],
uri text,
url text,
sudoc text,
number text,
witness_meta json,
member_meta json,
parsed json);
""")
elif set(table_names) != {'members', 'hearings'}:
raise ValueError(""" Improperly configured postgresql database given! Please give either a blank database
or one that has been previously configured by this package.
""")
if update_stewart_meta:
self._update_stewart_meta()
self._execute('SELECT url FROM hearings;')
self.searched = [e[0] for e in self.cur.fetchall()]
self.api_key = api_key
self.congresses = range(int(min_congress), int(max_congress) + 1)
def scrape(self):
"""
Scrape data from the GPO website. Loops through the list of results until all pages are exhausted.
"""
print("Crawling and scraping the GPO website. As pages are scraped, page URLs will be printed in terminal. If "
"you're running the scraper for the first time, the initial crawl will take some time.")
results_page = 'https://api.govinfo.gov/collections/CHRG/1776-01-28T20%3A18%3A10Z?offset=0&pageSize=100&' + \
'congress={1}&api_key={0}'
for congress in self.congresses:
page = results_page.format(self.api_key, congress)
while True:
print(page)
hearings_list = json.loads(urlopen(page).read())
for hearing in hearings_list['packages']:
if hearing['packageLink'] not in self.searched:
print(hearing['packageLink'])
self._save_data(hearing)
self.searched.append(hearing['packageLink'])
page = hearings_list['nextPage']
if not page:
break
def _extract_nav(self, url_element):
""" Helper function - grabs all unobserved links out of a given HTML element. """
url = 'http://www.gpo.gov' + re.search('(?<=\').*?(?=\')', url_element.get('onclick')).group(0)
if url not in self.searched:
page = urlopen('http://www.gpo.gov' + re.search('(?<=\').*?(?=\')', url_element.get('onclick')).group(0))
soup = BeautifulSoup(page.read(), 'lxml')
elements = [l for l in soup.find_all('a') if l.get('onclick') is not None]
self.searched.append(url)
return elements
else:
return []
def _save_data(self, hearing_json):
""" Dumps scraped text and metadata to the appropriate location in the document file structure. """
def extract_doc_meta(meta_html):
"""
Function to extract hearing metadata from the metadata file. Program searches through the HTML metadata and
locates various features, and combines them into a json object.
"""
def locate_string(key, name=False):
""" Helper function. Checks for a unique match on a given metadata element, and returns the value. """
elements_from_meta = meta_html.find(key)
if elements_from_meta is not None:
elements = list(set(elements_from_meta))
if len(elements) == 1 and name is False:
return elements[0].string
elif len(elements) == 1 and name is True:
return elements[0].find('name').string
else:
return ''
else:
return ''
# gathering a few unusual variables
uri = [link.string for link in meta_html.find_all('identifier') if link.get('type') == 'uri'][0]
congress = re.search('(?<=-)[0-9]+', uri)
if congress:
congress = congress.group(0)
committee_meta = meta_html.find_all('congcommittee')
committee_names = []
subcommittee_names = []
# first pass, using short committee names
for committee in committee_meta:
if committee.find('name', type='authority-short') is not None:
committee_names.append(committee.find('name', type='authority-short').string)
if committee.find('subcommittee') is not None:
try:
subcommittee = committee.find('subcommittee')
subcommittee_names.append(subcommittee.find('name', type='authority-short').string)
except:
pass
# occasionally, short names are missing - fall back standard names if no short ones are found
if len(committee_names) == 0:
for committee in committee_meta:
if committee.find('name', type='authority-standard') is not None:
committee_names.append(committee.find('name', type='authority-standard').string)
if meta_html.find('congserial') is not None:
serials = meta_html.find_all('congserial')
numbers = [serial.get('number') for serial in serials if serial.get('number') is not None]
else:
numbers = []
# the main variable collection and output construction.
meta_dictionary = {'Identifier': locate_string('recordidentifier'),
'Congress': congress,
'Session': locate_string('session'),
'Chamber': locate_string('chamber'),
'Date': locate_string('helddate'),
'Committees': committee_names,
'Subcommittees': subcommittee_names,
'Title': locate_string('title'),
'uri': uri,
'url': url,
'sudoc': locate_string('classification'),
'Number': numbers}
if re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', meta_dictionary['Date']) is None:
meta_dictionary['Date'] = None
return meta_dictionary
def extract_member_meta(meta_html):
""" Function to extract member metadata from the metadata file. This information is often absent. """
import re
member_dictionary = {}
member_elements = [link for link in meta_html.find_all('congmember')]
# loop over all of the member elements in a given page, and get relevant data
for member in member_elements:
party = member.get('party')
state_short = member.get('state')
chamber = member.get('chamber')
bio_id = member.get('bioguideid')
name_elements = member.find_all('name')
name_parsed = [link.string for link in name_elements if link.get('type') == 'parsed']
if len(name_parsed) > 0:
name_parsed = name_parsed[0]
state_long = re.search('(?<= of ).*', name_parsed)
if state_long:
state_long = state_long[0]
else:
name_parsed = [link.string for link in name_elements if link.get('type') == 'authority-fnf'][0]
state_long = ''
member_dictionary[name_parsed] = {'Name': name_parsed,
'State_Short': state_short,
'State_Long': state_long,
'Party': party,
'Chamber': chamber,
'GPO_ID': bio_id}
return member_dictionary
def extract_witness_meta(meta_html):
""" Function to extract witness metadata from the metadata file. This information is often absent. """
witness_list = [w.string for w in meta_html.find_all('witness') if w.string is not None]
return witness_list
htm_page = 'https://api.govinfo.gov/packages/{1}/htm?api_key={0}'
mods_page = 'https://api.govinfo.gov/packages/{1}/mods?api_key={0}'
hearing_id = hearing_json['packageId']
try:
transcript = urlopen(htm_page.format(self.api_key, hearing_id)).read()
transcript = re.sub('\x00', '', transcript.decode('utf8'))
except HTTPError:
transcript = ''
meta = urlopen(mods_page.format(self.api_key, hearing_id)).read()
meta_soup = BeautifulSoup(meta)
url = hearing_json['packageLink']
# Metadata is divided into three pieces: hearing info, member info, and witness info.
# See functions for details on each of these metadata elements.
hearing_meta = extract_doc_meta(meta_soup)
witness_meta = extract_witness_meta(meta_soup)
member_meta = extract_member_meta(meta_soup)
try:
self._execute('INSERT INTO hearings VALUES (' + ','.join(['%s'] * 14) + ')',
(hearing_meta['Identifier'],
transcript,
hearing_meta['Congress'],
hearing_meta['Session'],
hearing_meta['Chamber'],
hearing_meta['Date'],
hearing_meta['Committees'],
hearing_meta['Subcommittees'],
hearing_meta['uri'],
hearing_meta['url'],
hearing_meta['sudoc'],
hearing_meta['Number'],
Json(witness_meta),
Json(member_meta)))
except IntegrityError:
print('Duplicate key. Link not included.')
self.con.rollback()
def _update_stewart_meta(self):
"""
Generate the member table. The member table lists party seniority, majority status, leadership,
committee membership, congress, and state. All member data are drawn from Stewart's committee assignments data
(assumed to be saved as CSV files), which are available at the link below.
http://web.mit.edu/17.251/www/data_page.html
"""
import csv
def update(inputs, table, chamber):
"""
Helper function, which updates a given member table with metadata from Stewart's metadata. Given data from
a csv object, the function interprets that file and adds the data to a json output. See Stewart's data and
codebook for descriptions of the variables.
"""
def update_meta(meta_entry, datum):
meta_entry.append(datum)
meta_entry = [e for e in list(set(meta_entry)) if e != '']
return meta_entry
for row in inputs:
name = str(row[3].lower())
name = name.translate(str.maketrans(dict.fromkeys('!"#$%&\'()*+-./:;<=>?[\\]_`{|}~')))
congress = row[0].lower()
committee_code = row[1]
member_id = row[2]
majority = row[4].lower()
party_seniority = row[5].lower()
leadership = row[9]
committee_name = row[15]
state = row[18]
if row[6] == '100':
party = 'D'
elif row[6] == '200':
party = 'R'
else:
party = 'I'
entry = {'Party Seniority': party_seniority, 'Majority': majority, 'Leadership': leadership,
'Chamber': chamber, 'Party': party, 'State': state, 'Committee Name': committee_name}
if committee_code != '' and member_id != '':
if member_id in table:
member_meta = table[member_id]['Metadata']
member_membership = table[member_id]['Membership']
member_meta['Name'] = update_meta(member_meta['Name'], name)
member_meta['State'] = update_meta(member_meta['State'], state)
member_meta['Chamber'] = update_meta(member_meta['Chamber'], chamber)
member_meta['Party'] = update_meta(member_meta['Party'], party)
member_meta['Committee'] = update_meta(member_meta['Committee'], committee_name)
if congress in table[member_id]['Membership']:
member_membership[congress][committee_code] = entry
else:
member_membership[congress] = {committee_code: entry}
else:
table[member_id] = {'Metadata': {'Name': [name],
'State': [state],
'Chamber': [chamber],
'Party': [party],
'Committee': [committee_name]},
'Membership': {congress: {committee_code: entry}}}
self._execute('DELETE FROM members;')
member_table = {}
house_path = eval(input('Path to Stewart\'s House committee membership data (as csv): '))
senate_path = eval(input('Path to Stewart\'s Senate committee membership data (as csv): '))
# Loop through the house and senate assignment files, and save the output.
with open(house_path, 'r', encoding='ascii', errors='ignore') as f:
house_inputs = list(csv.reader(f))[2:]
with open(senate_path, 'r', encoding='ascii', errors='ignore') as f:
senate_inputs = list(csv.reader(f))[2:]
update(house_inputs, member_table, 'HOUSE')
update(senate_inputs, member_table, 'SENATE')
for k, v in list(member_table.items()):
self._execute('INSERT INTO members VALUES (%s, %s, %s)', (k, Json(v['Metadata']), Json(v['Membership'])),
errors='strict')
def _execute(self, cmd, data=None, errors='strict'):
""" Wrapper function for pyscopg2 commands. """
if errors not in ['strict', 'ignore']:
raise ValueError("""errors argument must be \'strict\' (raise exception on bad command)
or \'ignore\' (return None on bad command). '""")
self.cur = self.con.cursor()
if errors == 'ignore':
try:
self.cur.execute(cmd, data)
except:
self.con.rollback()
elif errors == 'strict':
self.cur.execute(cmd, data)
self.con.commit()
|
rbshaffer/gpo_tools
|
build/lib/gpo_tools/scrape.py
|
Python
|
mit
| 17,070
|
from geyser.tests.permissions import *
from geyser.tests.models import *
from geyser.tests.query import *
from geyser.tests.managers import *
from geyser.tests.views import *
|
MidwestCommunications/django-geyser
|
geyser/tests/__init__.py
|
Python
|
mit
| 175
|
"""User objects."""
from collections import namedtuple
import enum
import logging
logger = logging.getLogger(__name__)
DEFAULT_NAME = 'Unknown'
UserID = namedtuple('UserID', ['chat_id', 'gaia_id'])
"""A user ID, consisting of two parts which are always identical."""
NameType = enum.IntEnum('NameType', dict(DEFAULT=0, NUMERIC=1, REAL=2))
"""Indicates which type of name a user has.
``DEFAULT`` indicates that only a first name is available. ``NUMERIC``
indicates that only a numeric name is available. ``REAL`` indicates that a real
full name is available.
"""
class User:
"""A chat user.
Use :class:`.UserList` or :class:`.ConversationList` methods to get
instances of this class.
"""
__slots__ = ('name_type', 'full_name', 'first_name', 'id_', 'photo_url',
'emails', 'is_self')
def __init__(self, user_id, full_name, first_name, photo_url, emails,
is_self):
# Handle full_name or first_name being None by creating an approximate
# first_name from the full_name, or setting both to DEFAULT_NAME.
if not full_name:
full_name = first_name = DEFAULT_NAME
name_type = NameType.DEFAULT
elif not any(c.isalpha() for c in full_name):
first_name = full_name
name_type = NameType.NUMERIC
else:
first_name = first_name if first_name else full_name.split()[0]
name_type = NameType.REAL
self.name_type = name_type
"""The user's name type (:class:`~hangups.user.NameType`)."""
self.full_name = full_name
"""The user's full name (:class:`str`)."""
self.first_name = first_name
"""The user's first name (:class:`str`)."""
self.id_ = user_id
"""The user's ID (:class:`~hangups.user.UserID`)."""
self.photo_url = photo_url
"""The user's profile photo URL (:class:`str`)."""
self.emails = emails
"""The user's email address (:class:`str`)."""
self.is_self = is_self
"""Whether this user is the current user (:class:`bool`)."""
@property
def is_default(self):
"""Check whether the user has a custom name
Returns:
boolean, True if the user has a custom name, otherwise False
"""
return self.name_type == NameType.DEFAULT
def upgrade_name(self, user_):
"""Upgrade name type of this user.
Google Voice participants often first appear with no name at all, and
then get upgraded unpredictably to numbers ("+12125551212") or names.
Args:
user_ (~hangups.user.User): User to upgrade with.
"""
if user_.name_type > self.name_type:
self.full_name = user_.full_name
self.first_name = user_.first_name
self.name_type = user_.name_type
logger.debug('Added %s name to User "%s": %s',
self.name_type.name.lower(), self.full_name, self)
@classmethod
def from_entity(cls, entity, self_user_id):
"""Construct user from ``Entity`` message.
Args:
entity: ``Entity`` message.
self_user_id (~hangups.user.UserID or None): The ID of the current
user. If ``None``, assume ``entity`` is the current user.
Returns:
:class:`~hangups.user.User` object.
"""
user_id = UserID(chat_id=entity.id.chat_id,
gaia_id=entity.id.gaia_id)
return cls(user_id, entity.properties.display_name,
entity.properties.first_name,
entity.properties.photo_url,
entity.properties.email,
(self_user_id == user_id) or (self_user_id is None))
@classmethod
def from_conv_part_data(cls, conv_part_data, self_user_id):
"""Construct user from ``ConversationParticipantData`` message.
Args:
conv_part_id: ``ConversationParticipantData`` message.
self_user_id (~hangups.user.UserID or None): The ID of the current
user. If ``None``, assume ``conv_part_id`` is the current user.
Returns:
:class:`~hangups.user.User` object.
"""
user_id = UserID(chat_id=conv_part_data.id.chat_id,
gaia_id=conv_part_data.id.gaia_id)
if conv_part_data.fallback_name == 'unknown':
full_name = None
else:
full_name = conv_part_data.fallback_name
return cls(user_id, full_name, None, None, [],
(self_user_id == user_id) or (self_user_id is None))
class UserList:
"""Maintains a list of all the users.
Using :func:`build_user_conversation_list` to initialize this class is
recommended.
Args:
client: The connected :class:`Client`.
self_entity: ``Entity`` message for the current user.
entities: List of known ``Entity`` messages.
conv_parts: List of ``ConversationParticipantData`` messages. These are
used as a fallback in case any users are missing.
"""
user_cls = User
"""A custom class used to create new User instances
May be a subclass of `hangups.user.User`
"""
def __init__(self, client, self_entity, entities, conv_parts):
self._client = client
self._self_user = self.user_cls.from_entity(self_entity, None)
# {UserID: User}
self._user_dict = {self._self_user.id_: self._self_user}
# Add each entity as a new User.
for entity in entities:
user_ = self.user_cls.from_entity(entity, self._self_user.id_)
self._user_dict[user_.id_] = user_
# Add each conversation participant as a new User if we didn't already
# add them from an entity. These don't include a real first_name, so
# only use them as a fallback.
for participant in conv_parts:
self._add_user_from_conv_part(participant)
logger.info('%s initialized with %s user(s)',
self.__class__.__name__, len(self._user_dict))
self._client.on_state_update.add_observer(self._on_state_update)
def get_user(self, user_id):
"""Get a user by its ID.
Args:
user_id (~hangups.user.UserID): The ID of the user.
Raises:
KeyError: If no such user is known.
Returns:
:class:`~hangups.user.User` with the given ID.
"""
try:
return self._user_dict[user_id]
except KeyError:
logger.warning('%s returning unknown User for UserID %s',
self.__class__.__name__, user_id)
return self.user_cls(user_id, None, None, None, [], False)
def get_all(self):
"""Get all known users.
Returns:
List of :class:`~hangups.user.User` instances.
"""
return self._user_dict.values()
def _add_user_from_conv_part(self, conv_part):
"""Add or upgrade User from ConversationParticipantData."""
user_ = self.user_cls.from_conv_part_data(
conv_part, self._self_user.id_)
existing = self._user_dict.get(user_.id_)
if existing is None:
logger.warning('Adding fallback User with %s name "%s"',
user_.name_type.name.lower(), user_.full_name)
self._user_dict[user_.id_] = user_
return user_
else:
existing.upgrade_name(user_)
return existing
def _on_state_update(self, state_update):
"""Receive a StateUpdate"""
if state_update.HasField('conversation'):
self._handle_conversation(state_update.conversation)
def _handle_conversation(self, conversation):
"""Receive Conversation and update list of users"""
for participant in conversation.participant_data:
self._add_user_from_conv_part(participant)
|
das7pad/hangups
|
hangups/user.py
|
Python
|
mit
| 7,978
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
try:
from html.parser import HTMLParser # py3
except ImportError:
from HTMLParser import HTMLParser # py2
from django.forms import widgets
from django.utils.html import format_html
from django.utils.translation import ungettext_lazy, ugettext_lazy as _
from django.forms.fields import IntegerField
from django.forms.models import ModelForm
from cms.plugin_pool import plugin_pool
from djangocms_text_ckeditor.widgets import TextEditorWidget
from djangocms_text_ckeditor.utils import plugin_tags_to_user_html
from cmsplugin_cascade.fields import PartialFormField
from cmsplugin_cascade.forms import ManageChildrenFormMixin
from cmsplugin_cascade.mixins import ImagePropertyMixin
from cmsplugin_cascade.widgets import NumberInputWidget, MultipleCascadingSizeWidget
from cmsplugin_cascade.link.cms_plugins import TextLinkPlugin
from . import settings, utils
from .plugin_base import BootstrapPluginBase
from .image import ImageForm
from .picture import BootstrapPicturePlugin
class CarouselSlidesForm(ManageChildrenFormMixin, ModelForm):
num_children = IntegerField(min_value=1, initial=1,
widget=NumberInputWidget(attrs={'size': '3', 'style': 'width: 5em !important;'}),
label=_('Slides'),
help_text=_('Number of slides for this carousel.'),
)
class CarouselPlugin(BootstrapPluginBase):
name = _("Carousel")
form = CarouselSlidesForm
default_css_class = 'carousel'
default_css_attributes = ('options',)
parent_classes = ['BootstrapColumnPlugin', 'SimpleWrapperPlugin']
render_template = 'cascade/bootstrap3/{}/carousel.html'
default_inline_styles = {'overflow': 'hidden'}
fields = ('num_children', 'glossary',)
DEFAULT_CAROUSEL_ATTRIBUTES = {'data-ride': 'carousel'}
OPTION_CHOICES = (('slide', _("Animate")), ('pause', _("Pause")), ('wrap', _("Wrap")),)
glossary_fields = (
PartialFormField('interval',
NumberInputWidget(attrs={'size': '2', 'style': 'width: 4em;', 'min': '1'}),
label=_("Interval"),
initial=5,
help_text=_("Change slide after this number of seconds."),
),
PartialFormField('options',
widgets.CheckboxSelectMultiple(choices=OPTION_CHOICES),
label=_('Options'),
initial=['slide', 'wrap', 'pause'],
help_text=_("Adjust interval for the carousel."),
),
PartialFormField('container_max_heights',
MultipleCascadingSizeWidget(list(tp[0] for tp in settings.CMSPLUGIN_CASCADE['bootstrap3']['breakpoints']),
allowed_units=['px']),
label=_("Carousel heights"),
initial=dict((bp[0], '{}px'.format(100 + 50 * i))
for i, bp in enumerate(settings.CMSPLUGIN_CASCADE['bootstrap3']['breakpoints'])),
help_text=_("Heights of Carousel in pixels for distinct Bootstrap's breakpoints."),
),
PartialFormField('resize-options',
widgets.CheckboxSelectMultiple(choices=BootstrapPicturePlugin.RESIZE_OPTIONS),
label=_("Resize Options"),
help_text=_("Options to use when resizing the image."),
initial=['upscale', 'crop', 'subject_location', 'high_resolution']
),
)
def get_form(self, request, obj=None, **kwargs):
utils.reduce_breakpoints(self, 'container_max_heights')
return super(CarouselPlugin, self).get_form(request, obj, **kwargs)
@classmethod
def get_identifier(cls, obj):
identifier = super(CarouselPlugin, cls).get_identifier(obj)
num_cols = obj.get_children().count()
content = ungettext_lazy('with {0} slide', 'with {0} slides', num_cols).format(num_cols)
return format_html('{0}{1}', identifier, content)
@classmethod
def get_css_classes(cls, obj):
css_classes = super(CarouselPlugin, cls).get_css_classes(obj)
if 'slide' in obj.glossary.get('options', []):
css_classes.append('slide')
return css_classes
@classmethod
def get_html_tag_attributes(cls, obj):
attributes = super(CarouselPlugin, cls).get_html_tag_attributes(obj)
attributes.update(cls.DEFAULT_CAROUSEL_ATTRIBUTES)
attributes['data-interval'] = 1000 * int(obj.glossary.get('interval', 5))
options = obj.glossary.get('options', [])
attributes['data-pause'] = 'pause' in options and 'hover' or 'false'
attributes['data-wrap'] = 'wrap' in options and 'true' or 'false'
return attributes
def save_model(self, request, obj, form, change):
wanted_children = int(form.cleaned_data.get('num_children'))
super(CarouselPlugin, self).save_model(request, obj, form, change)
self.extend_children(obj, wanted_children, CarouselSlidePlugin)
@classmethod
def sanitize_model(cls, obj):
sanitized = super(CarouselPlugin, cls).sanitize_model(obj)
complete_glossary = obj.get_complete_glossary()
# fill all invalid heights for this container to a meaningful value
max_height = max(obj.glossary['container_max_heights'].values())
pattern = re.compile(r'^(\d+)px$')
for bp in complete_glossary['breakpoints']:
if not pattern.match(obj.glossary['container_max_heights'].get(bp, '')):
obj.glossary['container_max_heights'][bp] = max_height
return sanitized
plugin_pool.register_plugin(CarouselPlugin)
class CarouselSlidePlugin(BootstrapPluginBase):
name = _("Slide")
model_mixins = (ImagePropertyMixin,)
form = ImageForm
default_css_class = 'img-responsive'
parent_classes = ['CarouselPlugin']
raw_id_fields = ('image_file',)
fields = ('image_file', 'glossary',)
render_template = 'cascade/bootstrap3/carousel-slide.html'
change_form_template = 'cascade/admin/text_plugin_change_form.html'
html_parser = HTMLParser()
def get_form(self, request, obj=None, **kwargs):
if obj:
caption = self.html_parser.unescape(obj.glossary.get('caption', ''))
obj.glossary.update(caption=caption)
# define glossary fields on the fly, because the TextEditorWidget requires the plugin_pk
text_editor_widget = TextEditorWidget(installed_plugins=[TextLinkPlugin], pk=self.parent.pk,
placeholder=self.parent.placeholder, plugin_language=self.parent.language)
kwargs['glossary_fields'] = (
PartialFormField('caption', text_editor_widget, label=_("Slide Caption"),
help_text=_("Caption text to be laid over the backgroud image."),
),
)
return super(CarouselSlidePlugin, self).get_form(request, obj, **kwargs)
def render(self, context, instance, placeholder):
# image shall be rendered in a responsive context using the ``<picture>`` element
elements = utils.get_picture_elements(context, instance)
caption = self.html_parser.unescape(instance.glossary.get('caption', ''))
context.update({
'is_responsive': True,
'instance': instance,
'caption': plugin_tags_to_user_html(caption, context, placeholder),
'placeholder': placeholder,
'elements': elements,
})
return super(CarouselSlidePlugin, self).render(context, instance, placeholder)
@classmethod
def sanitize_model(cls, obj):
sanitized = super(CarouselSlidePlugin, cls).sanitize_model(obj)
complete_glossary = obj.get_complete_glossary()
obj.glossary.update({'resize-options': complete_glossary.get('resize-options', [])})
return sanitized
@classmethod
def get_identifier(cls, obj):
identifier = super(CarouselSlidePlugin, cls).get_identifier(obj)
try:
content = obj.image.name or obj.image.original_filename
except AttributeError:
content = _("Empty Slide")
return format_html('{0}{1}', identifier, content)
plugin_pool.register_plugin(CarouselSlidePlugin)
|
jtiki/djangocms-cascade
|
cmsplugin_cascade/bootstrap3/carousel.py
|
Python
|
mit
| 8,063
|
#coding:utf-8
#########################
#Copyright(c) 2014 dtysky
#########################
def EditFormat(US,UT):
tmp={
'sc':[
0,('k'),{'k':'None'},
('cp','sc'),{'cp':'None','sc':'None'}
],
'sw':[
0,(),{},
('s',),{'s':'None'}
],
'chrlast':[
1,('l','t'),{'l':'None','t':'None'},
('m','p','c','f','d'),{'m':'None','p':'None','c':'None','f':'None','d':'None'}
],
'bg':[
0,('l','t'),{'l':'None','t':'None'},
('m','s','w'),{'m':'None','s':'None','w':'None'}
],
'cg':[
0,('l','t'),{'l':'None','t':'None'},
('m','s'),{'m':'None','s':'None'}
],
'bgm':[
0,(),{},
('m',),{'m':'None'}
],
'movie':[
0,(),{},
('m','k'),{'m':'None','k':'None'}
],
'sound':[
0,(),{},
('m','k'),{'m':'None','k':'None'}
],
'date':[
0,('m',),{'m':'None'},
('s',),{'s':'None'}
],
'vd':[
0,(),{},
('m',),{'m':'None'}
],
'ef':[
1,('e','args'),{'e':'None','args':'None'},
('c',),{'c':'None'}
],
'gf':[
0,('l','t'),{'l':'None','t':'None'},
('m',),{'m':'None'}
],
'key':[
0,('m',),{'m':'None'},
('s','n'),{'s':'None','n':'None'}
],
'mode':[
0,(),{},
('m',),{'m':'None'}
],
'pause':[
0,(),{},
('p',),{'p':'None'}
],
'view':[
0,(),{},
('m',),{'m':'None'}
],
'chc':[
0,(),{},
('a','b'),{'a':'None','b':'None'}
],
'renpy':[
0,(),{},
('m',),{'m':'None'}
],
'test':[
0,(),{},
('m',),{'m':'None'}
]
}
for ch in UT.Args['ch']['m']:
tmp['ch-'+ch]=[
1,('l','t'),{'l':'None','t':'None'},
('m','p','c','f','d'),{'m':ch,'p':'None','c':'None','f':'None','d':'None'}
]
return tmp
|
dtysky/Gal2Renpy
|
Sublime_Plugin/EditFormat.py
|
Python
|
mit
| 1,765
|
from io import BytesIO, TextIOWrapper
from uuid import uuid4
import os
import stat
from pyrsistent import pmap, pset
import attr
from filesystems import Path, common, exceptions
class _BytesIOIsTerrible(BytesIO):
def __repr__(self):
return "<BytesIOIsTerrible contents={!r}>".format(self.bytes)
def close(self):
self._hereismyvalue = self.getvalue()
super(_BytesIOIsTerrible, self).close()
@property
def bytes(self):
if self.closed:
return self._hereismyvalue
return self.getvalue()
def FS():
return _State().FS(name="MemoryFS")
def _fs(fn):
"""
Eat the fs argument.
"""
return lambda fs, *args, **kwargs: fn(*args, **kwargs)
@attr.s(hash=True)
class _File(object):
"""
A file.
"""
_name = attr.ib()
_parent = attr.ib(repr=False)
_contents = attr.ib(factory=_BytesIOIsTerrible)
def __getitem__(self, name):
return _FileChild(parent=self._parent)
def create_directory(self, path, with_parents, allow_existing):
raise exceptions.FileExists(path)
def list_directory(self, path):
raise exceptions.NotADirectory(path)
def remove_empty_directory(self, path):
raise exceptions.NotADirectory(path)
def create_file(self, path):
raise exceptions.FileExists(path)
def open_file(self, path, mode):
if mode.read:
file = _BytesIOIsTerrible(self._contents.bytes)
elif mode.write:
self._contents = _BytesIOIsTerrible()
file = self._contents
else:
original, self._contents = self._contents, _BytesIOIsTerrible()
self._contents.write(original.bytes)
file = self._contents
if mode.text:
return TextIOWrapper(file)
return file
def remove_file(self, path):
del self._parent[self._name]
def link(self, source, to, fs, state):
raise exceptions.FileExists(to)
def readlink(self, path):
raise exceptions.NotASymlink(path)
def stat(self, path):
return os.stat_result((stat.S_IFREG,) + (0,) * 9)
lstat = stat
@attr.s(hash=True)
class _FileChild(object):
"""
The attempted "child" of a file, which well, shouldn't have children.
"""
_parent = attr.ib()
def __getitem__(self, name):
return self
def create_directory(self, path, with_parents, allow_existing):
raise exceptions.NotADirectory(path.parent())
def list_directory(self, path):
raise exceptions.NotADirectory(path)
def remove_empty_directory(self, path):
raise exceptions.NotADirectory(path)
def create_file(self, path):
raise exceptions.NotADirectory(path)
def open_file(self, path, mode):
raise exceptions.NotADirectory(path)
def remove_file(self, path):
raise exceptions.NotADirectory(path)
def link(self, source, to, fs, state):
raise exceptions.NotADirectory(to.parent())
def readlink(self, path):
raise exceptions.NotADirectory(path)
def stat(self, path):
raise exceptions.NotADirectory(path)
lstat = stat
@attr.s(hash=True)
class _Directory(object):
"""
A directory.
"""
_name = attr.ib()
_parent = attr.ib(repr=False)
_children = attr.ib(default=pmap())
@classmethod
def root(cls):
root = cls(name="", parent=None)
root._parent = root
return root
def __getitem__(self, name):
return self._children.get(
name,
_DirectoryChild(name=name, parent=self),
)
def __setitem__(self, name, node):
self._children = self._children.set(name, node)
def __delitem__(self, name):
self._children = self._children.remove(name)
def create_directory(self, path, with_parents, allow_existing):
if allow_existing:
return self
raise exceptions.FileExists(path)
def list_directory(self, path):
return pset(self._children)
def remove_empty_directory(self, path):
if self._children:
raise exceptions.DirectoryNotEmpty(path)
del self._parent[self._name]
def create_file(self, path):
raise exceptions.FileExists(path)
def open_file(self, path, mode):
raise exceptions.IsADirectory(path)
def remove_file(self, path):
raise exceptions._UnlinkNonFileError(path)
def link(self, source, to, fs, state):
raise exceptions.FileExists(to)
def readlink(self, path):
raise exceptions.NotASymlink(path)
def stat(self, path):
return os.stat_result((stat.S_IFDIR,) + (0,) * 9)
lstat = stat
@attr.s(hash=True)
class _DirectoryChild(object):
"""
A node that doesn't exist, but is within an existing directory.
It therefore *could* exist if asked to create itself.
"""
_name = attr.ib()
_parent = attr.ib(repr=False)
def __getitem__(self, name):
return _NoSuchEntry(parent=self, name=name)
def create_directory(self, path, with_parents, allow_existing):
directory = _Directory(
name=self._name,
parent=self._parent,
)
self._parent[self._name] = directory
return directory
def list_directory(self, path):
raise exceptions.FileNotFound(path)
def remove_empty_directory(self, path):
raise exceptions.FileNotFound(path)
def create_file(self, path):
file = self._parent[self._name] = _File(
name=self._name,
parent=self._parent,
)
return file.open_file(path=path, mode=common._FileMode(activity="w"))
def open_file(self, path, mode):
if mode.read:
raise exceptions.FileNotFound(path)
else:
file = self._parent[self._name] = _File(
name=self._name,
parent=self._parent,
)
return file.open_file(path=path, mode=mode)
def remove_file(self, path):
raise exceptions.FileNotFound(path)
def link(self, source, to, fs, state):
self._parent[self._name] = _Link(
name=self._name,
parent=self._parent,
source=source,
entry_at=lambda path=source: state[fs.realpath(path=path)],
)
def readlink(self, path):
raise exceptions.FileNotFound(path)
def stat(self, path):
raise exceptions.FileNotFound(path)
lstat = stat
@attr.s(hash=True)
class _Link(object):
_name = attr.ib()
_parent = attr.ib(repr=False)
_source = attr.ib()
_entry_at = attr.ib(repr=False)
def __getitem__(self, name):
return self._entry_at()[name]
def create_directory(self, path, with_parents, allow_existing):
if allow_existing:
entry = self._entry_at(path=path)
return entry.create_directory(
path=path,
with_parents=with_parents,
allow_existing=allow_existing,
)
raise exceptions.FileExists(path)
def list_directory(self, path):
return self._entry_at(path=path).list_directory(path=path)
def remove_empty_directory(self, path):
raise exceptions.NotADirectory(path)
def create_file(self, path):
raise exceptions.FileExists(path)
def open_file(self, path, mode):
return self._entry_at(path=path).open_file(path=path, mode=mode)
def remove_file(self, path):
del self._parent[self._name]
def link(self, source, to, fs, state):
raise exceptions.FileExists(to)
def readlink(self, path):
return self._source
def stat(self, path):
return self._entry_at(path=path).stat(path=path)
def lstat(self, path):
return os.stat_result((stat.S_IFLNK,) + (0,) * 9)
@attr.s(hash=True)
class _NoSuchEntry(object):
"""
A non-existent node that also cannot be created alone.
It has no existing parent. What a shame.
"""
_name = attr.ib()
_parent = attr.ib()
def __getitem__(self, name):
return _NoSuchEntry(parent=self, name=name)
def create_directory(self, path, with_parents, allow_existing):
if with_parents:
parent = self._parent.create_directory(
path=path,
with_parents=with_parents,
allow_existing=allow_existing,
)
directory = parent[self._name].create_directory(
path=path,
with_parents=False,
allow_existing=allow_existing,
)
return directory
raise exceptions.FileNotFound(path.parent())
def list_directory(self, path):
raise exceptions.FileNotFound(path)
def remove_empty_directory(self, path):
raise exceptions.FileNotFound(path)
def create_file(self, path):
raise exceptions.FileNotFound(path)
def open_file(self, path, mode):
raise exceptions.FileNotFound(path)
def remove_file(self, path):
raise exceptions.FileNotFound(path)
def link(self, source, to, fs, state):
raise exceptions.FileNotFound(to.parent())
def readlink(self, path):
raise exceptions.FileNotFound(path)
def stat(self, path):
raise exceptions.FileNotFound(path)
lstat = stat
@attr.s(hash=True)
class _State(object):
_root = attr.ib(factory=_Directory.root)
def __getitem__(self, path):
"""
Retrieve the Node at the given path.
"""
node = self._root
for segment in path.segments:
node = node[segment]
return node
def FS(self, name):
return common.create(
name=name,
create_file=_fs(self.create_file),
open_file=_fs(self.open_file),
remove_file=_fs(self.remove_file),
create_directory=_fs(self.create_directory),
list_directory=_fs(self.list_directory),
remove_empty_directory=_fs(self.remove_empty_directory),
temporary_directory=_fs(self.temporary_directory),
stat=_fs(self.stat),
lstat=_fs(self.lstat),
link=lambda fs, *args, **kwargs: self.link(*args, fs=fs, **kwargs),
readlink=_fs(self.readlink),
)()
def create_directory(self, path, with_parents, allow_existing):
self[path].create_directory(
path=path,
with_parents=with_parents,
allow_existing=allow_existing,
)
def list_directory(self, path):
return self[path].list_directory(path=path)
def remove_empty_directory(self, path):
return self[path].remove_empty_directory(path=path)
def temporary_directory(self):
# TODO: Maybe this isn't good enough.
directory = Path(uuid4().hex)
self.create_directory(
path=directory,
with_parents=False,
allow_existing=False,
)
return directory
def create_file(self, path):
return self[path].create_file(path=path)
def open_file(self, path, mode):
mode = common._parse_mode(mode=mode)
return self[path].open_file(path=path, mode=mode)
def remove_file(self, path):
self[path].remove_file(path=path)
def link(self, source, to, fs):
self[to].link(fs=fs, source=source, to=to, state=self)
def readlink(self, path):
return self[path].readlink(path=path)
def lstat(self, path):
return self[path].lstat(path=path)
def stat(self, path):
return self[path].stat(path=path)
|
Julian/Filesystems
|
filesystems/memory.py
|
Python
|
mit
| 11,660
|
#
# Naive Bayes Classifier
#
# _____________________________________________________________________
import math
class Classifier:
def __init__(self, bucketPrefix, testBucketNumber, dataFormat):
""" a classifier will be built from files with the bucketPrefix
excluding the file with textBucketNumber. dataFormat is a string that
describes how to interpret each line of the data files. For example,
for the iHealth data the format is:
"attr attr attr attr class"
"""
total = 0
classes = {}
# counts used for attributes that are not numeric
counts = {}
# totals used for attributes that are numereric
# we will use these to compute the mean and sample standard deviation for
# each attribute - class pair.
totals = {}
numericValues = {}
# reading the data in from the file
self.format = dataFormat.strip().split('\t')
#
self.prior = {}
self.conditional = {}
# for each of the buckets numbered 1 through 10:
for i in range(1, 11):
# if it is not the bucket we should ignore, read in the data
if i != testBucketNumber:
filename = "%s-%02i" % (bucketPrefix, i)
f = open(filename)
lines = f.readlines()
f.close()
for line in lines:
fields = line.strip().split('\t')
ignore = []
vector = []
nums = []
for i in range(len(fields)):
if self.format[i] == 'num':
nums.append(float(fields[i]))
elif self.format[i] == 'attr':
vector.append(fields[i])
elif self.format[i] == 'comment':
ignore.append(fields[i])
elif self.format[i] == 'class':
category = fields[i]
# now process this instance
total += 1
classes.setdefault(category, 0)
counts.setdefault(category, {})
totals.setdefault(category, {})
numericValues.setdefault(category, {})
classes[category] += 1
# now process each non-numeric attribute of the instance
col = 0
for columnValue in vector:
col += 1
counts[category].setdefault(col, {})
counts[category][col].setdefault(columnValue, 0)
counts[category][col][columnValue] += 1
# process numeric attributes
col = 0
for columnValue in nums:
col += 1
totals[category].setdefault(col, 0)
#totals[category][col].setdefault(columnValue, 0)
totals[category][col] += columnValue
numericValues[category].setdefault(col, [])
numericValues[category][col].append(columnValue)
#
# ok done counting. now compute probabilities
#
# first prior probabilities p(h)
#
for (category, count) in classes.items():
self.prior[category] = count / total
#
# now compute conditional probabilities p(h|D)
#
for (category, columns) in counts.items():
self.conditional.setdefault(category, {})
for (col, valueCounts) in columns.items():
self.conditional[category].setdefault(col, {})
for (attrValue, count) in valueCounts.items():
self.conditional[category][col][attrValue] = (
count / classes[category])
self.tmp = counts
#
# now compute mean and sample standard deviation
#
self.means = {}
self.ssd = {}
# self.totals = totals
for (category, columns) in totals.items():
self.means.setdefault(category, {})
for (col, cTotal) in columns.items():
self.means[category][col] = cTotal / classes[category]
# standard deviation
for (category, columns) in numericValues.items():
self.ssd.setdefault(category, {})
for (col, values) in columns.items():
SumOfSquareDifferences = 0
theMean = self.means[category][col]
for value in values:
SumOfSquareDifferences += (value - theMean)**2
columns[col] = 0
self.ssd[category][col] = math.sqrt(SumOfSquareDifferences / (classes[category] - 1))
# test the code
c = Classifier("pimaSmall/pimaSmall", 1, "num num num num num num num num class")
# test means computation
assert(c.means['1'][1] == 5.25)
assert(round(c.means['1'][2], 4) == 146.0556)
assert(round(c.means['0'][2], 4) == 111.9057)
# test standard deviation
assert(round(c.ssd['0'][1], 4) == 2.5469)
assert(round(c.ssd['1'][8], 4) == 10.9218)
print("Means and SSD computation appears OK")
|
applecool/AI
|
Naive Bayes/naiveBayesDensityFunctionTrainingSolution.py
|
Python
|
mit
| 5,448
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
INVALID_EMAIL_TO_MSG = 'The email to field should contain valid value'
def main():
try:
incident = demisto.incidents()[0]
custom_fields = incident.get('CustomFields', {})
emailto = custom_fields.get('campaignemailto')
subject = custom_fields.get('campaignemailsubject')
email_body = custom_fields.get('campaignemailbody')
if not emailto:
return_error(INVALID_EMAIL_TO_MSG)
demisto.executeCommand("send-mail", {"to": emailto, "subject": subject, "body": email_body})
except Exception as ex:
return_error(f'Failed to execute SendEmailToCampaignRecipients. Error: {str(ex)}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
VirusTotal/content
|
Packs/Campaign/Scripts/SendEmailToCampaignRecipients/SendEmailToCampaignRecipients.py
|
Python
|
mit
| 821
|
# vi: ts=8 sts=4 sw=4 et
#
# __init__.py: draco2.draw package definition
#
# This file is part of Draco2. Draco2 is free software and is made available
# under the MIT license. Consult the file "LICENSE" that is distributed
# together with this file for the exact licensing terms.
#
# Draco2 is copyright (c) 1999-2007 by the Draco2 authors. See the file
# "AUTHORS" for a complete overview.
#
# $Revision: 1187 $
from draco2.draw.exception import *
from draco2.draw.color import RGBA, xRGBA
from draco2.draw.image import Image
from draco2.draw.painter import Painter
from draco2.draw.pen import Pen
from draco2.draw.brush import Brush
from draco2.draw.font import Font, TTFont
|
geertj/draco2
|
draco2/draw/__init__.py
|
Python
|
mit
| 679
|
"""
File Adapter Module
"""
from .main import FileAdapter
|
noahziheng/freeiot
|
libfreeiot/adapters/file/__init__.py
|
Python
|
mit
| 62
|
#project euler 9
def find_triplet_prod(sum):
for b in range(1,sum/2):
a = (sum**2/2 - sum*b)/(sum-b)
if a%1==0 and a**2+b**2==(sum-a-b)**2:
print a,b,sum-a-b
return a*b*(sum-a-b)
return 0
print find_triplet_prod(1000)
|
joemathai/problems
|
Project Euler/9.py
|
Python
|
mit
| 236
|
""" Script to generate a json file containing book name, number of
chapters, number of chunks """
import json
import urllib.request
import re
import os
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
# with open("catalog.json") as file:
# DATA = json.load(file)
# Get catalog.json
URL_CAT = "https://api.unfoldingword.org/ts/txt/2/catalog.json"
response_cat = urllib.request.urlopen(URL_CAT)
DATA = json.loads(response_cat.read().decode('utf-8'))
BASE_URL = "https://api.unfoldingword.org/ts/txt/2/"
EN = "en"
OT = "ot"
NT = "nt"
RES = ["ulb"]
OUTPUT = []
# skip obs for now, loop over all books
for x in range(0, 67):
# gives book name and order (the books are stored out of order in the json)
slug = DATA[x]["slug"]
if slug == "obs":
continue
meta = DATA[x]["meta"][0]
anth = OT if meta == "bible-ot" else NT
chunks_url = BASE_URL + slug + "/" + EN + "/ulb/chunks.json"
outpath = os.path.join(ROOT_DIR, "chunks", anth, slug, "chunks.json")
os.makedirs(os.path.dirname(outpath), exist_ok=True)
urllib.request.urlretrieve(chunks_url, outpath)
|
WycliffeAssociates/translationRecorder
|
translationRecorder/app/src/scripts/download_chunks.py
|
Python
|
mit
| 1,111
|
import pytest
def test_boil_unique(porridge, password):
assert porridge.boil(password) != porridge.boil(password)
def test_boil_invalid_password_type(porridge):
with pytest.raises(TypeError) as exception:
porridge.boil(1)
assert exception.value.args[0].startswith("'password' must be a str")
|
thusoy/porridge
|
tests/test_boil.py
|
Python
|
mit
| 317
|
from supriya.commands.Response import Response
class QueryTreeResponse(Response):
### CLASS VARIABLES ###
__slots__ = ("_node_id", "_query_tree_group")
### INITIALIZER ###
def __init__(self, node_id=None, osc_message=None, query_tree_group=None):
Response.__init__(self, osc_message=osc_message)
self._node_id = node_id
self._query_tree_group = query_tree_group
### SPECIAL METHODS ###
def __str__(self):
return str(self._query_tree_group)
### PUBLIC METHODS ###
@classmethod
def from_osc_message(cls, osc_message):
"""
Create response from OSC message.
::
>>> message = supriya.osc.OscMessage('/g_queryTree.reply', 0, 0, 1, 1, 2, 1001, 0, 1000, 1, 1002, 0)
>>> supriya.commands.QueryTreeResponse.from_osc_message(message)
QueryTreeResponse(
node_id=0,
query_tree_group=QueryTreeGroup(
children=(
QueryTreeGroup(
children=(
QueryTreeGroup(
children=(),
node_id=1001,
),
QueryTreeGroup(
children=(
QueryTreeGroup(
children=(),
node_id=1002,
),
),
node_id=1000,
),
),
node_id=1,
),
),
node_id=0,
),
)
::
>>> print(supriya.commands.QueryTreeResponse.from_osc_message(message))
NODE TREE 0 group
1 group
1001 group
1000 group
1002 group
"""
def recurse(contents, control_flag):
node_id = contents.pop(0)
child_count = contents.pop(0)
if child_count == -1:
controls = []
synthdef_name = contents.pop(0)
if control_flag:
control_count = contents.pop(0)
for i in range(control_count):
control_name_or_index = contents.pop(0)
control_value = contents.pop(0)
control = supriya.commands.QueryTreeControl(
control_name_or_index=control_name_or_index,
control_value=control_value,
)
controls.append(control)
controls = tuple(controls)
result = supriya.commands.QueryTreeSynth(
node_id=node_id, synthdef_name=synthdef_name, controls=controls
)
else:
children = []
for i in range(child_count):
children.append(recurse(contents, control_flag))
children = tuple(children)
result = supriya.commands.QueryTreeGroup(
node_id=node_id, children=children
)
return result
import supriya.commands
contents = list(osc_message.contents)
control_flag = bool(contents.pop(0))
query_tree_group = recurse(contents, control_flag)
response = cls(
node_id=query_tree_group.node_id, query_tree_group=query_tree_group
)
return response
def to_dict(self, flat=False):
"""
Convert QueryTreeResponse to JSON-serializable dictionary.
::
>>> query_tree_response = supriya.commands.QueryTreeResponse(
... node_id=0,
... query_tree_group=supriya.commands.QueryTreeGroup(
... node_id=0,
... children=(
... supriya.commands.QueryTreeGroup(
... node_id=1,
... children=(
... supriya.commands.QueryTreeGroup(
... node_id=1002,
... children=(
... supriya.commands.QueryTreeSynth(
... node_id=1105,
... synthdef_name='dca557070c6b57164557041ac746fb3f',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='damping',
... control_value=0.06623425334692,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='duration',
... control_value=3.652155876159668,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='level',
... control_value=0.894767701625824,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=16.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='room_size',
... control_value=0.918643176555634,
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1098,
... synthdef_name='cc754c63533fdcf412a44ef6adb1a8f0',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='duration',
... control_value=5.701356887817383,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='level',
... control_value=0.959683060646057,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=16.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='pitch_dispersion',
... control_value=0.040342573076487,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='pitch_shift',
... control_value=10.517594337463379,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='time_dispersion',
... control_value=0.666014134883881,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='window_size',
... control_value=1.014111995697021,
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1096,
... synthdef_name='5cb6fb104ee1dc44d6b300e13112d37a',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='duration',
... control_value=5.892660140991211,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='level',
... control_value=0.159362614154816,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=16.0,
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1010,
... synthdef_name='da0982184cc8fa54cf9d288a0fe1f6ca',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=16.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='amplitude',
... control_value=0.846831738948822,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='frequency',
... control_value=1522.9603271484375,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='gate',
... control_value=0.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='pan',
... control_value=0.733410477638245,
... ),
... ),
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1003,
... synthdef_name='454b69a7c505ddecc5b39762d291a5ec',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='done_action',
... control_value=2.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='fade_time',
... control_value=0.019999999552965,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='gate',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='in_',
... control_value=16.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=0.0,
... ),
... ),
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1000,
... synthdef_name='72696657e1216698c415e704ea8ab9a2',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_clamp_time',
... control_value=0.009999999776483,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_postgain',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_pregain',
... control_value=6.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_relax_time',
... control_value=0.100000001490116,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_slope_above',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_slope_below',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_1_threshold',
... control_value=0.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_clamp_time',
... control_value=0.009999999776483,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_postgain',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_pregain',
... control_value=3.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_relax_time',
... control_value=0.100000001490116,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_slope_above',
... control_value=0.5,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_slope_below',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_2_threshold',
... control_value=-6.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_clamp_time',
... control_value=0.009999999776483,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_postgain',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_pregain',
... control_value=-3.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_relax_time',
... control_value=0.100000001490116,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_slope_above',
... control_value=0.25,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_slope_below',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_3_threshold',
... control_value=-12.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_clamp_time',
... control_value=0.009999999776483,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_postgain',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_pregain',
... control_value=-3.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_relax_time',
... control_value=0.100000001490116,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_slope_above',
... control_value=0.125,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_slope_below',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='band_4_threshold',
... control_value=-18.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='frequency_1',
... control_value=200.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='frequency_2',
... control_value=2000.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='frequency_3',
... control_value=5000.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='in_',
... control_value=0.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=0.0,
... ),
... ),
... ),
... supriya.commands.QueryTreeSynth(
... node_id=1001,
... synthdef_name='c1aa521afab5b0c0ce3d744690951649',
... controls=(
... supriya.commands.QueryTreeControl(
... control_name_or_index='level',
... control_value=1.0,
... ),
... supriya.commands.QueryTreeControl(
... control_name_or_index='out',
... control_value=0.0,
... ),
... ),
... ),
... ),
... ),
... )
::
>>> import json
>>> result = query_tree_response.to_dict()
>>> result = json.dumps(
... result,
... indent=4,
... separators=(',', ': '),
... sort_keys=True,
... )
>>> print(result)
{
"server_tree": {
"children": [
{
"children": [
{
"children": [
{
"controls": {
"damping": 0.06623425334692,
"duration": 3.652155876159668,
"level": 0.894767701625824,
"out": 16.0,
"room_size": 0.918643176555634
},
"node_id": 1105,
"synthdef": "dca557070c6b57164557041ac746fb3f"
},
{
"controls": {
"duration": 5.701356887817383,
"level": 0.959683060646057,
"out": 16.0,
"pitch_dispersion": 0.040342573076487,
"pitch_shift": 10.517594337463379,
"time_dispersion": 0.666014134883881,
"window_size": 1.014111995697021
},
"node_id": 1098,
"synthdef": "cc754c63533fdcf412a44ef6adb1a8f0"
},
{
"controls": {
"duration": 5.892660140991211,
"level": 0.159362614154816,
"out": 16.0
},
"node_id": 1096,
"synthdef": "5cb6fb104ee1dc44d6b300e13112d37a"
},
{
"controls": {
"amplitude": 0.846831738948822,
"frequency": 1522.9603271484375,
"gate": 0.0,
"out": 16.0,
"pan": 0.733410477638245
},
"node_id": 1010,
"synthdef": "da0982184cc8fa54cf9d288a0fe1f6ca"
}
],
"node_id": 1002
},
{
"controls": {
"done_action": 2.0,
"fade_time": 0.019999999552965,
"gate": 1.0,
"in_": 16.0,
"out": 0.0
},
"node_id": 1003,
"synthdef": "454b69a7c505ddecc5b39762d291a5ec"
}
],
"node_id": 1
},
{
"controls": {
"band_1_clamp_time": 0.009999999776483,
"band_1_postgain": 1.0,
"band_1_pregain": 6.0,
"band_1_relax_time": 0.100000001490116,
"band_1_slope_above": 1.0,
"band_1_slope_below": 1.0,
"band_1_threshold": 0.0,
"band_2_clamp_time": 0.009999999776483,
"band_2_postgain": 1.0,
"band_2_pregain": 3.0,
"band_2_relax_time": 0.100000001490116,
"band_2_slope_above": 0.5,
"band_2_slope_below": 1.0,
"band_2_threshold": -6.0,
"band_3_clamp_time": 0.009999999776483,
"band_3_postgain": 1.0,
"band_3_pregain": -3.0,
"band_3_relax_time": 0.100000001490116,
"band_3_slope_above": 0.25,
"band_3_slope_below": 1.0,
"band_3_threshold": -12.0,
"band_4_clamp_time": 0.009999999776483,
"band_4_postgain": 1.0,
"band_4_pregain": -3.0,
"band_4_relax_time": 0.100000001490116,
"band_4_slope_above": 0.125,
"band_4_slope_below": 1.0,
"band_4_threshold": -18.0,
"frequency_1": 200.0,
"frequency_2": 2000.0,
"frequency_3": 5000.0,
"in_": 0.0,
"out": 0.0
},
"node_id": 1000,
"synthdef": "72696657e1216698c415e704ea8ab9a2"
},
{
"controls": {
"level": 1.0,
"out": 0.0
},
"node_id": 1001,
"synthdef": "c1aa521afab5b0c0ce3d744690951649"
}
],
"node_id": 0
}
}
::
>>> result = query_tree_response.to_dict(flat=True)
>>> result = json.dumps(
... result,
... indent=4,
... separators=(',', ': '),
... sort_keys=True,
... )
>>> print(result)
{
"server_tree": [
{
"children": [
1,
1000,
1001
],
"node_id": 0,
"parent": null
},
{
"children": [
1002,
1003
],
"node_id": 1,
"parent": 0
},
{
"children": [
1105,
1098,
1096,
1010
],
"node_id": 1002,
"parent": 1
},
{
"controls": {
"damping": 0.06623425334692,
"duration": 3.652155876159668,
"level": 0.894767701625824,
"out": 16.0,
"room_size": 0.918643176555634
},
"node_id": 1105,
"parent": 1002,
"synthdef": "dca557070c6b57164557041ac746fb3f"
},
{
"controls": {
"duration": 5.701356887817383,
"level": 0.959683060646057,
"out": 16.0,
"pitch_dispersion": 0.040342573076487,
"pitch_shift": 10.517594337463379,
"time_dispersion": 0.666014134883881,
"window_size": 1.014111995697021
},
"node_id": 1098,
"parent": 1002,
"synthdef": "cc754c63533fdcf412a44ef6adb1a8f0"
},
{
"controls": {
"duration": 5.892660140991211,
"level": 0.159362614154816,
"out": 16.0
},
"node_id": 1096,
"parent": 1002,
"synthdef": "5cb6fb104ee1dc44d6b300e13112d37a"
},
{
"controls": {
"amplitude": 0.846831738948822,
"frequency": 1522.9603271484375,
"gate": 0.0,
"out": 16.0,
"pan": 0.733410477638245
},
"node_id": 1010,
"parent": 1002,
"synthdef": "da0982184cc8fa54cf9d288a0fe1f6ca"
},
{
"controls": {
"done_action": 2.0,
"fade_time": 0.019999999552965,
"gate": 1.0,
"in_": 16.0,
"out": 0.0
},
"node_id": 1003,
"parent": 1,
"synthdef": "454b69a7c505ddecc5b39762d291a5ec"
},
{
"controls": {
"band_1_clamp_time": 0.009999999776483,
"band_1_postgain": 1.0,
"band_1_pregain": 6.0,
"band_1_relax_time": 0.100000001490116,
"band_1_slope_above": 1.0,
"band_1_slope_below": 1.0,
"band_1_threshold": 0.0,
"band_2_clamp_time": 0.009999999776483,
"band_2_postgain": 1.0,
"band_2_pregain": 3.0,
"band_2_relax_time": 0.100000001490116,
"band_2_slope_above": 0.5,
"band_2_slope_below": 1.0,
"band_2_threshold": -6.0,
"band_3_clamp_time": 0.009999999776483,
"band_3_postgain": 1.0,
"band_3_pregain": -3.0,
"band_3_relax_time": 0.100000001490116,
"band_3_slope_above": 0.25,
"band_3_slope_below": 1.0,
"band_3_threshold": -12.0,
"band_4_clamp_time": 0.009999999776483,
"band_4_postgain": 1.0,
"band_4_pregain": -3.0,
"band_4_relax_time": 0.100000001490116,
"band_4_slope_above": 0.125,
"band_4_slope_below": 1.0,
"band_4_threshold": -18.0,
"frequency_1": 200.0,
"frequency_2": 2000.0,
"frequency_3": 5000.0,
"in_": 0.0,
"out": 0.0
},
"node_id": 1000,
"parent": 0,
"synthdef": "72696657e1216698c415e704ea8ab9a2"
},
{
"controls": {
"level": 1.0,
"out": 0.0
},
"node_id": 1001,
"parent": 0,
"synthdef": "c1aa521afab5b0c0ce3d744690951649"
}
]
}
"""
def recurse(node, parent_node_id, nodes):
if "synthdef" in node:
node["parent"] = parent_node_id
nodes.append(node)
else:
group = {
"node_id": node["node_id"],
"parent": parent_node_id,
"children": [x["node_id"] for x in node["children"]],
}
nodes.append(group)
for x in node["children"]:
recurse(x, node["node_id"], nodes)
data = self.query_tree_group.to_dict()
if not flat:
return {"server_tree": data}
nodes = []
recurse(data, None, nodes)
return {"server_tree": nodes}
### PUBLIC PROPERTIES ###
@property
def node_id(self):
return self._node_id
@property
def query_tree_group(self):
return self._query_tree_group
|
Pulgama/supriya
|
supriya/commands/QueryTreeResponse.py
|
Python
|
mit
| 38,279
|
from .models import Address
from rest_framework import serializers
from rest_framework_gis.serializers import GeoModelSerializer
class AddressSerializer(GeoModelSerializer):
formatted_address = serializers.Field(source='formatted_address')
def __init__(self, *args, **kwargs):
super(AddressSerializer, self).__init__(*args, **kwargs)
fields = self.context['request'].QUERY_PARAMS.get('fields')
if fields:
fields = fields.split(',')
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields.keys())
for field_name in existing - allowed:
self.fields.pop(field_name)
class Meta:
model = Address
fields = ('uprn', 'organisation_name', 'department_name',
'po_box_number', 'building_name', 'sub_building_name',
'building_number', 'thoroughfare_name',
'dependent_thoroughfare_name', 'dependent_locality',
'double_dependent_locality', 'post_town', 'postcode',
'postcode_type', 'formatted_address', 'point')
|
ministryofjustice/addressfinder
|
addressfinder/apps/address/serializers.py
|
Python
|
mit
| 1,182
|
import argparse
import os
import pdb
import sys
import seaborn
try:
import trilegal
except ImportError:
print('May need IsoTrack reader')
pass
import matplotlib.pylab as plt
import numpy as np
from .. import fileio
from ..eep.critical_point import Eep
seaborn.set()
__all__ = ['interp_match_grid', 'interp_mhefs']
def reformat_filename(fname, fmt='Z{:.4f}_Y{:.3f}_M{:.3f}{}'):
"""
Common filename formats
e.g.,
os.system('mv {} {}'.format(fname, reformat_filename(fname))
"""
z = float(fname.split('Z')[1].split('Y')[0].replace('_', ''))
y = float(fname.split('Y')[1].split('O')[0].replace('_', ''))
mstr = fname.split('M')[1].split('.dat')[0].replace('.HB', '')
ext = fname.split('M'+mstr)[1]
m = float(mstr)
return fmt.format(z, y, m, ext)
def plot_MheF(isotracks=None, labels=None, colors=None):
""" plot the minimum initial mass for He Fusion """
if isotracks is None:
print('WARNING: attempting to use hard coded isotracks')
isotracks = ['isotrack/parsec/CAF09_MC_S13v3_OV0.3.dat',
'isotrack/parsec/CAF09_MC_S13v3_OV0.4.dat',
'isotrack/parsec/CAF09_MC_S13v3_OV0.5.dat',
'isotrack/parsec/CAF09_MC_S13v3_OV0.6.dat',
'isotrack/parsec/CAF09_S12D_NS_1TP.dat']
isotracks = [os.path.join(os.environ['TRILEGAL_ROOT'], i)
for i in isotracks]
if labels is None:
labels = ['$\Lambda_c=0.3$',
'$\Lambda_c=0.4$',
'$\Lambda_c=0.5$',
'$\Lambda_c=0.6$',
'$S12D\_NS\_1TP$']
if colors is None:
colors = ['darkred', 'orange', 'navy', 'purple', 'k']
fig, ax = plt.subplots()
for i, isotrack in enumerate(isotracks):
isot = trilegal.IsoTrack(isotrack)
ax.plot(isot.Z, isot.mhefs, lw=2, label=labels[i], color=colors[i])
ax.plot(isot.Z, isot.mhefs, 'o', color=colors[i])
ax.grid()
ax.set_xlim(0.001, 0.0085)
ax.set_ylim(1.55, 2.05)
return ax
def interp_mhefs(isodirs, outfile=None):
"""
Write the minimum initial mass for He fusion to a file, interpolating
between isotracks.
Parameters
----------
isotracks : list of strings
path to parsec isotrack files.
eg:
isodirs = ['CAF09_MC_S13v3_OV0.3',
'CAF09_MC_S13v3_OV0.4',
'CAF09_MC_S13v3_OV0.5',
'CAF09_MC_S13v3_OV0.6',
'CAF09_MC_S13v3_OV0.7']
outfile : string
filename of output file
"""
def pretty(ov, marr):
""" make a %.2f string combining a float and an array """
return ' '.join(['%.2f' % i
for i in np.concatenate(([ov], marr))]) + '\n'
outfile = outfile or 'MHeF_interp.dat'
line = ''
mhefs = np.array([])
nmhefs = np.array([])
ovs = np.array([])
zs = np.array([])
for isodir in isodirs:
# each INT file in each dir
int_files = fileio.get_files(isodir, '*INT*')
if len(int_files) == 0:
print('no INT files found in {0:s}'.format(isodir))
continue
for int_file in int_files:
z = float(os.path.split(int_file)[1].split('_Z')[1].split('_')[0])
zs = np.append(zs, z)
with open(int_file, 'r') as inp:
lines = inp.readlines()
# MHeF is the first mass of the INT* files issue is where the
# mass is in the file
if int_file.endswith('2'):
# Leo's formatted isotrack files *INT2
nsplits = int(lines[0])
iline = nsplits + 3
else:
# Sandro's dbert/*INT files
iline = -1
mhefs = np.append(mhefs, float(lines[iline].split()[0]))
ov = float(os.path.split(int_file)[1].split('_OV')[1].split('_')[0])
ovs = np.append(ovs, ov)
zs = np.unique(zs)
# one mass for one Z one row for each OV.
mhefs = mhefs.reshape(len(mhefs) // len(zs), len(zs))
line += '# OV ' + ' '.join(['Z%g' % z for z in zs]) + '\n'
# midpoint interpolation
for i in range(len(isodirs)-1):
intov = (ovs[i+1] + ovs[i]) / 2.
intpd = (mhefs[i+1] + mhefs[i]) / 2.
line += pretty(ovs[i], mhefs[i])
line += pretty(intov, intpd)
line += pretty(ovs[i+1], mhefs[i+1])
with open(outfile, 'w') as outf:
outf.write(line)
print(('wrote %s' % outfile))
return outfile
def interpolate_between_sets(match_dir1, match_dir2, outdir, mhef,
overwrite=False, plot=False,
truth_track_loc='', frac=2.):
def strip_m(s):
return float(s.split('_M')[-1].replace('.dat', '').replace('.HB', ''))
def strip_z(s):
return float(s.split('Z')[-1].split('Y')[0].replace('_', ''))
def get_names(s):
return [os.path.split(i)[1] for i in s]
header = 'logAge Mass logTe Mbol logg C/O'
fileio.ensure_dir(outdir)
t1files = sorted(fileio.get_files(match_dir1, '*.dat'),
key=lambda t: strip_m(t))
t2files = sorted(fileio.get_files(match_dir2, '*.dat'),
key=lambda t: strip_m(t))
tname1s = get_names(t1files)
tname2s = get_names(t2files)
t1hbs = [t for t in t1files if 'HB' in t]
t2hbs = [t for t in t2files if 'HB' in t]
i2s = [i for i, t in enumerate(tname2s) if t in tname1s]
t2files = np.array(t2files)[i2s]
i1s = [i for i, t in enumerate(tname1s) if t in tname2s]
t1files = np.array(t1files)[i1s]
tname1s = get_names(t1files)
tname2s = get_names(t2files)
ntracks = len(t1files)
# assert tname1s == tname2s, 'Track mismatches'
if tname1s != tname2s:
print('Track mismatches')
pdb.set_trace()
t1s = [np.loadtxt(t) for t in t1files]
t2s = [np.loadtxt(t) for t in t2files]
for i in range(ntracks):
if plot:
if os.path.isdir(truth_track_loc):
gs1, gs2, [lax, lbax, lrax], [rax, rbax, rrax] = \
setup_diagplot()
else:
fig, (lax, rax) = plt.subplots(nrows=2)
addedhb = False # for plotting
mass = strip_m(t1files[i])
# simple mid point interpolation
nt1s = len(t1s[i])
nt2s = len(t2s[i])
# most of the time both tracks are the same length
if nt1s == nt2s:
track = (t1s[i] + t2s[i]) / frac
else:
i1, i2 = np.argsort([nt1s, nt2s])
nt1, nt2 = [nt1s, nt2s][i1], [nt1s, nt2s][i2]
t1, t2 = [t1s[i], t2s[i]][i1], [t1s[i], t2s[i]][i2]
tname1, tname2 = [tname1s[i], tname2s[i]][i1], [tname1s[i], tname2s[i]][i2]
thbs = t2hbs
if tname1 == tname1s[i]:
thbs = t1hbs
if mass <= mhef:
# keep the track short.
track = (t1 + t2[:nt1]) / frac
print('tuncating HB', mass, nt1, nt2, i)
else:
# add HB to track 1
print('adding HB', mass, len(t1s[i]), len(t2s[i]))
thb, = [t for t in thbs if 'M%.2f' % mass in t]
thb = np.genfromtxt(thb)
twithhb = rg_tip_heb_transition(thb, t1)
track = (twithhb + t2) / frac
addedhb = True
if plot:
_plot(track, mass, lax, rax, label='interp')
if addedhb:
_plot(twithhb, '', lax, rax, label='added hb')
_plot(thb, '', lax, rax, label='hb')
_plot(t1s[i], '', lax, rax, label='t1')
_plot(t2s[i], '', lax, rax, label='t2')
if os.path.isdir(truth_track_loc):
z = strip_z(tname1s[i])
tds = fileio.get_dirs(truth_track_loc)
td, = [t for t in tds if str(z) in t]
mstr = '{0:.2f}'.format(mass)
if mass < 1.:
mstr = mstr[1:]
truth_tracks = \
fileio.get_files(td, '*Z{0:.4f}*M{1:s}*'.format(z, mstr))
if len(truth_tracks) > 0:
if len(truth_tracks) > 1:
truet0 = np.loadtxt(truth_tracks[0])
truet1 = np.loadtxt(truth_tracks[1])
if len(truet0) == len(track):
truet = truet0
elif len(truet1) == len(track):
truet = truet1
elif len(truet0[:nt1s]) == len(track):
truet = truet0[:nt1s]
else:
pdb.set_trace()
else:
truet = np.loadtxt(truth_tracks[0])
if len(truet) != len(track):
if len(truet[:nt1s]) == len(track):
truet = truet[:nt1s]
try:
_plot(truet, '', lax, rax, label='truth')
diff_plot(truet, track, lbax, rbax, lrax, rrax)
rbax.set_xscale('log')
except:
print('hey!')
import pdb; pdb.set_trace()
rax.legend(loc='best')
figname = os.path.join(outdir, tname1s[i].replace('dat', 'png'))
plt.savefig(figname)
# print('wrote {}'.format(figname))
plt.close()
outfile = os.path.join(outdir, tname1s[i])
if not os.path.isfile(outfile) or overwrite:
# np.savetxt(outfile, track, header=header, fmt='%.8f')
np.savetxt(outfile, track, header=header, fmt='%.8f')
# print('wrote {}'.format(outfile))
def rg_tip_heb_transition(hb_track, track):
"""
Attach a HB model to a PMS model.
Done in a consistent way as in TRILEGAL. Basically, zero time goes by,
linear connection. The idea is that no stars should fall in this region
because there are simply no tracks calculated. If you want a track
following a hiashi line, well, calculate one. If you're interpolating,
you're gonna have a slight error on a time scale of 1e5 years, counting a
star that could have been in a transition phase from RG_TIP to HE_BEG as a
RGB star. At this point in time, a negligable error.
"""
eep = Eep()
ntrans = eep.trans
rg_tip = eep.nms - 1
agei = 100.
agef = 10 ** hb_track.T[0][0]
te0, tef = track.T[2][rg_tip], hb_track.T[2][0]
mbol0, mbolf = track.T[3][rg_tip], hb_track.T[3][0]
m, b = np.polyfit([te0, tef], [mbol0, mbolf], 1)
age = np.linspace(agei, agef, ntrans, endpoint=False)
logte = np.linspace(te0, tef, ntrans, endpoint=False)
Mbol = m * logte + b
mass = np.zeros(ntrans) + track.T[1][0]
logg = -10.616 + np.log10(mass) + 4.0 * logte - (4.77 - Mbol) / 2.5
CO = np.zeros(ntrans)
logage = np.log10(10 ** track.T[0][rg_tip] + age)
trans_track = np.column_stack([logage, mass, logte, Mbol, logg, CO])
hb_track.T[0] = np.log10(10 ** hb_track.T[0] + 10 ** logage[-1])
new_track = np.concatenate((track, trans_track, hb_track))
return new_track
def setup_diagplot():
import matplotlib.gridspec as gridspec
fig = plt.figure(figsize=(12, 8))
gs1 = gridspec.GridSpec(3, 3)
gs1.update(left=0.1, right=0.48, wspace=0.05)
lax = plt.subplot(gs1[:-1, :2])
lbax = plt.subplot(gs1[-1, :2])
lrax = plt.subplot(gs1[:-1, -1])
lbax.set_xlabel(r'$\log\ Te$')
lax.set_ylabel(r'$\rm{Mbol}$')
gs2 = gridspec.GridSpec(3, 3)
gs2.update(left=0.6, right=0.98, hspace=0.05)
rax = plt.subplot(gs2[:-1, :2])
rbax = plt.subplot(gs2[-1, :2])
rrax = plt.subplot(gs2[:-1, -1])
rbax.set_xlabel(r'$\rm{Age}$')
return gs1, gs2, [lax, lbax, lrax], [rax, rbax, rrax]
def diff_plot(track1, track2, lbax, rbax, lrax, rrax):
from matplotlib.ticker import MaxNLocator
lxdiff = track1.T[2] - track2.T[2]
rxdiff = 10 ** track1.T[0] - 10 ** track2.T[0]
ydiff = track1.T[3] - track2.T[3]
lbax.plot(track1.T[2], lxdiff, '.')
rbax.plot(10 ** track1.T[0], lxdiff, '.')
[ax.axhline(0.) for ax in [lbax, rbax]]
for ax in lrax, rrax:
ax.axvline(0.)
ax.plot(ydiff, track1.T[3], '.')
ax.xaxis.set_major_locator(MaxNLocator(5))
def _plot(track, mass, lax, rax, label=''):
alpha = 0.3
if len(label) > 0:
alpha = 1.
lax.plot(track.T[2], track.T[3], alpha=alpha)
if mass != '':
rax.plot(10 ** track.T[0], track.T[3], alpha=alpha,
label='${}\ {:.2f}$'.format(label, mass))
else:
rax.plot(10 ** track.T[0], track.T[3], label=label,
alpha=alpha)
return
def read_mhef(mhef_file):
with open(mhef_file, 'r') as inp:
lines = inp.readlines()
zs = np.array([l.replace('Z', '')
for l in lines[0].split() if 'Z' in l], dtype=float)
data = np.genfromtxt(mhef_file)
return data, zs
def interp_match_grid(dir1, dir2, mhef_file, overwrite=False,
plot=False, truth_track_loc='', newsubs=None):
frac = 2
data, zs = read_mhef(mhef_file)
subs = [dir1, dir2]
pts = np.sort(np.array([s.replace('ov', '').replace('/', '') for s in subs],
dtype=float))
if newsubs is None:
interps = data.T[0][(data.T[0] > pts[0]) & (data.T[0] < pts[1])]
if len(interps) == 0:
if frac == 2:
interps = np.array([np.mean(pts)])
newsubs = ['ov{:.2f}'.format(s) for s in interps]
else:
newsubs = np.asarray([newsubs])
interps = np.array([s.replace('ov', '').replace('/', '')
for s in newsubs], dtype=float)
print('interpolate for these new values: {}'.format(interps))
sets = [[os.path.join(s, l) for l in os.listdir(s)
if not l.startswith('.') and os.path.isdir(os.path.join(s, l))]
for s in subs]
# frac=2 default: mean would assume we're finding the point inbetween.
for i in range(len(sets)-1):
for j in range(len(sets[i])):
newset = os.path.split(sets[i][j])[1]
newset = newset.replace('OV{:.1f}'.format(pts[i]),
'OV{:.2f}'.format(interps[i]))
newdir = os.path.join(newsubs[i], newset)
print('interpolating output: {0:s}'.format(newdir))
interpolate_between_sets(sets[i][j], sets[i+1][j], newdir,
data[2*i+1][j+1], plot=plot, frac=frac,
truth_track_loc=truth_track_loc)
return
def main(argv):
"""
Report ... quick test between OV0.4 OV0.6 to compare to parsec:
Even the low mass where nothing should change was off. NEED TO CALC OV0.5
quick test between ov0.30 and 0.60:
Some offsets likely due to the end of the track differences.
Other offsets because comparing to ov0.40 isn't correct, this will
create ov0.45.
Lots of structure on HB phase looks pretty strange. It might be better
NOT to interpolate and run with MATCH but use a KDE later.
"""
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('-m', '--mhef_file', type=str,
help='file containing the He fusion masses')
parser.add_argument('-n', '--newsubs', type=str,
help='new subdirectory name')
parser.add_argument('-i', '--isodir_loc', type=str,
help='where the isotrack files are (if not -m)')
parser.add_argument('-t', '--truth_track_loc', type=str, default='',
help='over plot comparison tracks from this location')
parser.add_argument('-d', '--diag_plot', action='store_true',
help='make HB plots')
parser.add_argument('-v', '--pdb', action='store_true',
help='invoke python debugger')
parser.add_argument('dir1', type=str, help='directory 1')
parser.add_argument('dir2', type=str, help='directory 2')
args = parser.parse_args(argv)
if args.pdb:
pdb.set_trace()
# Where are the INT files
if not args.mhef_file:
isodir_loc = args.isodir_loc or os.getcwd()
isodirs = [os.path.join(isodir_loc, l) for l in os.listdir(isodir_loc) if os.path.isdir(os.path.join(isodir_loc, l))]
args.mhef_file = interp_mhefs(isodirs)
interp_match_grid(args.dir1, args.dir2,
args.mhef_file,
plot=args.diag_plot,
truth_track_loc=args.truth_track_loc,
newsubs=args.newsubs)
if __name__ == '__main__':
main(sys.argv[1:])
|
philrosenfield/padova_tracks
|
interpolate/interpolate_match_grid.py
|
Python
|
mit
| 16,902
|
from unittest.mock import Mock, patch
from file_transfer.send_file import send_file
@patch("socket.socket")
@patch("builtins.open")
def test_send_file_running_as_expected(file, sock):
# ===== initialization =====
conn = Mock()
sock.return_value.accept.return_value = conn, Mock()
f = iter([1, None])
file.return_value.__enter__.return_value.read.side_effect = lambda _: next(f)
# ===== invoke =====
send_file(filename="mytext.txt", testing=True)
# ===== ensurance =====
sock.assert_called_once()
sock.return_value.bind.assert_called_once()
sock.return_value.listen.assert_called_once()
sock.return_value.accept.assert_called_once()
conn.recv.assert_called_once()
file.return_value.__enter__.assert_called_once()
file.return_value.__enter__.return_value.read.assert_called()
conn.send.assert_called_once()
conn.close.assert_called_once()
sock.return_value.shutdown.assert_called_once()
sock.return_value.close.assert_called_once()
|
TheAlgorithms/Python
|
file_transfer/tests/test_send_file.py
|
Python
|
mit
| 1,013
|
# -*- coding: utf-8 -*-
# Synchronization core module for Storj GUI Client #
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import threading, csv
HANDLE_ON_MOVE_EVENT = True
HANDLE_ON_DELETE_EVENT = True
SYNC_DIRECTORIES_FILE = "storj_sync_dirs.csv"
class StorjFileSynchronization():
def start_sync_thread(self):
return 1
def reload_sync_configuration(self):
return 1
def add_file_to_sync_queue(self, file_path):
return 1
class FileChangesHandler(PatternMatchingEventHandler):
#patterns = ["*.xml", "*.lxml"]
def __init__(self):
self.storj_file_synchronization_core = StorjFileSynchronization()
def process(self, event):
"""
event.event_type
'modified' | 'created' | 'moved' | 'deleted'
event.is_directory
True | False
event.src_path
path/to/observed/file
"""
# the file will be processed there
self.storj_file_synchronization_core.add_file_to_sync_queue(file_path=str(event.src_path))
print str(event)
#print str(event.src_path) + str(event.event_type) + "event" # print now only for degug
def on_deleted(self, event):
if HANDLE_ON_DELETE_EVENT:
self.process(event)
def on_moved(self, event):
if HANDLE_ON_MOVE_EVENT:
self.process(event)
def on_modified(self, event):
self.process(event)
def on_created(self, event):
self.process(event)
class SyncObserverWorker():
def __init__(self):
self.is_sync_active = False
def start_observing_thread(self):
observing_main_thread = threading.Thread(
target=self.start_observing)
observing_main_thread.start()
def start_observing(self):
paths_to_observe = []
with open(unicode(SYNC_DIRECTORIES_FILE), 'rb') as stream:
for rowdata in csv.reader(stream):
for column, data in enumerate(rowdata):
if column == 0:
paths_to_observe.append(str(data.decode('utf8')))
print data.decode('utf8')
paths_to_observe.append("/home/lakewik/storjsync")
self.observer = Observer()
for path in paths_to_observe:
self.observer.schedule(FileChangesHandler(), path=str(path))
self.observer.start()
print "Synchronization directories observing started!"
self.is_sync_active = True
def stop_observers(self):
self.observer.stop()
self.is_sync_active = False
print "Synchronization directories observing stopped!"
return 1
def restart_observers(self):
self.stop_observers()
self.start_observing_thread()
return True
def is_sync_active(self):
return self.is_sync_active
|
lakewik/storj-gui-client
|
UI/utilities/synchronization_core.py
|
Python
|
mit
| 2,901
|
#!/home/ubuntu/muddev/pyenv/bin/python
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
whitehorse-io/encarnia
|
pyenv/bin/django-admin.py
|
Python
|
mit
| 145
|
import os
import io
import math
import random
import numpy as np
import pandas as pd
import sklearn
from sklearn.preprocessing import StandardScaler
import requests
from tqdm import tqdm
import tensorflow as tf
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Flatten
from keras.models import load_model
'''
class Helper
'''
class Helper:
#Transform the dataset to shapes defined by 7 steps and 3 features
def prepare_sequence(self, data):
sequence = []
sequence_size = 7
samples = 0
for i in range(0, data.shape[0] - sequence_size + 1):
sequence.append(data[i:i+7])
samples += 1
return np.concatenate(sequence).reshape((samples, sequence_size, data.shape[1]))
#Download files
def download_file(self, url, filename):
r = requests.get(url, stream=True)
total_size = int(r.headers.get('content-length', 0));
block_size = 1024
total_kb_size = math.ceil(total_size//block_size)
wrote = 0
with open(filename, 'wb') as f:
for data in tqdm(r.iter_content(block_size), total=total_kb_size , unit='KB', unit_scale=True):
wrote = wrote + len(data)
f.write(data)
'''
Class Stock
'''
class Stock:
def __init__(self):
self.helper = Helper()
pass
def gettingData(self):
url = "https://www.coingecko.com/price_charts/export/279/eur.csv"
datafile = 'eth-eur.csv'
self.helper.download_file(url, datafile)
return pd.read_csv(datafile)
def preprocessing(self, data):
#customize index
data.snapped_at[0].split()[0]
data.snapped_at = data.snapped_at.apply(lambda x: x.split()[0])
data.set_index('snapped_at', inplace=True)
data.index = pd.to_datetime(data.index)
'''
In some cases there is no sample for a certain date.
'''
#Generate all the possible days and use them to reindex
start = data.index[data.index.argmin()]
end = data.index[data.index.argmax()]
index_complete = pd.date_range(start, end)
data = data.reindex(index_complete)
#Fill the blanks with the mean between the previous and the day after
print("\nLooking if the index is complete...")
for idx in data.index:
dayloc = data.index.get_loc(idx)
day = data.loc[idx]
if day.hasnans:
#updating
rg = slice(dayloc-1, dayloc+2)
data.loc[idx] = data.iloc[rg].mean()
print("Day <{}> has been updated with the mean values".format(idx))
'''
Adding the target for every sample
'''
new_column = 'closed_price'
datab = data.copy()
nc = list()
for idx in data.index:
dayloc = data.index.get_loc(idx)
#we put the price in the day after as closed price
if dayloc == len(data.index)-1:
#last position will not have closed_price
closed_price = np.nan
else:
closed_price = data.iloc[dayloc+1].price
nc.append(closed_price)
data[new_column] = nc
#Delete last because we don't know still the closed price
data = data.drop(data.index[len(data)-1])
return data
def scale(self, data_train, data_test):
scaler = StandardScaler()
data_train_norm, data_test_norm = data_train.copy(), data_test.copy()
columns = data_train.columns
data_train_norm[columns] = scaler.fit_transform(data_train[columns])
data_test_norm[columns] = scaler.transform(data_test[columns])
return data_train_norm, data_test_norm, scaler
def get_train_test(self, data, train_size=0.9):
split = round(len(data)*train_size)
data_train, data_test = data[:split].copy(), data[split:].copy()
return data_train, data_test
def build_model(self, ):
model = Sequential()
model.add(LSTM(32, input_shape=(7, 3) ))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
return model
def training(self, model, X, y):
modelfile = 'model.h5'
if (os.path.exists(modelfile)):
print("Recovering backed up model..\n")
return load_model(modelfile)
else:
print("Training...\n")
model.fit(X, y, epochs=50, batch_size=32, verbose=0)
model.save(modelfile)
return model
|
samuxiii/prototypes
|
learning/stock-app/stock.py
|
Python
|
mit
| 4,798
|
from aitools import *
#from state import *
import time
class Government:
def __init__(self): # All stats 0-100
self.military = 0 # Military Power
self.resources = 0 # Available raw resources
self.funds = 0 # Available money
self.tech = 0 # Technological superiority
self.freedom = 0 # Freedom of citizenry
self.discipline = 0 # Overall organization and effectiveness of command structure
self.centralized = 0 # Is the government susceptible to collapse in key areas, or will it hold together if a major system falls
self.loyalty = 0 # Are the denizens loyal to there government, or easily subverted
self.covert = 0 # Extent of the governments covert network
class Job:
def __init__(self):
self.stubborn = 0
self.patient = 0
self.openminded = 0
self.strong = 0
self.shy = 0
self.trained = 0
self.experienced = 0
self.religious = 0
self.brutal = 0
self.intelligent = 0
self.satisfied = 0
self.rowdy = 0
self.antsy = 0
class Personality:
def __init__(self,seed):
self.training = seed.get("training")
self.experience = seed.get("experience")
self.bravery = seed.get("bravery")
self.intelligence = seed.get("intelligence")
self.tenacity = seed.get("tenacity")
self.morality = seed.get("morality")
self.loyalty = seed.get("loyalty")
self.honesty = seed.get("honesty")
self.mercy = seed.get("mercy")
self.faith = seed.get("faith")
|
remremrem/EV-Tribute
|
world/ai.py
|
Python
|
mit
| 1,661
|
from system.core.controller import *
from time import strftime
class TimeDisplay(Controller):
def __init__(self, action):
super(TimeDisplay, self).__init__(action)
def timedate(self):
time_date = strftime('%b %d, %Y %I %p')
def index(self):
time_date = strftime('%b %d, %Y %I %p')
return self.load_view('/time_display_index.html', message=time_date)
|
authman/Python201609
|
Guerrero_Melissa/Assignments/Time_Display_assignment/controllers/TimeDisplay.py
|
Python
|
mit
| 361
|
from world import BinaryFeatureObject as Obj
from world import BinaryFeatureWorld as World
features = ["red", "round", "hard"]
apple = Obj("apple", ["red", "round", "hard"])
orange = Obj("orange", ["round", "hard"])
blood = Obj("blood", ["red"])
water = Obj("water", [])
ball = Obj("ball", ["round", "hard"])
bird = Obj("bird", [])
pot = Obj("pot", ["hard", "round"])
bottle = Obj("bottle", ["hard", "round"])
mouse = Obj("mouse", ["round"])
redblock = Obj("redblock", ["red", "hard"])
table = Obj("table", ["hard"])
book = Obj("book", ["hard"])
grape = Obj("grape", ["round"])
chinaflag = Obj("chinaflag", ["red"])
globe = Obj("globe", ["round", "hard"])
world = World(features,
[apple, orange, blood, ball, bird, redblock, pot,
bottle, mouse, redblock, table, book, grape, chinaflag, globe],
# set([apple, ball, bottle, table, globe])
# set([apple, orange, ball, pot, bottle, redblock, table, book, globe])
[apple, orange, grape]
)
|
leonxlin/concept-learner
|
test_world.py
|
Python
|
mit
| 968
|
import webbrowser
url = 'http://www.wsb.com/Assignment2/case29.php?secret="/><script>alert(document.cookie)</script>'
new = 2
webbrowser.open(url, new=new)
|
vhazali/cs5331
|
assignment2/scripts/exploit29/exploit29.py
|
Python
|
mit
| 158
|
import unittest
import unittest.mock as mock
from auto_peering.session_store import SessionStore
from test import mocks, randoms, builders
class TestSessionStore(unittest.TestCase):
def test_creates_session_for_provided_account_id(self):
sts_client = mocks.build_sts_client_mock()
peering_role_name = randoms.role_name()
account_id = randoms.account_id()
expected_credentials, assume_role_mock = \
mocks.build_sts_assume_role_mock()
sts_client.assume_role = assume_role_mock
session_store = SessionStore(sts_client, peering_role_name)
session = session_store.get_session_for(account_id)
assume_role_call = sts_client.assume_role.call_args
expected_role_arn = \
builders.build_role_arn_for(account_id, peering_role_name)
expected_role_session_name = "vpc-auto-peering-lambda"
self.assertEqual(len(sts_client.assume_role.mock_calls), 1)
self.assertEqual(
assume_role_call,
mock.call(
RoleArn=expected_role_arn,
RoleSessionName=expected_role_session_name))
actual_credentials = session.get_credentials()
self.assertEqual(
actual_credentials.access_key,
expected_credentials.access_key)
self.assertEqual(
actual_credentials.secret_key,
expected_credentials.secret_key)
self.assertEqual(
actual_credentials.token,
expected_credentials.token)
def test_caches_session_for_account_id(self):
sts_client = mocks.build_sts_client_mock()
peering_role_name = randoms.role_name()
account_id = randoms.account_id()
expected_credentials, assume_role_mock = \
mocks.build_sts_assume_role_mock()
sts_client.assume_role = assume_role_mock
session_store = SessionStore(sts_client, peering_role_name)
first_session = session_store.get_session_for(account_id)
second_session = session_store.get_session_for(account_id)
self.assertEqual(len(sts_client.assume_role.mock_calls), 1)
self.assertEqual(first_session, second_session)
|
tobyclemson/terraform-aws-vpc-auto-peering
|
lambdas/auto_peering/test/test_session_store.py
|
Python
|
mit
| 2,204
|
from ..structure import *
from ..utils import *
class Gibbs(MCMC):
def __init__(self, problem, verbose_int = 100, N = 1000, T = 10000, record_start = 3000):
MCMC.__init__(self, problem, "Gibbs", verbose_int, N, T, record_start)
def particle_to_tuple(self, p):
return p
def init_particle(self):
return tuple((np.random.choice(self.problem.net[rv].values) for rv in self.problem.rvs))
def update_particle(self, particle):
d = self.tuple_to_dict(particle)
rv = self.problem.rvs[np.random.choice(len(self.problem.rvs))]
d[rv] = self.problem.net[rv].sample_blanket(d)
return self.dict_to_tuple(d)
|
SsnL/amcmc
|
inference/gibbs.py
|
Python
|
mit
| 669
|
#!/usr/bin/env python
# This file is protected via CODEOWNERS
import os
import re
import sys
from setuptools import setup
CURRENT_PYTHON = sys.version_info[:2]
REQUIRED_PYTHON = (3, 7)
# This check and everything above must remain compatible with Python 2.7.
if CURRENT_PYTHON < REQUIRED_PYTHON:
sys.stderr.write(
"""
==========================
Unsupported Python version
==========================
This version of urllib3 requires Python {}.{}, but you're trying to
install it on Python {}.{}.
This may be because you are using a version of pip that doesn't
understand the python_requires classifier. Make sure you
have pip >= 9.0 and setuptools >= 24.2, then try again:
$ python -m pip install --upgrade pip setuptools
$ python -m pip install urllib3
This will install the latest version of urllib3 which works on your
version of Python. If you can't upgrade your pip (or Python), request
an older version of urllib3:
$ python -m pip install "urllib3<2"
""".format(
*(REQUIRED_PYTHON + CURRENT_PYTHON)
)
)
sys.exit(1)
base_path = os.path.dirname(__file__)
# Get the version (borrowed from SQLAlchemy)
with open(os.path.join(base_path, "src", "urllib3", "_version.py")) as fp:
VERSION = (
re.compile(r""".*__version__ = ["'](.*?)['"]""", re.S).match(fp.read()).group(1)
)
with open("README.rst", encoding="utf-8") as fp:
# Remove reST raw directive from README as they're not allowed on PyPI
# Those blocks start with a newline and continue until the next newline
mode = None
lines = []
for line in fp:
if line.startswith(".. raw::"):
mode = "ignore_nl"
elif line == "\n":
mode = "wait_nl" if mode == "ignore_nl" else None
if mode is None:
lines.append(line)
readme = "".join(lines)
with open("CHANGES.rst", encoding="utf-8") as fp:
changes = fp.read()
version = VERSION
setup(
name="urllib3",
version=version,
description="HTTP library with thread-safe connection pooling, file post, and more.",
long_description="\n\n".join([readme, changes]),
long_description_content_type="text/x-rst",
classifiers=[
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries",
],
keywords="urllib httplib threadsafe filepost http https ssl pooling",
author="Andrey Petrov",
author_email="andrey.petrov@shazow.net",
url="https://urllib3.readthedocs.io",
project_urls={
"Documentation": "https://urllib3.readthedocs.io",
"Code": "https://github.com/urllib3/urllib3",
"Issue tracker": "https://github.com/urllib3/urllib3/issues",
},
license="MIT",
packages=[
"urllib3",
"urllib3.contrib",
"urllib3.contrib._securetransport",
"urllib3.util",
],
package_data={
"urllib3": ["py.typed"],
},
package_dir={"": "src"},
requires=[],
python_requires=">=3.7, <4",
extras_require={
"brotli": [
"brotli>=1.0.9; platform_python_implementation == 'CPython'",
"brotlicffi>=0.8.0; platform_python_implementation != 'CPython'",
],
"secure": [
"pyOpenSSL>=0.14",
"cryptography>=1.3.4",
"idna>=2.0.0",
"certifi",
],
"socks": ["PySocks>=1.5.6,<2.0,!=1.5.7"],
},
)
|
sigmavirus24/urllib3
|
setup.py
|
Python
|
mit
| 4,062
|
#!/Users/Drake/dev/LouderDev/louderdev/bin/python3
# -*- coding: utf8 -*-
# :Copyright: © 2015 Günter Milde.
# :License: Released under the terms of the `2-Clause BSD license`_, in short:
#
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved.
# This file is offered as-is, without any warranty.
#
# .. _2-Clause BSD license: http://www.spdx.org/licenses/BSD-2-Clause
#
# Revision: $Revision: 7847 $
# Date: $Date: 2015-03-17 18:30:47 +0100 (Di, 17 Mär 2015) $
"""
A minimal front end to the Docutils Publisher, producing HTML 5 documents.
The output also conforms to XHTML 1.0 transitional
(except for the doctype declaration).
"""
try:
import locale # module missing in Jython
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
from docutils.core import publish_cmdline, default_description
description = (u'Generates HTML 5 documents from standalone '
u'reStructuredText sources '
+ default_description)
publish_cmdline(writer_name='html5', description=description)
|
drakeloud/louderdev
|
louderdev/bin/rst2html5.py
|
Python
|
mit
| 1,171
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of Karesansui Core.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import re
import sys
from karesansui.lib.dict_op import DictOp
from karesansui.lib.parser.base.sh_conf_parser import shConfParser as Parser
from karesansui.lib.utils import preprint_r
"""
Define Variables for This Parser
"""
PARSER_NETWORK_CONF="/etc/sysconfig/network"
class networkParser:
_module = "network"
def __init__(self):
self.dop = DictOp()
self.dop.addconf(self._module,{})
self.parser = Parser()
self.parser.set_delim("=")
self.parser.set_new_delim("=")
self.parser.set_multidefine(False)
self.base_parser_name = self.parser.__class__.__name__
pass
def source_file(self):
retval = [PARSER_NETWORK_CONF]
return retval
def read_conf(self,extra_args=None):
retval = {}
self.parser.set_source_file([PARSER_NETWORK_CONF])
conf_arr = self.parser.read_conf()
try:
self.dop.addconf(self._module,conf_arr[PARSER_NETWORK_CONF]['value'])
except:
pass
self.dop.set(self._module,['@BASE_PARSER'],self.base_parser_name)
#self.dop.preprint_r(self._module)
return self.dop.getconf(self._module)
def write_conf(self,conf_arr={},extra_args=None,dryrun=False):
retval = True
try:
self.dop.addconf("parser",{})
self.dop.set("parser",[PARSER_NETWORK_CONF],conf_arr)
#self.dop.preprint_r("parser")
arr = self.dop.getconf("parser")
self.parser.write_conf(arr,dryrun=dryrun)
except:
pass
return retval
"""
"""
if __name__ == '__main__':
"""Testing
"""
parser = networkParser()
dop = DictOp()
dop.addconf("dum",parser.read_conf())
dop.add("dum","NETWORK","1.0.0.0")
dop.add("dum","NETWORK2","1.0.0.0")
dop.delete("dum","NETWORK")
conf = dop.getconf("dum")
parser.write_conf(conf,dryrun=True)
|
karesansui/karesansui
|
karesansui/lib/parser/network.py
|
Python
|
mit
| 3,123
|
from django.conf.urls import patterns, url
urlpatterns = patterns('core.views',
url(r'^list_drivers/$', 'list_drivers'),
url(r'^client_sign_up/$', 'client_sign_up'),
url(r'^activate_app/(?P<activation_string>.+)/$', 'activate_app'),
url(r'^get_info/$', 'get_info'),
url(r'^update_device_token/$', 'update_device_token'),
url(r'^monitor/$', 'monitor'),
url(r'^get_info/$', 'get_info'),
url(r'', 'home')
)
|
cmpe-295/project-backend
|
safe_ride/core/urls.py
|
Python
|
mit
| 611
|
"""Generate a table of brightness and contrast options for uvccapture.
Uvccapture works okay for taking webcam snapshots, but you have to tell it the
brightness, contrast, and saturation settings it should use. It's hard to guess
the right brightness and contrast settings, so this makes a table of options to
pick from.
"""
import os
import time
brightness = [1, 16, 32, 48, 64, 96, 128, 144, 160, 172, 192, 208, 224, 240, 255]
contrast = [8, 32, 64, 128, 160, 192, 255]
os.system('mkdir -p /tmp/webcam_html')
fmt = 'uvccapture -v -x320 -y240 -q90 -B{b} -C{c} -S1 -o/tmp/webcam_html/snap-{b}-{c}.jpg'
cmd_queue = []
# Make the -Cxx contrast column headings
table = "<table>\n<tr><td></td>"
for c in contrast:
table += "<td>-C{}</td>".format(c)
table += "</tr>\n"
for b in brightness:
# Make a row for each brightness level
table += "<tr><td>-B{}</td>\n".format(b)
for c in contrast:
# Make a cell for each contrast level at the given brightness
table += "<td><img src='snap-{}-{}.jpg'></td>\n".format(b, c)
cmd_queue.append(fmt.format(b=b, c=c))
table += "</tr>\n"
table += "</table>\n"
# Render the table to html so it's easy to see all the images. Note that you can
# reload index.html as the images are getting created. Just re-run `scp -r ...`
# on your workstation.
template = """<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="initial-scale=1.0, width=device-width">
<title>uvccapture table</title>
<style>
*{margin:0;padding:0;font-size:100%%;font:inherit;}
html,body{background-color:#fff9f9;line-height:1.3rem;
font-size:12pt;font-family:Arial,sans-serif;}
img{width:140px;height:auto;}
</style>
</head>
<body>
%s
</body>
</html>
"""
with open('/tmp/webcam_html/index.html', 'w') as f:
f.write(template % table)
# Run all the commands after index.html is ready
for cmd in cmd_queue:
# Sometimes uvccapture can't open the video device, so use an exponential
# backoff delay and retry if the first attempt doesn't work
for exp_backoff in [1, 2, 4, 8, 16, 32]:
if os.system(cmd) == 0:
break
else:
print "\n*** sleeping for {} ***\n".format(exp_backoff)
time.sleep(exp_backoff)
print
print "*** All Done! Your images are in /tmp/webcam_html"
|
wsnook/sandbox
|
webcam/webcam.py
|
Python
|
mit
| 2,308
|
import os
import os.path
import fnmatch
import logging
import ycm_core
import re
BASE_FLAGS = [
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-ferror-limit=10000',
'-DNDEBUG',
'-std=c++11',
'-xc++',
'-I/usr/lib/'
'-I/usr/include/'
]
SOURCE_EXTENSIONS = [
'.cpp',
'.cxx',
'.cc',
'.c',
'.m',
'.mm'
]
HEADER_EXTENSIONS = [
'.h',
'.hxx',
'.hpp',
'.hh'
]
def IsHeaderFile(filename):
extension = os.path.splitext(filename)[1]
return extension in HEADER_EXTENSIONS
def GetCompilationInfoForFile(database, filename):
if IsHeaderFile(filename):
basename = os.path.splitext(filename)[0]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists(replacement_file):
compilation_info = database.GetCompilationInfoForFile(replacement_file)
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile(filename)
def FindNearest(path, target, build_folder):
candidate = os.path.join(path, target)
if(os.path.isfile(candidate) or os.path.isdir(candidate)):
logging.info("Found nearest " + target + " at " + candidate)
return candidate;
parent = os.path.dirname(os.path.abspath(path));
if(parent == path):
raise RuntimeError("Could not find " + target);
if(build_folder):
candidate = os.path.join(parent, build_folder, target)
if(os.path.isfile(candidate) or os.path.isdir(candidate)):
logging.info("Found nearest " + target + " in build folder at " + candidate)
return candidate;
return FindNearest(parent, target, build_folder)
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
if not working_directory:
return list(flags)
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith('/'):
new_flag = os.path.join(working_directory, flag)
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith(path_flag):
path = flag[ len(path_flag): ]
new_flag = path_flag + os.path.join(working_directory, path)
break
if new_flag:
new_flags.append(new_flag)
return new_flags
def FlagsForClangComplete(root):
try:
clang_complete_path = FindNearest(root, '.clang_complete')
clang_complete_flags = open(clang_complete_path, 'r').read().splitlines()
return clang_complete_flags
except:
return None
def FlagsForInclude(root):
try:
include_path = FindNearest(root, 'include')
flags = []
for dirroot, dirnames, filenames in os.walk(include_path):
for dir_path in dirnames:
real_path = os.path.join(dirroot, dir_path)
flags = flags + ["-I" + real_path]
return flags
except:
return None
def FlagsForCompilationDatabase(root, filename):
try:
# Last argument of next function is the name of the build folder for
# out of source projects
compilation_db_path = FindNearest(root, 'compile_commands.json', 'build')
compilation_db_dir = os.path.dirname(compilation_db_path)
logging.info("Set compilation database directory to " + compilation_db_dir)
compilation_db = ycm_core.CompilationDatabase(compilation_db_dir)
if not compilation_db:
logging.info("Compilation database file found but unable to load")
return None
compilation_info = GetCompilationInfoForFile(compilation_db, filename)
if not compilation_info:
logging.info("No compilation info for " + filename + " in compilation database")
return None
return MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_)
except:
return None
def FlagsForFile(filename):
root = os.path.realpath(filename);
compilation_db_flags = FlagsForCompilationDatabase(root, filename)
if compilation_db_flags:
final_flags = compilation_db_flags
else:
final_flags = BASE_FLAGS
clang_flags = FlagsForClangComplete(root)
if clang_flags:
final_flags = final_flags + clang_flags
include_flags = FlagsForInclude(root)
if include_flags:
final_flags = final_flags + include_flags
return {
'flags': final_flags,
'do_cache': True
}
|
YanShenChun/dotfiles
|
ycm_extra_conf.py
|
Python
|
mit
| 5,086
|
'''
The Q-learning algorithm is used to learn a function approximator
for the state-action values of Tic-Tac-Toe positions.
'''
from capstone.game.games import TicTacToe
from capstone.game.players import RandPlayer
from capstone.rl import Environment, GameMDP, FixedGameMDP
from capstone.rl.learners import ApproximateQLearning
from capstone.rl.policies import EGreedy
from capstone.rl.utils import EpisodicWLDPlotter, Callback, LinearAnnealing
from capstone.rl.value_functions import QNetwork
mapping = { int(x): int(x) - 1 for x in range(1, 10)}
game = TicTacToe()
# mdp = GameMDP(game)
mdp = FixedGameMDP(game, RandPlayer(), 1)
env = Environment(mdp)
qnetwork = QNetwork(mapping, n_input_units=9, n_hidden_layers=1, n_output_units=9, n_hidden_units=100)
egreedy = EGreedy(env.actions, qnetwork, 1.0)
qlearning = ApproximateQLearning(
env=env,
qfunction=qnetwork,
policy=egreedy,
discount_factor=1.0,
experience_replay=True,
batch_size=32
)
qlearning.train(
n_episodes=10000,
callbacks=[
EpisodicWLDPlotter(
game=game,
opp_player=RandPlayer(),
n_matches=1000,
period=250,
filepath='figures/tic_deep_ql.pdf'
),
LinearAnnealing(egreedy, 'epsilon', init=1.0, final=0.1, n_episodes=5000)
]
)
# n_episodes = 4,000
# n_episodes_annearling = 2,000
# mention that I tried adam and rmsprop but they did not work
|
davidrobles/mlnd-capstone-code
|
experiments/tic_ql_fa.py
|
Python
|
mit
| 1,429
|
#!/usr/bin/env python
# =============================================================================
# MODULE DOCSTRING
# =============================================================================
"""
Classes that represent a portion of the state of an OpenMM context.
"""
# =============================================================================
# GLOBAL IMPORTS
# =============================================================================
import abc
import sys
import copy
import zlib
import inspect
import weakref
import collections
import numpy as np
try:
import openmm
from openmm import unit
except ImportError: # OpenMM < 7.6
from simtk import openmm, unit
from openmmtools import utils, integrators, forces, constants
# =============================================================================
# MODULE FUNCTIONS
# =============================================================================
def create_thermodynamic_state_protocol(system, protocol, constants=None,
composable_states=None):
"""An optimized utility function to create a list of thermodynamic states.
The method takes advantage of the fact that copying a thermodynamic state
does not require a copy of the OpenMM ``System`` object and that setting
parameters that are controlled by the ``(Compound)ThermodynamicState``
is effectively instantaneous.
Parameters
----------
reference_state : ThermodynamicState or openmm.System
``ThermodynamicState`` or The OpenMM ``System``. If a ``System`` the
constants must specify the temperature.
protocol : dict: str -> list
A dictionary associating the thermodynamic parameters to a list of
values. All the lists must have the same length.
constants : dict: str -> list
A dictionary associating a thermodnamic parameter to a value that
must remain constant along the protocol.
composable_states : IComposableState or list, optional
If specified, the function returns a list of ``CompoundThermodynamicState``
instead of simple ``ThermodynamicState`` objects.
Returns
-------
states : list of ``ThermodynamicState`` or ``CompoundThermodynamicState``
The sequence of thermodynamic states for the given protocol.
Examples
--------
>>> from openmm import unit
>>> from openmmtools import testsystems
>>> system = testsystems.AlanineDipeptideExplicit().system
>>> protocol = {'temperature': [300, 310, 330]*unit.kelvin,
... 'pressure': [1.0, 1.1, 1.2]*unit.atmosphere}
>>> states = create_thermodynamic_state_protocol(system, protocol)
>>> len(states)
3
"""
# Check that all elements of the protocol have the same length.
if len(protocol) == 0:
raise ValueError('No protocol has been specified.')
values_lengths = [len(values) for values in protocol.values()]
if len(set(values_lengths)) != 1:
raise ValueError('The protocol parameter values have different '
'lengths!\n{}'.format(protocol))
protocol_length = values_lengths[0]
# Handle default value.
if constants is None:
constants = {}
# Check that the user didn't specify the same parameter as both
# a constant and a protocol variable.
if len(set(constants).intersection(set(protocol))) != 0:
raise ValueError('Some parameters have been specified both '
'in constants and protocol.')
# Augument protocol to include the constants values as well.
for constant_parameter, value in constants.items():
protocol[constant_parameter] = [value for _ in range(protocol_length)]
# Create the reference ThermodynamicState.
if isinstance(system, openmm.System):
# Make sure the temperature is defined somewhere.
try:
temperature = constants['temperature']
except KeyError:
try:
temperature = protocol['temperature'][0]
except KeyError:
raise ValueError('If a System is passed the list of '
'constants must specify the temperature.')
thermo_state = ThermodynamicState(system, temperature=temperature)
else:
thermo_state = system
# Check if we need to create a reference CompoundThermodynamicState.
# Cast a single ComposableState into a list.
if isinstance(composable_states, IComposableState):
composable_states = [composable_states]
if composable_states is not None:
thermo_state = CompoundThermodynamicState(thermo_state, composable_states)
# Create all the states. Copying a state is much faster than
# initializing one because we don't have to copy System object.
states = [copy.deepcopy(thermo_state) for _ in range(protocol_length)]
# Assign protocol parameters.
protocol_keys, protocol_values = zip(*protocol.items())
for state_idx, state_values in enumerate(zip(*protocol_values)):
state = states[state_idx]
for lambda_key, lambda_value in zip(protocol_keys, state_values):
if hasattr(state, lambda_key):
setattr(state, lambda_key, lambda_value)
else:
raise AttributeError('{} object does not have protocol attribute '
'{}'.format(type(state), lambda_key))
return states
def reduced_potential_at_states(sampler_state, thermodynamic_states, context_cache):
"""Compute the reduced potential of a single configuration at multiple thermodynamic states.
Parameters
----------
sampler_state : SamplerState
The state holding the coordinates used to compute the potential.
thermodynamic_states : list of ``ThermodynamicState``
The list of thermodynamic states at which to compute the potential.
context_cache : cache.ContextCache
The context cache to use to request ``Context`` objects.
Returns
-------
reduced_potentials : np.ndarray of float
``reduced_potentials[i]`` is the unit-less reduced potentials
(i.e., in kT units) of state ``thermodynamic_states[i]``.
"""
reduced_potentials = np.zeros(len(thermodynamic_states))
# Group thermodynamic states by compatibility.
compatible_groups, original_indices = group_by_compatibility(thermodynamic_states)
# Compute the reduced potentials of all the compatible states.
for compatible_group, state_indices in zip(compatible_groups, original_indices):
# Get the context, any Integrator works.
context, integrator = context_cache.get_context(compatible_group[0])
# Update positions and box vectors. We don't need
# to set Context velocities for the potential.
sampler_state.apply_to_context(context, ignore_velocities=True)
# Compute and update the reduced potentials.
compatible_energies = ThermodynamicState.reduced_potential_at_states(
context, compatible_group)
for energy_idx, state_idx in enumerate(state_indices):
reduced_potentials[state_idx] = compatible_energies[energy_idx]
return reduced_potentials
def group_by_compatibility(thermodynamic_states):
"""Utility function to split the thermodynamic states by compatibility.
Parameters
----------
thermodynamic_states : list of ThermodynamicState
The thermodynamic state to group by compatibility.
Returns
-------
compatible_groups : list of list of ThermodynamicState
The states grouped by compatibility.
original_indices: list of list of int
The indices of the ThermodynamicStates in theoriginal list.
"""
compatible_groups = []
original_indices = []
for state_idx, state in enumerate(thermodynamic_states):
# Search for compatible group.
found_compatible = False
for group, indices in zip(compatible_groups, original_indices):
if state.is_state_compatible(group[0]):
found_compatible = True
group.append(state)
indices.append(state_idx)
# Create new one.
if not found_compatible:
compatible_groups.append([state])
original_indices.append([state_idx])
return compatible_groups, original_indices
def _box_vectors_volume(box_vectors):
"""Return the volume of the box vectors.
Support also triclinic boxes.
Parameters
----------
box_vectors : openmm.unit.Quantity
Vectors defining the box.
Returns
-------
volume : openmm.unit.Quantity
The box volume in units of length^3.
Examples
--------
Compute the volume of a Lennard-Jones fluid at 100 K and 1 atm.
>>> from openmmtools import testsystems
>>> system = testsystems.LennardJonesFluid(nparticles=100).system
>>> v = _box_vectors_volume(system.getDefaultPeriodicBoxVectors())
"""
a, b, c = box_vectors
box_matrix = np.array([a/a.unit, b/a.unit, c/a.unit])
return np.linalg.det(box_matrix) * a.unit**3
def _box_vectors_area_xy(box_vectors):
"""Return the xy-area of the box vectors.
Parameters
----------
box_vectors : openmm.unit.Quantity
Vectors defining the box.
Returns
-------
area_xy : openmm.unit.Quantity
The box area in units of length^2.
"""
return box_vectors[0][0] * box_vectors[1][1]
# =============================================================================
# CUSTOM EXCEPTIONS
# =============================================================================
class ThermodynamicsError(Exception):
"""Custom ThermodynamicState error.
The exception defines error codes as class constants. Currently
defined constants are MULTIPLE_BAROSTATS, UNSUPPORTED_BAROSTAT,
INCONSISTENT_BAROSTAT, BAROSTATED_NONPERIODIC, and
INCONSISTENT_INTEGRATOR.
Parameters
----------
code : ThermodynamicsError.Code
The error code.
Attributes
----------
code : ThermodynamicsError.Code
The code associated to this error.
Examples
--------
>>> raise ThermodynamicsError(ThermodynamicsError.MULTIPLE_BAROSTATS)
Traceback (most recent call last):
...
openmmtools.states.ThermodynamicsError: System has multiple barostats.
"""
# TODO substitute this with enum when we drop Python 2.7 support
(MULTIPLE_THERMOSTATS,
NO_THERMOSTAT,
NONE_TEMPERATURE,
INCONSISTENT_THERMOSTAT,
MULTIPLE_BAROSTATS,
NO_BAROSTAT,
UNSUPPORTED_BAROSTAT,
UNSUPPORTED_ANISOTROPIC_BAROSTAT,
SURFACE_TENSION_NOT_SUPPORTED,
INCONSISTENT_BAROSTAT,
BAROSTATED_NONPERIODIC,
INCONSISTENT_INTEGRATOR,
INCOMPATIBLE_SAMPLER_STATE,
INCOMPATIBLE_ENSEMBLE) = range(14)
error_messages = {
MULTIPLE_THERMOSTATS: "System has multiple thermostats.",
NO_THERMOSTAT: "System does not have a thermostat specifying the temperature.",
NONE_TEMPERATURE: "Cannot set temperature of the thermodynamic state to None.",
INCONSISTENT_THERMOSTAT: "System thermostat is inconsistent with thermodynamic state.",
MULTIPLE_BAROSTATS: "System has multiple barostats.",
UNSUPPORTED_BAROSTAT: "Found unsupported barostat {} in system.",
UNSUPPORTED_ANISOTROPIC_BAROSTAT:
"MonteCarloAnisotropicBarostat is only supported if the pressure along all scaled axes is the same.",
SURFACE_TENSION_NOT_SUPPORTED:
"Surface tension can only be set for states that have a system with a MonteCarloMembraneBarostat.",
NO_BAROSTAT: "System does not have a barostat specifying the pressure.",
INCONSISTENT_BAROSTAT: "System barostat is inconsistent with thermodynamic state.",
BAROSTATED_NONPERIODIC: "Non-periodic systems cannot have a barostat.",
INCONSISTENT_INTEGRATOR: "Integrator is coupled to a heat bath at a different temperature.",
INCOMPATIBLE_SAMPLER_STATE: "The sampler state has a different number of particles.",
INCOMPATIBLE_ENSEMBLE: "Cannot apply to a context in a different thermodynamic ensemble."
}
def __init__(self, code, *args):
error_message = self.error_messages[code].format(*args)
super(ThermodynamicsError, self).__init__(error_message)
self.code = code
class SamplerStateError(Exception):
"""Custom SamplerState error.
The exception defines error codes as class constants. The only
currently defined constant is INCONSISTENT_VELOCITIES.
Parameters
----------
code : SamplerStateError.Code
The error code.
Attributes
----------
code : SamplerStateError.Code
The code associated to this error.
Examples
--------
>>> raise SamplerStateError(SamplerStateError.INCONSISTENT_VELOCITIES)
Traceback (most recent call last):
...
openmmtools.states.SamplerStateError: Velocities have different length than positions.
"""
# TODO substitute this with enum when we drop Python 2.7 support
(INCONSISTENT_VELOCITIES,
INCONSISTENT_POSITIONS) = range(2)
error_messages = {
INCONSISTENT_VELOCITIES: "Velocities have different length than positions.",
INCONSISTENT_POSITIONS: "Specified positions with inconsistent number of particles."
}
def __init__(self, code, *args):
error_message = self.error_messages[code].format(*args)
super(SamplerStateError, self).__init__(error_message)
self.code = code
# =============================================================================
# THERMODYNAMIC STATE
# =============================================================================
class ThermodynamicState(object):
"""Thermodynamic state of a system.
Represent the portion of the state of a Context that does not
change with integration. Its main objectives are to wrap an
OpenMM system object to easily maintain a consistent thermodynamic
state. It can be used to create new OpenMM Contexts, or to convert
an existing Context to this particular thermodynamic state.
NVT, NPT and NPgammaT ensembles are supported. The temperature must
be specified in the constructor, either implicitly via a thermostat
force in the system, or explicitly through the temperature
parameter, which overrides an eventual thermostat indication.
To set a ThermodynamicState up in the NPgammaT ensemble, the system
passed to the constructor has to have a MonteCarloMembraneBarostat.
To set a ThermodynamicState up with anisotropic pressure control,
the system passed to the constructor has to have a MonteCarloAnisotropicBarostat.
Currently the MonteCarloAnisotropicBarostat is only supported if
the pressure is equal for all axes that are under pressure control.
Parameters
----------
system : openmm.System
An OpenMM system in a particular thermodynamic state.
temperature : openmm.unit.Quantity, optional
The temperature for the system at constant temperature. If
a MonteCarloBarostat is associated to the system, its
temperature will be set to this. If None, the temperature
is inferred from the system thermostat.
pressure : openmm.unit.Quantity, optional
The pressure for the system at constant pressure. If this
is specified, a MonteCarloBarostat is added to the system,
or just set to this pressure in case it already exists. If
None, the pressure is inferred from the system barostat, and
NVT ensemble is assumed if there is no barostat.
surface_tension : openmm.unit.Quantity, optional
The surface tension for the system at constant surface tension.
If this is specified, the system must have a MonteCarloMembraneBarostat.
If None, the surface_tension is inferred from the barostat and
NPT/NVT ensemble is assumed if there is no MonteCarloMembraneBarostat.
Attributes
----------
system
temperature
pressure
surface_tension
volume
n_particles
Notes
-----
This state object cannot describe states obeying non-Boltzamnn
statistics, such as Tsallis statistics.
Examples
--------
Specify an NVT state for a water box at 298 K.
>>> from openmmtools import testsystems
>>> temperature = 298.0*unit.kelvin
>>> waterbox = testsystems.WaterBox(box_edge=10*unit.angstroms,
... cutoff=4*unit.angstroms).system
>>> state = ThermodynamicState(system=waterbox, temperature=temperature)
In an NVT ensemble volume is constant and pressure is None.
>>> state.volume
Quantity(value=1.0, unit=nanometer**3)
>>> state.pressure is None
True
Convert this to an NPT state at 298 K and 1 atm pressure. This
operation automatically adds a MonteCarloBarostat to the system.
>>> pressure = 1.0*unit.atmosphere
>>> state.pressure = pressure
>>> state.pressure
Quantity(value=1.0, unit=atmosphere)
>>> state.volume is None
True
You cannot set a non-periodic system at constant pressure
>>> nonperiodic_system = testsystems.TolueneVacuum().system
>>> state = ThermodynamicState(nonperiodic_system, temperature=300*unit.kelvin,
... pressure=1.0*unit.atmosphere)
Traceback (most recent call last):
...
openmmtools.states.ThermodynamicsError: Non-periodic systems cannot have a barostat.
When temperature and/or pressure are not specified (i.e. they are
None) ThermodynamicState tries to infer them from a thermostat or
a barostat.
>>> state = ThermodynamicState(system=waterbox)
Traceback (most recent call last):
...
openmmtools.states.ThermodynamicsError: System does not have a thermostat specifying the temperature.
>>> thermostat = openmm.AndersenThermostat(200.0*unit.kelvin, 1.0/unit.picosecond)
>>> force_id = waterbox.addForce(thermostat)
>>> state = ThermodynamicState(system=waterbox)
>>> state.pressure is None
True
>>> state.temperature
Quantity(value=200.0, unit=kelvin)
>>> barostat = openmm.MonteCarloBarostat(1.0*unit.atmosphere, 200.0*unit.kelvin)
>>> force_id = waterbox.addForce(barostat)
>>> state = ThermodynamicState(system=waterbox)
>>> state.pressure
Quantity(value=1.01325, unit=bar)
>>> state.temperature
Quantity(value=200.0, unit=kelvin)
"""
# -------------------------------------------------------------------------
# Public interface
# -------------------------------------------------------------------------
def __init__(self, system, temperature=None, pressure=None, surface_tension=None):
self._initialize(system, temperature, pressure, surface_tension)
@property
def system(self):
"""The system in this thermodynamic state.
The returned system is a copy and can be modified without
altering the internal state of ThermodynamicState. In order
to ensure a consistent thermodynamic state, the system has
a Thermostat force. You can use `get_system()` to obtain a
copy of the system without the thermostat. The method
`create_context()` then takes care of removing the thermostat
when an integrator with a coupled heat bath is used (e.g.
`LangevinIntegrator`).
It can be set only to a system which is consistent with the
current thermodynamic state. Use `set_system()` if you want to
correct the thermodynamic state of the system automatically
before assignment.
See Also
--------
ThermodynamicState.get_system
ThermodynamicState.set_system
ThermodynamicState.create_context
"""
return self.get_system()
@system.setter
def system(self, value):
self.set_system(value)
def set_system(self, system, fix_state=False):
"""Manipulate and set the system.
With default arguments, this is equivalent to using the system
property, which raises an exception if the thermostat and the
barostat are not configured according to the thermodynamic state.
With this method it is possible to adjust temperature and
pressure of the system to make the assignment possible, without
manually configuring thermostat and barostat.
Parameters
----------
system : openmm.System
The system to set.
fix_state : bool, optional
If True, a thermostat is added to the system (if not already
present) and set to the correct temperature. If this state is
in NPT ensemble, a barostat is added or configured if it
exist already. If False, this simply check that thermostat
and barostat are correctly configured without modifying them.
Default is False.
Raises
------
ThermodynamicsError
If the system after the requested manipulation is still in
an incompatible state.
Examples
--------
The constructor adds a thermostat and a barostat to configure
the system in an NPT ensemble.
>>> from openmmtools import testsystems
>>> alanine = testsystems.AlanineDipeptideExplicit()
>>> state = ThermodynamicState(alanine.system, temperature=300*unit.kelvin,
... pressure=1.0*unit.atmosphere)
If we try to set a system not in NPT ensemble, an error occur.
>>> state.system = alanine.system
Traceback (most recent call last):
...
openmmtools.states.ThermodynamicsError: System does not have a thermostat specifying the temperature.
We can fix both thermostat and barostat while setting the system.
>>> state.set_system(alanine.system, fix_state=True)
"""
# Copy the system to avoid modifications during standardization.
system = copy.deepcopy(system)
self._unsafe_set_system(system, fix_state)
def get_system(self, remove_thermostat=False, remove_barostat=False):
"""Manipulate and return the system.
With default arguments, this is equivalent as the system property.
By setting the arguments it is possible to obtain a modified copy
of the system without the thermostat or the barostat.
Parameters
----------
remove_thermostat : bool
If True, the system thermostat is removed.
remove_barostat : bool
If True, the system barostat is removed.
Returns
-------
system : openmm.System
The system of this ThermodynamicState.
Examples
--------
The constructor adds a thermostat and a barostat to configure
the system in an NPT ensemble.
>>> from openmmtools import testsystems
>>> alanine = testsystems.AlanineDipeptideExplicit()
>>> state = ThermodynamicState(alanine.system, temperature=300*unit.kelvin,
... pressure=1.0*unit.atmosphere)
The system property returns a copy of the system with the
added thermostat and barostat.
>>> system = state.system
>>> [force.__class__.__name__ for force in system.getForces()
... if 'Thermostat' in force.__class__.__name__]
['AndersenThermostat']
We can remove them while getting the arguments with
>>> system = state.get_system(remove_thermostat=True, remove_barostat=True)
>>> [force.__class__.__name__ for force in system.getForces()
... if 'Thermostat' in force.__class__.__name__]
[]
"""
system = copy.deepcopy(self._standard_system)
# Remove or configure standard pressure barostat.
if remove_barostat:
self._pop_barostat(system)
else: # Set pressure of standard barostat.
self._set_system_pressure(system, self.pressure)
self._set_system_surface_tension(system, self.surface_tension)
# Set temperature of standard thermostat and barostat.
if not (remove_barostat and remove_thermostat):
self._set_system_temperature(system, self.temperature)
# Remove or configure standard temperature thermostat.
if remove_thermostat:
self._remove_thermostat(system)
return system
@property
def temperature(self):
"""Constant temperature of the thermodynamic state."""
return self._temperature
@temperature.setter
def temperature(self, value):
if value is None:
raise ThermodynamicsError(ThermodynamicsError.NONE_TEMPERATURE)
self._temperature = value
@property
def kT(self):
"""Thermal energy per mole."""
return constants.kB * self.temperature
@property
def beta(self):
"""Thermodynamic beta in units of mole/energy."""
return 1.0 / self.kT
@property
def pressure(self):
"""Constant pressure of the thermodynamic state.
If the pressure is allowed to fluctuate, this is None. Setting
this will automatically add/configure a barostat to the system.
If it is set to None, the barostat will be removed.
"""
return self._pressure
@pressure.setter
def pressure(self, new_pressure):
old_pressure = self._pressure
self._pressure = new_pressure
# If we change ensemble, we need to modify the standard system.
if (new_pressure is None) != (old_pressure is None):
# The barostat will be removed/added since fix_state is True.
try:
self.set_system(self._standard_system, fix_state=True)
except ThermodynamicsError:
# Restore old pressure to keep object consistent.
self._pressure = old_pressure
raise
@property
def barostat(self):
"""The barostat associated to the system.
Note that this is only a copy of the barostat, and you will need
to set back the ThermodynamicState.barostat property for the changes
to take place internally. If the pressure is allowed to fluctuate,
this is None. Normally, you should only need to access the pressure
and temperature properties, but this allows you to modify other parameters
of the MonteCarloBarostat (e.g. frequency) after initialization. Setting
this to None will place the system in an NVT ensemble.
"""
# Retrieve the barostat with standard temperature/pressure, then
# set temperature and pressure to the thermodynamic state values.
barostat = copy.deepcopy(self._find_barostat(self._standard_system))
if barostat is not None: # NPT ensemble.
self._set_barostat_pressure(barostat, self.pressure)
self._set_barostat_temperature(barostat, self.temperature)
if self.surface_tension is not None:
self._set_barostat_surface_tension(barostat, self.surface_tension)
return barostat
@barostat.setter
def barostat(self, new_barostat):
# If None, just remove the barostat from the standard system.
if new_barostat is None:
self.pressure = None
self.surface_tension = None
return
# Remember old pressure and surface tension in case something goes wrong.
old_pressure = self.pressure
old_surface_tension = self.surface_tension
# make sure that the barostat type does not change
if self.barostat is not None and type(new_barostat) is not type(self.barostat):
raise ThermodynamicsError(ThermodynamicsError.INCONSISTENT_BAROSTAT)
# Build the system with the new barostat.
system = self.get_system(remove_barostat=True)
system.addForce(copy.deepcopy(new_barostat))
# Update the internally stored standard system, and restore the old
# pressure if something goes wrong (e.g. the system is not periodic).
try:
self._pressure = self._get_barostat_pressure(new_barostat)
self._surface_tension = self._get_barostat_surface_tension(new_barostat)
self._unsafe_set_system(system, fix_state=False)
except ThermodynamicsError:
self._pressure = old_pressure
self._surface_tension = old_surface_tension
raise
@property
def default_box_vectors(self):
"""The default box vectors of the System (read-only)."""
return self._standard_system.getDefaultPeriodicBoxVectors()
@property
def volume(self):
"""Constant volume of the thermodynamic state (read-only).
If the volume is allowed to fluctuate, or if the system is
not in a periodic box this is None.
"""
return self.get_volume()
def get_volume(self, ignore_ensemble=False):
"""Volume of the periodic box (read-only).
Parameters
----------
ignore_ensemble : bool, optional
If True, the volume of the periodic box vectors is returned
even if the volume fluctuates.
Returns
-------
volume : openmm.unit.Quantity
The volume of the periodic box (units of length^3) or
None if the system is not periodic or allowed to fluctuate.
"""
# Check if volume fluctuates
if self.pressure is not None and not ignore_ensemble:
return None
if not self._standard_system.usesPeriodicBoundaryConditions():
return None
return _box_vectors_volume(self.default_box_vectors)
@property
def n_particles(self):
"""Number of particles (read-only)."""
return self._standard_system.getNumParticles()
@property
def is_periodic(self):
"""True if the system is in a periodic box (read-only)."""
return self._standard_system.usesPeriodicBoundaryConditions()
@property
def surface_tension(self):
"""Surface tension"""
return self._surface_tension
@surface_tension.setter
def surface_tension(self, gamma):
if (self._surface_tension is None) != (gamma is None):
raise ThermodynamicsError(ThermodynamicsError.SURFACE_TENSION_NOT_SUPPORTED)
else:
self._surface_tension = gamma
def reduced_potential(self, context_state):
"""Reduced potential in this thermodynamic state.
Parameters
----------
context_state : SamplerState or openmm.Context
Carry the configurational properties of the system.
Returns
-------
u : float
The unit-less reduced potential, which can be considered
to have units of kT.
Notes
-----
The reduced potential is defined as in Ref. [1],
with a additional term for the surface tension
u = \beta [U(x) + p V(x) + \mu N(x) - \gamma A]
where the thermodynamic parameters are
\beta = 1/(kB T) is the inverse temperature
p is the pressure
\mu is the chemical potential
\gamma is the surface tension
and the configurational properties are
x the atomic positions
U(x) is the potential energy
V(x) is the instantaneous box volume
N(x) the numbers of various particle species (e.g. protons of
titratible groups)
A(x) is the xy-area of the box.
References
----------
[1] Shirts MR and Chodera JD. Statistically optimal analysis of
equilibrium states. J Chem Phys 129:124105, 2008.
Examples
--------
Compute the reduced potential of a water box at 298 K and 1 atm.
>>> from openmmtools import testsystems
>>> waterbox = testsystems.WaterBox(box_edge=20.0*unit.angstroms)
>>> system, positions = waterbox.system, waterbox.positions
>>> state = ThermodynamicState(system=waterbox.system,
... temperature=298.0*unit.kelvin,
... pressure=1.0*unit.atmosphere)
>>> integrator = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> context = state.create_context(integrator)
>>> context.setPositions(waterbox.positions)
>>> sampler_state = SamplerState.from_context(context)
>>> u = state.reduced_potential(sampler_state)
If the sampler state is incompatible, an error is raised
>>> incompatible_sampler_state = sampler_state[:-1]
>>> state.reduced_potential(incompatible_sampler_state)
Traceback (most recent call last):
...
openmmtools.states.ThermodynamicsError: The sampler state has a different number of particles.
In case a cached SamplerState containing the potential energy
and the volume of the context is not available, the method
accepts a Context object and compute them with Context.getState().
>>> u = state.reduced_potential(context)
"""
# Read Context/SamplerState n_particles, energy and volume.
if isinstance(context_state, openmm.Context):
n_particles = context_state.getSystem().getNumParticles()
openmm_state = context_state.getState(getEnergy=True)
potential_energy = openmm_state.getPotentialEnergy()
volume = openmm_state.getPeriodicBoxVolume()
area = _box_vectors_area_xy(openmm_state.getPeriodicBoxVectors())
else:
n_particles = context_state.n_particles
potential_energy = context_state.potential_energy
volume = context_state.volume
area = context_state.area_xy
# Check compatibility.
if n_particles != self.n_particles:
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_SAMPLER_STATE)
return self._compute_reduced_potential(potential_energy, self.temperature,
volume, self.pressure, area, self.surface_tension)
@classmethod
def reduced_potential_at_states(cls, context, thermodynamic_states):
"""Efficiently compute the reduced potential for a list of compatible states.
The user is responsible to ensure that the given context is compatible
with the thermodynamic states.
Parameters
----------
context : openmm.Context
The OpenMM `Context` object with box vectors and positions set.
thermodynamic_states : list of ThermodynamicState
The list of thermodynamic states at which to compute the reduced
potential.
Returns
-------
reduced_potentials : list of float
The unit-less reduced potentials, which can be considered
to have units of kT.
Raises
------
ValueError
If the thermodynamic states are not compatible to each other.
"""
# Isolate first thermodynamic state.
if len(thermodynamic_states) == 1:
thermodynamic_states[0].apply_to_context(context)
return [thermodynamic_states[0].reduced_potential(context)]
# Check that the states are compatible.
for state_idx, state in enumerate(thermodynamic_states[:-1]):
if not state.is_state_compatible(thermodynamic_states[state_idx + 1]):
raise ValueError('State {} is not compatible.')
# In NPT, we'll need also the volume.
is_npt = thermodynamic_states[0].pressure is not None
is_npgammat = thermodynamic_states[0].surface_tension is not None
volume = None
area_xy = None
energy_by_force_group = {force.getForceGroup(): 0.0*unit.kilocalories_per_mole
for force in context.getSystem().getForces()}
# Create new cache for memoization.
memo = {}
# Go through thermodynamic states and compute only the energy of the
# force groups that changed. Compute all the groups the first pass.
force_groups_to_compute = set(energy_by_force_group)
reduced_potentials = [0.0 for _ in range(len(thermodynamic_states))]
for state_idx, state in enumerate(thermodynamic_states):
if state_idx == 0:
state.apply_to_context(context)
else:
state._apply_to_context_in_state(context, thermodynamic_states[state_idx - 1])
# Compute the energy of all the groups to update.
for force_group_idx in force_groups_to_compute:
openmm_state = context.getState(getEnergy=True, groups=2**force_group_idx)
energy_by_force_group[force_group_idx] = openmm_state.getPotentialEnergy()
# Compute volume if this is the first time we obtain a state.
if is_npt and volume is None:
volume = openmm_state.getPeriodicBoxVolume()
if is_npgammat and area_xy is None:
area_xy = _box_vectors_area_xy(openmm_state.getPeriodicBoxVectors())
# Compute the new total reduced potential.
potential_energy = unit.sum(list(energy_by_force_group.values()))
reduced_potential = cls._compute_reduced_potential(potential_energy, state.temperature,
volume, state.pressure, area_xy, state.surface_tension)
reduced_potentials[state_idx] = reduced_potential
# Update groups to compute for next states.
if state_idx < len(thermodynamic_states) - 1:
next_state = thermodynamic_states[state_idx + 1]
force_groups_to_compute = next_state._find_force_groups_to_update(context, state, memo)
return reduced_potentials
def is_state_compatible(self, thermodynamic_state):
"""Check compatibility between ThermodynamicStates.
The state is compatible if Contexts created by thermodynamic_state
can be set to this ThermodynamicState through apply_to_context.
The property is symmetric and transitive.
This is faster than checking compatibility of a Context object
through is_context_compatible, and it should be preferred when
possible.
Parameters
----------
thermodynamic_state : ThermodynamicState
The thermodynamic state to test.
Returns
-------
is_compatible : bool
True if the context created by thermodynamic_state can be
converted to this state through apply_to_context().
See Also
--------
ThermodynamicState.apply_to_context
ThermodynamicState.is_context_compatible
Examples
--------
States in the same ensemble (NVT or NPT) are compatible.
>>> from openmm import unit
>>> from openmmtools import testsystems
>>> alanine = testsystems.AlanineDipeptideExplicit()
>>> state1 = ThermodynamicState(alanine.system, 273*unit.kelvin)
>>> state2 = ThermodynamicState(alanine.system, 310*unit.kelvin)
>>> state1.is_state_compatible(state2)
True
States in different ensembles are not compatible.
>>> state1.pressure = 1.0*unit.atmosphere
>>> state1.is_state_compatible(state2)
False
States that store different systems (that differ by more than
barostat and thermostat pressure and temperature) are also not
compatible.
>>> alanine_implicit = testsystems.AlanineDipeptideImplicit().system
>>> state_implicit = ThermodynamicState(alanine_implicit, 310*unit.kelvin)
>>> state2.is_state_compatible(state_implicit)
False
"""
state_system_hash = thermodynamic_state._standard_system_hash
return self._standard_system_hash == state_system_hash
def is_context_compatible(self, context):
"""Check compatibility of the given context.
This is equivalent to is_state_compatible but slower, and it should
be used only when the state the created the context is unknown. The
context is compatible if it can be set to this ThermodynamicState
through apply_to_context().
Parameters
----------
context : openmm.Context
The OpenMM context to test.
Returns
-------
is_compatible : bool
True if this ThermodynamicState can be applied to context.
See Also
--------
ThermodynamicState.apply_to_context
ThermodynamicState.is_state_compatible
"""
# Avoid modifying the context system during standardization.
context_system = copy.deepcopy(context.getSystem())
context_integrator = context.getIntegrator()
# If the temperature is controlled by the integrator, the compatibility
# is independent on the parameters of the thermostat, so we add one
# identical to self._standard_system. We don't care if the integrator's
# temperature != self.temperature, so we set check_consistency=False.
if self._is_integrator_thermostated(context_integrator, check_consistency=False):
thermostat = self._find_thermostat(self._standard_system)
context_system.addForce(copy.deepcopy(thermostat))
# Compute and compare standard system hash.
self._standardize_system(context_system)
context_system_hash = self._compute_standard_system_hash(context_system)
is_compatible = self._standard_system_hash == context_system_hash
return is_compatible
def create_context(self, integrator, platform=None, platform_properties=None):
"""Create a context in this ThermodynamicState.
The context contains a copy of the system. If the integrator
is coupled to a heat bath (e.g. LangevinIntegrator), the system
in the context will not have a thermostat, and vice versa if
the integrator is not thermostated the system in the context will
have a thermostat.
An integrator is considered thermostated if it exposes a method
getTemperature(). A CompoundIntegrator is considered coupled to
a heat bath if at least one of its integrators is. An exception
is raised if the integrator is thermostated at a temperature
different from the thermodynamic state's.
Parameters
----------
integrator : openmm.Integrator
The integrator to use for Context creation. The eventual
heat bath temperature must be consistent with the
thermodynamic state.
platform : openmm.Platform, optional
Platform to use. If None, OpenMM tries to select the fastest
available platform. Default is None.
platform_properties : dict, optional
A dictionary of platform properties. Requires platform to be
specified.
Returns
-------
context : openmm.Context
The created OpenMM Context object.
Raises
------
ThermodynamicsError
If the integrator has a temperature different from this
ThermodynamicState.
ValueError
If platform_properties is specified, but platform is None
Examples
--------
When passing an integrator that does not expose getter and setter
for the temperature, the context will be created with a thermostat.
>>> import openmm
>>> from openmm import unit
>>> from openmmtools import testsystems
>>> toluene = testsystems.TolueneVacuum()
>>> state = ThermodynamicState(toluene.system, 300*unit.kelvin)
>>> integrator = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> context = state.create_context(integrator)
>>> system = context.getSystem()
>>> [force.__class__.__name__ for force in system.getForces()
... if 'Thermostat' in force.__class__.__name__]
['AndersenThermostat']
The thermostat is removed if we choose an integrator coupled
to a heat bath.
>>> del context # Delete previous context to free memory.
>>> integrator = openmm.LangevinIntegrator(300*unit.kelvin, 5.0/unit.picosecond,
... 2.0*unit.femtosecond)
>>> context = state.create_context(integrator)
>>> system = context.getSystem()
>>> [force.__class__.__name__ for force in system.getForces()
... if 'Thermostat' in force.__class__.__name__]
[]
"""
# Check that integrator is consistent and if it is thermostated.
# With CompoundIntegrator, at least one must be thermostated.
is_thermostated = self._is_integrator_thermostated(integrator)
# Get a copy of the system. If integrator is coupled
# to heat bath, remove the system thermostat.
system = self.get_system(remove_thermostat=is_thermostated)
# Create context.
if platform is None:
if platform_properties is not None:
raise ValueError("To set platform_properties, you need to also specify the platform.")
return openmm.Context(system, integrator)
elif platform_properties is None:
return openmm.Context(system, integrator, platform)
else:
return openmm.Context(system, integrator, platform, platform_properties)
def apply_to_context(self, context):
"""Apply this ThermodynamicState to the context.
The method apply_to_context does *not* check for the compatibility
of the context. The user is responsible for this. Depending on the
system size, is_context_compatible can be an expensive operation,
so is_state_compatible should be preferred when possible.
Parameters
----------
context : openmm.Context
The OpenMM Context to be set to this ThermodynamicState.
Raises
------
ThermodynamicsError
If the context is in a different thermodynamic ensemble w.r.t.
this state. This is just a quick check which does not substitute
is_state_compatible or is_context_compatible.
See Also
--------
ThermodynamicState.is_state_compatible
ThermodynamicState.is_context_compatible
Examples
--------
The method doesn't verify compatibility with the context, it is
the user's responsibility to do so, possibly with is_state_compatible
rather than is_context_compatible which is slower.
>>> import openmm
>>> from openmm import unit
>>> from openmmtools import testsystems
>>> toluene = testsystems.TolueneVacuum()
>>> state1 = ThermodynamicState(toluene.system, 273.0*unit.kelvin)
>>> state2 = ThermodynamicState(toluene.system, 310.0*unit.kelvin)
>>> integrator = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> context = state1.create_context(integrator)
>>> if state2.is_state_compatible(state1):
... state2.apply_to_context(context)
>>> context.getParameter(openmm.AndersenThermostat.Temperature())
310.0
"""
self._set_context_barostat(context, update_pressure=True, update_temperature=True, update_surface_tension=True)
self._set_context_thermostat(context)
# -------------------------------------------------------------------------
# Magic methods
# -------------------------------------------------------------------------
def __copy__(self):
"""Overwrite normal implementation to share standard system."""
cls = self.__class__
new_state = cls.__new__(cls)
new_state.__dict__.update({k: v for k, v in self.__dict__.items()
if k != '_standard_system'})
new_state.__dict__['_standard_system'] = self._standard_system
return new_state
def __deepcopy__(self, memo):
"""Overwrite normal implementation to share standard system."""
cls = self.__class__
new_state = cls.__new__(cls)
memo[id(self)] = new_state
for k, v in self.__dict__.items():
if k != '_standard_system':
new_state.__dict__[k] = copy.deepcopy(v, memo)
new_state.__dict__['_standard_system'] = self._standard_system
return new_state
_ENCODING = 'utf-8'
def __getstate__(self, skip_system=False):
"""Return a dictionary representation of the state.
Zlib compresses the serialized system after its created. Many
alchemical systems have very long serializations so this method
helps reduce space in memory and on disk. The compression forces
the encoding for compatibility between separate Python installs
(utf-8 by default).
Parameters
----------
skip_system: bool, Default: False
Choose whether or not to get the serialized system as the part
of the return. If False, then the serialized system is computed
and included in the serialization. If True, then ``None`` is
returned for the ``'standard_system'`` field of the serialization.
"""
serialized_system = None
if not skip_system:
serialized_system = openmm.XmlSerializer.serialize(self._standard_system)
serialized_system = zlib.compress(serialized_system.encode(self._ENCODING))
return dict(standard_system=serialized_system, temperature=self.temperature,
pressure=self.pressure, surface_tension=self._surface_tension)
def __setstate__(self, serialization):
"""Set the state from a dictionary representation."""
self._temperature = serialization['temperature']
self._pressure = serialization['pressure']
self._surface_tension = serialization['surface_tension']
serialized_system = serialization['standard_system']
# Decompress system, if need be
try:
serialized_system = zlib.decompress(serialized_system)
# Py2 returns the string, Py3 returns a byte string to decode, but if we
# decode the string in Py2 we get a unicode object that OpenMM can't parse.
if sys.version_info > (3, 0):
serialized_system = serialized_system.decode(self._ENCODING)
except (TypeError, zlib.error): # Py3/2 throws different error types
# Catch the "serialization is not compressed" error, do nothing to string.
# Preserves backwards compatibility
pass
self._standard_system_hash = serialized_system.__hash__()
# Check first if we have already the system in the cache.
try:
self._standard_system = self._standard_system_cache[self._standard_system_hash]
except KeyError:
system = openmm.XmlSerializer.deserialize(serialized_system)
self._standard_system_cache[self._standard_system_hash] = system
self._standard_system = system
# -------------------------------------------------------------------------
# Internal-usage: initialization
# -------------------------------------------------------------------------
def _initialize(self, system, temperature=None, pressure=None, surface_tension=None):
"""Initialize the thermodynamic state."""
# Avoid modifying the original system when setting temperature and pressure.
system = copy.deepcopy(system)
# If pressure is None, we try to infer the pressure from the barostat.
barostat = self._find_barostat(system)
if pressure is None and barostat is not None:
self._pressure = self._get_barostat_pressure(barostat)
else:
self._pressure = pressure # Pressure here can also be None.
# If surface tension is None, we try to infer the surface tension from the barostat.
barostat_type = type(barostat)
if surface_tension is None and barostat_type == openmm.MonteCarloMembraneBarostat:
self._surface_tension = barostat.getDefaultSurfaceTension()
elif surface_tension is not None and barostat_type != openmm.MonteCarloMembraneBarostat:
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_ENSEMBLE)
else:
self._surface_tension = surface_tension
# If temperature is None, we infer the temperature from a thermostat.
if temperature is None:
thermostat = self._find_thermostat(system)
if thermostat is None:
raise ThermodynamicsError(ThermodynamicsError.NO_THERMOSTAT)
self._temperature = thermostat.getDefaultTemperature()
else:
self._temperature = temperature
# Fix system temperature/pressure if requested.
if temperature is not None:
self._set_system_temperature(system, temperature)
if pressure is not None:
self._set_system_pressure(system, pressure)
if surface_tension is not None:
self._set_system_surface_tension(system, surface_tension)
# We can use the unsafe set_system since the system has been copied.
self._unsafe_set_system(system, fix_state=False)
# -------------------------------------------------------------------------
# Internal-usage: system handling
# -------------------------------------------------------------------------
# Standard values are not standard in a physical sense, they are
# just consistent between ThermodynamicStates to make comparison
# of standard system hashes possible. We set this to round floats
# and use OpenMM units to avoid funniness due to precision errors
# caused by unit conversion.
_STANDARD_PRESSURE = 1.0*unit.bar
_STANDARD_TEMPERATURE = 273.0*unit.kelvin
_STANDARD_SURFACE_TENSION = 0.0*unit.nanometer*unit.bar
_NONPERIODIC_NONBONDED_METHODS = {openmm.NonbondedForce.NoCutoff,
openmm.NonbondedForce.CutoffNonPeriodic}
# Shared cache of standard systems to minimize memory consumption
# when simulating a lot of thermodynamic states. The cache holds
# only weak references so ThermodynamicState objects must keep the
# system as an internal variable.
_standard_system_cache = weakref.WeakValueDictionary()
def _unsafe_set_system(self, system, fix_state):
"""This implements self.set_system but modifies the passed system."""
# Configure temperature and pressure.
if fix_state:
# We just need to add/remove the barostat according to the ensemble.
# Temperature, pressure, surface tension of thermostat and barostat will be set
# to their standard value afterwards.
self._set_system_pressure(system, self.pressure)
self._set_system_surface_tension(system, self.surface_tension)
else:
# If the flag is deactivated, we check that temperature
# pressure, and surface tension of the system are correct.
self._check_system_consistency(system)
# Update standard system.
self._standardize_system(system)
self._update_standard_system(system)
def _check_system_consistency(self, system):
"""Check system consistency with this ThermodynamicState.
Raise an error if the system is inconsistent. Currently checks
that there's 1 and only 1 thermostat at the correct temperature,
that there's only 1 barostat (or none in case this is in NVT),
that the barostat is supported, has the correct temperature and
pressure, and that it is not associated to a non-periodic system.
Parameters
----------
system : openmm.System
The system to test.
Raises
------
ThermodynamicsError
If the system is inconsistent with this state.
"""
TE = ThermodynamicsError # shortcut
# This raises MULTIPLE_THERMOSTATS
thermostat = self._find_thermostat(system)
# When system is self._system, we check the presence of a
# thermostat before the barostat to avoid crashes when
# checking the barostat temperature.
if thermostat is None:
raise TE(TE.NO_THERMOSTAT)
elif not utils.is_quantity_close(thermostat.getDefaultTemperature(),
self.temperature):
raise TE(TE.INCONSISTENT_THERMOSTAT)
# This line raises MULTIPLE_BAROSTATS and UNSUPPORTED_BAROSTAT.
barostat = self._find_barostat(system)
if barostat is not None:
# Check that barostat is not added to non-periodic system. We
# cannot use System.usesPeriodicBoundaryConditions() because
# in OpenMM < 7.1 that returns True when a barostat is added.
# TODO just use usesPeriodicBoundaryConditions when drop openmm7.0
for force in system.getForces():
if isinstance(force, openmm.NonbondedForce):
nonbonded_method = force.getNonbondedMethod()
if nonbonded_method in self._NONPERIODIC_NONBONDED_METHODS:
raise TE(TE.BAROSTATED_NONPERIODIC)
if not self._is_barostat_consistent(barostat):
raise TE(TE.INCONSISTENT_BAROSTAT)
elif self.pressure is not None:
raise TE(TE.NO_BAROSTAT)
def _standardize_system(self, system):
"""Return a copy of the system in a standard representation.
This effectively defines which ThermodynamicStates are compatible
between each other. Compatible ThermodynamicStates have the same
standard systems, and is_state_compatible will return True if
the (cached) serialization of the standard systems are identical.
If no thermostat is present, an AndersenThermostat is added. The
presence of absence of a barostat determine whether this system is
in NPT or NVT ensemble. Pressure and temperature of barostat (if
any) and thermostat are set to _STANDARD_PRESSURE/TEMPERATURE.
If present, the barostat force is pushed at the end so that the
order of the two forces won't matter.
Effectively this means that only same systems in the same ensemble
(NPT or NVT) are compatible between each other.
Parameters
----------
system : openmm.System
The system to standardize.
See Also
--------
ThermodynamicState.apply_to_context
ThermodynamicState.is_state_compatible
ThermodynamicState.is_context_compatible
"""
# This adds a thermostat if it doesn't exist already. This way
# the comparison between system using thermostat with different
# parameters (e.g. collision frequency) will fail as expected.
self._set_system_temperature(system, self._STANDARD_TEMPERATURE)
# We need to be sure that thermostat and barostat always are
# in the same order, as the hash depends on the Forces order.
# Here we push the barostat at the end.
barostat = self._pop_barostat(system)
if barostat is not None:
self._set_barostat_pressure(barostat, self._STANDARD_PRESSURE)
if isinstance(barostat, openmm.MonteCarloMembraneBarostat):
self._set_barostat_surface_tension(barostat, self._STANDARD_SURFACE_TENSION)
system.addForce(barostat)
def _compute_standard_system_hash(self, standard_system):
"""Compute the standard system hash."""
system_serialization = openmm.XmlSerializer.serialize(standard_system)
return system_serialization.__hash__()
def _update_standard_system(self, standard_system):
"""Update the standard system, its hash and the standard system cache."""
self._standard_system_hash = self._compute_standard_system_hash(standard_system)
try:
self._standard_system = self._standard_system_cache[self._standard_system_hash]
except KeyError:
self._standard_system_cache[self._standard_system_hash] = standard_system
self._standard_system = standard_system
# -------------------------------------------------------------------------
# Internal-usage: context handling
# -------------------------------------------------------------------------
def _set_context_barostat(self, context, update_pressure, update_temperature, update_surface_tension):
"""Set the barostat parameters in the Context."""
barostat = self._find_barostat(context.getSystem())
# Check if we are in the same ensemble.
if (barostat is None) != (self._pressure is None):
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_ENSEMBLE)
if (type(barostat) is openmm.MonteCarloMembraneBarostat) == (self._surface_tension is None):
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_ENSEMBLE)
# No need to set the barostat if we are in NVT.
if self._pressure is None:
return
# Apply pressure, surface tension, and temperature to barostat.
if update_pressure:
self._set_barostat_pressure(barostat, self.pressure)
self._set_barostat_pressure_in_context(barostat, self.pressure, context)
if self.surface_tension is not None and update_surface_tension:
self._set_barostat_surface_tension(barostat, self.surface_tension)
self._set_barostat_surface_tension_in_context(barostat, self.surface_tension, context)
if update_temperature:
self._set_barostat_temperature(barostat, self.temperature)
# TODO remove try except when drop openmm7.0 support
try:
context.setParameter(barostat.Temperature(), self.temperature)
except AttributeError: # OpenMM < 7.1
openmm_state = context.getState(getPositions=True, getVelocities=True,
getParameters=True)
context.reinitialize()
context.setState(openmm_state)
def _set_context_thermostat(self, context):
"""Set the thermostat parameters in the Context."""
# First try to set the integrator (most common case).
# If this fails retrieve the Andersen thermostat.
is_thermostated = self._set_integrator_temperature(context.getIntegrator())
if not is_thermostated:
thermostat = self._find_thermostat(context.getSystem())
thermostat.setDefaultTemperature(self.temperature)
context.setParameter(thermostat.Temperature(), self.temperature)
def _apply_to_context_in_state(self, context, thermodynamic_state):
"""Apply this ThermodynamicState to the context.
When we know the thermodynamic state of the context, this is much faster
then apply_to_context(). The given thermodynamic state is assumed to be
compatible.
Parameters
----------
context : openmm.Context
The OpenMM Context to be set to this ThermodynamicState.
thermodynamic_state : ThermodynamicState
The ThermodynamicState of this context.
"""
update_pressure = self.pressure != thermodynamic_state.pressure
update_temperature = self.temperature != thermodynamic_state.temperature
update_surface_tension = self.surface_tension != thermodynamic_state.surface_tension
if update_pressure or update_temperature or update_surface_tension:
self._set_context_barostat(context, update_pressure, update_temperature, update_surface_tension)
if update_temperature:
self._set_context_thermostat(context)
# -------------------------------------------------------------------------
# Internal-usage: integrator handling
# -------------------------------------------------------------------------
@staticmethod
def _loop_over_integrators(integrator):
"""Unify manipulation of normal, compound and thermostated integrators."""
if isinstance(integrator, openmm.CompoundIntegrator):
for integrator_id in range(integrator.getNumIntegrators()):
_integrator = integrator.getIntegrator(integrator_id)
integrators.ThermostatedIntegrator.restore_interface(_integrator)
yield _integrator
else:
integrators.ThermostatedIntegrator.restore_interface(integrator)
yield integrator
def _is_integrator_thermostated(self, integrator, check_consistency=True):
"""True if integrator is coupled to a heat bath.
If integrator is a CompoundIntegrator, it returns true if at least
one of its integrators is coupled to a heat bath.
Raises
------
ThermodynamicsError
If check_consistency is True and the integrator is
coupled to a heat bath at a different temperature
than this thermodynamic state.
"""
# Loop over integrators to handle CompoundIntegrators.
is_thermostated = False
for _integrator in self._loop_over_integrators(integrator):
try:
temperature = _integrator.getTemperature()
except AttributeError:
pass
else:
# Raise exception if the heat bath is at the wrong temperature.
if (check_consistency and
not utils.is_quantity_close(temperature, self.temperature)):
err_code = ThermodynamicsError.INCONSISTENT_INTEGRATOR
raise ThermodynamicsError(err_code)
is_thermostated = True
# We still need to loop over every integrator to make sure
# that the temperature is consistent for all of them.
return is_thermostated
def _set_integrator_temperature(self, integrator):
"""Set heat bath temperature of the integrator.
If integrator is a CompoundIntegrator, it sets the temperature
of every sub-integrator.
Returns
-------
is_thermostated : bool
True if the integrator is thermostated.
"""
def set_temp(_integrator):
try:
_integrator.setTemperature(self.temperature)
return True
except AttributeError:
return False
# Loop over integrators to handle CompoundIntegrators.
is_thermostated = False
for _integrator in self._loop_over_integrators(integrator):
is_thermostated = is_thermostated or set_temp(_integrator)
return is_thermostated
# -------------------------------------------------------------------------
# Internal-usage: barostat handling
# -------------------------------------------------------------------------
_SUPPORTED_BAROSTATS = {'MonteCarloBarostat', 'MonteCarloAnisotropicBarostat', 'MonteCarloMembraneBarostat'}
@classmethod
def _find_barostat(cls, system, get_index=False):
"""Return the first barostat found in the system.
Returns
-------
force_idx : int or None, optional
The force index of the barostat.
barostat : OpenMM Force object
The barostat in system, or None if no barostat is found.
Raises
------
ThermodynamicsError
If the system contains unsupported barostats.
"""
try:
force_idx, barostat = forces.find_forces(system, '.*Barostat.*', only_one=True)
except forces.MultipleForcesError:
raise ThermodynamicsError(ThermodynamicsError.MULTIPLE_BAROSTATS)
except forces.NoForceFoundError:
force_idx, barostat = None, None
else:
if barostat.__class__.__name__ not in cls._SUPPORTED_BAROSTATS:
raise ThermodynamicsError(ThermodynamicsError.UNSUPPORTED_BAROSTAT,
barostat.__class__.__name__)
elif isinstance(barostat, openmm.MonteCarloAnisotropicBarostat):
# support only if pressure in all scaled directions is equal
pressures = barostat.getDefaultPressure().value_in_unit(unit.bar)
scaled = [barostat.getScaleX(), barostat.getScaleY(), barostat.getScaleY()]
if sum(scaled) == 0:
raise ThermodynamicsError(ThermodynamicsError.UNSUPPORTED_ANISOTROPIC_BAROSTAT)
active_pressures = [pressure for pressure, active in zip(pressures, scaled) if active]
if any(abs(pressure - active_pressures[0]) > 0 for pressure in active_pressures):
raise ThermodynamicsError(ThermodynamicsError.UNSUPPORTED_ANISOTROPIC_BAROSTAT)
if get_index:
return force_idx, barostat
return barostat
@classmethod
def _pop_barostat(cls, system):
"""Remove the system barostat.
Returns
-------
The removed barostat if it was found, None otherwise.
"""
barostat_idx, barostat = cls._find_barostat(system, get_index=True)
if barostat_idx is not None:
# We need to copy the barostat since we don't own
# its memory (i.e. we can't add it back to the system).
barostat = copy.deepcopy(barostat)
system.removeForce(barostat_idx)
return barostat
return None
def _is_barostat_type_consistent(self, barostat):
# during initialization (standard system not set), any barostat type is OK
if not hasattr(self, "_standard_system"):
return True
system_barostat = self._find_barostat(self._standard_system)
return type(barostat) == type(system_barostat)
def _is_barostat_consistent(self, barostat):
"""Check the barostat's temperature, pressure, and surface_tension."""
try:
barostat_temperature = barostat.getDefaultTemperature()
except AttributeError: # versions previous to OpenMM 7.1
barostat_temperature = barostat.getTemperature()
barostat_pressure = self._get_barostat_pressure(barostat)
barostat_surface_tension = self._get_barostat_surface_tension(barostat)
is_consistent = self._is_barostat_type_consistent(barostat)
is_consistent = is_consistent and utils.is_quantity_close(barostat_temperature, self.temperature)
is_consistent = is_consistent and utils.is_quantity_close(barostat_pressure, self.pressure)
if barostat is not None and self._surface_tension is not None:
is_consistent = is_consistent and utils.is_quantity_close(barostat_surface_tension, self._surface_tension)
else:
is_consistent = is_consistent and (barostat_surface_tension == self._surface_tension) # both None
return is_consistent
def _set_system_pressure(self, system, pressure):
"""Add or configure the system barostat to the given pressure.
If a new barostat is added, its temperature is set to
self.temperature.
Parameters
----------
system : openmm.System
The system's barostat will be added/configured.
pressure : openmm.unit.Quantity or None
The pressure with units compatible to bars. If None, the
barostat of the system is removed.
Raises
------
ThermodynamicsError
If pressure needs to be set for a non-periodic system.
"""
if pressure is None: # If new pressure is None, remove barostat.
self._pop_barostat(system)
return
if not system.usesPeriodicBoundaryConditions():
raise ThermodynamicsError(ThermodynamicsError.BAROSTATED_NONPERIODIC)
barostat = self._find_barostat(system)
if barostat is None: # Add barostat
barostat = openmm.MonteCarloBarostat(pressure, self.temperature)
system.addForce(barostat)
else: # Set existing barostat
self._set_barostat_pressure(barostat, pressure)
@staticmethod
def _set_barostat_pressure(barostat, pressure):
"""Set barostat pressure."""
if isinstance(pressure, unit.Quantity):
pressure = pressure.value_in_unit(unit.bar)
if isinstance(barostat, openmm.MonteCarloAnisotropicBarostat):
barostat.setDefaultPressure(openmm.Vec3(pressure, pressure, pressure)*unit.bar)
else:
barostat.setDefaultPressure(pressure*unit.bar)
@staticmethod
def _set_barostat_pressure_in_context(barostat, pressure, context):
"""Set barostat pressure."""
if isinstance(barostat, openmm.MonteCarloAnisotropicBarostat):
p = pressure.value_in_unit(unit.bar)
context.setParameter(barostat.Pressure(), openmm.Vec3(p, p, p)*unit.bar)
else:
context.setParameter(barostat.Pressure(), pressure)
@staticmethod
def _get_barostat_pressure(barostat):
"""Set barostat pressure."""
if isinstance(barostat, openmm.MonteCarloAnisotropicBarostat):
scaled = [barostat.getScaleX(), barostat.getScaleY(), barostat.getScaleZ()]
first_scaled_axis = scaled.index(True)
return barostat.getDefaultPressure()[first_scaled_axis]
else:
return barostat.getDefaultPressure()
@staticmethod
def _set_barostat_temperature(barostat, temperature):
"""Set barostat temperature."""
barostat.setDefaultTemperature(temperature)
def _set_system_surface_tension(self, system, gamma):
"""Set system surface tension"""
if gamma is not None and not system.usesPeriodicBoundaryConditions():
raise ThermodynamicsError(ThermodynamicsError.BAROSTATED_NONPERIODIC)
barostat = self._find_barostat(system)
if (gamma is None) == isinstance(barostat, openmm.MonteCarloMembraneBarostat):
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_ENSEMBLE)
self._set_barostat_surface_tension(barostat, gamma)
def _set_barostat_surface_tension(self, barostat, gamma):
# working around a bug in the unit conversion https://github.com/openmm/openmm/issues/2406
if isinstance(gamma, unit.Quantity):
gamma = gamma.value_in_unit(unit.bar * unit.nanometer)
if isinstance(barostat, openmm.MonteCarloMembraneBarostat):
barostat.setDefaultSurfaceTension(gamma)
elif gamma is not None:
raise ThermodynamicsError(ThermodynamicsError.SURFACE_TENSION_NOT_SUPPORTED)
def _get_barostat_surface_tension(self, barostat):
if isinstance(barostat, openmm.MonteCarloMembraneBarostat):
return barostat.getDefaultSurfaceTension()
else:
return None
@staticmethod
def _set_barostat_surface_tension_in_context(barostat, surface_tension, context):
"""Set barostat surface tension."""
# work around a unit conversion issue in openmm
if isinstance(surface_tension, unit.Quantity):
surface_tension = surface_tension.value_in_unit(unit.nanometer*unit.bar)
try:
context.getParameter(barostat.SurfaceTension())
except Exception:
raise ThermodynamicsError(ThermodynamicsError.INCOMPATIBLE_ENSEMBLE)
context.setParameter(barostat.SurfaceTension(), surface_tension)
# -------------------------------------------------------------------------
# Internal-usage: thermostat handling
# -------------------------------------------------------------------------
@classmethod
def _find_thermostat(cls, system, get_index=False):
"""Return the first thermostat in the system.
Returns
-------
force_idx : int or None, optional
The force index of the thermostat.
thermostat : OpenMM Force object or None
The thermostat in system, or None if no thermostat is found.
"""
try:
force_idx, thermostat = forces.find_forces(system, '.*Thermostat.*', only_one=True)
except forces.MultipleForcesError:
raise ThermodynamicsError(ThermodynamicsError.MULTIPLE_THERMOSTATS)
except forces.NoForceFoundError:
force_idx, thermostat = None, None
if get_index:
return force_idx, thermostat
return thermostat
@classmethod
def _remove_thermostat(cls, system):
"""Remove the system thermostat."""
thermostat_idx, thermostat = cls._find_thermostat(system, get_index=True)
if thermostat_idx is not None:
system.removeForce(thermostat_idx)
@classmethod
def _set_system_temperature(cls, system, temperature):
"""Configure thermostat and barostat to the given temperature.
The thermostat temperature is set, or a new AndersenThermostat
is added if it doesn't exist.
Parameters
----------
system : openmm.System
The system to modify.
temperature : openmm.unit.Quantity
The temperature for the thermostat.
"""
thermostat = cls._find_thermostat(system)
if thermostat is None:
thermostat = openmm.AndersenThermostat(temperature, 1.0/unit.picosecond)
system.addForce(thermostat)
else:
thermostat.setDefaultTemperature(temperature)
barostat = cls._find_barostat(system)
if barostat is not None:
cls._set_barostat_temperature(barostat, temperature)
# -------------------------------------------------------------------------
# Internal-usage: initialization
# -------------------------------------------------------------------------
@staticmethod
def _compute_reduced_potential(potential_energy, temperature, volume, pressure, area_xy=None, surface_tension=None):
"""Convert potential energy into reduced potential."""
beta = 1.0 / (unit.BOLTZMANN_CONSTANT_kB * temperature)
reduced_potential = potential_energy / unit.AVOGADRO_CONSTANT_NA
if pressure is not None:
reduced_potential += pressure * volume
if area_xy is not None and surface_tension is not None:
reduced_potential -= surface_tension * area_xy
return beta * reduced_potential
def _find_force_groups_to_update(self, context, thermodynamic_state, memo):
"""Find the force groups to be recomputed when moving to the given state.
With the current implementation of ThermodynamicState, no force group has
to be recomputed as only temperature and pressure change between compatible
states, but this method becomes essential in CompoundThermodynamicState.
"""
return set()
# =============================================================================
# SAMPLER STATE
# =============================================================================
class SamplerState(object):
"""State carrying the configurational properties of a system.
Represent the portion of the state of a Context that changes with
integration. When initialized through the normal constructor, the
object is only partially defined as the energy attributes are None
until the SamplerState is updated with update_from_context. The
state can still be applied to a newly created context to set its
positions, velocities and box vectors. To initialize all attributes,
use the alternative constructor from_context.
Parameters
----------
positions : Nx3 openmm.unit.Quantity
Position vectors for N particles (length units).
velocities : Nx3 openmm.unit.Quantity, optional
Velocity vectors for N particles (velocity units).
box_vectors : 3x3 openmm.unit.Quantity
Current box vectors (length units).
Attributes
----------
positions
velocities
box_vectors : 3x3 openmm.unit.Quantity.
Current box vectors (length units).
potential_energy
kinetic_energy
total_energy
volume
n_particles
collective_variables
Examples
--------
>>> from openmmtools import testsystems
>>> toluene_test = testsystems.TolueneVacuum()
>>> sampler_state = SamplerState(toluene_test.positions)
At this point only the positions are defined
>>> sampler_state.velocities is None
True
>>> sampler_state.total_energy is None
True
but it can still be used to set up a context
>>> temperature = 300.0*unit.kelvin
>>> thermodynamic_state = ThermodynamicState(toluene_test.system, temperature)
>>> integrator = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> context = thermodynamic_state.create_context(integrator)
>>> sampler_state.apply_to_context(context) # Set initial positions.
A SamplerState cannot be updated by an incompatible context
which here is defined as having the same number of particles
>>> hostguest_test = testsystems.HostGuestVacuum()
>>> incompatible_state = ThermodynamicState(hostguest_test.system, temperature)
>>> integrator2 = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> incompatible_context = incompatible_state.create_context(integrator2)
>>> incompatible_context.setPositions(hostguest_test.positions)
>>> sampler_state.is_context_compatible(incompatible_context)
False
>>> sampler_state.update_from_context(incompatible_context)
Traceback (most recent call last):
...
openmmtools.states.SamplerStateError: Specified positions with inconsistent number of particles.
Create a new SamplerState instead
>>> sampler_state2 = SamplerState.from_context(context)
>>> sampler_state2.potential_energy is not None
True
It is possible to slice a sampler state to obtain positions and
particles of a subset of atoms
>>> sliced_sampler_state = sampler_state[:10]
>>> sliced_sampler_state.n_particles
10
"""
# -------------------------------------------------------------------------
# Public interface
# -------------------------------------------------------------------------
def __init__(self, positions, velocities=None, box_vectors=None):
# Allocate variables, they get set in _initialize
self._positions = None
self._velocities = None
self._box_vectors = None
self._collective_variables = None
self._kinetic_energy = None
self._potential_energy = None
args = []
for input in [positions, velocities, box_vectors]:
if isinstance(input, unit.Quantity) and not isinstance(input._value, np.ndarray):
args.append(np.array(input/input.unit)*input.unit)
else:
args.append(copy.deepcopy(input))
self._initialize(*args)
@classmethod
def from_context(cls, context_state, ignore_collective_variables=False):
"""Alternative constructor.
Read all the configurational properties from a Context object or
an OpenMM State object. This guarantees that all attributes
(including energy attributes) are initialized.
Parameters
----------
context_state : openmm.Context or openmm.State
The object to read. If a State object, it must contain information
about positions, velocities and energy.
ignore_collective_variables : bool, optional
If True, the collective variables are not updated from the
Context, and will be invalidated. If a State is passed in,
this raises an error if False, otherwise, it would be ambiguous
between a State tied to a System with collective variables, and one without.
Returns
-------
sampler_state : SamplerState
A new SamplerState object.
"""
sampler_state = cls([])
sampler_state._read_context_state(context_state, check_consistency=False,
ignore_positions=False,
ignore_velocities=False,
ignore_collective_variables=ignore_collective_variables)
return sampler_state
@property
def positions(self):
"""Particle positions.
An Nx3 openmm.unit.Quantity object, where N is the number of
particles.
Raises
------
SamplerStateError
If set to an array with a number of particles different
than n_particles.
"""
return self._positions
@positions.setter
def positions(self, value):
self._set_positions(value, from_context=False, check_consistency=True)
@property
def velocities(self):
"""Particle velocities.
An Nx3 openmm.unit.Quantity object, where N is the number of
particles.
Raises
------
SamplerStateError
If set to an array with a number of particles different
than n_particles.
"""
return self._velocities
@velocities.setter
def velocities(self, value):
self._set_velocities(value, from_context=False)
@property
def box_vectors(self):
"""Box vectors.
An 3x3 openmm.unit.Quantity object.
"""
return self._box_vectors
@box_vectors.setter
def box_vectors(self, value):
# Make sure this is a Quantity. System.getDefaultPeriodicBoxVectors
# returns a list of Quantity objects instead for example.
if value is not None and not isinstance(value, unit.Quantity):
value = unit.Quantity(value)
self._box_vectors = value
# Derived properties
@property
def potential_energy(self):
"""openmm.unit.Quantity or None: Potential energy of this configuration."""
if self._are_positions_valid:
return None
return self._potential_energy
@potential_energy.setter
def potential_energy(self, new_value):
if new_value is not None:
raise AttributeError("Cannot set potential energy as it is a function of Context")
self._potential_energy = None
@property
def kinetic_energy(self):
"""openmm.unit.Quantity or None: Kinetic energy of this configuration."""
if self.velocities is None or self.velocities.has_changed:
return None
return self._kinetic_energy
@kinetic_energy.setter
def kinetic_energy(self, new_value):
if new_value is not None:
raise AttributeError("Cannot set kinetic energy as it is a function of Context")
self._kinetic_energy = None
@property
def collective_variables(self):
"""dict or None: Collective variables for this configuration if present in Context"""
if self._are_positions_valid:
return None
return self._collective_variables
@collective_variables.setter
def collective_variables(self, new_value):
if new_value is not None:
raise AttributeError("Cannot set collective variables as it is a function of Context")
self._collective_variables = new_value
@property
def total_energy(self):
"""The sum of potential and kinetic energy (read-only)."""
if self.potential_energy is None or self.kinetic_energy is None:
return None
return self.potential_energy + self.kinetic_energy
@property
def volume(self):
"""The volume of the box (read-only)"""
return _box_vectors_volume(self.box_vectors)
@property
def area_xy(self):
"""The xy-area of the box (read-only)"""
return _box_vectors_area_xy(self.box_vectors)
@property
def n_particles(self):
"""Number of particles (read-only)."""
return len(self.positions)
def is_context_compatible(self, context):
"""Check compatibility of the given context.
The context is compatible if this SamplerState can be applied
through apply_to_context.
Parameters
----------
context : openmm.Context
The context to test.
Returns
-------
is_compatible : bool
True if this SamplerState can be applied to context.
See Also
--------
SamplerState.apply_to_context
"""
is_compatible = self.n_particles == context.getSystem().getNumParticles()
return is_compatible
def update_from_context(self, context_state, ignore_positions=False, ignore_velocities=False,
ignore_collective_variables=False):
"""Read the state from the given Context or State object.
The context must be compatible. Use SamplerState.from_context
if you want to build a new sampler state from an incompatible.
Parameters
----------
context_state : openmm.Context or openmm.State
The object to read. If a State, it must contain information
on positions, velocities and energies. Collective
variables can only be updated from a Context, NOT a State
at the moment.
ignore_positions : bool, optional
If True, the positions (and potential energy) are not updated from the
Context. This can cause the SamplerState to no longer be consistent between
its variables, so the defaults err on the side of updating everything,
if possible. Only use if you know what you are doing.
ignore_velocities : bool, optional
If True, the velocities (and kinetic energy) are not updated from the
Context. This can cause the SamplerState to no longer be consistent between
its variables, so the defaults err on the side of updating everything,
if possible. Only use if you know what you are doing.
ignore_collective_variables : bool, optional
If True, the collective variables are not updated from the
Context. If a State is passed in,
this raises an error if False, otherwise, it would be ambiguous
between a State tied to a System with collective variables, and one without.
Raises
------
SamplerStateError
If the given context is not compatible, or if a State is given without
setting ignore_collective_variables
"""
self._read_context_state(context_state, check_consistency=True,
ignore_positions=ignore_positions,
ignore_velocities=ignore_velocities,
ignore_collective_variables=ignore_collective_variables)
def apply_to_context(self, context, ignore_velocities=False):
"""Set the context state.
If velocities and box vectors have not been specified in the
constructor, they are not set.
Parameters
----------
context : openmm.Context
The context to set.
ignore_velocities : bool, optional
If True, velocities are not set in the Context even if they
are defined. This can be useful if you only need to use the
Context only to compute energies.
"""
# NOTE: Box vectors MUST be updated before positions are set.
if self.box_vectors is not None:
context.setPeriodicBoxVectors(*self.box_vectors)
context.setPositions(self._unitless_positions)
if self._velocities is not None and not ignore_velocities:
context.setVelocities(self._unitless_velocities)
def has_nan(self):
"""Check that energies and positions are finite.
Returns
-------
True if the potential energy or any of the generalized coordinates
are nan.
"""
if (self.potential_energy is not None and
np.isnan(self.potential_energy.value_in_unit(self.potential_energy.unit))):
return True
if np.any(np.isnan(self._positions)):
return True
return False
def __getitem__(self, item):
sampler_state = self.__class__([])
# Handle single index.
if np.issubdtype(type(item), np.integer):
# Here we don't need to copy since we instantiate a new array.
pos_value = self._positions[item].value_in_unit(self._positions.unit)
new_positions = unit.Quantity(np.array([pos_value]), self._positions.unit)
sampler_state._set_positions(new_positions, from_context=False, check_consistency=False)
if self._velocities is not None:
vel_value = self._velocities[item].value_in_unit(self._velocities.unit)
new_velocities = unit.Quantity(np.array([vel_value]), self._velocities.unit)
sampler_state._set_velocities(new_velocities, from_context=False)
else: # Assume slice or sequence.
# Copy original values to avoid side effects.
sampler_state._set_positions(copy.deepcopy(self._positions[item]),
from_context=False, check_consistency=False)
if self._velocities is not None:
sampler_state._set_velocities(copy.deepcopy(self._velocities[item].copy()),
from_context=False)
# Copy box vectors.
sampler_state.box_vectors = copy.deepcopy(self.box_vectors)
# Energies/CV's for only a subset of atoms is undefined.
sampler_state._potential_energy = None
sampler_state._kinetic_energy = None
sampler_state._collective_variables = None
return sampler_state
def __getstate__(self, ignore_velocities=False):
"""Return a dictionary representation of the state.
Parameters
----------
ignore_velocities : bool, optional
If True, velocities are not serialized. This can be useful for
example to save bandwidth when sending a ``SamplerState`` over
the network and velocities are not required (default is False).
"""
velocities = None if ignore_velocities else self.velocities
serialization = dict(
positions=self.positions, velocities=velocities,
box_vectors=self.box_vectors, potential_energy=self.potential_energy,
kinetic_energy=self.kinetic_energy,
collective_variables=self.collective_variables
)
return serialization
def __setstate__(self, serialization, ignore_velocities=False):
"""Set the state from a dictionary representation.
Parameters
----------
ignore_velocities : bool, optional
If True and the ``SamplerState`` has already velocities
defined, this does not overwrite the velocities.
"""
if ignore_velocities and '_velocities' in self.__dict__:
serialization['velocities'] = self.velocities
self._initialize(**serialization)
# -------------------------------------------------------------------------
# Internal-usage
# -------------------------------------------------------------------------
def _initialize(self, positions, velocities, box_vectors,
potential_energy=None, kinetic_energy=None, collective_variables=None):
"""Initialize the sampler state."""
self._set_positions(positions, from_context=False, check_consistency=False)
self.velocities = velocities # Checks consistency and units.
self.box_vectors = box_vectors # Make sure box vectors is Quantity.
self._potential_energy = potential_energy
self._kinetic_energy = kinetic_energy
self._collective_variables = collective_variables
def _set_positions(self, new_positions, from_context, check_consistency):
"""Set the positions without checking for consistency."""
if check_consistency and (new_positions is None or len(new_positions) != self.n_particles):
raise SamplerStateError(SamplerStateError.INCONSISTENT_POSITIONS)
if from_context:
self._unitless_positions_cache = new_positions._value
assert new_positions.unit == unit.nanometer
else:
self._unitless_positions_cache = None
self._positions = utils.TrackedQuantity(new_positions)
# The potential energy changes with different positions.
self._potential_energy = None
# The CVs change with different positions too
self._collective_variables = None
def _set_velocities(self, new_velocities, from_context):
"""Set the velocities."""
if from_context:
self._unitless_velocities_cache = new_velocities._value
assert new_velocities.unit == unit.nanometer/unit.picoseconds
else:
if new_velocities is not None and self.n_particles != len(new_velocities):
raise SamplerStateError(SamplerStateError.INCONSISTENT_VELOCITIES)
self._unitless_velocities_cache = None
if new_velocities is not None:
new_velocities = utils.TrackedQuantity(new_velocities)
self._velocities = new_velocities
# The kinetic energy changes with different positions.
self._kinetic_energy = None
@property
def _unitless_positions(self):
"""Keeps a cache of unitless positions."""
if self._unitless_positions_cache is None or self._positions.has_changed:
self._unitless_positions_cache = self.positions.value_in_unit_system(unit.md_unit_system)
if self._positions.has_changed:
self._positions.has_changed = False
self._potential_energy = None
return self._unitless_positions_cache
@property
def _unitless_velocities(self):
"""Keeps a cache of unitless velocities."""
if self._velocities is None:
return None
if self._unitless_velocities_cache is None or self._velocities.has_changed:
self._unitless_velocities_cache = self._velocities.value_in_unit_system(unit.md_unit_system)
if self._velocities.has_changed:
self._velocities.has_changed = False
self._kinetic_energy = None
return self._unitless_velocities_cache
def _read_context_state(self, context_state, check_consistency,
ignore_positions,
ignore_velocities,
ignore_collective_variables):
"""Read the Context state.
Parameters
----------
context_state : openmm.Context or openmm.State
The object to read.
check_consistency : bool
If True, raise an error if the context system have a
different number of particles than the current state.
ignore_positions : bool
If True, the positions and potential energy are not updated from the
Context.
ignore_velocities : bool
If True, the velocities and kinetic energy are not updated from the
Context.
ignore_collective_variables : bool
If True, the collective variables are not updated from the
Context. If a State is passed in,
this raises an error if False, otherwise, it would be ambiguous
between a State tied to a System with collective variables, and one without.
Raises
------
SamplerStateError
If the the context system have a different number of
particles than the current state.
"""
if isinstance(context_state, openmm.Context):
system = context_state.getSystem()
openmm_state = context_state.getState(getPositions=not ignore_positions,
getVelocities=not ignore_velocities,
getEnergy=not (ignore_velocities and ignore_positions),
enforcePeriodicBox=system.usesPeriodicBoundaryConditions())
else:
if not ignore_collective_variables:
raise SamplerStateError("State objects must have ignore_collective_variables=True because they "
"don't track CV's and would be ambiguous between a System with no "
"collective variables.")
openmm_state = context_state
# We assign positions first, since the velocities
# property will check its length for consistency.
# Potential energy and kinetic energy must be updated
# after positions and velocities or they'll be reset.
if not ignore_positions:
positions = openmm_state.getPositions(asNumpy=True)
self._set_positions(positions, from_context=True, check_consistency=check_consistency)
self._potential_energy = openmm_state.getPotentialEnergy()
if not ignore_velocities:
velocities = openmm_state.getVelocities(asNumpy=True)
self._set_velocities(velocities, from_context=True)
self._kinetic_energy = openmm_state.getKineticEnergy()
self.box_vectors = openmm_state.getPeriodicBoxVectors(asNumpy=True)
if not ignore_collective_variables:
self._read_collective_variables(context_state)
def _read_collective_variables(self, context_state):
"""
Update the collective variables from the context object
Parameters
----------
context_state : openmm.Context
The object to read. This only works with Context's for now,
but in the future, this may support OpenMM State objects as well.
"""
# Allows direct key assignment without initializing each key:dict pair
collective_variables = collections.defaultdict(dict)
system = context_state.getSystem()
for force_index, force in enumerate(system.getForces()):
try:
cv_values = force.getCollectiveVariableValues(context_state)
for cv_index in range(force.getNumCollectiveVariables()):
cv_name = force.getCollectiveVariableName(cv_index)
collective_variables[cv_name][force_index] = cv_values[cv_index]
except AttributeError:
pass
# Trap no variables found (empty dict), return None
# Cast defaultdict back to dict
self._collective_variables = dict(collective_variables) if collective_variables else None
@property
def _are_positions_valid(self):
"""Helper function to reduce this check duplication in multiple properties"""
return self.positions is None or self.positions.has_changed
# =============================================================================
# COMPOUND THERMODYNAMIC STATE
# =============================================================================
class ComposableStateError(Exception):
"""Error raised by a ComposableState."""
pass
class IComposableState(utils.SubhookedABCMeta):
"""A state composable through CompoundThermodynamicState.
Define the interface that needs to be implemented to extend a
ThermodynamicState through CompoundThermodynamicState.
See Also
--------
CompoundThermodynamicState
"""
@abc.abstractmethod
def apply_to_system(self, system):
"""Set the system to be in this state.
This method is called in three situations:
1) On initialization, before standardizing the system.
2) When a new system is set and the argument ``fix_state`` is
set to ``True``.
3) When the system is retrieved to convert the standard system
into a system in the correct thermodynamic state for the
simulation.
Parameters
----------
system : openmm.System
The system to modify.
Raises
------
ComposableStateError
If the system is not compatible with the state.
"""
pass
@abc.abstractmethod
def check_system_consistency(self, system):
"""Check if the system is in this state.
It raises a ComposableStateError if the system is not in
this state. This is called when the ThermodynamicState's
system is set with the ``fix_state`` argument set to False.
Parameters
----------
system : openmm.System
The system to test.
Raises
------
ComposableStateError
If the system is not consistent with this state.
"""
pass
@abc.abstractmethod
def apply_to_context(self, context):
"""Set the context to be in this state.
Parameters
----------
context : openmm.Context
The context to set.
Raises
------
ComposableStateError
If the context is not compatible with the state.
"""
pass
@abc.abstractmethod
def _standardize_system(self, system):
"""Standardize the given system.
ThermodynamicState relies on this method to create a standard
system that defines compatibility with another state or context.
The definition of a standard system is tied to the implementation
of apply_to_context. For example, if apply_to_context sets a
global parameter of the context, _standardize_system should
set the default value of the parameter in the system to a
standard value.
Parameters
----------
system : openmm.System
The system to standardize.
Raises
------
ComposableStateError
If the system is not compatible with the state.
"""
pass
@abc.abstractmethod
def _on_setattr(self, standard_system, attribute_name, old_composable_state):
"""Check if standard system needs to be updated after a state attribute is set.
This callback function is called after an attribute is set (i.e.
after __setattr__ is called on this state) or if an attribute whose
name starts with "set_" is requested (i.e. if a setter is retrieved
from this state through __getattr__).
Parameters
----------
standard_system : openmm.System
The standard system before setting the attribute.
attribute_name : str
The name of the attribute that has just been set or retrieved.
old_composable_state : IComposableState
A copy of the composable state before the attribute was set.
Returns
-------
need_changes : bool
True if the standard system has to be updated, False if no change
occurred.
Raises
------
ComposableStateError
If the attribute change put the system in an inconsistent state.
"""
pass
@abc.abstractmethod
def _find_force_groups_to_update(self, context, current_context_state, memo):
"""Find the force groups whose energy must be recomputed after applying self.
This is used to compute efficiently the potential energy of the
same configuration in multiple thermodynamic states to minimize
the number of force evaluations.
Parameters
----------
context : Context
The context, currently in `current_context_state`, that will
be moved to this state.
current_context_state : ThermodynamicState
The full thermodynamic state of the given context. This is
guaranteed to be compatible with self.
memo : dict
A dictionary that can be used by the state for memoization
to speed up consecutive calls on the same context.
Returns
-------
force_groups_to_update : set of int
The indices of the force groups whose energy must be computed
again after applying this state, assuming the context to be in
`current_context_state`.
"""
pass
class CompoundThermodynamicState(ThermodynamicState):
"""Thermodynamic state composed by multiple states.
Allows to extend a ThermodynamicState through composition rather
than inheritance.
The class dynamically inherits from the ThermodynamicState object
given in the constructor, and it preserves direct access to all its
methods and attributes. It is compatible also with subclasses of
ThermodynamicState, but it does not support objects which make use
of __slots__.
It is the user's responsibility to check that IComposableStates are
compatible to each other (i.e. that they do not depend on and/or
modify the same properties of the system). If this is not the case,
consider merging them into a single IComposableStates. If an
IComposableState needs to access properties of ThermodynamicState
(e.g. temperature, pressure) consider extending it through normal
inheritance.
It is not necessary to explicitly inherit from IComposableState for
compatibility as long as all abstract methods are implemented. All
its attributes and methods will still be directly accessible unless
they are masked by the main ThermodynamicState or by a IComposableState
that appeared before in the constructor argument composable_states.
After construction, changing the original thermodynamic_state or
any of the composable_states changes the state of the compound state.
Parameters
----------
thermodynamic_state : ThermodynamicState
The main ThermodynamicState which holds the OpenMM system.
composable_states : list of IComposableState
Each element represent a portion of the overall thermodynamic
state.
Examples
--------
Create an alchemically modified system.
>>> from openmmtools import testsystems, alchemy
>>> factory = alchemy.AbsoluteAlchemicalFactory(consistent_exceptions=False)
>>> alanine_vacuum = testsystems.AlanineDipeptideVacuum().system
>>> alchemical_region = alchemy.AlchemicalRegion(alchemical_atoms=range(22))
>>> alanine_alchemical_system = factory.create_alchemical_system(reference_system=alanine_vacuum,
... alchemical_regions=alchemical_region)
>>> alchemical_state = alchemy.AlchemicalState.from_system(alanine_alchemical_system)
AlchemicalState implement the IComposableState interface, so it can be
used with CompoundThermodynamicState. All the alchemical parameters are
accessible through the compound state.
>>> import openmm
>>> from openmm import unit
>>> thermodynamic_state = ThermodynamicState(system=alanine_alchemical_system,
... temperature=300*unit.kelvin)
>>> compound_state = CompoundThermodynamicState(thermodynamic_state=thermodynamic_state,
... composable_states=[alchemical_state])
>>> compound_state.lambda_sterics
1.0
>>> compound_state.lambda_electrostatics
1.0
You can control the parameters in the OpenMM Context in this state by
setting the state attributes.
>>> compound_state.lambda_sterics = 0.5
>>> integrator = openmm.VerletIntegrator(1.0*unit.femtosecond)
>>> context = compound_state.create_context(integrator)
>>> context.getParameter('lambda_sterics')
0.5
>>> compound_state.lambda_sterics = 1.0
>>> compound_state.apply_to_context(context)
>>> context.getParameter('lambda_sterics')
1.0
"""
def __init__(self, thermodynamic_state, composable_states):
# Check that composable states expose the correct interface.
for composable_state in composable_states:
assert isinstance(composable_state, IComposableState)
# Copy internal attributes of thermodynamic state.
thermodynamic_state = copy.deepcopy(thermodynamic_state)
self.__dict__ = thermodynamic_state.__dict__
# Setting self._composable_states signals __setattr__ to start
# searching in composable states as well, so this must be the
# last new attribute set in the constructor.
composable_states = copy.deepcopy(composable_states)
self._composable_states = composable_states
# This call causes the thermodynamic state standard system
# to be standardized also w.r.t. all the composable states.
self.set_system(self._standard_system, fix_state=True)
def get_system(self, **kwargs):
"""Manipulate and return the system.
With default arguments, this is equivalent as the system property.
By setting the arguments it is possible to obtain a modified copy
of the system without the thermostat or the barostat.
Parameters
----------
remove_thermostat : bool
If True, the system thermostat is removed.
remove_barostat : bool
If True, the system barostat is removed.
Returns
-------
system : openmm.System
The system of this ThermodynamicState.
"""
system = super(CompoundThermodynamicState, self).get_system(**kwargs)
# The system returned by ThermodynamicState has standard parameters,
# so we need to set them to the actual value of the composable states.
for s in self._composable_states:
s.apply_to_system(system)
return system
def set_system(self, system, fix_state=False):
"""Allow to set the system and fix its thermodynamic state.
With default arguments, this is equivalent to assign the
system property, which raise an error if the system is in
a different thermodynamic state.
Parameters
----------
system : openmm.System
The system to set.
fix_state : bool, optional
The thermodynamic state of the state will be fixed by
all the composable states. Default is False.
See Also
--------
ThermodynamicState.set_system
"""
system = copy.deepcopy(system)
for s in self._composable_states:
if fix_state:
s.apply_to_system(system)
else:
s.check_system_consistency(system)
super(CompoundThermodynamicState, self)._unsafe_set_system(system, fix_state)
def is_context_compatible(self, context):
"""Check compatibility of the given context.
Parameters
----------
context : openmm.Context
The OpenMM context to test.
Returns
-------
is_compatible : bool
True if this ThermodynamicState can be applied to context.
See Also
--------
ThermodynamicState.is_context_compatible
"""
# We override ThermodynamicState.is_context_compatible to
# handle the case in which one of the composable states
# raises ComposableStateError when standardizing the context system.
try:
return super(CompoundThermodynamicState, self).is_context_compatible(context)
except ComposableStateError:
return False
def apply_to_context(self, context):
"""Apply this compound thermodynamic state to the context.
See Also
--------
ThermodynamicState.apply_to_context
"""
super(CompoundThermodynamicState, self).apply_to_context(context)
for s in self._composable_states:
s.apply_to_context(context)
def __getattr__(self, name):
def setter_decorator(funcs, composable_states):
def _setter_decorator(*args, **kwargs):
for func, composable_state in zip(funcs, composable_states):
old_state = copy.deepcopy(composable_state)
func(*args, **kwargs)
self._on_setattr_callback(composable_state, name, old_state)
return _setter_decorator
# Called only if the attribute couldn't be found in __dict__.
# In this case we fall back to composable state, in the given order.
attrs = []
composable_states = []
for s in self._composable_states:
try:
attr = getattr(s, name)
except AttributeError:
pass
else:
attrs.append(attr)
composable_states.append(s)
if len(attrs) > 0:
# If this is a setter, we need to set the attribute in all states
# and ensure that the callback is called in each of them.
if name.startswith('set_'):
# Decorate the setter so that _on_setattr is called after the
# attribute is modified. This also reduces the calls to multiple
# setter to a single function.
attr = setter_decorator(attrs, composable_states)
else:
if len(attrs) > 1 and not all(np.isclose(attrs[0], a) for a in attrs[1:]):
raise RuntimeError('The composable states of {} expose the same '
'attribute with different values: {}'.format(
self.__class__.__name__, set(attrs)))
attr = attrs[0]
return attr
# Attribute not found, fall back to normal behavior.
return super(CompoundThermodynamicState, self).__getattribute__(name)
def __setattr__(self, name, value):
# Add new attribute to CompoundThermodynamicState.
if '_composable_states' not in self.__dict__:
super(CompoundThermodynamicState, self).__setattr__(name, value)
# Update existing ThermodynamicState attribute (check ancestors).
# We can't use hasattr here because it calls __getattr__, which
# search in all composable states as well. This means that this
# will catch only properties and methods.
elif any(name in C.__dict__ for C in self.__class__.__mro__):
super(CompoundThermodynamicState, self).__setattr__(name, value)
# Update composable states attributes. This catches also normal
# attributes besides properties and methods.
else:
old_state = None
for s in self._composable_states:
try:
getattr(s, name)
except AttributeError:
pass
else:
old_state = copy.deepcopy(s)
s.__setattr__(name, value)
self._on_setattr_callback(s, name, old_state)
# No attribute found. This is monkey patching.
if old_state is None:
super(CompoundThermodynamicState, self).__setattr__(name, value)
def __getstate__(self, **kwargs):
"""Return a dictionary representation of the state."""
# Create original ThermodynamicState to serialize.
thermodynamic_state = object.__new__(self.__class__.__bases__[0])
thermodynamic_state.__dict__ = self.__dict__
# Set the instance _standardize_system method to CompoundState._standardize_system
# so that the composable states standardization will be called during serialization.
thermodynamic_state._standardize_system = self._standardize_system
serialized_thermodynamic_state = utils.serialize(thermodynamic_state, **kwargs)
# Serialize composable states.
serialized_composable_states = [utils.serialize(state)
for state in self._composable_states]
return dict(thermodynamic_state=serialized_thermodynamic_state,
composable_states=serialized_composable_states)
def __setstate__(self, serialization):
"""Set the state from a dictionary representation."""
serialized_thermodynamic_state = serialization['thermodynamic_state']
serialized_composable_states = serialization['composable_states']
thermodynamic_state = utils.deserialize(serialized_thermodynamic_state)
self.__dict__ = thermodynamic_state.__dict__
self._composable_states = [utils.deserialize(state)
for state in serialized_composable_states]
# -------------------------------------------------------------------------
# Internal-usage
# -------------------------------------------------------------------------
def _standardize_system(self, system):
"""Standardize the system.
Override ThermodynamicState._standardize_system to standardize
the system also with respect to all other composable states.
Raises
------
ComposableStateError
If it is impossible to standardize the system.
See Also
--------
ThermodynamicState._standardize_system
"""
super(CompoundThermodynamicState, self)._standardize_system(system)
for composable_state in self._composable_states:
composable_state._standardize_system(system)
def _on_setattr_callback(self, composable_state, attribute_name, old_composable_state):
"""Updates the standard system (and hash) after __setattr__."""
try:
change_standard_system = composable_state._on_setattr(self._standard_system, attribute_name, old_composable_state)
except TypeError:
change_standard_system = composable_state._on_setattr(self._standard_system, attribute_name)
# TODO Drop support for the old signature and remove deprecation warning from 0.17 on.
import warnings
old_signature = '_on_setattr(self, standard_system, attribute_name)'
new_signature = old_signature[:-1] + ', old_composable_state)'
warnings.warn('The signature IComposableState.{} has been deprecated, '
'and future versions of openmmtools will support only the '
'new one: {}.'.format(old_signature, new_signature))
if change_standard_system:
new_standard_system = copy.deepcopy(self._standard_system)
composable_state.apply_to_system(new_standard_system)
composable_state._standardize_system(new_standard_system)
self._update_standard_system(new_standard_system)
def _apply_to_context_in_state(self, context, thermodynamic_state):
super(CompoundThermodynamicState, self)._apply_to_context_in_state(context, thermodynamic_state)
for s in self._composable_states:
s.apply_to_context(context)
def _find_force_groups_to_update(self, context, current_context_state, memo):
"""Find the force groups to be recomputed when moving to the given state.
Override ThermodynamicState._find_force_groups_to_update to find
groups to update for changes of composable states.
"""
# Initialize memo: create new cache for each composable state.
if len(memo) == 0:
memo.update({i: {} for i in range(len(self._composable_states))})
# Find force group to update for parent class.
force_groups = super(CompoundThermodynamicState, self)._find_force_groups_to_update(
context, current_context_state, memo)
# Find force group to update for composable states.
for composable_state_idx, composable_state in enumerate(self._composable_states):
force_groups.update(composable_state._find_force_groups_to_update(
context, current_context_state, memo[composable_state_idx]))
return force_groups
# =============================================================================
# GLOBAL PARAMETER STATE
# =============================================================================
class GlobalParameterError(ComposableStateError):
"""Exception raised by ``GlobalParameterState``."""
pass
class GlobalParameterFunction(object):
"""A function of global parameters.
All the functions supported by ``openmmtools.utils.math_eval``
are supported.
Parameters
----------
expression : str
A mathematical expression involving global parameters.
See Also
--------
openmmtools.utils.math_eval
Examples
--------
>>> class MyComposableState(GlobalParameterState):
... gamma = GlobalParameterState.GlobalParameter('gamma', standard_value=1.0)
... lambda_angles = GlobalParameterState.GlobalParameter('lambda_angles', standard_value=1.0)
...
>>> composable_state = MyComposableState(gamma=1.0, lambda_angles=0.5)
>>> composable_state.set_function_variable('lambda', 0.5)
>>> composable_state.set_function_variable('lambda2', 1.0)
>>> composable_state.gamma = GlobalParameterFunction('lambda**2')
>>> composable_state.gamma
0.25
>>> composable_state.lambda_angles = GlobalParameterFunction('(lambda + lambda2) / 2')
>>> composable_state.lambda_angles
0.75
>>> composable_state.set_function_variable('lambda2', 0.5)
>>> composable_state.lambda_angles
0.5
"""
def __init__(self, expression):
self._expression = expression
def __call__(self, variables):
return utils.math_eval(self._expression, variables)
class GlobalParameterState(object):
"""A composable state controlling one or more OpenMM ``Force``'s global parameters.
This is a partially abstract class that provides facilities to implement
composable states that control one or more global parameters defined in
OpenMM ``Force`` objects. Global parameters are implemented through the
use of the ``GlobalParameterState.GlobalParameter`` descriptor.
A ``Force`` object can use one or more global parameters that are
controlled by the same state. Conversely, multiple ``Force``s can use
the same global parameter (i.e. with the same name) controlled by the
state object.
It is possible to enslave the global parameters to one or more arbitrary
variables entering a mathematical expression through the use of
``GlobalParameterFunction``. Global parameters that are associated to a
global parameter function are validated on get rather than on set.
Parameters
----------
parameters_name_suffix : str, optional
If specified, the state will control a modified version of the global
parameters with the name ``parameter_name + '_' + parameters_name_suffix``.
When this is the case, the normal parameters are not accessible.
**kwargs
The value of the parameters controlled by this state. Parameters
that are not passed here are left undefined.
Notes
-----
The class automatically implement the static constructor ``from_system``
that reads and create a state object from an OpenMM ``System``. The function
calls ``__init__`` and passes the parameter name suffix string as the
first positional argument, so it is possible to overwrite ``__init__``
and rename ``parameters_name_suffix`` as long as it is the first parameter
of the constructor.
See Also
--------
GlobalParameterFunction
Examples
--------
Assume we have a ``System`` with a custom force object whose energy
function is controlled by two global variables called ``lambda_bonds``
and ``gamma``.
>>> import openmm
>>> from openmm import unit
>>> # Define a diatomic molecule.
>>> system = openmm.System()
>>> particle_idx = system.addParticle(40.0*unit.amu)
>>> particle_idx = system.addParticle(40.0*unit.amu)
>>> custom_force = openmm.CustomBondForce('lambda_bonds^gamma*60000*(r-0.15)^2;')
>>> parameter_idx = custom_force.addGlobalParameter('lambda_bonds', 1.0) # Default value is 1.0.
>>> parameter_idx = custom_force.addGlobalParameter('gamma', 1.0) # Default value is 1.0.
>>> bond_idx = custom_force.addBond(0, 1, [])
>>> force_index = system.addForce(custom_force)
>>> # Create a thermodynamic state object controlling the temperature of the system.
>>> thermodynamic_state = ThermodynamicState(system, temperature=300.0*unit.kelvin)
Define a composable state controlling the two global parameters ``gamma``
and ``lambda_bonds``, both with standard state value 0.0. Parameters that
are not specified in the constructor are left undefined.
>>> class MyComposableState(GlobalParameterState):
... gamma = GlobalParameterState.GlobalParameter('gamma', standard_value=1.0)
... lambda_bonds = GlobalParameterState.GlobalParameter('lambda_bonds', standard_value=1.0)
...
>>> my_composable_state = MyComposableState(gamma=1.0)
>>> my_composable_state.gamma
1.0
>>> my_composable_state.lambda_bonds is None
True
There is a second static constructor you can use to read the state
of an OpenMM ``System`` from the default values of its force parameters.
>>> my_composable_state = MyComposableState.from_system(system)
>>> my_composable_state.lambda_bonds
1.0
>>> my_composable_state.gamma
1.0
Optionally, you can limit the values that a global parameter can assume.
In this case, ``lambda_bonds`` is forced to be between 0.0 and 1.0.
>>> class MyComposableState(GlobalParameterState):
... gamma = GlobalParameterState.GlobalParameter('gamma', standard_value=0.0)
... lambda_bonds = GlobalParameterState.GlobalParameter('lambda_bonds', standard_value=0.0)
... @lambda_bonds.validator
... def lambda_bonds(self, instance, new_value):
... if new_value is not None and not (0.0 <= new_value <= 1.0):
... raise ValueError('lambda_bonds must be between 0.0 and 1.0')
... return new_value
...
>>> my_composable_state = MyComposableState(gamma=1.0)
>>> my_composable_state.lambda_bonds = 2.0
Traceback (most recent call last):
...
ValueError: lambda_bonds must be between 0.0 and 1.0
You can then add it to a ``CompoundThermodynamicState`` to manipulate
OpenMM ``System`` and ``Context`` objects.
>>> my_composable_state.lambda_bonds = 1.0
>>> compound_state = CompoundThermodynamicState(thermodynamic_state, composable_states=[my_composable_state])
>>> state_system = compound_state.get_system()
>>> state_system.getForce(0).getGlobalParameterDefaultValue(0) # lambda_bonds global parameter.
1.0
>>> compound_state.lambda_bonds = 0.0
>>> state_system = compound_state.get_system()
>>> state_system.getForce(0).getGlobalParameterDefaultValue(0) # lambda_bonds global parameter.
0.0
>>> context = compound_state.create_context(openmm.VerletIntegrator(1.0*unit.femtoseconds))
>>> context.getParameter('lambda_bonds')
0.0
>>> compound_state.lambda_bonds = 1.0
>>> compound_state.apply_to_context(context)
>>> context.getParameter('lambda_bonds')
1.0
You can express enslave global parameters to a mathematical expression
involving arbitrary variables.
>>> compound_state.set_function_variable('lambda', 1.0)
>>> compound_state.lambda_bonds = GlobalParameterFunction('2*(lambda - 0.5) * step(lambda - 0.5)')
>>> for l in [0.5, 0.75, 1.0]:
... compound_state.set_function_variable('lambda', l)
... print(compound_state.lambda_bonds)
0.0
0.5
1.0
If you need to control similar forces with the same state object,
this parent class provides a suffix mechanism to control different
global variables with the same state object. This allows to reuse
the same logic to control multiple objects
>>> # Add a second custom force using similar global parameters.
>>> custom_force = openmm.CustomBondForce('lambda_bonds_mysuffix*20000*(r-0.15)^2;')
>>> parameter_idx = custom_force.addGlobalParameter('lambda_bonds_mysuffix', 1.0) # Default value is 1.0.
>>> bond_idx = custom_force.addBond(0, 1, [])
>>> force_idx = system.addForce(custom_force)
>>> # Create a state controlling the modified global parameter.
>>> my_composable_state = MyComposableState(parameters_name_suffix='mysuffix', lambda_bonds=0.0)
>>> my_composable_state.lambda_bonds_mysuffix = 1.0
>>> my_composable_state.gamma_mysuffix is None
True
>>> my_composable_state.apply_to_system(system)
>>> # The unmodified parameter becomes unaccessible.
>>> my_composable_state.apply_to_system(system)
>>> my_composable_state.lambda_bonds
Traceback (most recent call last):
...
AttributeError: This state does not control lambda_bonds but lambda_bonds_mysuffix.
Note also in the example above that the forces don't need to define
all the global parameters controlled by the state. The state object
will perform some check to ensure that you won't try to set an undefined
parameter.
>>> my_composable_state.gamma_mysuffix = 2
>>> my_composable_state.apply_to_system(system)
Traceback (most recent call last):
...
openmmtools.states.GlobalParameterError: Could not find global parameter gamma_mysuffix in the system.
"""
# This constant can be overwritten by inheriting classes to
# raise a custom exception class when an error is encountered.
_GLOBAL_PARAMETER_ERROR = GlobalParameterError
def __init__(self, parameters_name_suffix=None, **kwargs):
self._initialize(parameters_name_suffix=parameters_name_suffix, **kwargs)
@classmethod
def from_system(cls, system, parameters_name_suffix=None):
"""Static constructor reading the state from an OpenMM System.
Parameters
----------
system : openmm.System
An OpenMM ``System`` object defining a non-empty subset
of global parameters controlled by this state.
parameters_name_suffix : str, optional
If specified, the state will search for a modified
version of the global parameters with the name
``parameter_name + '_' + parameters_name_suffix``.
Returns
-------
The GlobalParameterState object representing the state of the system.
Raises
------
GlobalParameterStateError
If the same parameter has different values in the system, or
if the system has no lambda parameters.
"""
state_parameters = {}
for force, parameter_name, parameter_id in cls._get_system_controlled_parameters(
system, parameters_name_suffix):
if parameter_id >= force.getNumGlobalParameters():
raise GlobalParameterStateError(f'Attempted to access system parameter {parameter_name} (id {parameter_id}) that does not exist in {force.__class__.__name__}')
parameter_value = force.getGlobalParameterDefaultValue(parameter_id)
# Check that we haven't already found
# the parameter with a different value.
if parameter_name in state_parameters:
if state_parameters[parameter_name] != parameter_value:
err_msg = ('Parameter {} has been found twice (Force {}) with two values: '
'{} and {}').format(parameter_name, force.__class__.__name__,
parameter_value, state_parameters[parameter_name])
raise cls._GLOBAL_PARAMETER_ERROR(err_msg)
else:
state_parameters[parameter_name] = parameter_value
# Check that the system can be controlled by this state..
if len(state_parameters) == 0:
err_msg = 'System has no global parameters controlled by this state.'
raise cls._GLOBAL_PARAMETER_ERROR(err_msg)
# Create and return the GlobalParameterState. The constructor of
# GlobalParameterState takes the parameters without the suffix so
# we left them undefined in the constructor and assign the attributes.
state = cls(parameters_name_suffix)
for parameter_name, parameter_value in state_parameters.items():
setattr(state, parameter_name, parameter_value)
return state
# -------------------------------------------------------------------------
# Function variables
# -------------------------------------------------------------------------
def get_function_variable(self, variable_name):
"""Return the value of the function variable.
Function variables are _not_ global parameters but rather variables
entering mathematical expressions specified with ``GlobalParameterFunction``,
which can be use to enslave global parameter to arbitrary variables.
Parameters
----------
variable_name : str
The name of the function variable.
Returns
-------
variable_value : float
The value of the function variable.
"""
try:
variable_value = self._function_variables[variable_name]
except KeyError:
err_msg = 'Unknown function variable {}'.format(variable_name)
raise self._GLOBAL_PARAMETER_ERROR(err_msg)
return variable_value
def set_function_variable(self, variable_name, new_value):
"""Set the value of the function variable.
Function variables are _not_ global parameters but rather variables
entering mathematical expressions specified with ``GlobalParameterFunction``,
which can be use to enslave global parameter to arbitrary variables.
Parameters
----------
variable_name : str
The name of the function variable.
new_value : float
The new value for the variable.
"""
forbidden_variable_names = set(self._parameters)
if variable_name in forbidden_variable_names:
err_msg = ('Cannot have an function variable with the same name '
'of the predefined global parameter {}.'.format(variable_name))
raise self._GLOBAL_PARAMETER_ERROR(err_msg)
# Check that the new value is a scalar,
if not (np.isreal(new_value) and np.isscalar(new_value)):
err_msg = 'Only integers and floats can be assigned to a function variable.'
raise self._GLOBAL_PARAMETER_ERROR(err_msg)
self._function_variables[variable_name] = new_value
# -------------------------------------------------------------------------
# Operators
# -------------------------------------------------------------------------
def __eq__(self, other):
"""Equality operator.
Two GlobalParameterState are equal if they control the same global
parameters and they all have the same values. This way the operator
preserves the commutative property.
"""
# Check if other is a global parameter state.
if not isinstance(other, GlobalParameterState):
return False
# Check that they define the same parameters.
if not set(self._parameters) == set(other._parameters):
return False
# Check that all values are the same
is_equal = True
for parameter_name in self._parameters:
self_value = getattr(self, parameter_name)
other_value = getattr(other, parameter_name)
is_equal = is_equal and self_value == other_value
return is_equal
def __ne__(self, other):
# TODO: we can safely remove this when dropping support for Python 2
return not self == other
def __str__(self):
return str(self._parameters)
# -------------------------------------------------------------------------
# Global parameters descriptor class.
# -------------------------------------------------------------------------
# The global parameter descriptor makes it easy for the user to
# create their own state classes. The set of controlled parameters is
# dynamically discovered by _get_controlled_parameters() by checking
# which descriptors are GlobalParameter objects.
class GlobalParameter(object):
"""Descriptor for a global parameter.
Parameters
----------
parameter_name : str
The name of the global parameter.
standard_value : float
The value of the global parameter in the standard state. This
is used to define the concept of compatible states (i.e. whether
a ``System`` or ``Context`` can be transformed from a state
to another).
validator : callable, optional
A function to call before setting a new value with signature
``validator(self, instance, new_value) -> validated_value``.
It is also possible to define this through the ``validator``
decorator.
"""
def __init__(self, parameter_name, standard_value, validator=None):
self.parameter_name = parameter_name
self.standard_value = standard_value
self.validator_func = validator
def __get__(self, instance, owner_class=None):
self._check_controlled(instance)
return instance._get_global_parameter_value(self.parameter_name, self)
def __set__(self, instance, new_value):
self._check_controlled(instance)
instance._set_global_parameter_value(self.parameter_name, new_value, self)
def validator(self, validator):
return self.__class__(self.parameter_name, self.standard_value, validator)
def _check_controlled(self, instance):
"""Raise GlobalParameterError if the parameter is not controlled by the state.
If the state uses a parameter name suffix, the normal parameter
name is not accessible.
"""
if instance._parameters_name_suffix is not None:
suffixed_parameter_name = self.parameter_name + '_' + instance._parameters_name_suffix
err_msg = 'This state does not control {} but {}.'.format(
self.parameter_name, suffixed_parameter_name)
raise AttributeError(err_msg)
# -------------------------------------------------------------------------
# Internal usage: IComposableState interface
# -------------------------------------------------------------------------
def apply_to_system(self, system):
"""Set the state of the system to this.
Parameters
----------
system : openmm.System
The system to modify.
Raises
------
GlobalParameterError
If the system does not defined some of the global parameters
controlled by this state.
"""
parameters_applied = set()
for force, parameter_name, parameter_id in self._get_system_controlled_parameters(
system, self._parameters_name_suffix):
parameter_value = getattr(self, parameter_name)
if parameter_value is None:
err_msg = 'The system parameter {} is not defined in this state.'
raise self._GLOBAL_PARAMETER_ERROR(err_msg.format(parameter_name))
else:
if parameter_id >= force.getNumGlobalParameters():
raise GlobalParameterStateError(f'Attempted to access system parameter {parameter_name} (id {parameter_id}) that does not exist in {force.__class__.__name__}')
parameters_applied.add(parameter_name)
force.setGlobalParameterDefaultValue(parameter_id, parameter_value)
# Check that we set all the defined parameters.
for parameter_name in self._get_controlled_parameters(self._parameters_name_suffix):
if (self._parameters[parameter_name] is not None and
parameter_name not in parameters_applied):
err_msg = 'Could not find global parameter {} in the system.'
raise self._GLOBAL_PARAMETER_ERROR(err_msg.format(parameter_name))
def check_system_consistency(self, system):
"""Check if the system is in this state.
It raises a GlobalParameterError if the system is not consistent
with this state.
Parameters
----------
system : openmm.System
The system to test.
Raises
------
GlobalParameterError
If the system is not consistent with this state.
"""
system_state = self.__class__.from_system(system, self._parameters_name_suffix)
# Check if parameters are all the same.
if self != system_state:
err_msg = ('Consistency check failed:\n'
'\tSystem parameters {}\n'
'\t{} parameters {}')
class_name = self.__class__.__name__
raise self._GLOBAL_PARAMETER_ERROR(err_msg.format(system_state, class_name, self))
def apply_to_context(self, context):
"""Put the Context into this state.
Parameters
----------
context : openmm.Context
The context to set.
Raises
------
GlobalParameterError
If the context does not have the required global parameters.
"""
context_parameters = context.getParameters()
# Set the global parameters in Context.
for parameter_name in self._parameters:
parameter_value = getattr(self, parameter_name)
if parameter_value is None:
# Check that Context does not have this parameter.
if parameter_name in context_parameters:
err_msg = 'Context has parameter {} which is undefined in this state.'
raise self._GLOBAL_PARAMETER_ERROR(err_msg.format(parameter_name))
continue
try:
context.setParameter(parameter_name, parameter_value)
except Exception:
err_msg = 'Could not find parameter {} in context'
raise self._GLOBAL_PARAMETER_ERROR(err_msg.format(parameter_name))
def _standardize_system(self, system):
"""Standardize the given system.
Set all global parameters of the system their standard value.
Parameters
----------
system : openmm.System
The system to standardize.
Raises
------
GlobalParameterError
If the system is not consistent with this state.
"""
# Collect all the global parameters' standard values.
standard_values = {}
controlled_parameters = self._get_controlled_parameters(self._parameters_name_suffix)
for parameter_name, parameter_descriptor in controlled_parameters.items():
standard_values[parameter_name] = parameter_descriptor.standard_value
# Create a standard state.
standard_state = copy.deepcopy(self)
for parameter_name, standard_value in standard_values.items():
# Skip undefined parameters.
if getattr(standard_state, parameter_name) is not None:
setattr(standard_state, parameter_name, standard_value)
# Standardize the system.
standard_state.apply_to_system(system)
def _on_setattr(self, standard_system, attribute_name, old_global_parameter_state):
"""Check if the standard system needs changes after a state attribute is set.
Parameters
----------
standard_system : openmm.System
The standard system before setting the attribute.
attribute_name : str
The name of the attribute that has just been set or retrieved.
old_global_parameter_state : GlobalParameterState
A copy of the composable state before the attribute was set.
Returns
-------
need_changes : bool
True if the standard system has to be updated, False if no change
occurred.
"""
# There are no attributes that can be set that can alter the standard system,
# but if a parameter goes from defined to undefined, we should raise an error.
old_attribute_value = getattr(old_global_parameter_state, attribute_name)
new_attribute_value = getattr(self, attribute_name)
if (old_attribute_value is None) != (new_attribute_value is None):
err_msg = 'Cannot set the parameter {} in the system from {} to {}'.format(
attribute_name, old_attribute_value, new_attribute_value)
# Set back old value to maintain a consistent state in case the exception
# is catched. If this attribute was associated to a GlobalParameterFunction,
# we need to retrieve the original function object before setting.
old_attribute_value = old_global_parameter_state._get_global_parameter_value(
attribute_name, resolve_function=None)
setattr(self, attribute_name, old_attribute_value)
raise self._GLOBAL_PARAMETER_ERROR(err_msg)
return False
def _find_force_groups_to_update(self, context, current_context_state, memo):
"""Find the force groups whose energy must be recomputed after applying self.
Parameters
----------
context : Context
The context, currently in `current_context_state`, that will
be moved to this state.
current_context_state : ThermodynamicState
The full thermodynamic state of the given context. This is
guaranteed to be compatible with self.
memo : dict
A dictionary that can be used by the state for memoization
to speed up consecutive calls on the same context.
Returns
-------
force_groups_to_update : set of int
The indices of the force groups whose energy must be computed
again after applying this state, assuming the context to be in
`current_context_state`.
"""
# Cache information about system force groups.
# We create a dictionary "memo" mapping parameter_name -> list of force groups to update.
if len(memo) == 0:
system = context.getSystem()
system_parameters = self._get_system_controlled_parameters(system, self._parameters_name_suffix)
for force, parameter_name, _ in system_parameters:
# Keep track of valid parameters only.
if self._parameters[parameter_name] is not None:
try:
memo[parameter_name].append(force.getForceGroup())
except KeyError:
memo[parameter_name] = [force.getForceGroup()]
# Find lambda parameters that will change.
force_groups_to_update = set()
for parameter_name, force_groups in memo.items():
self_parameter_value = getattr(self, parameter_name)
current_parameter_value = getattr(current_context_state, parameter_name)
if self_parameter_value != current_parameter_value:
force_groups_to_update.update(force_groups)
return force_groups_to_update
# -------------------------------------------------------------------------
# Internal usage: Attributes handling
# -------------------------------------------------------------------------
@classmethod
def _get_controlled_parameters(cls, parameters_name_suffix=None):
"""Return a set of the global parameters controlled by the state class.
This is constructed dynamically by introspection gathering all the
descriptors that belong to the GlobalParameter class.
Parameters
----------
parameters_name_suffix : str, optional
If specified, this returns the set of parameter names with the
name suffix.
Returns
-------
controlled_parameters : dict of str -> GlobalParameter
A map from the name of the controlled parameter to the
GlobalParameter descriptor handling it.
"""
if parameters_name_suffix is None:
suffix = ''
else:
suffix = '_' + parameters_name_suffix
# TODO just use inspect.getmembers when dropping Python 2 which automatically resolves the MRO.
# controlled_parameters = {name + suffix: descriptor for name, descriptor in inspect.getmembers(cls)
# if isinstance(descriptor, cls.GlobalParameter)}
controlled_parameters = {name + suffix: descriptor for c in inspect.getmro(cls)
for name, descriptor in c.__dict__.items()
if isinstance(descriptor, cls.GlobalParameter)}
return controlled_parameters
def _validate_global_parameter(self, parameter_name, parameter_value, descriptor=None):
"""Return the validated parameter value using the descriptor validator.
Parameters
----------
parameter_name : str
Parameter name (with eventual suffix) to validate.
parameter_value : float
Parameter value to validate. If a GlobalParameterFunction is associated
to the parameter, this must be evaluated before calling this.
descriptor : GlobalParameterState.GlobalParameter, optional
If None, the functions automatically looks for the descriptor associated
to this parameter and calls its validator (if any). This search is
skipped if this argument is provided.
Returns
-------
validated_value : float
The validated value.
Raises
------
KeyError
If parameter_name is not controlled by this state.
"""
if descriptor is None:
# Get the descriptors of all controlled parameters.
controlled_parameters = self._get_controlled_parameters(self._parameters_name_suffix)
# Call validator, before setting the parameter. This raises KeyError.
descriptor = controlled_parameters[parameter_name]
if descriptor.validator_func is not None:
parameter_value = descriptor.validator_func(descriptor, self, parameter_value)
return parameter_value
def _get_global_parameter_value(self, parameter_name, descriptor=None, resolve_function=True):
"""Retrieve the current value of a global parameter.
Parameters
----------
parameter_name : str
Parameter name (with eventual suffix) to validate.
descriptor : GlobalParameterState.GlobalParameter, optional
If None, and the parameter is associated to a GlobalParameterFunction,
the functions automatically looks for the descriptor associated to this
parameter and calls its validator (if any). This search is skipped if
this argument is provided. Default is None.
resolve_function : bool, optional
If False and the parameter is associated to a GlobalParameterFunction,
the function is not evaluated (and its result is not validated), and
the GlobalParameterFunction object is returned instead. Default is True.
Returns
-------
parameter_value : float
The parameter value.
Raises
------
KeyError
If parameter_name is not controlled by this state.
"""
parameter_value = self._parameters[parameter_name]
if resolve_function and isinstance(parameter_value, GlobalParameterFunction):
parameter_value = parameter_value(self._function_variables)
# If the value is generated through a mathematical expression,
# we validate the value after the expression is evaluated rather
# than on setting.
parameter_value = self._validate_global_parameter(parameter_name, parameter_value, descriptor)
return parameter_value
def _set_global_parameter_value(self, parameter_name, new_value, descriptor=None):
"""Set the value of a global parameter.
Parameters
----------
parameter_name : str
Parameter name (with eventual suffix) to validate.
new_value : float or GlobalParameterFunction
The new parameter value.
descriptor : GlobalParameterState.GlobalParameter, optional
If None, and the parameter is not a GlobalParameterFunction, the functions
automatically looks for the descriptor associated to this parameter and
calls its validator (if any). This search is skipped if this argument is
provided.
Raises
------
KeyError
If parameter_name is not controlled by this state.
"""
# Check if the parameter is defined and raise KeyError otherwise.
if parameter_name not in self._parameters:
raise KeyError(parameter_name)
# If the value is generated through a mathematical expression,
# we validate the value after the expression is evaluated rather
# than on setting.
if not isinstance(new_value, GlobalParameterFunction):
new_value = self._validate_global_parameter(parameter_name, new_value, descriptor)
self._parameters[parameter_name] = new_value
def __getattr__(self, key):
"""Handles global parameters with a suffix."""
# __getattr__ is called only if the item is not found in the
# usual ways, so we don't need to handle GlobalParameter here.
try:
parameter_value = self._get_global_parameter_value(key)
except KeyError:
# Parameter not found, fall back to normal behavior.
parameter_value = super(GlobalParameterState, self).__getattribute__(key)
return parameter_value
def __setattr__(self, key, value):
"""Handles global parameters with a suffix."""
# Check if the object has been initialized and we can
# start resolving dynamically suffixed parameters.
if '_parameters_name_suffix' in self.__dict__ and self._parameters_name_suffix is not None:
try:
self._set_global_parameter_value(key, value)
except KeyError:
pass
else:
return
# This is not a "suffixed" parameter. Fallback to normal behavior.
super(GlobalParameterState, self).__setattr__(key, value)
@classmethod
def _get_system_controlled_parameters(cls, system, parameters_name_suffix):
"""Yields the controlled global parameters defined in the System.
Yields
------
A tuple force, parameter_name, parameter_index for each supported
lambda parameter.
"""
searched_parameters = cls._get_controlled_parameters(parameters_name_suffix)
# Retrieve all the forces with global supported parameters.
for force_index in range(system.getNumForces()):
force = system.getForce(force_index)
try:
n_global_parameters = force.getNumGlobalParameters()
except AttributeError:
continue
for parameter_id in range(n_global_parameters):
parameter_name = force.getGlobalParameterName(parameter_id)
if parameter_name in searched_parameters:
yield force, parameter_name, parameter_id
def __getstate__(self):
"""Return a dictionary representation of the state."""
serialization = dict(
parameters={},
function_variables=self._function_variables.copy(),
parameters_name_suffix=self._parameters_name_suffix
)
# Copy parameters. We serialize the parameters with their original name
# (i.e., without suffix) because we'll pass them to _initialize().
if self._parameters_name_suffix is None:
suffix = ''
else:
suffix = '_' + self._parameters_name_suffix
for parameter_name in self._get_controlled_parameters():
parameter_value = self._parameters[parameter_name + suffix]
# Convert global parameter function into string expressions.
if isinstance(parameter_value, GlobalParameterFunction):
parameter_value = parameter_value._expression
serialization['parameters'][parameter_name] = parameter_value
return serialization
def __setstate__(self, serialization):
"""Set the state from a dictionary representation."""
parameters = serialization['parameters']
# parameters_name_suffix is optional for backwards compatibility since openmmtools 0.16.0.
parameters_name_suffix = serialization.get('parameters_name_suffix', None)
# Global parameter functions has been added in openmmtools 0.17.0.
function_variables = serialization.get('function_variables', {})
# Temporarily store global parameter functions.
global_parameter_functions = {}
for parameter_name, value in parameters.items():
if isinstance(value, str):
global_parameter_functions[parameter_name] = value
parameters[parameter_name] = None
# Initialize parameters and add all function variables.
self._initialize(parameters_name_suffix=parameters_name_suffix, **parameters)
for variable_name, value in function_variables.items():
self.set_function_variable(variable_name, value)
# Add global parameter functions.
if parameters_name_suffix is not None:
parameters_name_suffix = '_' + parameters_name_suffix
else:
parameters_name_suffix = ''
for parameter_name, expression in global_parameter_functions.items():
setattr(self, parameter_name + parameters_name_suffix, GlobalParameterFunction(expression))
# -------------------------------------------------------------------------
# Internal usage: Initialization
# -------------------------------------------------------------------------
def _initialize(self, parameters_name_suffix=None, **kwargs):
"""Initialize the state.
It takes the global parameters and their values as keywords arguments.
Controlled parameters that are not passed are left undefined (i.e. are
set to None).
"""
self._function_variables = {}
# Get controlled parameters from introspection.
controlled_parameters = set(self._get_controlled_parameters())
# Check for unknown parameters
unknown_parameters = set(kwargs) - controlled_parameters
if len(unknown_parameters) > 0:
err_msg = "Unknown parameters {}".format(unknown_parameters)
raise self._GLOBAL_PARAMETER_ERROR(err_msg)
# Append suffix to parameters before storing them internally.
if parameters_name_suffix is not None:
kwargs = {key + '_' + parameters_name_suffix: value for key, value in kwargs.items()}
controlled_parameters = {key + '_' + parameters_name_suffix for key in controlled_parameters}
# Default value for all parameters is None.
self._parameters = dict.fromkeys(controlled_parameters, None)
# This signals to __setattr__ that we can start resolving dynamically
# suffixed parameters so it should be the last direct assignment.
self._parameters_name_suffix = parameters_name_suffix
# Update parameters with constructor arguments.
for parameter_name, value in kwargs.items():
setattr(self, parameter_name, value)
if __name__ == '__main__':
import doctest
doctest.testmod()
# doctest.run_docstring_examples(CompoundThermodynamicState, globals())
|
choderalab/openmmtools
|
openmmtools/states.py
|
Python
|
mit
| 165,224
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
import random
import base64
from comment.settings import PROXIES
proxyServer = "http://proxy.abuyun.com:9020"
# 代理隧道验证信息
proxyUser = "H575H2K6SEGYE3QD"
proxyPass = "452745BC63D65158"
# for Python2
proxyAuth = "Basic " + base64.urlsafe_b64encode(bytes((proxyUser + ":" + proxyPass), "ascii")).decode("utf8")
class RandomUserAgent(object):
"""Randomly rotate user agents based on a list of predefined ones"""
def __init__(self, agents):
self.agents = agents
@classmethod
def from_crawler(cls, crawler):
return cls(crawler.settings.getlist('USER_AGENTS'))
def process_request(self, request, spider):
#print "**************************" + random.choice(self.agents)
request.headers.setdefault('User-Agent', random.choice(self.agents))
class ProxyMiddleware(object):
def process_request(self, request, spider):
request.meta["proxy"] = proxyServer
request.headers["Proxy-Authorization"] = proxyAuth
|
zh-angle-i/review-analysis
|
review_analysis/spidering/comment/middlewares.py
|
Python
|
mit
| 1,174
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('speaker_list', '0003_auto_20160213_1850'),
]
operations = [
migrations.AlterModelOptions(
name='speakerlist',
options={'ordering': ['nr_of_speeches', 'speech_id']},
),
migrations.AlterField(
model_name='speakerlist',
name='speech_id',
field=models.IntegerField(verbose_name='talar id', default=1),
),
]
|
I-sektionen/i-portalen
|
wsgi/iportalen_django/speaker_list/migrations/0004_auto_20160218_1353.py
|
Python
|
mit
| 587
|
import os
import sys
import socket
import json
import uuid
import imp
import threading
import time
import random
import math
import shutil
import hashlib
import signal
import traceback
import cStringIO
import bisect
import requests as rq
import shlex
import subprocess
import tempfile
import tarfile
import base64
import shutil
import glob
import zlib
import re
import copy
import cPickle
from kunai.log import logger
from kunai.threadmgr import threader
from kunai.perfdata import PerfDatas
from kunai.now import NOW
# some singleton :)
from kunai.broadcast import broadcaster
from kunai.websocketmanager import websocketmgr
from kunai.pubsub import pubsub
from kunai.httpdaemon import route, response, abort
from kunai.encrypter import encrypter
KGOSSIP = 10
# Main class for a Gossip cluster
class Gossip(object):
def __init__(self, nodes, nodes_lock, addr, port, name, incarnation, uuid, tags, seeds, bootstrap):
self.nodes = nodes
self.nodes_lock = nodes_lock
self.addr = addr
self.port = port
self.name = name
self.incarnation = incarnation
self.uuid = uuid
self.tags = tags
self.seeds = seeds
self.bootstrap = bootstrap
self.interrupted = False
# list of uuid to ping back because we though they were dead
self.to_ping_back = []
myself = self.get_boostrap_node()
self.set_alive(myself, bootstrap=True)
# export my http uri now I got a real self
self.export_http()
def __getitem__(self, uuid):
return self.nodes[uuid]
def get(self, uuid, default=None):
return self.nodes.get(uuid, default)
def __iter__(self):
return self.nodes.__iter__()
def __contains__(self, uuid):
return uuid in self.nodes
def __setitem__(self, k, value):
self.nodes[k] = value
def __delitem__(self, k):
try:
del self.nodes[k]
except IndexError:
pass
# get my own node entry
def get_boostrap_node(self):
node = {'addr': self.addr, 'port': self.port, 'name': self.name,
'incarnation':self.incarnation, 'uuid':self.uuid, 'state':'alive', 'tags':self.tags,
'services':{}}
return node
############# Main new state handling methods
# Set alive a node we eart about.
# * It can be us if we allow the bootstrap node (only at startup).
# * If strong it means we did the check, so we believe us :)
def set_alive(self, node, bootstrap=False, strong=False):
addr = node['addr']
port = node['port']
name = node['name']
incarnation = node['incarnation']
uuid = node['uuid']
state = node['state'] = 'alive'
tags = node.get('tags', [])
# Maybe it's me? if so skip it
if not bootstrap:
if node['addr'] == self.addr and node['port'] == self.port:
return
# Maybe it's a new node that just enter the cluster?
if uuid not in self.nodes:
logger.log("New node detected", node, part='gossip')
# Add the node but in a protected mode
with self.nodes_lock:
self.nodes[uuid] = node
self.stack_alive_broadcast(node)
return
prev = self.nodes.get(uuid, None)
# maybe the prev was out by another thread?
if prev is None:
return
change = (prev['state'] != state)
# If the data is not just new, bail out
if not strong and incarnation <= prev['incarnation']:
return
logger.debug('ALIVENODE', name, prev['state'], state, strong, change, incarnation, prev['incarnation'], (strong and change), (incarnation > prev['incarnation']))
# only react to the new data if they are really new :)
if strong or incarnation > prev['incarnation']:
# protect the nodes access with the lock so others threads are happy :)
with self.nodes_lock:
self.nodes[uuid] = node
# Only broadcast if it's a new data from somewhere else
if (strong and change) or incarnation > prev['incarnation']:
logger.debug("Updating alive a node", prev, 'with', node)
self.stack_alive_broadcast(node)
# Someone suspect a node, so believe it
def set_suspect(self, suspect):
addr = suspect['addr']
port = suspect['port']
name = suspect['name']
incarnation = suspect['incarnation']
uuid = suspect['uuid']
tags = suspect.get('tags', [])
services = suspect.get('services', {})
state = 'suspect'
# Maybe we didn't even have this nodes in our list?
if not uuid in self.nodes:
return
node = self.nodes.get(uuid, None)
# Maybe it vanish by another threads?
if node is None:
return
# Maybe this data is too old
if incarnation < node['incarnation']:
return
# We only case about into about alive nodes, dead and suspect
# are not interesting :)
if node['state'] != 'alive':
return
# Maybe it's us?? We need to say FUCKING NO, I'm alive!!
if uuid == self.uuid:
logger.log('SUSPECT: SOMEONE THINK I AM SUSPECT, BUT I AM ALIVE', part='gossip')
self.incarnation += 1
node['incarnation'] = self.incarnation
self.stack_alive_broadcast(node)
return
logger.log('SUSPECTING: I suspect node %s' % node['name'], part='gossip')
# Ok it's definitivly someone else that is now suspected, update this, and update it :)
node['incarnation'] = incarnation
node['state'] = state
node['suspect_time'] = int(time.time())
node['tags'] = tags
node['services'] = services
self.stack_suspect_broadcast(node)
# Someone ask us about a leave node, so believe it
# Leave node are about all states, so we don't filter by current state
# if the incarnation is ok, we believe it
def set_leave(self, leaved):
addr = leaved['addr']
port = leaved['port']
name = leaved['name']
incarnation = leaved['incarnation']
uuid = leaved['uuid']
tags = leaved.get('tags', [])
services = leaved.get('services', {})
state = 'leave'
print "SET_LEAVE::", leaved
# Maybe we didn't even have this nodes in our list?
if not uuid in self.nodes:
return
node = self.nodes.get(uuid, None)
# The node can vanish by another thread delete
if node is None:
return
# Maybe we already know it's leaved, so don't update it
if node['state'] == 'leave':
return
print "SET LEAVE %s and inner node %s" % (leaved, node)
# If for me it must be with my own incarnation number so we are sure it's really us that should leave
# and not
if uuid == self.uuid:
if incarnation != node['incarnation']:
print "LEAVE INCARNATION NOT THE SAME FOR MYSELF"
return
else:
# If not for me, use the classic 'not already known' rule
if incarnation < node['incarnation']:
print "LEAVE, NOT FOR ME, THE INCARNATION NUMBER IS TOO OLD"
return
print "SET LEAVE UUID and SELF.UUID", uuid, self.uuid
# Maybe it's us?? If so we must send our broadcast and exit in few seconds
if uuid == self.uuid:
logger.log('LEAVE: someone is asking me for leaving.', part='gossip')
self.incarnation += 1
node['incarnation'] = self.incarnation
self.stack_leave_broadcast(node)
def bailout_after_leave(self):
logger.log('Bailing out in few seconds. I was put in leave state')
time.sleep(10)
logger.log('Exiting from a self leave message')
# Will set self.interrupted = True to eavery thread that loop
pubsub.pub('interrupt')
threader.create_and_launch(bailout_after_leave, args=(self,))
return
logger.log('LEAVING: The node %s is leaving' % node['name'], part='gossip')
# Ok it's definitivly someone else that is now suspected, update this, and update it :)
node['incarnation'] = incarnation
node['state'] = state
node['leave_time'] = int(time.time())
node['tags'] = tags
node['services'] = services
self.stack_leave_broadcast(node)
# Someone suspect a node, so believe it
def set_dead(self, suspect):
addr = suspect['addr']
port = suspect['port']
name = suspect['name']
incarnation = suspect['incarnation']
uuid = suspect['uuid']
tags = suspect.get('tags', [])
services = suspect.get('services', {})
state = 'dead'
# Maybe we didn't even have this nodes in our list?
if not uuid in self.nodes:
return
node = self.nodes.get(uuid, None)
# The node can vanish
if node is None:
return
# Maybe this data is too old
if incarnation < node['incarnation']:
return
# We only case about into about alive nodes, dead and suspect
# are not interesting :)
if node['state'] != 'alive':
return
# Maybe it's us?? We need to say FUCKING NO, I'm alive!!
if uuid == self.uuid:
logger.log('SUSPECT: SOMEONE THINK I AM SUSPECT, BUT I AM ALIVE', part='gossip')
self.incarnation += 1
node['incarnation'] = self.incarnation
self.stack_alive_broadcast(node)
return
logger.log('DEAD: I put in dead node %s' % node['name'], part='gossip')
# Ok it's definitivly someone else that is now suspected, update this, and update it :)
node['incarnation'] = incarnation
node['state'] = state
node['suspect_time'] = int(time.time())
node['tags'] = tags
node['services'] = services
self.stack_dead_broadcast(node)
# Someone send us it's nodes, we are merging it with ours
def merge_nodes(self, nodes):
to_del = []
# Get a copy of self.nodes so we won't lock too much here
mynodes = {}
with self.nodes_lock:
mynodes = copy.copy(self.nodes)
for (k, node) in nodes.iteritems():
# Maybe it's me? bail out
if node['addr'] == self.addr and node['port'] == self.port:
continue
# Look if we got some duplicates, that got the same addr, but different
for (otherk, othern) in mynodes.iteritems():
if node['addr'] == othern['addr'] and node['port'] == othern['port'] and otherk != k:
# we keep the newest incarnation
if node['incarnation'] < othern['incarnation']:
to_del.append(k)
else:
to_del.append(otherk)
state = node['state']
# Try to incorporate it
if state == 'alive':
self.set_alive(node)
elif state == 'dead' or state == 'suspect':
self.set_suspect(node)
elif state == 'leave':
self.set_leave(node)
# Now clean old nodes
for k in to_del:
try:
del self.nodes[k]
except KeyError:
pass
# We will choose a random guy in our nodes that is alive, and
# sync with it
def launch_full_sync(self):
logger.debug("Launch_full_sync:: all nodes %d" % len(self.nodes), part='gossip')
nodes = {}
with self.nodes_lock:
nodes = copy.copy(self.nodes)
others = [ (n['addr'], n['port']) for n in nodes.values() if n['state'] == 'alive' and n['uuid'] != self.uuid]
if len(others) >= 1:
other = random.choice(others)
logger.debug("launch_full_sync::", other, part='gossip')
self.do_push_pull(other)
#else:
# print "NO OTHER ALIVE NODES !"
# We will choose some K random nodes and gossip them the broadcast messages to them
def launch_gossip(self):
# There is no broadcast message to sent so bail out :)
if len(broadcaster.broadcasts) == 0:
return
ns = self.nodes.values()
#ns.sort()
logger.debug("launch_gossip:: all nodes %d" % len(self.nodes), part='gossip')
others = [n for n in ns if n['uuid'] != self.uuid]
# Maybe every one is dead, if o bail out
if len(others) == 0:
return
nb_dest = min(len(others), KGOSSIP)
dests = random.sample(others, nb_dest)
for dest in dests:
logger.debug("launch_gossip::", dest['name'], part='gossip')
self.do_gossip_push(dest)
# we ping some K random nodes, but in priority some nodes that we thouugh were deads
# but talk to us
# also exclude leave node, because thay said they are not here anymore ^^
def ping_another(self):
#print "PING ANOTHER"
nodes = {}
with self.nodes_lock:
nodes = copy.copy(self.nodes)
others = [ n for n in nodes.values() if n['uuid'] != self.uuid and n['state'] != 'leave']
# first previously deads
for uuid in self.to_ping_back:
if uuid in nodes:
self.do_ping(nodes[uuid])
# now reset it
self.to_ping_back = []
# Now we take one in all the others
if len(others) >= 1:
other = random.choice(others)
self.do_ping(other)
# Launch a ping to another node and if fail set it as suspect
def do_ping(self, other):
ping_payload = {'type':'ping', 'seqno':0, 'node': other['name'], 'from': self.uuid}
message = json.dumps(ping_payload)
enc_message = encrypter.encrypt(message)
addr = other['addr']
port = other['port']
_t = time.time()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
sock.sendto(enc_message, (addr, port) )
logger.debug('PING waiting %s ack message' % other['name'], part='gossip')
# Allow 3s to get an answer
sock.settimeout(3)
ret = sock.recv(65535)
logger.debug('PING got a return from %s' % other['name'], len(ret), part='gossip')
# An aswer? great it is alive!
self.set_alive(other, strong=True)
except (socket.timeout, socket.gaierror), exp:
logger.debug("PING: error joining the other node %s:%s : %s" % (addr, port, exp), part='gossip')
logger.debug("PING: go indirect mode", part='gossip')
possible_relays = []
with self.nodes_lock:
possible_relays = [n for n in self.nodes.values() if n['uuid'] != self.uuid and n != other and n['state'] == 'alive']
if len(possible_relays) == 0:
logger.log("PING: no possible relays for ping", part='gossip')
self.set_suspect(other)
# Take at least 3 relays to ask ping
relays = random.sample(possible_relays, min(len(possible_relays), 3))
logger.debug('POSSIBLE RELAYS', relays)
ping_relay_payload = {'type':'ping-relay', 'seqno':0, 'tgt': other['uuid'], 'from': self.uuid}
message = json.dumps(ping_relay_payload)
enc_message = encrypter.encrypt(message)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
for r in relays:
try:
sock.sendto(enc_message, (r['addr'], r['port']) )
logger.debug('PING waiting ack message', part='gossip')
except socket.error, exp:
logger.error('Cannot send a ping relay to %s:%s' % (r['addr'], r['port']), part='gossip')
# Allow 3s to get an answer from whatever relays got it
sock.settimeout(3*2)
try:
ret = sock.recv(65535)
except socket.timeout:
# still noone succed to ping it? I suspect it
self.set_suspect(other)
sock.close()
return
msg = json.loads(ret)
sock.close()
logger.debug('PING: got an answer from a relay', msg, part='gossip')
logger.debug('RELAY set alive', other['name'], part='gossip')
# Ok it's no more suspected, great :)
self.set_alive(other, strong=True)
except socket.error, exp:
logger.log("PING: cannot join the other node %s:%s : %s" % (addr, port, exp), part='gossip')
# Randomly push some gossip broadcast messages and send them to
# KGOSSIP others nodes
def do_gossip_push(self, dest):
message = ''
to_del = []
stack = []
tags = dest['tags']
for b in broadcaster.broadcasts:
# not a valid node for this message, skip it
if 'tag' in b and b['tag'] not in tags:
continue
old_message = message
send = b['send']
if send >= KGOSSIP:
to_del.append(b)
bmsg = b['msg']
stack.append(bmsg)
message = json.dumps(stack)
# Maybe we are now too large and we do not have just one
# fucking big message, so we fail back to the old_message that was
# in the good size and send it now
if len(message) > 1400 and len(stack) != 1:
message = old_message
stack = stack[:-1]
break
# stack a sent to this broadcast message
b['send'] += 1
# Clean too much broadcasted messages
for b in to_del:
broadcaster.broadcasts.remove(b)
# Void message? bail out
if len(message) == 0:
return
addr = dest['addr']
port = dest['port']
# and go for it!
try:
enc_message = encrypter.encrypt(message)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
sock.sendto(enc_message, (addr, port) )
logger.debug('BROADCAST: sent %d message (len=%d) to %s:%s' % (len(stack), len(enc_message), addr, port), part='gossip')
except (socket.timeout, socket.gaierror), exp:
logger.debug("ERROR: cannot sent the message %s" % exp, part='gossip')
try:
sock.close()
except Exception:
pass
# Will try to join a node cluster and do a push-pull with at least one of them
def join(self):
logger.log("We will try to join our seeds members", self.seeds, part='gossip')
tmp = self.seeds
others = []
if not len(self.seeds):
logger.log("No seeds nodes, I'm a bootstrap node?")
return
for e in tmp:
elts = e.split(':')
addr = elts[0]
port = self.port
if len(elts) > 1:
port = int(elts[1])
others.append( (addr, port) )
random.shuffle(others)
while True:
logger.log('JOINING myself %s is joining %s nodes' % (self.name, others), part='gossip')
nb = 0
for other in others:
nb += 1
r = self.do_push_pull(other)
# Do not merge with more than KGOSSIP distant nodes
if nb > KGOSSIP:
continue
# If we got enough nodes, we exit
if len(self.nodes) != 1 or self.interrupted or self.bootstrap:
return
# Do not hummer the cpu....
time.sleep(0.1)
# Go launch a push-pull to another node. We will sync all our nodes
# entries, and each other will be able to learn new nodes and so
# launch gossip broadcasts if need
def do_push_pull(self, other):
nodes = {}
with self.nodes_lock:
nodes = copy.copy(self.nodes)
m = {'type': 'push-pull-msg', 'nodes': nodes}
message = json.dumps(m)
(addr, port) = other
uri = 'http://%s:%s/push-pull' % (addr, port)
payload = {'msg': message}
try:
r = rq.get(uri, params=payload)
logger.debug("push-pull response", r, part='gossip')
try:
back = json.loads(r.text)
except ValueError, exp:
logger.debug('ERROR CONNECTING TO %s:%s' % other, exp, part='gossip')
return False
pubsub.pub('manage-message', msg=back)
#self.manage_message(back)
return True
except rq.exceptions.RequestException,exp: #Exception, exp:
logger.debug('ERROR CONNECTING TO %s:%s' % other, exp, part='gossip')
return False
# suspect nodes are set with a suspect_time entry. If it's too old,
# set the node as dead, and broadcast the information to everyone
def look_at_deads(self):
# suspect a node for 5 * log(n+1) * interval
node_scale = math.ceil(math.log10(float(len(self.nodes) + 1)))
probe_interval = 1
suspicion_mult = 5
suspect_timeout = suspicion_mult * node_scale * probe_interval
leave_timeout = suspect_timeout * 3 # something like 30s
#print "SUSPECT timeout", suspect_timeout
now = int(time.time())
nodes = {}
with self.nodes_lock:
for node in self.nodes.values():
# Only look at suspect nodes of course...
if node['state'] != 'suspect':
continue
stime = node.get('suspect_time', now)
if stime < (now - suspect_timeout):
logger.log("SUSPECT: NODE", node['name'], node['incarnation'], node['state'], "is NOW DEAD", part='gossip')
node['state'] = 'dead'
self.stack_dead_broadcast(node)
# Now for leave nodes, this time we will really remove the entry from our nodes
to_del = []
for (uuid, node) in nodes.iteritems():
# Only look at suspect nodes of course...
if node['state'] != 'leave':
continue
ltime = node.get('leave_time', now)
print "LEAVE TIME", node['name'], ltime, now - leave_timeout, (now - leave_timeout) - ltime
if ltime < (now - leave_timeout):
logger.log("LEAVE: NODE", node['name'], node['incarnation'], node['state'], "is now definitivly leaved. We remove it from our nodes", part='gossip')
to_del.append(uuid)
# now really remove them from our list :)
for uuid in to_del:
try:
del self.nodes[uuid]
except IndexError: # not here? it was was we want
pass
########## Message managment
def create_alive_msg(self, node):
return {'type':'alive', 'name':node['name'], 'addr':node['addr'], 'port': node['port'], 'uuid':node['uuid'],
'incarnation':node['incarnation'], 'state':'alive', 'tags':node['tags'], 'services':node['services']}
def create_event_msg(self, payload):
return {'type':'event', 'from':self.uuid, 'payload':payload, 'ctime':int(time.time()), 'eventid':uuid.uuid1().get_hex()}
def create_suspect_msg(self, node):
return {'type':'suspect', 'name':node['name'], 'addr':node['addr'], 'port': node['port'], 'uuid':node['uuid'],
'incarnation':node['incarnation'], 'state':'suspect', 'tags':node['tags'], 'services':node['services']}
def create_dead_msg(self, node):
return {'type':'dead', 'name':node['name'], 'addr':node['addr'], 'port': node['port'], 'uuid':node['uuid'],
'incarnation':node['incarnation'], 'state':'dead', 'tags':node['tags'], 'services':node['services']}
def create_leave_msg(self, node):
return {'type':'leave', 'name':node['name'], 'addr':node['addr'], 'port': node['port'], 'uuid':node['uuid'],
'incarnation':node['incarnation'], 'state':'leave', 'tags':node['tags'], 'services':node['services']}
def create_new_ts_msg(self, key):
return {'type':'/ts/new', 'from':self.uuid, 'key':key}
def stack_alive_broadcast(self, node):
msg = self.create_alive_msg(node)
b = {'send':0, 'msg':msg}
broadcaster.broadcasts.append(b)
# Also send it to the websocket if there
self.forward_to_websocket(msg)
return
def stack_event_broadcast(self, payload):
msg = self.create_event_msg(payload)
b = {'send':0, 'msg':msg}
broadcaster.broadcasts.append(b)
return
def stack_new_ts_broadcast(self, key):
msg = self.create_new_ts_msg(key)
b = {'send':0, 'msg':msg, 'tags':'ts'}
broadcaster.broadcasts.append(b)
return
def stack_suspect_broadcast(self, node):
msg = self.create_suspect_msg(node)
b = {'send':0, 'msg':msg}
broadcaster.broadcasts.append(b)
# Also send it to the websocket if there
self.forward_to_websocket(msg)
return b
def stack_leave_broadcast(self, node):
msg = self.create_leave_msg(node)
b = {'send':0, 'msg':msg}
broadcaster.broadcasts.append(b)
# Also send it to the websocket if there
self.forward_to_websocket(msg)
return b
def stack_dead_broadcast(self, node):
msg = self.create_dead_msg(node)
b = {'send':0, 'msg':msg}
broadcaster.broadcasts.append(b)
self.forward_to_websocket(msg)
return b
def forward_to_websocket(self, msg):
websocketmgr.forward({'channel': 'gossip', 'payload': msg})
############## Http interface
# We must create http callbacks in running because
# we must have the self object
def export_http(self):
@route('/agent/name')
def get_name():
return self.nodes[self.uuid]['name']
@route('/agent/leave/:nname')
def set_node_leave(nname):
node = None
with self.nodes_lock:
for n in self.nodes.values():
if n['name'] == nname:
node = n
if node is None:
return abort(404, 'This node is not found')
logger.log('PUTTING LEAVE the node %s' % n, part='http')
self.set_leave(node)
return
@route('/agent/members')
def agent_members():
response.content_type = 'application/json'
logger.debug("/agent/members is called", part='http')
nodes = {}
with self.nodes_lock:
nodes = copy.copy(self.nodes)
return nodes
@route('/agent/join/:other')
def agent_join(other):
response.content_type = 'application/json'
addr = other
port = self.port
if ':' in other:
parts = other.split(':', 1)
addr = parts[0]
port = int(parts[1])
tgt = (addr, port)
logger.debug("HTTP: agent join for %s:%s " % (addr, port), part='http')
r = self.do_push_pull(tgt)
logger.debug("HTTP: agent join for %s:%s result:%s" % (addr, port, r), part='http')
return json.dumps(r)
|
pombredanne/kunai
|
kunai/gossip.py
|
Python
|
mit
| 28,201
|
import re
from flask_wtf import Form
from flask.ext.babel import gettext,lazy_gettext
from wtforms import TextField, PasswordField, BooleanField
from wtforms.validators import InputRequired, Email, EqualTo, Length
from app.data.models import User
from app.fields import Predicate
def username_is_available(username):
if not User.if_exists(username):
return True
return False
def safe_characters(s):
" Only letters (a-z) and numbers are allowed for usernames and passwords. Based off Google username validator "
if not s:
return True
return re.match(r'^[\w]+$', s) is not None
class UserForm(Form):
username = TextField(lazy_gettext('Username'), validators=[
Predicate(safe_characters, message=lazy_gettext("Please use only letters (a-z) and numbers")),
Predicate(username_is_available,message=lazy_gettext("An account has already been registered with that username. Try another?")),
Length(min=2, max=30, message=lazy_gettext("Please use between 2 and 30 characters")),
InputRequired(message=lazy_gettext("You can't leave this empty"))])
#username = TextField(lazy_gettext('Username'), validators=[DataRequired(lazy_gettext('This field is required.')), Length(min=2, max=20)])
email = TextField(lazy_gettext('Email'), validators=[
Email(message=lazy_gettext('Please enter a valid email address')),
InputRequired(message=lazy_gettext('You can\'t leave this empty'))])
#email = TextField(lazy_gettext('Email'), validators=[Email(lazy_gettext('Invalid email address.')), DataRequired(lazy_gettext('This field is required.')), Length(max=128)])
jmeno = TextField(lazy_gettext('First Name'), validators=[
Length(min=2, max=64, message=lazy_gettext("Please use between 2 and 64 characters")),
Predicate(safe_characters, message=lazy_gettext("Please use only letters (a-z) and numbers")),
InputRequired(message=lazy_gettext("You can't leave this empty"))])
prijmeni = TextField(lazy_gettext('Surname'), validators=[
Length(min=2, max=64, message=lazy_gettext("Please use between 2 and 64 characters")),
Predicate(safe_characters, message=lazy_gettext("Please use only letters (a-z) and numbers")),
InputRequired(message=lazy_gettext("You can't leave this empty"))])
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
class RegisterUserForm(UserForm):
password = PasswordField(lazy_gettext('Password'),validators=[
InputRequired(message=lazy_gettext("You can't leave this empty")),
EqualTo('confirm',message=lazy_gettext('Passwords must match.')),
Predicate(safe_characters, message=lazy_gettext("Please use only letters (a-z) and numbers")),
Length(min=2, max=30, message=lazy_gettext("Please use between 2 and 30 characters"))])
#password = PasswordField(lazy_gettext('Password'),validators=[DataRequired(lazy_gettext('This field is required.')),EqualTo('confirm',message=lazy_gettext('Passwords must match.')),Length(min=6, max=20)])
confirm = PasswordField(lazy_gettext('Confirm Password'), validators=[
InputRequired(message=lazy_gettext("You can't leave this empty"))])
#confirm = PasswordField(lazy_gettext('Confirm Password'), validators=[DataRequired(lazy_gettext('This field is required.'))])
accept_tos = BooleanField(lazy_gettext('I accept the TOS'), validators=[
InputRequired(message=lazy_gettext("You can't leave this empty"))])
#accept_tos = BooleanField(lazy_gettext('I accept the TOS'), validators=[DataRequired(lazy_gettext('This field is required.'))])
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
class EditUserForm(UserForm):
username = TextField(lazy_gettext('Username'))
is_admin = BooleanField(lazy_gettext('Admin'))
active = BooleanField(lazy_gettext('Activated'))
|
kua-hosi-GRUp/Flask-Bones
|
app/public/forms/user.py
|
Python
|
mit
| 3,934
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._blob_containers_operations import build_clear_legal_hold_request, build_create_or_update_immutability_policy_request, build_create_request, build_delete_immutability_policy_request, build_delete_request, build_extend_immutability_policy_request, build_get_immutability_policy_request, build_get_request, build_lease_request, build_list_request, build_lock_immutability_policy_request, build_set_legal_hold_request, build_update_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BlobContainersOperations:
"""BlobContainersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2018_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.ListContainerItems":
"""Lists all containers and does not support a prefix like data plane. Also SRP today does not
return continuation token.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListContainerItems, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ListContainerItems
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainerItems"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ListContainerItems', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers'} # type: ignore
@distributed_trace_async
async def create(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: "_models.BlobContainer",
**kwargs: Any
) -> "_models.BlobContainer":
"""Creates a new container under the specified account as described by request body. The container
resource includes metadata and properties for that container. It does not include a list of the
blobs contained by the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param blob_container: Properties of the blob container to create.
:type blob_container: ~azure.mgmt.storage.v2018_02_01.models.BlobContainer
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(blob_container, 'BlobContainer')
request = build_create_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: "_models.BlobContainer",
**kwargs: Any
) -> "_models.BlobContainer":
"""Updates container properties as specified in request body. Properties not mentioned in the
request will be unchanged. Update fails if the specified container doesn't already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param blob_container: Properties to update for the blob container.
:type blob_container: ~azure.mgmt.storage.v2018_02_01.models.BlobContainer
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(blob_container, 'BlobContainer')
request = build_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
account_name: str,
container_name: str,
**kwargs: Any
) -> "_models.BlobContainer":
"""Gets properties of a specified container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
account_name: str,
container_name: str,
**kwargs: Any
) -> None:
"""Deletes specified container under its account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def set_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: "_models.LegalHold",
**kwargs: Any
) -> "_models.LegalHold":
"""Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold
follows an append pattern and does not clear out the existing tags that are not specified in
the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param legal_hold: The LegalHold property that will be set to a blob container.
:type legal_hold: ~azure.mgmt.storage.v2018_02_01.models.LegalHold
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.LegalHold
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(legal_hold, 'LegalHold')
request = build_set_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.set_legal_hold.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LegalHold', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold'} # type: ignore
@distributed_trace_async
async def clear_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: "_models.LegalHold",
**kwargs: Any
) -> "_models.LegalHold":
"""Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent
operation. ClearLegalHold clears out only the specified tags in the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param legal_hold: The LegalHold property that will be clear from a blob container.
:type legal_hold: ~azure.mgmt.storage.v2018_02_01.models.LegalHold
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.LegalHold
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(legal_hold, 'LegalHold')
request = build_clear_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.clear_legal_hold.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LegalHold', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
clear_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold'} # type: ignore
@distributed_trace_async
async def create_or_update_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: Optional[str] = None,
parameters: Optional["_models.ImmutabilityPolicy"] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but
not required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob
container.
:type parameters: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
_json = None
request = build_create_or_update_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
if_match=if_match,
template_url=self.create_or_update_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def get_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: Optional[str] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Gets the existing immutability policy along with the corresponding ETag in response headers and
body.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.get_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def delete_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Aborts an unlocked immutability policy. The response of delete has
immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this
operation. Deleting a locked immutability policy is not allowed, only way is to delete the
container after deleting all blobs inside the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.delete_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
delete_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def lock_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is
ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_lock_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.lock_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
lock_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock'} # type: ignore
@distributed_trace_async
async def extend_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
parameters: Optional["_models.ImmutabilityPolicy"] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only
action allowed on a Locked policy will be this action. ETag in If-Match is required for this
operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be extended for a blob
container.
:type parameters: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
_json = None
request = build_extend_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
if_match=if_match,
json=_json,
template_url=self.extend_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
extend_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend'} # type: ignore
@distributed_trace_async
async def lease(
self,
resource_group_name: str,
account_name: str,
container_name: str,
parameters: Optional["_models.LeaseContainerRequest"] = None,
**kwargs: Any
) -> "_models.LeaseContainerResponse":
"""The Lease Container operation establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param parameters: Lease Container request body.
:type parameters: ~azure.mgmt.storage.v2018_02_01.models.LeaseContainerRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LeaseContainerResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_02_01.models.LeaseContainerResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseContainerResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'LeaseContainerRequest')
else:
_json = None
request = build_lease_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.lease.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LeaseContainerResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_blob_containers_operations.py
|
Python
|
mit
| 46,817
|
import music21 # type: ignore
from typing import List
import matplotlib.pyplot as plt # type: ignore
bcl = music21.corpus.chorales.ChoraleListRKBWV()
num_chorales = max(bcl.byRiemenschneider.keys(), key=int)
bwv_to_rs_num_map = {} # type: Dict[str,str]
rs_numbers = [] # type: List[int]
min_pitches = [] # type: List[int]
max_pitches = [] # type: List[int]
pitch_ranges = [] # type: List[int]
timesig_strs = [] # type: List[str]
# we just take pitches from some arbitrary chorale
a_chorale_bwv = 'bwv26.6'
a_chorale = music21.corpus.parse(a_chorale_bwv)
some_pitches = a_chorale.parts[0].flat.pitches
min_min_pitch = some_pitches[0]
max_max_pitch = some_pitches[0]
min_min_bwv = a_chorale_bwv # type: str
max_max_bwv = a_chorale_bwv # type: str
# allow for +/- 7 sharps
num_sharps = [] # type: List[int]
min_sharps = 0
max_sharps = 0
min_sharps_bwv = a_chorale_bwv
max_sharps_bwv = a_chorale_bwv
for i in bcl.byRiemenschneider:
info = bcl.byRiemenschneider[i]
bwv = info["bwv"] # type: str
bwv_to_rs_num_map[bwv] = i
c = music21.corpus.parse('bach/bwv' + str(info["bwv"]))
if len(c.parts) != 4:
print("Skipping {} of {} (BWV {}): not in four parts."\
.format(i,num_chorales,bwv))
continue
print("Processing {} of {} (BWV {})".format(i,num_chorales,bwv))
rs_numbers.append(i)
# process key signature
ks = c.recurse().getElementsByClass('KeySignature')[0]
num_sharps.append(ks.sharps)
if ks.sharps < min_sharps:
min_sharps = ks.sharps
min_sharps_bwv = bwv
if ks.sharps > max_sharps:
max_sharps = ks.sharps
max_sharps_bwv = bwv
# process time signature
ts = c.recurse().getElementsByClass('TimeSignature')[0]
if ts.ratioString not in timesig_strs:
timesig_strs.append(ts.ratioString)
if ts.ratioString == "3/2":
print("--> this one in 3/2!")
# process min/max pitches
melody_pitches = c.parts[0].flat.pitches
min_pitch = melody_pitches[0]
max_pitch = melody_pitches[0]
for p in melody_pitches:
if p < min_pitch:
min_pitch = p
if p > max_pitch:
max_pitch = p
if min_pitch < min_min_pitch:
min_min_pitch = min_pitch
min_min_bwv = bwv
if max_pitch > max_max_pitch:
max_max_pitch = max_pitch
max_max_bwv = bwv
min_pitches.append(min_pitch.midi)
max_pitches.append(max_pitch.midi)
pitch_ranges.append(max_pitch.midi - min_pitch.midi)
print("Done.")
print("=" * 50)
print("min pitch:", min_min_pitch,
"bwv:", min_min_bwv, "rs:", bwv_to_rs_num_map[min_min_bwv])
print("max pitch:", max_max_pitch,
"bwv:", max_max_bwv, "rs:", bwv_to_rs_num_map[max_max_bwv])
ultimin = music21.corpus.parse(min_min_bwv)
sop_pitches = ultimin.parts[0].flat.pitches
max_p = sop_pitches[0]
for p in sop_pitches:
if p > max_p:
max_p = p
print("max pitch of chorale with global pitch minimum", max_p)
ultimax = music21.corpus.parse(max_max_bwv)
sop_pitches = ultimax.parts[0].flat.pitches
min_p = sop_pitches[0]
for p in sop_pitches:
if p < min_p:
min_p = p
print("min pitch of chorale with global pitch maximum", min_p) # todo
print("min sharps:", min_sharps,
"bwv:", min_sharps_bwv, "rs:", bwv_to_rs_num_map[min_sharps_bwv])
print("max sharps:", max_sharps,
"bwv:", max_sharps_bwv, "rs:", bwv_to_rs_num_map[max_sharps_bwv])
print("time sig used:", timesig_strs)
plt.figure(1)
plt.title("Range of Chorales")
plt.hist(pitch_ranges, bins=range(min(pitch_ranges), max(pitch_ranges)+2),
rwidth=0.9, align='left', color='g')
plt.xticks(range(min(pitch_ranges), max(pitch_ranges)+1))
plt.xlabel('Pitch Range')
plt.ylabel('Frequency')
#plt.show()
plt.savefig('range_dist.svg')
#plt.figure(2)
#plt.title("Distribution of keys in Chorales")
#plt.hist(num_sharps, bins=range(min(num_sharps), max(num_sharps)+2), rwidth=0.9,
# align='left')
#plt.xticks(range(-4,5))
#plt.xlabel('Number of sharps')
#plt.ylabel('Frequency')
#
#plt.savefig('key_dist.svg')
# plt.figure(3)
# plt.title("Distribution of time signatures in chorales")
# plt.plot(timesig_denoms, timesig_nums)
# plt.xlabel('Division')
# plt.ylabel('Bar length')
# plt.show()
|
alexcoplan/p2proj
|
src/script/analyse_corpus.py
|
Python
|
mit
| 4,091
|
# coding:utf-8
import requests
from bs4 import BeautifulSoup
from multiprocessing.dummy import Pool as tp
import os
from ebooklib import epub
import base64
pool = tp(processes=16)
dirn = os.getcwd()
hd = {'User-Agent': 'Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; Trident/5.0)'}
p = None
# p = {"http": "http://127.0.0.1:8080"}
style = b'CkBuYW1lc3BhY2UgZXB1YiAiaHR0cDovL3d3dy5pZHBmLm9yZy8yMDA3L29wcyI7CmJvZHkgewogICAgZm9udC1mYW1pbHk6IENhbWJyaWEsIExpYmVyYXRpb24gU2VyaWYsIEJpdHN0cmVhbSBWZXJhIFNlcmlmLCBHZW9yZ2lhLCBUaW1lcywgVGltZXMgTmV3IFJvbWFuLCBzZXJpZjsKfQpoMiB7CiAgICAgdGV4dC1hbGlnbjogbGVmdDsKICAgICB0ZXh0LXRyYW5zZm9ybTogdXBwZXJjYXNlOwogICAgIGZvbnQtd2VpZ2h0OiAyMDA7ICAgICAKfQpvbCB7CiAgICAgICAgbGlzdC1zdHlsZS10eXBlOiBub25lOwp9Cm9sID4gbGk6Zmlyc3QtY2hpbGQgewogICAgICAgIG1hcmdpbi10b3A6IDAuM2VtOwp9Cm5hdltlcHVifHR5cGV+PSd0b2MnXSA+IG9sID4gbGkgPiBvbCAgewogICAgbGlzdC1zdHlsZS10eXBlOnNxdWFyZTsKfQpuYXZbZXB1Ynx0eXBlfj0ndG9jJ10gPiBvbCA+IGxpID4gb2wgPiBsaSB7CiAgICAgICAgbWFyZ2luLXRvcDogMC4zZW07Cn0K'
def getpage(link):
print('Getting: ' + link)
gethtml = requests.get(link, headers=hd, proxies=p)
Soup = BeautifulSoup(gethtml.content, 'lxml')
return Soup
def getpage2(link):
print('Getting: ' + link[0])
i = 0
while i < 3:
try:
gethtml = requests.get(link[1], headers=hd, proxies=p)
if gethtml.status_code == 200:
break
else:
i += 1
except:
i += 1
Soup = BeautifulSoup(gethtml.content, 'lxml')
return (link[0], Soup)
def makehtml(i):
tit = i.find('p', class_="novel_subtitle").get_text()
con = i.find('div', id="novel_honbun", class_="novel_view").prettify()
html = '<html>\n<head>\n' + '<title>' + tit + '</title>\n</head>\n<body>\n<div>\n<h3>' + tit + '</h3>\n' + con + '</div>\n</body>\n</html>'
return (tit, html)
syoid = input('Enter Novel ID of the novel: ')
menupage = getpage('http://ncode.syosetu.com/' + syoid + '/')
firstpage = getpage('http://ncode.syosetu.com/' + syoid + '/1/')
author = menupage.find('div', class_="novel_writername").get_text().split(':')[-1]
pagenum = int(firstpage.find('div', id='novel_no').get_text().split('/')[-1])
maintitle = menupage.find('title').get_text().split(' - ')[0]
about = menupage.find("div", id="novel_ex").prettify()
print('Started. Title: ' + maintitle)
if menupage.find('div', class_="chapter_title") is None:
worklist = [(str(i), 'http://ncode.syosetu.com/' + syoid + '/' + str(i) + '/') for i in range(1, pagenum + 1)]
plist = pool.map(getpage2, worklist)
hl = []
for j in plist:
hl.append((j[0], makehtml(j[1])))
book = epub.EpubBook()
book.set_identifier(syoid)
book.set_title(maintitle)
book.set_language('jp')
book.add_author(author)
sabout = '<html>\n<head>\n<title>小説紹介</title>\n</head>\n<body>\n<div>\n<h3>小説紹介</h3>\n' + about + '</div>\n</body>\n</html>'
cabout = epub.EpubHtml(title='About', file_name='0.xhtml', content=sabout, lang='ja_jp')
book.add_item(cabout)
conlist = [epub.EpubHtml(title=i[1][0], file_name=i[0] + '.xhtml', content=i[1][1], lang='ja_jp') for i in hl]
for i in conlist:
book.add_item(i)
contuple = conlist
contuple.insert(0, 'cabout')
contuple = tuple(contuple)
book.toc = (epub.Link('0.xhtml', '小説紹介', 'intro'), (epub.Section('目録:'), contuple))
book.add_item(epub.EpubNcx())
book.add_item(epub.EpubNav())
dstyle = str(base64.b64decode(style))
css = epub.EpubItem(uid="style_nav", file_name="style/nav.css", media_type="text/css", content=dstyle)
book.add_item(css)
book.spine = ['nav'] + conlist
epub.write_epub(dirn + '\\' + maintitle + '.epub', book, {})
print('Succeed.')
else:
book = epub.EpubBook()
book.set_identifier(syoid)
book.set_title(maintitle)
book.set_language('jp')
book.add_author(author)
sabout = '<html>\n<head>\n<title>小説紹介</title>\n</head>\n<body>\n<div>\n<h3>小説紹介</h3>\n' + about + '</div>\n</body>\n</html>'
cabout = epub.EpubHtml(title='About', file_name='0.xhtml', content=sabout, lang='ja_jp')
catalog = menupage.find('div', class_='index_box').find_all(["div", "dd"], class_=['chapter_title', 'subtitle'])
book.add_item(cabout)
worklist = [[]]
j = 0
for i in catalog:
if i.name == 'div':
worklist.append([i.get_text(), []])
j += 1
if i.name == 'dd':
if j == 0:
num = i.find('a')['href'].split('/')[-2]
worklist[0].append((num, 'http://ncode.syosetu.com/' + syoid + '/' + num + '/'))
else:
num = i.find('a')['href'].split('/')[-2]
worklist[j][1].append((num, 'http://ncode.syosetu.com/' + syoid + '/' + num + '/'))
pagelist = [cabout]
numlist = []
for k in range(len(worklist)):
i = worklist[k]
if k == 0 and len(i) != 0:
plist = pool.map(getpage2, i)
hl = []
for j in range(len(plist)):
h = makehtml(plist[j][1])
num = str(k) + ' - ' + str(j + 1)
pag = epub.EpubHtml(h[0], file_name=num + '.xhtml', content=h[1], lang='ja_jp')
book.add_item(pag)
pagelist.append(pag)
hl.append((h[0], num))
numlist.append(hl)
elif k == 0 and len(i) == 0:
numlist.append([])
pass
elif isinstance(i[1], list):
plist = pool.map(getpage2, i[1])
hl = []
for j in range(len(plist)):
num = str(k) + ' - ' + str(j + 1)
h = makehtml(plist[j][1])
pag = epub.EpubHtml(h[0], file_name=num + '.xhtml', content=h[1], lang='ja_jp')
book.add_item(pag)
pagelist.append(pag)
hl.append((h[0], num))
numlist.append([i[0], hl])
toclist = [epub.Link('0.xhtml', '小説紹介', 'intro')]
if numlist[0] != []:
for i in numlist[0]:
toclist.append(epub.Link(i[1] + '.xhtml', i[0], i[1]))
for i in numlist[1:]:
intuple = tuple([epub.Link(j[1] + '.xhtml', j[0], j[1]) for j in i[1]])
toclist.append((epub.Section(i[0]), intuple))
book.toc = tuple(toclist)
book.add_item(epub.EpubNcx())
book.add_item(epub.EpubNav())
dstyle = str(base64.b64decode(style))
css = epub.EpubItem(uid="style_nav", file_name="style/nav.css", media_type="text/css", content=dstyle)
book.add_item(css)
book.spine = ['nav'] + pagelist
epub.write_epub(dirn + '\\' + maintitle + '.epub', book, {})
print('Succeed.')
|
tongyuantongyu/Syosetsu-EPUB-Maker
|
Syosetu.py
|
Python
|
mit
| 6,683
|
from sqlalchemy.ext.associationproxy import association_proxy
from server.models.db import db
from server.models.user import User
class Project(db.Model):
__tablename__ = 'projects'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
description = db.Column(db.Text)
source = db.Column(db.String(256))
type = db.Column(db.Enum('bitbucket', 'git'))
users = association_proxy('project_user', 'user')
favorite_users = association_proxy('project_favorite', 'user')
languages = db.relationship('Language', secondary='project_languages')
keywords = db.relationship('Keyword', secondary='project_keywords')
def _get_users_with_role(self, role: str) -> list:
"""Returns a list of users corresponding to the
project with a role
:param role: role of user | owner || contributer || designer
:return: list of users corresponding to the given role
"""
return list(filter(lambda x: x.role == role, self.users.col))
@property
def owners(self) -> list:
"""Returns a list of project owners
:return: a list of usernames of project owners
"""
return self._get_users_with_role('owner')
@property
def contributers(self) -> list:
"""Returns a list of project contributers
:return: a list of project contributers
"""
return self._get_users_with_role('contributer')
@property
def designers(self) -> list:
"""Returns a list of project designers
:return: a list of project designers
"""
return self._get_users_with_role('designer')
|
ganemone/ontheside
|
server/models/project.py
|
Python
|
mit
| 1,665
|
# use greenlets for greater concurrency
#from gevent import monkey; monkey.patch_all()
import bottle
import json
import os
from bottle import request, response, static_file
from simfile import SMParser
from urllib.parse import parse_qsl
PROD = True
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
SIMFILES_DIR = PROJECT_ROOT + '/simfiles'
BUILD_DIR = PROJECT_ROOT + '/frontend/build'
STATIC_DIR = BUILD_DIR + '/static'
app = bottle.Bottle()
@app.get('/')
def root():
return static_file('index.html', root=BUILD_DIR)
@app.get('/static/css/<filename>')
def static_css(filename):
return static_file(filename, root=STATIC_DIR + '/css')
@app.get('/static/js/<filename>')
def static_js(filename):
return static_file(filename, root=STATIC_DIR + '/js')
@app.get('/api/v1/simfiles')
def api_v1_simfiles():
sims = sorted(os.listdir(SIMFILES_DIR), key=lambda s: s.lower())
return json.dumps([{'value': sim, 'label': sim}
for sim in sims])
@app.get('/api/v1/simfiles/<name>', method='GET')
def api_v1_simfiles_name(name):
query_params = dict(parse_qsl(request.query_string))
# strip invalid params
[query_params.pop(param) for param in list(query_params.keys())
if not param in SMParser.analyze.__code__.co_varnames]
try:
with open(SIMFILES_DIR + '/' + os.path.basename(name), encoding='utf_8_sig') as fh:
parsed = SMParser(fh.read())
# just override this for now, not all charts have a Hard/Expert chart
query_params['difficulty'] = list(parsed.charts['Single'].keys())[-1]
return {
"result": parsed.analyze(**query_params)
}
except Exception as e:
return { "errors": "Could not load simfile (bad param?): " + e.message }
def run():
bottle.debug(not PROD)
bottle.run(
app=app,
host='0.0.0.0',
port=os.environ.get('PORT', 8000),
reloader=not PROD,
server='gunicorn',
workers=8,
)
if __name__=='__main__':
run()
|
zachwalton/truebpm
|
app.py
|
Python
|
mit
| 2,017
|
import os
import sys
import glob
import sched
import serial
import signal
import struct
import time
import thread
port = None
baud = 2000000
commands = {
'CTRL': chr(0),
'READSECTION': chr(1),
'WRITESECTION': chr(2)
}
countries = [
'Japan (NTSC)','USA (NTSC)','Europe, Oceania, Asia (PAL)','Sweden (PAL)',
'Finland (PAL)','Denmark (PAL)','France (PAL)','Holland (PAL)',
'Spain (PAL)','Germany, Austria, Switz (PAL)','Italy (PAL)',
'Hong Kong, China (PAL)','Indonesia (PAL)','Korea (PAL)'
]
def main():
#get enviroment specific list of serial ports
if sys.platform.startswith("win"):
ports = ["COM" + str(i + 1) for i in range(256)]
elif sys.platform.startswith("linux") or sys.platform.startswith("cygwin"):
ports = glob.glob("/dev/ttyUSB*")
elif sys.platform.startswith("darwin"):
ports = glob.glob("/dev/tty*")
else:
raise EnvironmentError("Unsupported platform")
#print the list of open ports to the screen
num_ports = 0
available_ports = []
for port in ports:
available_ports.append(port)
print " " + str(num_ports) + " - " + port
num_ports += 1
#ask the user to select a serial port
def get_port():
try:
return int(raw_input("Please select a device: "))
except (ValueError):
return -1
pass
port_selection = get_port()
while port_selection < 0 or port_selection >= num_ports:
print "Invalid selection.",
port_selection = get_port()
#open the serial port
try:
port = serial.Serial(available_ports[port_selection], baud)
except (OSError, serial.SerialException):
raise OSError("Could not open serial port")
# wait for device to signal it is ready.
port.read(1)
#print the options to the screen
def print_options():
print " i - Cart Info\n d - Dump ROM\n s - Dump SRAM\n w - Write SRAM\n h - Show this screen\n q - Quit"
print_options()
#the main loop
quit = False
while not quit:
action = raw_input("Please select an action: ").lower()
if action == "i":
header = get_header(port)
if not verify_header(header):
print "Error reading cart!"
continue
print_cart_info(header)
elif action == "d":
header = get_header(port)
if not verify_header(header):
print "Error reading cart!"
continue
print_cart_info(header)
file_name = raw_input("Please enter an output filename: ")
output_file = open(file_name, "wb")
hirom = (header[21] & 1)
read_offset = 0x0 if hirom else 0x8000
bank_size = 0x10000 if hirom else 0x8000
rom_size = (1 << header[23]) * 1024
num_banks = rom_size/bank_size
set_ctrl_lines(port, False, True, False, True)
total_bytes_read = 0
for bank in range(0, num_banks):
# send read section command
port.write(commands['READSECTION'])
# write bank to read from
port.write(chr(bank));
# write start and end addresses
write_addr(port, read_offset)
write_addr(port, read_offset + bank_size - 1)
bytes_read = 0;
# read bank data in loop
while bytes_read < bank_size:
num_to_read = port.inWaiting()
output_file.write(port.read(num_to_read))
bytes_read += num_to_read
total_bytes_read += num_to_read
sys.stdout.write("\r Dumping ROM {0:,}/{1:,} bytes".format(total_bytes_read, rom_size))
sys.stdout.flush()
output_file.close()
print "\n Done."
elif action == "s":
header = get_header(port)
if not verify_header(header):
print "Error reading cart!"
continue
hirom = (header[21] & 1)
sram_size = header[24] * 2048
print_cart_info(header)
if sram_size == 0:
print "Error! Game has no SRAM!"
continue
file_name = raw_input("Please enter an output filename: ")
output_file = open(file_name, "wb")
set_ctrl_lines(port, False, True, hirom, True)
# compute bank and addresses to write to
if hirom:
bank = 0x20
start_addr = 0x6000
else:
bank = 0x30
start_addr = 0x8000
end_addr = start_addr + sram_size - 1
port.write(commands['READSECTION'])
# write bank number
port.write(chr(bank))
# write start and end addresses
write_addr(port, start_addr)
write_addr(port, end_addr)
bytes_read = 0;
# read bank data in loop
while bytes_read < sram_size:
num_to_read = port.inWaiting()
output_file.write(port.read(num_to_read))
bytes_read += num_to_read
sys.stdout.write("\r Dumping SRAM {0:,}/{1:,} bytes".format(bytes_read, sram_size))
sys.stdout.flush()
output_file.close()
print "\n Done."
elif action == "w":
header = get_header(port)
if not verify_header(header):
print "Error reading cart!"
continue
hirom = (header[21] & 1)
sram_size = header[24] * 2048
print_cart_info(header)
if sram_size == 0:
print "Error! Game has no SRAM!"
continue
def get_input_file():
try:
file_name = raw_input("Please enter an input filename: ")
return open(file_name, "rb")
except IOError:
return None
input_file = get_input_file()
while not input_file:
print "No such file."
continue
file_size = os.fstat(input_file.fileno()).st_size
if sram_size != file_size:
print "File size mismatch! File: {}, SRAM: {}".format(file_size, sram_size)
input_file.close()
continue
set_ctrl_lines(port, True, False, hirom, True)
# compute bank and addresses to write to
if hirom:
bank = 0x20
start_addr = 0x6000
else:
bank = 0x30
start_addr = 0x8000
end_addr = start_addr + sram_size - 1
port.write(commands['WRITESECTION'])
# write bank number
port.write(chr(bank))
# write start and end addresses
write_addr(port, start_addr)
write_addr(port, end_addr)
bytes_written = 0;
while input_file.tell() < file_size:
this_byte = input_file.read(1)
port.write(this_byte)
bytes_written += 1
time.sleep(0.001) #add a small delay
sys.stdout.write("\r Writing SRAM {0}/{1} bytes".format(bytes_written, sram_size))
sys.stdout.flush()
input_file.close()
print "\n Done."
elif action == "h":
print_options()
elif action == "q":
quit = True
else:
print "Invalid selection.",
port.close()
# read cart header in bank 0, 0xffc0 to 0xffde
def get_header(port):
set_ctrl_lines(port, False, True, False, True)
port.write(commands['READSECTION'])
# write bank number
port.write(chr(0))
#write start and end addresses
write_addr(port, 0xffc0)
write_addr(port, 0xffdf)
# read 32 byte header
data = port.read(32)
return bytearray(data)
def verify_header(header):
return not all(v == 0 for v in header)
def print_cart_info(header):
title = str(header[:21]).strip()
layout = "HiROM" if (header[21] & 1) else "LoROM"
rom_size = (1 << header[23])
#sram_size = (1 << header[24])
sram_size = header[24] * 2048
country_code = header[25]
country = countries[country_code] if country_code < len(countries) else str(country_code)
version = header[27]
checksum = (header[30] << 8) | header[31]
print " {}, {}, {} KB ROM, {} KB SRAM\n Country: {}, Version: {}, Checksum: 0x{:02X}".format(title, layout, rom_size, sram_size, country, version, checksum)
# write a 16 bit address to the serial port
def write_addr(port, addr):
port.write(chr(addr >> 8 & 0xff))
port.write(chr(addr & 0xff))
# set control line states (lines are active low)
# 4 bits of information (most to least sig): read, write, cart select, reset
def set_ctrl_lines(port, read, write, cart, reset):
value = (read << 3) | (write << 2) | (cart << 1) | (reset)
port.write(commands['CTRL'])
port.write(chr(value))
#code to handle SIGINT
def sigint_handler(signum, frame):
signal.signal(signal.SIGINT, sigint)
if port is not None:
port.close()
sys.exit(1)
signal.signal(signal.SIGINT, sigint_handler)
if __name__ == '__main__':
sigint = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, sigint_handler)
main()
|
cthill/SNESDump
|
desktop/snesdump.py
|
Python
|
mit
| 9,518
|
import six
import pytest
from unittest.mock import Mock
from crabpy.gateway.exception import (
GatewayRuntimeException,
GatewayResourceNotFoundException
)
from crabpy.gateway.crab import (
Gewest, Provincie,
Gemeente, Deelgemeente, Taal,
Bewerking, Organisatie,
Aardsubadres, Aardadres,
Aardgebouw, Aardwegobject,
Aardterreinobject, Statushuisnummer,
Statussubadres, Statusstraatnaam,
Statuswegsegment, Geometriemethodewegsegment,
Statusgebouw, Geometriemethodegebouw,
Herkomstadrespositie, Straat,
Huisnummer, Postkanton,
Wegobject, Wegsegment,
Terreinobject, Perceel,
Gebouw, Metadata, Subadres,
Adrespositie
)
class TestCrabGateway:
def test_list_gewesten(self, crab_gateway, crab_service):
crab_service.ListGewesten.return_value = Mock(
GewestItem=[
Mock(GewestId=1, TaalCodeGewestNaam='NL', GewestNaam='Vlaams')
]
)
res = crab_gateway.list_gewesten()
assert isinstance(res, list)
assert isinstance(res[0], Gewest)
def test_get_gewest_by_id(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2,
)
res = crab_gateway.get_gewest_by_id(2)
assert isinstance(res, Gewest)
assert res.id == 2
assert isinstance(res.centroid, tuple)
assert isinstance(res.bounding_box, tuple)
def test_get_gewest_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.side_effect = (
GatewayResourceNotFoundException
)
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_gewest_by_id(-1)
def test_list_gemeenten_default(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2,
)
crab_service.ListGemeentenByGewestId.return_value = Mock(
GemeenteItem=[
Mock(GemeenteId=1, NISGemeenteCode=10000,
TaalCode='NL', TaalCodeGemeenteNaam='NL')
]
)
res = crab_gateway.list_gemeenten()
assert isinstance(res, list)
assert isinstance(res[0], Gemeente)
assert res[0].gewest.id == 2
def test_list_gemeenten_vlaanderen(self, crab_gateway, crab_service):
crab_service.ListGemeentenByGewestId.return_value = Mock(
GemeenteItem=[
Mock(GemeenteId=1, NISGemeenteCode=10000,
TaalCode='NL', TaalCodeGemeenteNaam='NL')
]
)
gewest = Gewest(2)
res = crab_gateway.list_gemeenten(gewest)
assert isinstance(res, list)
assert isinstance(res[0], Gemeente)
assert res[0].gewest.id == 2
def test_list_provincies(self, crab_gateway):
gewest = Gewest(1)
res = crab_gateway.list_provincies(gewest)
assert isinstance(res, list)
gewest = Gewest(3)
res = crab_gateway.list_provincies(gewest)
assert isinstance(res, list)
gewest = Gewest(2)
res = crab_gateway.list_provincies(gewest)
assert isinstance(res, list)
assert isinstance(res[0], Provincie)
assert res[0].gewest.id == 2
gewest = 2
res = crab_gateway.list_provincies(gewest)
assert isinstance(res, list)
assert isinstance(res[0], Provincie)
assert res[0].gewest.id == 2
def test_get_provincie_by_id(self, crab_gateway):
res = crab_gateway.get_provincie_by_id(10000)
assert isinstance(res, Provincie)
assert res.niscode == 10000
res = crab_gateway.get_provincie_by_id(20001)
assert isinstance(res, Provincie)
assert res.niscode == 20001
res = crab_gateway.get_provincie_by_id(20002)
assert isinstance(res, Provincie)
assert res.niscode == 20002
res = crab_gateway.get_provincie_by_id(30000)
assert isinstance(res, Provincie)
assert res.niscode == 30000
res = crab_gateway.get_provincie_by_id(40000)
assert isinstance(res, Provincie)
assert res.niscode == 40000
res = crab_gateway.get_provincie_by_id(50000)
assert isinstance(res, Provincie)
assert res.niscode == 50000
res = crab_gateway.get_provincie_by_id(60000)
assert isinstance(res, Provincie)
assert res.niscode == 60000
res = crab_gateway.get_provincie_by_id(70000)
assert isinstance(res, Provincie)
assert res.niscode == 70000
res = crab_gateway.get_provincie_by_id(80000)
assert isinstance(res, Provincie)
assert res.niscode == 80000
res = crab_gateway.get_provincie_by_id(90000)
assert isinstance(res, Provincie)
assert res.niscode == 90000
def test_get_provincie_by_unexisting_id(self, crab_gateway):
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_provincie_by_id(-1)
def test_list_gemeenten_by_provincie(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2
)
crab_service.ListGemeentenByGewestId.return_value = Mock(
GemeenteItem=[
Mock(GemeenteId=1, NISGemeenteCode=10000,
TaalCode='NL', TaalCodeGemeenteNaam='NL')
]
)
provincie = Provincie(10000, 'Antwerpen', Gewest(2))
res = crab_gateway.list_gemeenten_by_provincie(provincie)
assert isinstance(res, list)
assert isinstance(res[0], Gemeente)
assert str(res[0].niscode)[0] == '1'
provincie = 10000
res = crab_gateway.list_gemeenten_by_provincie(provincie)
assert isinstance(res, list)
assert isinstance(res[0], Gemeente)
assert str(res[0].niscode)[0] == '1'
def test_get_gemeente_by_id(self, crab_gateway, crab_service):
crab_service.GetGemeenteByGemeenteId.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_gemeente_by_id(1)
assert isinstance(res, Gemeente)
assert res.id == 1
def test_get_gemeente_by_id_with_string(self, crab_gateway, crab_service):
crab_service.GetGemeenteByGemeenteId.side_effect = (
GatewayRuntimeException(None, None)
)
with pytest.raises(GatewayRuntimeException):
crab_gateway.get_gemeente_by_id('gent')
def test_get_gemeente_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetGemeenteByGemeenteId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_gemeente_by_id(-1)
def test_get_gemeente_by_niscode(self, crab_gateway, crab_service):
crab_service.GetGemeenteByNISGemeenteCode.return_value = Mock(
GemeenteId=1, GewestId=1, NisGemeenteCode=11001, BeginBewerking=1,
BeginOrganisatie=1
)
res = crab_gateway.get_gemeente_by_niscode(11001)
assert isinstance(res, Gemeente)
assert res.niscode == 11001
def test_get_gemeente_by_unexisting_niscode(self, crab_gateway, crab_service):
crab_service.GetGemeenteByNISGemeenteCode.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_gemeente_by_niscode(-1)
def test_list_deelgemeenten(self, crab_gateway):
res = crab_gateway.list_deelgemeenten()
assert isinstance(res, list)
assert isinstance(res[0], Deelgemeente)
def test_list_deelgemeenten_wrong_gewest(self, crab_gateway, crab_service):
with pytest.raises(ValueError):
crab_gateway.list_deelgemeenten(1)
def test_list_deelgemeenten_by_gemeente(self, crab_gateway, crab_service):
crab_service.GetGemeenteByNISGemeenteCode.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.list_deelgemeenten_by_gemeente(45062)
assert isinstance(res, list)
assert len(res) == 2
assert isinstance(res[0], Deelgemeente)
gemeente = Gemeente(1, None, 45062, None)
res = crab_gateway.list_deelgemeenten_by_gemeente(gemeente)
assert isinstance(res, list)
assert len(res) == 2
assert isinstance(res[0], Deelgemeente)
def test_get_deelgemeente_by_id(self, crab_gateway):
res = crab_gateway.get_deelgemeente_by_id('45062A')
assert isinstance(res, Deelgemeente)
assert res.id == '45062A'
def test_get_deelgemeente_by_unexisting_id(self, crab_gateway):
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_deelgemeente_by_id(-1)
def test_list_talen(self, crab_gateway, crab_service):
crab_service.ListTalen.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_talen()
assert isinstance(res, list)
assert isinstance(res[0], Taal)
def test_list_bewerkingen(self, crab_gateway, crab_service):
res = crab_gateway.list_bewerkingen()
assert isinstance(res, list)
assert isinstance(res[0], Bewerking)
def test_list_organisaties(self, crab_gateway, crab_service):
res = crab_gateway.list_organisaties()
assert isinstance(res, list)
assert isinstance(res[0], Organisatie)
def test_list_aardsubadressen(self, crab_gateway, crab_service):
crab_service.ListAardSubadressen.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_aardsubadressen()
assert isinstance(res, list)
assert isinstance(res[0], Aardsubadres)
def test_list_aardadressen(self, crab_gateway, crab_service):
crab_service.ListAardAdressen.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_aardadressen()
assert isinstance(res, list)
assert isinstance(res[0], Aardadres)
def test_list_aardgebouwen(self, crab_gateway, crab_service):
crab_service.ListAardGebouwen.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_aardgebouwen()
assert isinstance(res, list)
assert isinstance(res[0], Aardgebouw)
def test_list_aarwegobjecten(self, crab_gateway, crab_service):
crab_service.ListAardWegobjecten.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_aardwegobjecten()
assert isinstance(res, list)
assert isinstance(res[0], Aardwegobject)
def test_list_aardterreinobjecten(self, crab_gateway, crab_service):
crab_service.ListAardTerreinobjecten.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_aardterreinobjecten()
assert isinstance(res, list)
assert isinstance(res[0], Aardterreinobject)
def test_list_statushuisnummers(self, crab_gateway, crab_service):
crab_service.ListStatusHuisnummers.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_statushuisnummers()
assert isinstance(res, list)
assert isinstance(res[0], Statushuisnummer)
def test_list_statussubadressen(self, crab_gateway, crab_service):
crab_service.ListStatusSubadressen.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_statussubadressen()
assert isinstance(res, list)
assert isinstance(res[0], Statussubadres)
def test_list_statusstraatnamen(self, crab_gateway, crab_service):
crab_service.ListStatusStraatnamen.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_statusstraatnamen()
assert isinstance(res, list)
assert isinstance(res[0], Statusstraatnaam)
def test_list_statuswegsegmenten(self, crab_gateway, crab_service):
crab_service.ListStatusWegsegmenten.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_statuswegsegmenten()
assert isinstance(res, list)
assert isinstance(res[0], Statuswegsegment)
def test_list_geometriemethodewegsegmenten(self, crab_gateway,
crab_service):
crab_service.ListGeometriemethodeWegsegmenten.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_geometriemethodewegsegmenten()
assert isinstance(res, list)
assert isinstance(res[0], Geometriemethodewegsegment)
def test_list_statusgebouwen(self, crab_gateway, crab_service):
crab_service.ListStatusGebouwen.return_value = Mock(CodeItem=[Mock()])
res = crab_gateway.list_statusgebouwen()
assert isinstance(res, list)
assert isinstance(res[0], Statusgebouw)
def test_list_gemetriemethodegebouwen(self, crab_gateway, crab_service):
crab_service.ListGeometriemethodeGebouwen.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_geometriemethodegebouwen()
assert isinstance(res, list)
assert isinstance(res[0], Geometriemethodegebouw)
def test_list_herkomstadrespositie(self, crab_gateway, crab_service):
crab_service.ListHerkomstAdresposities.return_value = Mock(
CodeItem=[Mock()]
)
res = crab_gateway.list_herkomstadresposities()
assert isinstance(res, list)
assert isinstance(res[0], Herkomstadrespositie)
def test_list_straten(self, crab_gateway, crab_service):
crab_service.ListStraatnamenWithStatusByGemeenteId.return_value = Mock(
StraatnaamWithStatusItem=[Mock()]
)
res = crab_gateway.list_straten(1)
assert isinstance(res, list)
assert isinstance(res[0], Straat)
gemeente = Gemeente(1, None, None, None)
res = crab_gateway.list_straten(gemeente)
assert isinstance(res, list)
assert isinstance(res[0], Straat)
def test_list_straten_empty(self, crab_gateway, crab_service):
crab_service.ListStraatnamenWithStatusByGemeenteId.return_value = Mock(
StraatnaamWithStatusItem=[]
)
res = crab_gateway.list_straten(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_straat_by_id(self, crab_gateway, crab_service):
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = Mock(
StraatnaamId=1, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_straat_by_id(1)
assert isinstance(res, Straat)
assert res.id == 1
def test_get_straat_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_straat_by_id(-1)
def test_list_huisnummers_by_straat(self, crab_gateway, crab_service):
crab_service.ListHuisnummersWithStatusByStraatnaamId.return_value = (
Mock(HuisnummerWithStatusItem=[Mock(HuisnummerId=1)])
)
res = crab_gateway.list_huisnummers_by_straat(1)
assert isinstance(res, list)
assert isinstance(res[0], Huisnummer)
straat = Straat(1, None, None, None, None, None, None, None)
res = crab_gateway.list_huisnummers_by_straat(straat)
assert isinstance(res, list)
assert isinstance(res[0], Huisnummer)
def test_list_huisnummers_by_straat_empty(self, crab_gateway,
crab_service):
crab_service.ListHuisnummersWithStatusByStraatnaamId.return_value = (
Mock(HuisnummerWithStatusItem=[])
)
res = crab_gateway.list_huisnummers_by_straat(0)
assert isinstance(res, list)
assert len(res) == 0
def test_list_huisnummers_by_perceel(self, crab_gateway, crab_service):
crab_service.ListHuisnummersWithStatusByIdentificatorPerceel\
.return_value = Mock(
HuisnummerWithStatusItem=[Mock(HuisnummerId=1)]
)
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = (
Mock(HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1)
)
res1 = crab_gateway.list_huisnummers_by_perceel("13040C1747/00G002")
assert isinstance(res1, list)
assert isinstance(res1[0], Huisnummer)
perceel = Perceel('13040C1747/00G002')
res2 = crab_gateway.list_huisnummers_by_perceel(perceel)
assert isinstance(res2, list)
assert isinstance(res2[0], Huisnummer)
assert [p.id for p in res1] == [p.id for p in res2]
def test_list_huisnummers_by_perceel_empty(self, crab_gateway,
crab_service):
crab_service.ListHuisnummersWithStatusByIdentificatorPerceel\
.return_value = Mock(HuisnummerWithStatusItem=[])
res = crab_gateway.list_huisnummers_by_perceel("13040A0000/00A001")
assert isinstance(res, list)
assert len(res) == 0
def test_get_huisnummer_by_id(self, crab_gateway, crab_service):
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = Mock(
HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_huisnummer_by_id(1)
assert isinstance(res, Huisnummer)
assert res.id == 1
def test_get_huisnummer_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_huisnummer_by_id(-1)
def test_get_huisnummer_by_nummer_and_straat(self, crab_gateway,
crab_service):
crab_service.GetHuisnummerWithStatusByHuisnummer.return_value = Mock(
HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1,
Huisnummer='1'
)
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = Mock(
StraatnaamId=1, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_huisnummer_by_nummer_and_straat(1, 1)
assert isinstance(res, Huisnummer)
assert res.huisnummer == '1'
assert res.straat.id == 1
straat = Straat(1, None, None, None, None, None, None, None)
res = crab_gateway.get_huisnummer_by_nummer_and_straat(1, straat)
assert isinstance(res, Huisnummer)
assert res.huisnummer == '1'
def test_get_huisnummer_by_unexisting_nummer_and_straat(self, crab_gateway,
crab_service):
crab_service.GetHuisnummerWithStatusByHuisnummer.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_huisnummer_by_nummer_and_straat(-1, -1)
def test_list_postkantons_by_gemeente(self, crab_gateway, crab_service):
crab_service.ListPostkantonsByGemeenteId.return_value = Mock(
PostkantonItem=[Mock(PostkantonCode=1)]
)
res = crab_gateway.list_postkantons_by_gemeente(1)
assert isinstance(res, list)
assert isinstance(res[0], Postkanton)
gemeente = Gemeente(1, None, None, None)
res = crab_gateway.list_postkantons_by_gemeente(gemeente)
assert isinstance(res, list)
assert isinstance(res[0], Postkanton)
def test_list_postkantons_by_gemeente_empty(self, crab_gateway,
crab_service):
crab_service.ListPostkantonsByGemeenteId.return_value = Mock(
PostkantonItem=[]
)
res = crab_gateway.list_postkantons_by_gemeente(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_postkanton_by_huisnummer(self, crab_gateway, crab_service):
crab_service.GetPostkantonByHuisnummerId.return_value = Mock(
PostkantonCode=1
)
res = crab_gateway.get_postkanton_by_huisnummer(1)
assert isinstance(res, Postkanton)
huisnummer = Huisnummer(1, None, None, None)
res = crab_gateway.get_postkanton_by_huisnummer(huisnummer)
assert isinstance(res, Postkanton)
def test_get_postkanton_by_unexisting_huisnummer(self, crab_gateway,
crab_service):
crab_service.GetPostkantonByHuisnummerId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_postkanton_by_huisnummer(-1)
def test_list_wegobjecten_by_straat(self, crab_gateway, crab_service):
crab_service.ListWegobjectenByStraatnaamId.return_value = Mock(
WegobjectItem=[Mock(IdentificatorWegobject=1,
AardWegobject=1)]
)
res = crab_gateway.list_wegobjecten_by_straat(1)
assert isinstance(res, list)
assert isinstance(res[0], Wegobject)
straat = Straat(1, None, None, None, None, None, None, None)
res = crab_gateway.list_wegobjecten_by_straat(straat)
assert isinstance(res, list)
assert isinstance(res[0], Wegobject)
def test_list_wegobjecten_by_unexsiting_straat(self, crab_gateway,
crab_service):
crab_service.ListWegobjectenByStraatnaamId.return_value = Mock(
WegobjectItem=[]
)
res = crab_gateway.list_wegobjecten_by_straat(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_wegobject_by_id(self, crab_gateway, crab_service):
crab_service.GetWegobjectByIdentificatorWegobject.return_value = Mock(
IdentificatorWegobject='53839893', BeginBewerking=1,
BeginOrganisatie=1
)
res = crab_gateway.get_wegobject_by_id("53839893")
assert isinstance(res, Wegobject)
assert res.id == "53839893"
def test_get_wegobject_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetWegobjectByIdentificatorWegobject.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_wegobject_by_id(-1)
def test_list_wegsegmenten_by_straat(self, crab_gateway, crab_service):
crab_service.ListWegsegmentenByStraatnaamId.return_value = Mock(
WegsegmentItem=[Mock()]
)
res = crab_gateway.list_wegsegmenten_by_straat(1)
assert isinstance(res, list)
assert isinstance(res[0], Wegsegment)
straat = Straat(1, None, None, None, None, None, None, None)
res = crab_gateway.list_wegsegmenten_by_straat(straat)
assert isinstance(res, list)
assert isinstance(res[0], Wegsegment)
def test_list_wegsegmenten_by_straat_empty(self, crab_gateway,
crab_service):
crab_service.ListWegsegmentenByStraatnaamId.return_value = Mock(
WegsegmentItem=[]
)
res = crab_gateway.list_wegsegmenten_by_straat(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_wegsegment_by_id(self, crab_gateway, crab_service):
crab_service.GetWegsegmentByIdentificatorWegsegment.return_value = (
Mock(IdentificatorWegsegment='108724', BeginBewerking=1,
BeginOrganisatie=1)
)
res = crab_gateway.get_wegsegment_by_id("108724")
assert isinstance(res, Wegsegment)
assert res.id == "108724"
def test_get_wegsegment_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetWegsegmentByIdentificatorWegsegment.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_wegsegment_by_id(-1)
def test_list_terreinobjecten_by_huisnummer(self, crab_gateway,
crab_service):
crab_service.ListTerreinobjectenByHuisnummerId.return_value = Mock(
TerreinobjectItem=[Mock()]
)
res = crab_gateway.list_terreinobjecten_by_huisnummer(1)
assert isinstance(res, list)
assert isinstance(res[0], Terreinobject)
huisnummer = Huisnummer(1, None, None, None)
res = crab_gateway.list_terreinobjecten_by_huisnummer(huisnummer)
assert isinstance(res, list)
assert isinstance(res[0], Terreinobject)
def test_list_terreinobjecten_by_huisnummer_empty(self, crab_gateway,
crab_service):
crab_service.ListTerreinobjectenByHuisnummerId.return_value = Mock(
TerreinobjectItem=[]
)
res = crab_gateway.list_terreinobjecten_by_huisnummer(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_terreinobject_by_id(self, crab_gateway, crab_service):
crab_service.GetTerreinobjectByIdentificatorTerreinobject\
.return_value = Mock(
IdentificatorTerreinobject='13040_C_1747_G_002_00',
BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_terreinobject_by_id("13040_C_1747_G_002_00")
assert isinstance(res, Terreinobject)
assert res.id == "13040_C_1747_G_002_00"
def test_get_terreinobject_by_unexisting_id(self, crab_gateway,
crab_service):
crab_service.GetTerreinobjectByIdentificatorTerreinobject\
.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_terreinobject_by_id(-1)
def test_list_percelen_by_huisnummer(self, crab_gateway, crab_service):
crab_service.ListPercelenByHuisnummerId.return_value = Mock(
PerceelItem=[Mock()]
)
res = crab_gateway.list_percelen_by_huisnummer(1)
assert isinstance(res, list)
assert isinstance(res[0], Perceel)
huisnummer = Huisnummer(1, None, None, None)
res = crab_gateway.list_percelen_by_huisnummer(huisnummer)
assert isinstance(res, list)
assert isinstance(res[0], Perceel)
def test_list_percelen_by_huisnummer_empty(self, crab_gateway,
crab_service):
crab_service.ListPercelenByHuisnummerId.return_value = Mock(
PerceelItem=[]
)
res = crab_gateway.list_percelen_by_huisnummer(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_perceel_by_id(self, crab_gateway, crab_service):
crab_service.GetPerceelByIdentificatorPerceel.return_value = Mock(
IdentificatorPerceel='13040C1747/00G002',
BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_perceel_by_id("13040C1747/00G002")
assert isinstance(res, Perceel)
assert res.id == "13040C1747/00G002"
def test_get_perceel_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetPerceelByIdentificatorPerceel.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_perceel_by_id(-1)
def test_list_gebouwen_by_huisnummer(self, crab_gateway, crab_service):
crab_service.ListGebouwenByHuisnummerId.return_value = Mock(
GebouwItem=[Mock(IdentificatorGebouw=1)]
)
res = crab_gateway.list_gebouwen_by_huisnummer(1)
assert isinstance(res, list)
assert isinstance(res[0], Gebouw)
huisnummer = Huisnummer(1, None, None, None)
res = crab_gateway.list_gebouwen_by_huisnummer(huisnummer)
assert isinstance(res, list)
assert isinstance(res[0], Gebouw)
def test_list_gebouwen_by_huisnummer_empty(self, crab_gateway,
crab_service):
crab_service.ListGebouwenByHuisnummerId.return_value = Mock(
GebouwItem=[]
)
res = crab_gateway.list_gebouwen_by_huisnummer(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_gebouw_by_id(self, crab_gateway, crab_service):
crab_service.GetGebouwByIdentificatorGebouw.return_value = Mock(
IdentificatorGebouw=1538575, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_gebouw_by_id("1538575")
assert isinstance(res, Gebouw)
assert res.id == 1538575
def test_get_gebouw_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetGebouwByIdentificatorGebouw.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_gebouw_by_id(-1)
def test_list_subadressen_by_huisnummer(self, crab_gateway, crab_service):
crab_service.ListSubadressenWithStatusByHuisnummerId.return_value = (
Mock(SubadresWithStatusItem=[Mock(SubadresId=1)])
)
res = crab_gateway.list_subadressen_by_huisnummer(129462)
assert isinstance(res, list)
assert isinstance(res[0], Subadres)
huisnummer = Huisnummer(129462, None, None, None)
res = crab_gateway.list_subadressen_by_huisnummer(huisnummer)
assert isinstance(res, list)
assert isinstance(res[0], Subadres)
def test_list_subadressen_by_huisnummer_empty(self, crab_gateway,
crab_service):
crab_service.ListSubadressenWithStatusByHuisnummerId.return_value = (
Mock(SubadresWithStatusItem=[])
)
res = crab_gateway.list_subadressen_by_huisnummer(0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_subadres_by_id(self, crab_gateway, crab_service):
crab_service.GetSubadresWithStatusBySubadresId.return_value = Mock(
SubadresId=1120936, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_subadres_by_id(1120936)
assert isinstance(res, Subadres)
assert res.id == 1120936
def test_get_subadres_by_unexisting_id(self, crab_gateway, crab_service):
crab_service.GetSubadresWithStatusBySubadresId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_subadres_by_id(-1)
def test_list_adresposities_by_huisnummer(self, crab_gateway, crab_service):
crab_service.ListAdrespositiesByHuisnummerId.return_value = Mock(
AdrespositieItem=[Mock()]
)
res = crab_gateway.list_adresposities_by_huisnummer(1)
assert isinstance(res, list)
assert isinstance(res[0], Adrespositie)
def test_list_adresposities_by_huisnummer_empty(self, crab_gateway,
crab_service):
crab_service.ListAdrespositiesByHuisnummerId.return_value = Mock(
AdrespositieItem=[]
)
res = crab_gateway.list_adresposities_by_huisnummer(0)
assert isinstance(res, list)
assert len(res) == 0
def test_list_adresposities_by_nummer_and_straat(self, crab_gateway,
crab_service):
crab_service.ListAdrespositiesByHuisnummer.return_value = Mock(
AdrespositieItem=[Mock()]
)
res = crab_gateway.list_adresposities_by_nummer_and_straat(1, 1)
assert isinstance(res, list)
assert isinstance(res[0], Adrespositie)
def test_list_adresposities_by_nummer_and_straat_empty(self, crab_gateway,
crab_service):
crab_service.ListAdrespositiesByHuisnummer.return_value = Mock(
AdrespositieItem=[]
)
res = crab_gateway.list_adresposities_by_nummer_and_straat(0, 0)
assert isinstance(res, list)
assert len(res) == 0
def test_list_adresposities_by_subadres(self, crab_gateway, crab_service):
crab_service.ListAdrespositiesBySubadresId.return_value = Mock(
AdrespositieItem=[Mock()]
)
res = crab_gateway.list_adresposities_by_subadres(1120936)
assert isinstance(res, list)
assert isinstance(res[0], Adrespositie)
def test_list_adresposities_by_subadres_empty(self, crab_gateway,
crab_service):
crab_service.ListAdrespositiesBySubadresId.return_value = Mock(
AdrespositieItem=[]
)
res = crab_gateway.list_adresposities_by_subadres(0)
assert isinstance(res, list)
assert len(res) == 0
def test_list_adresposities_by_subadres_and_huisnummer(self, crab_gateway,
crab_service):
crab_service.ListAdrespositiesBySubadres.return_value = Mock(
AdrespositieItem=[Mock()]
)
res = crab_gateway.list_adresposities_by_subadres_and_huisnummer('A', 129462)
assert isinstance(res, list)
assert isinstance(res[0], Adrespositie)
def test_list_adresposities_by_unexisting_subadres_and_huisnummer(
self, crab_gateway, crab_service):
crab_service.ListAdrespositiesBySubadres.return_value = Mock(
AdrespositieItem=[]
)
res = crab_gateway.list_adresposities_by_subadres_and_huisnummer(0, 0)
assert isinstance(res, list)
assert len(res) == 0
def test_get_adrespositie_by_id(self, crab_gateway, crab_service):
crab_service.GetAdrespositieByAdrespositieId.return_value = Mock(
AdrespositieId=4428005, BeginBewerking=1, BeginOrganisatie=1
)
res = crab_gateway.get_adrespositie_by_id(4428005)
assert isinstance(res, Adrespositie)
assert res.id == 4428005
def test_get_adrespositie_by_unexisting_id(self, crab_gateway,
crab_service):
crab_service.GetAdrespositieByAdrespositieId.return_value = None
with pytest.raises(GatewayResourceNotFoundException):
crab_gateway.get_adrespositie_by_id(-1)
def test_get_postadres_by_huisnummer(self, crab_gateway, crab_service):
crab_service.GetPostadresByHuisnummerId.return_value = Mock(
Postadres='Steenweg op Oosthoven 51, 2300 Turnhout'
)
res = crab_gateway.get_postadres_by_huisnummer(1)
assert res == 'Steenweg op Oosthoven 51, 2300 Turnhout'
hnr = Huisnummer(1, None, None, None)
res = crab_gateway.get_postadres_by_huisnummer(hnr)
assert res == 'Steenweg op Oosthoven 51, 2300 Turnhout'
def test_get_postadres_by_subadres(self, crab_gateway, crab_service):
crab_service.GetPostadresBySubadresId.return_value = Mock(
Postadres='Antoon van Brabantstraat 7 bus B, 2630 Aartselaar'
)
res = crab_gateway.get_postadres_by_subadres(1120936)
assert res == 'Antoon van Brabantstraat 7 bus B, 2630 Aartselaar'
subadres = Subadres(1120936, None, None)
res = crab_gateway.get_postadres_by_subadres(subadres)
assert res == 'Antoon van Brabantstraat 7 bus B, 2630 Aartselaar'
class TestGewest:
def test_fully_initialised(self):
g = Gewest(
2,
{'nl': 'Vlaams gewest', 'fr': 'Région Flamande'},
(138165.09, 189297.53),
(22279.17, 153050.23, 258873.3, 244022.31)
)
assert g.id == 2
assert g.naam =='Vlaams gewest'
assert g.centroid == (138165.09, 189297.53)
assert g.bounding_box == (22279.17, 153050.23, 258873.3, 244022.31)
assert 'Vlaams gewest (2)' == str(g)
assert "Gewest(2)" == repr(g)
def test_str_and_repr_dont_lazy_load(self):
g = Gewest(2)
assert 'Gewest 2' == str(g)
assert 'Gewest(2)'== repr(g)
def test_check_gateway_not_set(self):
g = Gewest(2)
with pytest.raises(RuntimeError):
g.check_gateway()
def test_gemeenten(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2,
)
crab_service.ListGemeentenByGewestId.return_value = Mock(
GemeenteItem=[
Mock(GemeenteId=1, NISGemeenteCode=10000,
TaalCode='NL', TaalCodeGemeenteNaam='NL')
]
)
g = Gewest(2)
g.set_gateway(crab_gateway)
gemeenten = g.gemeenten
assert isinstance(gemeenten, list)
def test_provincies(self, crab_gateway):
g = Gewest(2)
g.set_gateway(crab_gateway)
provincies = g.provincies
assert isinstance(provincies, list)
assert len(provincies) == 5
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2, GewestNaam='Vlaams Gewest', CenterX=138165.09,
CenterY=189297.53, MinimumX=22279.17, MinimumY=153050.23,
MaximumX=258873.3, MaximumY=244022.31
)
g = Gewest(2)
g.set_gateway(crab_gateway)
assert g.id == 2
assert str(g.naam) == 'Vlaams Gewest'
assert g.centroid == (138165.09, 189297.53)
assert g.bounding_box == (22279.17, 153050.23, 258873.3, 244022.31)
class TestProvincie:
def test_fully_initialised(self):
p = Provincie(20001, 'Vlaams-Brabant', Gewest(2))
assert p.niscode == 20001
assert p.naam == 'Vlaams-Brabant'
assert 'Vlaams-Brabant (20001)' == str(p)
assert "Provincie(20001, 'Vlaams-Brabant', Gewest(2))" == repr(p)
def test_check_gateway_not_set(self):
p = Provincie(20001, 'Vlaams-Brabant', Gewest(2))
with pytest.raises(RuntimeError):
p.check_gateway()
def test_gemeenten(self, crab_gateway, crab_service):
crab_service.GetGewestByGewestIdAndTaalCode.return_value = Mock(
GewestId=2
)
crab_service.ListGemeentenByGewestId.return_value = Mock(
GemeenteItem=[
Mock(GemeenteId=1, NISGemeenteCode=10000,
TaalCode='NL', TaalCodeGemeenteNaam='NL')
]
)
p = Provincie(20001, 'Vlaams-Brabant', Gewest(2))
p.set_gateway(crab_gateway)
gemeenten = p.gemeenten
assert isinstance(gemeenten, list)
class TestGemeente:
def test_fully_initialised(self):
g = Gemeente(
1,
'Aartselaar',
11001,
Gewest(2),
Taal('nl', 'Nederlands', 'Nederlands.'),
(150881.07, 202256.84),
(148950.36, 199938.28, 152811.77, 204575.39),
Metadata(
'1830-01-01 00:00:00',
'2002-08-13 17:32:32',
Bewerking(1, '', ''),
Organisatie(6, '', '')
)
)
assert g.id == 1
assert g.naam == 'Aartselaar'
assert g.niscode == 11001
assert isinstance(g.gewest, Gewest)
assert g.gewest.id == 2
assert g.centroid == (150881.07, 202256.84)
assert g.bounding_box == (148950.36, 199938.28, 152811.77, 204575.39)
assert int(g.gewest.id) ==2
assert isinstance(g._taal, Taal)
assert g._taal_id == 'nl'
assert isinstance(g.metadata, Metadata)
assert g.metadata.begin_datum == '1830-01-01 00:00:00'
assert g.metadata.begin_tijd == '2002-08-13 17:32:32'
assert isinstance(g.metadata.begin_bewerking, Bewerking)
assert int(g.metadata.begin_bewerking.id) == 1
assert isinstance(g.metadata.begin_organisatie, Organisatie)
assert int(g.metadata.begin_organisatie.id) == 6
assert 'Aartselaar (1)' == str(g)
assert "Gemeente(1, 'Aartselaar', 11001)" == repr(g)
@pytest.mark.skipif(
not six.PY2,
reason='This test only works on python 2.x'
)
def test_unicode_py2(self):
g = Gemeente(92, 'Biévène', 23009, Gewest(2))
assert 'Biévène (92)'.encode() == str(g)
@pytest.mark.skipif(
not six.PY3,
reason='This test only works on python 3.x'
)
def test_unicode_py3(self):
g = Gemeente(92, 'Biévène', 23009, Gewest(2))
assert 'Biévène (92)' == str(g)
def test_str_and_repr_dont_lazy_load(self):
g = Gemeente(1, 'Aartselaar', 11001, Gewest(2))
assert 'Aartselaar (1)' == str(g)
assert "Gemeente(1, 'Aartselaar', 11001)" == repr(g)
def test_check_gateway_not_set(self):
g = Gemeente(1, 'Aartselaar', 11001, Gewest(2))
with pytest.raises(RuntimeError):
g.check_gateway()
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListTalen.return_value = Mock(CodeItem=[Mock(Code='nl')])
crab_service.GetGemeenteByGemeenteId.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1,
CenterX=150881.07, CenterY=202256.84, MinimumX=148950.36,
MinimumY=199938.28, MaximumX=152811.77, MaximumY=204575.39,
TaalCode='nl'
)
g = Gemeente(1, 'Aartselaar', 11001, Gewest(2))
g.set_gateway(crab_gateway)
assert g.id == 1
assert g.naam == 'Aartselaar'
assert g.niscode == 11001
assert isinstance(g.gewest, Gewest)
assert int(g.gewest.id) == 2
assert g.taal.id == 'nl'
assert g.centroid == (150881.07, 202256.84)
assert g.bounding_box == (148950.36, 199938.28, 152811.77, 204575.39)
g.metadata.set_gateway(crab_gateway)
assert isinstance(g.metadata, Metadata)
assert not g.metadata.begin_datum == None
assert not g.metadata.begin_tijd == None
assert isinstance(g.metadata.begin_bewerking, Bewerking)
assert int(g.metadata.begin_bewerking.id) == 1
assert isinstance(g.metadata.begin_organisatie, Organisatie)
assert int(g.metadata.begin_organisatie.id) == 1
def test_straten(self, crab_gateway, crab_service):
crab_service.ListStraatnamenWithStatusByGemeenteId.return_value = Mock(
StraatnaamWithStatusItem=[Mock()]
)
g = Gemeente(1, 'Aartselaar', 11001, Gewest(3))
g.set_gateway(crab_gateway)
straten = g.straten
assert isinstance(straten, list)
def test_postkantons(self, crab_gateway, crab_service):
crab_service.ListPostkantonsByGemeenteId.return_value = Mock(
PostkantonItem=[Mock(PostkantonCode=1)]
)
g = Gemeente(1, 'Aartselaar', 11001, Gewest(3))
g.set_gateway(crab_gateway)
postkanton = g.postkantons
assert isinstance(postkanton, list)
def test_provincie(self, crab_gateway):
g = Gemeente(1, 'Aartselaar', 11001, Gewest(2))
g.set_gateway(crab_gateway)
provincie = g.provincie
assert isinstance(provincie, Provincie)
assert 10000 == provincie.id
class TestDeelgemeente:
def test_fully_initialised(self):
dg = Deelgemeente('45062A', 'Sint-Maria-Horebeke', 45062)
assert dg.id == '45062A'
assert dg.naam == 'Sint-Maria-Horebeke'
assert 'Sint-Maria-Horebeke (45062A)' == str(dg)
assert "Deelgemeente('45062A', 'Sint-Maria-Horebeke', 45062)" == repr(dg)
def test_check_gateway_not_set(self):
dg = Deelgemeente('45062A', 'Sint-Maria-Horebeke', 45062)
with pytest.raises(RuntimeError):
dg.check_gateway()
def test_gemeente(self, crab_gateway, crab_service):
crab_service.GetGemeenteByNISGemeenteCode.return_value = Mock(
GemeenteId=1, GewestId=1, NisGemeenteCode=45062, BeginBewerking=1,
BeginOrganisatie=1, GemeenteNaam='Horebeke'
)
dg = Deelgemeente('45062A', 'Sint-Maria-Horebeke', 45062)
dg.set_gateway(crab_gateway)
gemeente = dg.gemeente
assert isinstance(gemeente, Gemeente)
assert gemeente.niscode == 45062
assert gemeente.naam == 'Horebeke'
class TestTaal:
def test_fully_initialised(self):
t = Taal(
"nl",
'Nederlands',
'Nederlands.'
)
assert t.id == "nl"
assert t.naam == 'Nederlands'
assert t.definitie == 'Nederlands.'
assert 'Nederlands' == str(t)
assert "Taal('nl', 'Nederlands', 'Nederlands.')" == repr(t)
class TestBewerking:
def test_repr(self):
b = Bewerking(
'3',
'correctie',
'Correctie van de attributen.'
)
assert "Bewerking(3, 'correctie', 'Correctie van de attributen.')" == repr(b)
class TestOrganisatie:
def test_repr(self):
o = Organisatie(
'6',
'NGI',
'Nationaal Geografisch Instituut.'
)
assert "Organisatie(6, 'NGI', 'Nationaal Geografisch Instituut.')" == repr(o)
class TestAardsubadres:
def test_repr(self):
a = Aardsubadres(
'1',
'appartementNummer',
'Nummer van het appartement.'
)
assert "Aardsubadres(1, 'appartementNummer', 'Nummer van het appartement.')" == repr(a)
class TestAardadres:
def test_repr(self):
a = Aardadres(
'1',
'subAdres',
'Aanduiding van een plaats op een huisnummer'
)
assert "Aardadres(1, 'subAdres', 'Aanduiding van een plaats op een huisnummer')" == repr(a)
class TestAardgebouw:
def test_repr(self):
a = Aardgebouw(
'3',
'virtueel gebouw',
'gbg afgezoomd met virtuele gvl'
)
assert "Aardgebouw(3, 'virtueel gebouw', 'gbg afgezoomd met virtuele gvl')" == repr(a)
class TestAardwegobject:
def test_repr(self):
a = Aardwegobject(
'1',
'taTEL',
'Wegverbinding volgens TeleAtlas.'
)
assert "Aardwegobject(1, 'taTEL', 'Wegverbinding volgens TeleAtlas.')" == repr(a)
class TestAardterreinobject:
def test_repr(self):
a = Aardterreinobject(
'1',
'kadPerceel',
'Perceel volgens het Kadaster.'
)
assert "Aardterreinobject(1, 'kadPerceel', 'Perceel volgens het Kadaster.')" == repr(a)
class TestStatushuisnummer:
def test_repr(self):
s = Statushuisnummer(
'1',
'voorgesteld',
None
)
assert "Statushuisnummer(1, 'voorgesteld', 'None')" == repr(s)
class TestStatussubadres:
def test_repr(self):
s = Statussubadres(
'1',
'voorgesteld',
None
)
assert "Statussubadres(1, 'voorgesteld', 'None')" == repr(s)
class TestStatusstraatnaam:
def test_repr(self):
s = Statusstraatnaam(
'1',
'voorgesteld',
None
)
assert "Statusstraatnaam(1, 'voorgesteld', 'None')" == repr(s)
class TestStatuswegsegment:
def test_repr(self):
s = Statuswegsegment(
'1',
'vergunningAangevraagd',
None
)
assert "Statuswegsegment(1, 'vergunningAangevraagd', 'None')" == repr(s)
class TestGeometriemethodewegsegment:
def test_repr(self):
g = Geometriemethodewegsegment(
'2',
'opmeting',
None
)
assert "Geometriemethodewegsegment(2, 'opmeting', 'None')" == repr(g)
class TestStatusgebouw:
def test_repr(self):
s = Statusgebouw(
'1',
'vergunningAangevraagd',
None
)
assert "Statusgebouw(1, 'vergunningAangevraagd', 'None')" == repr(s)
class TestGeometriemethodegebouw:
def test_repr(self):
g = Geometriemethodegebouw(
'2',
'opmeting',
None
)
assert "Geometriemethodegebouw(2, 'opmeting', 'None')" == repr(g)
class TestHerkomstadrespositie:
def test_repr(self):
h = Herkomstadrespositie(
'6',
'manueleAanduidingVanToegangTotDeWeg',
None
)
assert "Herkomstadrespositie(6, 'manueleAanduidingVanToegangTotDeWeg', 'None')" == repr(h)
class TestStraat:
def test_fully_initialised(self):
s = Straat(
1,
'Acacialaan',
1,
Statusstraatnaam(3, 'inGebruik', None),
'Acacialaan', 'nl', None, None,
Metadata(
'1830-01-01 00:00:00',
'2013-04-12 20:07:25.960000',
Bewerking(3, '', ''),
Organisatie(1, '', '')
)
)
assert s.id == 1
assert s.label == 'Acacialaan'
assert s.namen == (('Acacialaan', 'nl'), (None, None))
assert int(s.status_id) == 3
assert isinstance(s.status, Statusstraatnaam)
assert int(s.gemeente_id) == 1
assert isinstance(s.metadata, Metadata)
assert s.metadata.begin_datum == '1830-01-01 00:00:00'
assert s.metadata.begin_tijd == '2013-04-12 20:07:25.960000'
assert isinstance(s.metadata.begin_bewerking, Bewerking)
assert int(s.metadata.begin_bewerking.id) == 3
assert isinstance(s.metadata.begin_organisatie, Organisatie)
assert int(s.metadata.begin_organisatie.id) == 1
assert 'Acacialaan (1)' == str(s)
assert "Straat(1, 'Acacialaan', 1, 3)" == repr(s)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListStatusStraatnamen.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = Mock(
StraatnaamId=1, BeginBewerking=1, BeginOrganisatie=1,
StraatnaamLabel='Acacialaan', Straatnaam='Acacialaan',
TaalCode='nl', StraatnaamTweedeTaal=None, TaalCodeTweedeTaal=None
)
crab_service.GetGemeenteByGemeenteId.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1
)
s = Straat(1, 'Acacialaan', 1, 3, 'Acacialaan', 'nl', None, None)
s.set_gateway(crab_gateway)
assert s.id == 1
assert s.label == 'Acacialaan'
assert int(s.status.id) == 3
assert s.namen == (('Acacialaan', 'nl'), (None, None))
assert int(s.gemeente.id) == 1
s.metadata.set_gateway(crab_gateway)
assert isinstance(s.metadata, Metadata)
assert s.metadata.begin_datum is not None
assert s.metadata.begin_tijd is not None
assert isinstance(s.metadata.begin_bewerking, Bewerking)
assert int(s.metadata.begin_bewerking.id) == 1
assert isinstance(s.metadata.begin_organisatie, Organisatie)
assert int(s.metadata.begin_organisatie.id) == 1
def test_str_and_repr_dont_lazy_load(self):
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
assert 'Acacialaan (1)' == str(s)
assert "Straat(1, 'Acacialaan', 1, 3)" == repr(s)
def test_check_gateway_not_set(self):
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
with pytest.raises(RuntimeError):
s.check_gateway()
def test_huisnummers(self, crab_gateway, crab_service):
crab_service.ListHuisnummersWithStatusByStraatnaamId.return_value = (
Mock(HuisnummerWithStatusItem=[Mock(HuisnummerId=1)])
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
huisnummers = s.huisnummers
assert isinstance(huisnummers, list)
def test_taal(self, crab_gateway, crab_service):
crab_service.ListTalen.return_value = Mock(CodeItem=[Mock(Code='nl')])
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = Mock(
StraatnaamId=1, BeginBewerking=1, BeginOrganisatie=1,
StraatnaamLabel='Acacialaan', Straatnaam='Acacialaan',
TaalCode='nl', StraatnaamTweedeTaal=None, TaalCodeTweedeTaal=None
)
crab_service.GetGemeenteByGemeenteId.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1,
TaalCode='nl'
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
taal = s.taal
assert isinstance(taal, Taal)
assert s.taal.id == 'nl'
def test_gemeente(self, crab_gateway, crab_service):
crab_service.GetGemeenteByGemeenteId.return_value = Mock(
GemeenteId=1, GewestId=1, BeginBewerking=1, BeginOrganisatie=1
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
gemeente = s.gemeente
assert isinstance(gemeente, Gemeente)
def test_status(self, crab_gateway, crab_service):
crab_service.ListStatusStraatnamen.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
status = s.status
assert isinstance(status, Statusstraatnaam)
def test_wegobjecten(self, crab_gateway, crab_service):
crab_service.ListWegobjectenByStraatnaamId.return_value = Mock(
WegobjectItem=[Mock(IdentificatorWegobject=1,
AardWegobject=1)]
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
wegobjecten = s.wegobjecten
assert isinstance(wegobjecten, list)
assert isinstance(wegobjecten[0], Wegobject)
def test_wegsegmenten(self, crab_gateway, crab_service):
crab_service.ListWegsegmentenByStraatnaamId.return_value = Mock(
WegsegmentItem=[Mock()]
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
wegsegmenten = s.wegsegmenten
assert isinstance(wegsegmenten, list)
assert isinstance(wegsegmenten[0], Wegsegment)
def test_bounding_box(self, crab_gateway, crab_service):
crab_service.ListWegsegmentenByStraatnaamId.return_value = Mock(
WegsegmentItem=[Mock()]
)
crab_service.GetWegsegmentByIdentificatorWegsegment.return_value = (
Mock(IdentificatorWegsegment='108724', BeginBewerking=1,
BeginOrganisatie=1,
Geometrie='LINESTRING (150339.255243488 201166.401677653,'
'150342.836939491 201165.832525652,'
'150345.139531493 201165.466573652,'
'150349.791371495 201164.769421652)')
)
s = Straat(1, 'Acacialaan', 1, 3, None, None, None, None)
s.set_gateway(crab_gateway)
bounding = s.bounding_box
assert isinstance(bounding, list)
assert len(bounding) == 4
class TestHuisnummer:
def test_fully_initialised(self):
h = Huisnummer(
1,
Statushuisnummer(3, 'inGebruik', None),
"51",
17718,
Metadata(
'1830-01-01 00:00:00',
'2011-04-29 13:27:40.230000',
Bewerking(1, '', ''),
Organisatie(5, '', '')
)
)
assert h.id == 1
assert h.huisnummer == "51"
assert int(h.status_id) == 3
assert isinstance(h.status, Statushuisnummer)
assert int(h.straat_id) == 17718
assert isinstance(h.metadata, Metadata)
assert h.metadata.begin_datum == '1830-01-01 00:00:00'
assert h.metadata.begin_tijd, '2011-04-29 13:27:40.230000'
assert isinstance(h.metadata.begin_bewerking, Bewerking)
assert int(h.metadata.begin_bewerking.id) == 1
assert isinstance(h.metadata.begin_organisatie, Organisatie)
assert int(h.metadata.begin_organisatie.id) == 5
assert '51 (1)' == str(h)
assert "Huisnummer(1, 3, '51', 17718)" == repr(h)
def test_str_dont_lazy_load(self):
h = Huisnummer(1, 3, '51', 17718)
assert '51 (1)' == str(h)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListStatusHuisnummers.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.GetStraatnaamWithStatusByStraatnaamId.return_value = Mock(
StraatnaamId=17718, BeginBewerking=1, BeginOrganisatie=1
)
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = Mock(
HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
assert h.id == 1
assert int(h.status.id) == 3
assert h.huisnummer == "51"
assert int(h.straat.id) == 17718
h.metadata.set_gateway(crab_gateway)
assert isinstance(h.metadata, Metadata)
assert not h.metadata.begin_datum == None
assert not h.metadata.begin_tijd == None
assert isinstance(h.metadata.begin_bewerking, Bewerking)
assert int(h.metadata.begin_bewerking.id) == 1
assert isinstance(h.metadata.begin_organisatie, Organisatie)
assert int(h.metadata.begin_organisatie.id) == 1
def test_postkanton(self, crab_gateway, crab_service):
crab_service.GetPostkantonByHuisnummerId.return_value = Mock(
PostkantonCode=1
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
postkanton = h.postkanton
assert isinstance(postkanton, Postkanton)
def test_terreinobjecten(self, crab_gateway, crab_service):
crab_service.ListTerreinobjectenByHuisnummerId.return_value = Mock(
TerreinobjectItem=[Mock()]
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
terreinobjecten = h.terreinobjecten
assert isinstance(terreinobjecten, list)
def test_percelen(self, crab_gateway, crab_service):
crab_service.ListPercelenByHuisnummerId.return_value = Mock(
PerceelItem=[Mock()]
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
percelen = h.percelen
assert isinstance(percelen, list)
def test_gebouwen(self, crab_gateway, crab_service):
crab_service.ListGebouwenByHuisnummerId.return_value = Mock(
GebouwItem=[Mock(IdentificatorGebouw=1)]
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
gebouwen = h.gebouwen
assert isinstance(gebouwen, list)
def test_subadressen(self, crab_gateway, crab_service):
crab_service.ListSubadressenWithStatusByHuisnummerId.return_value = (
Mock(SubadresWithStatusItem=[Mock(SubadresId=1)])
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
subadressen = h.subadressen
assert isinstance(subadressen, list)
def test_adresposities(self, crab_gateway, crab_service):
crab_service.ListAdrespositiesByHuisnummerId.return_value = Mock(
AdrespositieItem=[Mock()]
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
adresposities = h.adresposities
assert isinstance(adresposities, list)
def test_status(self, crab_gateway, crab_service):
crab_service.ListStatusHuisnummers.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
status = h.status
assert isinstance(status, Statushuisnummer)
def test_bounding_box(self, crab_gateway, crab_service):
crab_service.ListTerreinobjectenByHuisnummerId.return_value = Mock(
TerreinobjectItem=[Mock()]
)
crab_service.GetTerreinobjectByIdentificatorTerreinobject\
.return_value = Mock(
IdentificatorTerreinobject='13040_C_1747_G_002_00',
BeginBewerking=1, BeginOrganisatie=1
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
bounding = h.bounding_box
assert isinstance(bounding, list)
assert len(bounding) == 4
def test_check_gateway_not_set(self):
h = Huisnummer(1, 3, '51', 17718)
with pytest.raises(RuntimeError):
h.check_gateway()
def test_postadres(self, crab_gateway, crab_service):
crab_service.GetPostadresByHuisnummerId.return_value = Mock(
Postadres='Steenweg op Oosthoven 51, 2300 Turnhout'
)
h = Huisnummer(1, 3, '51', 17718)
h.set_gateway(crab_gateway)
assert h.postadres == 'Steenweg op Oosthoven 51, 2300 Turnhout'
class TestPostkanton:
def test_fully_initialised(self):
p = Postkanton(
2630
)
assert p.id == 2630
assert 'Postkanton 2630' == str(p)
assert 'Postkanton(2630)' == repr(p)
class TestWegobject:
def test_fully_initialised(self):
w = Wegobject(
"53839893",
Aardwegobject(4, 'ntLink', 'Wegverbinding volgens NavTeq.'),
(150753.46, 200148.41),
(150693.58, 200080.56, 150813.35, 200216.27),
Metadata(
'1830-01-01 00:00:00',
'2008-04-17 16:32:11.753000',
Bewerking(1, '', ''),
Organisatie(8, '', '')
)
)
assert w.id == "53839893"
assert w.centroid == (150753.46, 200148.41)
assert w.bounding_box == (150693.58, 200080.56, 150813.35, 200216.27)
assert int(w.aard_id) == 4
assert isinstance(w.aard, Aardwegobject)
assert isinstance(w.metadata, Metadata)
assert w.metadata.begin_datum == '1830-01-01 00:00:00'
assert w.metadata.begin_tijd == '2008-04-17 16:32:11.753000'
assert isinstance(w.metadata.begin_bewerking, Bewerking)
assert int(w.metadata.begin_bewerking.id) == 1
assert isinstance(w.metadata.begin_organisatie, Organisatie)
assert int(w.metadata.begin_organisatie.id) == 8
assert 'Wegobject 53839893' == str(w)
assert 'Wegobject(53839893)' == repr(w)
def test_check_gateway_not_set(self):
w = Wegobject(1, 4)
with pytest.raises(RuntimeError):
w.check_gateway()
def test_aard(self, crab_gateway, crab_service):
crab_service.ListAardWegobjecten.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
w = Wegobject("53839893", 4)
w.set_gateway(crab_gateway)
aard = w.aard
assert isinstance(aard, Aardwegobject)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListAardWegobjecten.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
crab_service.GetWegobjectByIdentificatorWegobject.return_value = Mock(
IdentificatorWegobject='53839893', BeginBewerking=1,
BeginOrganisatie=1, CenterX=150753.46, CenterY=200148.41,
MinimumX=150693.58, MinimumY=200080.56, MaximumX=150813.35,
MaximumY=200216.27
)
w = Wegobject("53839893", 4)
w.set_gateway(crab_gateway)
assert w.id == "53839893"
assert int(w.aard.id) == 4
assert w.centroid == (150753.46, 200148.41)
assert w.bounding_box == (150693.58, 200080.56, 150813.35, 200216.27)
w.metadata.set_gateway(crab_gateway)
assert isinstance(w.metadata, Metadata)
assert not w.metadata.begin_datum == None
assert not w.metadata.begin_tijd == None
assert isinstance(w.metadata.begin_bewerking, Bewerking)
assert int(w.metadata.begin_bewerking.id) == 1
assert isinstance(w.metadata.begin_organisatie, Organisatie)
assert int(w.metadata.begin_organisatie.id) == 1
class TestWegsegment:
def test_fully_initialised(self):
w = Wegsegment(
"108724",
Statuswegsegment(4, 'inGebruik', None),
Geometriemethodewegsegment(3, 'grb', None),
"""LINESTRING (150339.255243488 201166.401677653,\
150342.836939491 201165.832525652,\
150345.139531493 201165.466573652,\
150349.791371495 201164.769421652,\
150352.512459494 201164.36161365,\
150358.512331501 201163.46241365,\
150375.039179511 201156.606669646,\
150386.901963517 201150.194893643,\
150397.470027529 201142.865485638,\
150403.464011535 201135.266637631,\
150407.825739533 201127.481037624,\
150414.301515542 201109.016653612,\
150431.792971551 201057.519821577,\
150442.85677956 201026.858701557,\
150454.530123569 200999.312717538,\
150472.404939577 200955.342029508,\
150483.516619585 200927.052237488,\
150500.807755597 200883.890765458,\
150516.94650761 200844.146253429,\
150543.214411631 200773.35943738,\
150546.079307631 200764.489805374,\
150548.592075631 200754.511565369)""",
Metadata(
'1830-01-01 00:00:00',
'2013-04-12 20:12:12.687000',
Bewerking(3, '', ''),
Organisatie(1, '', '')
)
)
assert w.id == "108724"
assert w.geometrie == """LINESTRING (150339.255243488 201166.401677653,\
150342.836939491 201165.832525652,\
150345.139531493 201165.466573652,\
150349.791371495 201164.769421652,\
150352.512459494 201164.36161365,\
150358.512331501 201163.46241365,\
150375.039179511 201156.606669646,\
150386.901963517 201150.194893643,\
150397.470027529 201142.865485638,\
150403.464011535 201135.266637631,\
150407.825739533 201127.481037624,\
150414.301515542 201109.016653612,\
150431.792971551 201057.519821577,\
150442.85677956 201026.858701557,\
150454.530123569 200999.312717538,\
150472.404939577 200955.342029508,\
150483.516619585 200927.052237488,\
150500.807755597 200883.890765458,\
150516.94650761 200844.146253429,\
150543.214411631 200773.35943738,\
150546.079307631 200764.489805374,\
150548.592075631 200754.511565369)"""
assert int(w.status_id) == 4
assert isinstance(w.status, Statuswegsegment)
assert int(w._methode_id) == 3
assert isinstance(w.methode, Geometriemethodewegsegment)
assert isinstance(w.metadata, Metadata)
assert w.metadata.begin_datum == '1830-01-01 00:00:00'
assert w.metadata.begin_tijd == '2013-04-12 20:12:12.687000'
assert isinstance(w.metadata.begin_bewerking, Bewerking)
assert int(w.metadata.begin_bewerking.id) == 3
assert isinstance(w.metadata.begin_organisatie, Organisatie)
assert int(w.metadata.begin_organisatie.id) == 1
assert 'Wegsegment 108724' == str(w)
assert 'Wegsegment(108724)' == repr(w)
def test_check_gateway_not_set(self):
w = Wegsegment(1, 4)
with pytest.raises(RuntimeError):
w.check_gateway()
def test_status(self, crab_gateway, crab_service):
crab_service.ListStatusWegsegmenten.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
w = Wegsegment('108724', 4)
w.set_gateway(crab_gateway)
status = w.status
assert isinstance(status, Statuswegsegment)
def test_methode(self, crab_gateway, crab_service):
crab_service.ListGeometriemethodeWegsegmenten.return_value = Mock(
CodeItem=[Mock(Code=2)]
)
crab_service.GetWegsegmentByIdentificatorWegsegment.return_value = (
Mock(IdentificatorWegsegment='108724', BeginBewerking=1,
BeginOrganisatie=1, GeometriemethodeWegsegment=2)
)
w = Wegsegment('108724', 4)
w.set_gateway(crab_gateway)
methode = w.methode
assert isinstance(methode, Geometriemethodewegsegment)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.GetWegsegmentByIdentificatorWegsegment.return_value = (
Mock(IdentificatorWegsegment='108724', BeginBewerking=1,
BeginOrganisatie=1, GeometriemethodeWegsegment=3,
Geometrie='LINESTRING (150339.255243488 201166.401677653,'
'150342.836939491 201165.832525652,'
'150345.139531493 201165.466573652,'
'150349.791371495 201164.769421652)'
)
)
crab_service.ListGeometriemethodeWegsegmenten.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.ListStatusWegsegmenten.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
w = Wegsegment('108724', 4)
w.set_gateway(crab_gateway)
assert w.id == "108724"
assert int(w.status.id) == 4
assert int(w.methode.id) == 3
assert w.geometrie == ('LINESTRING (150339.255243488 201166.401677653,'
'150342.836939491 201165.832525652,'
'150345.139531493 201165.466573652,'
'150349.791371495 201164.769421652)')
w.metadata.set_gateway(crab_gateway)
assert isinstance(w.metadata, Metadata)
assert not w.metadata.begin_datum == None
assert not w.metadata.begin_tijd == None
assert isinstance(w.metadata.begin_bewerking, Bewerking)
assert int(w.metadata.begin_bewerking.id) == 1
assert isinstance(w.metadata.begin_organisatie, Organisatie)
assert int(w.metadata.begin_organisatie.id) == 1
class TestTerreinobject:
def test_fully_initialised(self):
t = Terreinobject(
"13040_C_1747_G_002_00",
Aardterreinobject(
1,
'kadPerceel',
'Perceel volgens het Kadaster.'
),
(190708.59, 224667.59),
(190700.24, 224649.87, 190716.95, 224701.7),
Metadata(
'1998-01-01 00:00:00',
'2009-09-11 12:46:55.693000',
Bewerking(3, '', ''),
Organisatie(3, '', '')
)
)
assert t.id == "13040_C_1747_G_002_00"
assert t.centroid == (190708.59, 224667.59)
assert t.bounding_box == (190700.24, 224649.87, 190716.95, 224701.7)
assert int(t.aard_id) == 1
assert isinstance(t.aard, Aardterreinobject)
assert isinstance(t.metadata, Metadata)
assert t.metadata.begin_datum == '1998-01-01 00:00:00'
assert t.metadata.begin_tijd == '2009-09-11 12:46:55.693000'
assert isinstance(t.metadata.begin_bewerking, Bewerking)
assert int(t.metadata.begin_bewerking.id) == 3
assert isinstance(t.metadata.begin_organisatie, Organisatie)
assert int(t.metadata.begin_organisatie.id) == 3
assert 'Terreinobject 13040_C_1747_G_002_00' == str(t)
assert 'Terreinobject(13040_C_1747_G_002_00)' == repr(t)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.GetTerreinobjectByIdentificatorTerreinobject\
.return_value = Mock(
IdentificatorTerreinobject='13040_C_1747_G_002_00',
BeginBewerking=1, BeginOrganisatie=1,
CenterX=190708.59, CenterY=224667.58,
MinimumX=190700.24, MinimumY=224649.87, MaximumX=190716.95,
MaximumY=224701.7
)
crab_service.ListAardTerreinobjecten.return_value = Mock(
CodeItem=[Mock(Code=1)]
)
t = Terreinobject("13040_C_1747_G_002_00", 1)
t.set_gateway(crab_gateway)
assert t.id == "13040_C_1747_G_002_00"
assert t.centroid == (190708.59, 224667.58)
assert t.bounding_box == (190700.24, 224649.87, 190716.95, 224701.7)
assert int(t.aard.id) == 1
t.metadata.set_gateway(crab_gateway)
assert isinstance(t.metadata, Metadata)
assert not t.metadata.begin_datum == None
assert not t.metadata.begin_tijd == None
assert isinstance(t.metadata.begin_bewerking, Bewerking)
assert int(t.metadata.begin_bewerking.id) == 1
assert isinstance(t.metadata.begin_organisatie, Organisatie)
assert int(t.metadata.begin_organisatie.id) == 1
def test_aard(self, crab_gateway, crab_service):
crab_service.ListAardTerreinobjecten.return_value = Mock(
CodeItem=[Mock(Code=1)]
)
t = Terreinobject("13040_C_1747_G_002_00", 1)
t.set_gateway(crab_gateway)
assert isinstance(t.aard, Aardterreinobject)
class TestPerceel:
def test_fully_initialised(self):
p = Perceel(
"13040C1747/00G002",
(190708.59, 224667.59),
Metadata(
'1998-01-01 00:00:00',
'2009-09-11 12:46:55.693000',
Bewerking(3, '', ''),
Organisatie(3, '', '')
)
)
assert p.id == "13040C1747/00G002"
assert p.centroid == (190708.59, 224667.59)
assert isinstance(p.metadata, Metadata)
assert p.metadata.begin_datum == '1998-01-01 00:00:00'
assert p.metadata.begin_tijd == '2009-09-11 12:46:55.693000'
assert isinstance(p.metadata.begin_bewerking, Bewerking)
assert int(p.metadata.begin_bewerking.id) == 3
assert isinstance(p.metadata.begin_organisatie, Organisatie)
assert int(p.metadata.begin_organisatie.id) == 3
assert 'Perceel 13040C1747/00G002' == str(p)
assert 'Perceel(13040C1747/00G002)' == repr(p)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.GetPerceelByIdentificatorPerceel.return_value = Mock(
IdentificatorPerceel='13040C1747/00G002',
BeginBewerking=1, BeginOrganisatie=1, CenterX=190708.59,
CenterY=224667.58,
)
p = Perceel("13040C1747/00G002")
p.set_gateway(crab_gateway)
assert p.id == "13040C1747/00G002"
assert p.centroid == (190708.59, 224667.58)
p.metadata.set_gateway(crab_gateway)
assert isinstance(p.metadata, Metadata)
assert p.metadata.begin_datum is not None
assert p.metadata.begin_tijd is not None
assert isinstance(p.metadata.begin_bewerking, Bewerking)
assert int(p.metadata.begin_bewerking.id) == 1
assert isinstance(p.metadata.begin_organisatie, Organisatie)
assert int(p.metadata.begin_organisatie.id) == 1
def test_huisnummers(self, crab_gateway, crab_service):
crab_service.GetPerceelByIdentificatorPerceel.return_value = Mock(
IdentificatorPerceel='13040C1747/00G002',
BeginBewerking=1, BeginOrganisatie=1
)
crab_service.ListHuisnummersWithStatusByIdentificatorPerceel\
.return_value = Mock(
HuisnummerWithStatusItem=[Mock(HuisnummerId=1)]
)
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = (
Mock(HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1)
)
p = crab_gateway.get_perceel_by_id('13040C1747/00G002')
hnrs = p.huisnummers
assert isinstance(hnrs, list)
assert [h.id for h in hnrs] == [h.id for h in crab_gateway.list_huisnummers_by_perceel('13040C1747/00G002')]
def test_postadressen(self, crab_gateway, crab_service):
crab_service.GetPerceelByIdentificatorPerceel.return_value = Mock(
IdentificatorPerceel='13040C1747/00G002',
BeginBewerking=1, BeginOrganisatie=1
)
crab_service.ListHuisnummersWithStatusByIdentificatorPerceel\
.return_value = Mock(
HuisnummerWithStatusItem=[Mock(HuisnummerId=1)]
)
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = (
Mock(HuisnummerId=1, BeginBewerking=1, BeginOrganisatie=1,
StatusHuisnummer=3)
)
crab_service.ListStatusHuisnummers.return_value = Mock(
CodeItem=[Mock(Code='3')]
)
crab_service.GetPostadresByHuisnummerId.return_value = Mock(
Postadres='Steenweg op Oosthoven 51, 2300 Turnhout'
)
p = crab_gateway.get_perceel_by_id('13040C1747/00G002')
postadressen = p.postadressen
assert isinstance(postadressen, list)
assert ['Steenweg op Oosthoven 51, 2300 Turnhout'] == postadressen
class TestGebouw:
def test_fully_initialised(self):
g = Gebouw(
"1538575",
Aardgebouw(1, 'hoofdgebouw', 'hoofdgebouw volgens het GRB'),
Statusgebouw(4, 'inGebruik', None),
Geometriemethodegebouw(3, 'grb', None),
"""POLYGON ((190712.36432739347 224668.5216938965,\
190706.26007138938 224667.54428589717,\
190706.03594338894 224668.89276589826,\
190704.89699938893 224668.66159789637,\
190705.350887388 224666.14575789496,\
190708.31754338741 224649.70287788659,\
190717.16349539906 224653.81065388769,\
190713.40490339696 224663.38582189381,\
190712.36432739347 224668.5216938965))""",
Metadata(
'1830-01-01 00:00:00',
'2011-05-19 10:51:09.483000',
Bewerking(1, '', ''),
Organisatie(5, '', '')
)
)
assert g.id, 1538575
assert int(g.aard_id) == 1
assert isinstance(g.aard, Aardgebouw)
assert int(g.status_id) == 4
assert isinstance(g.status, Statusgebouw)
assert int(g._methode_id) == 3
assert isinstance(g.methode, Geometriemethodegebouw)
assert g.geometrie == """POLYGON ((190712.36432739347 224668.5216938965,\
190706.26007138938 224667.54428589717,\
190706.03594338894 224668.89276589826,\
190704.89699938893 224668.66159789637,\
190705.350887388 224666.14575789496,\
190708.31754338741 224649.70287788659,\
190717.16349539906 224653.81065388769,\
190713.40490339696 224663.38582189381,\
190712.36432739347 224668.5216938965))"""
assert isinstance(g.metadata, Metadata)
assert g.metadata.begin_datum == '1830-01-01 00:00:00'
assert g.metadata.begin_tijd == '2011-05-19 10:51:09.483000'
assert isinstance(g.metadata.begin_bewerking, Bewerking)
assert int(g.metadata.begin_bewerking.id) == 1
assert isinstance(g.metadata.begin_organisatie, Organisatie)
assert int(g.metadata.begin_organisatie.id) == 5
assert 'Gebouw 1538575' == str(g)
assert 'Gebouw(1538575)' == repr(g)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListAardGebouwen.return_value = Mock(
CodeItem=[Mock(Code=1)]
)
crab_service.ListStatusGebouwen.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
crab_service.ListGeometriemethodeGebouwen.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.GetGebouwByIdentificatorGebouw.return_value = Mock(
IdentificatorGebouw=1538575, BeginBewerking=1, BeginOrganisatie=1,
GeometriemethodeGebouw=3,
Geometrie="POLYGON ((190712.36432739347 224668.5216938965,"
"190706.26007138938 224667.54428589717,"
"190712.36432739347 224668.5216938965))"
)
g = Gebouw("1538575", 1, 4)
g.set_gateway(crab_gateway)
assert g.id == 1538575
assert int(g.aard.id) == 1
assert int(g.status.id) == 4
assert int(g.methode.id) == 3
assert g.geometrie == (
"POLYGON ((190712.36432739347 224668.5216938965,"
"190706.26007138938 224667.54428589717,"
"190712.36432739347 224668.5216938965))"
)
g.metadata.set_gateway(crab_gateway)
assert isinstance(g.metadata, Metadata)
assert g.metadata.begin_datum is not None
assert g.metadata.begin_tijd is not None
assert isinstance(g.metadata.begin_bewerking, Bewerking)
assert int(g.metadata.begin_bewerking.id) == 1
assert isinstance(g.metadata.begin_organisatie, Organisatie)
assert int(g.metadata.begin_organisatie.id) == 1
def test_aard(self, crab_gateway, crab_service):
crab_service.ListAardGebouwen.return_value = Mock(
CodeItem=[Mock(Code=1)]
)
g = Gebouw("1538575", 1, 4)
g.set_gateway(crab_gateway)
aard = g.aard
assert isinstance(aard, Aardgebouw)
def test_status(self, crab_gateway, crab_service):
crab_service.ListStatusGebouwen.return_value = Mock(
CodeItem=[Mock(Code=4)]
)
g = Gebouw("1538575", 1, 4)
g.set_gateway(crab_gateway)
status = g.status
assert isinstance(status, Statusgebouw)
def test_methode(self, crab_gateway, crab_service):
crab_service.ListGeometriemethodeGebouwen.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.GetGebouwByIdentificatorGebouw.return_value = Mock(
IdentificatorGebouw=1538575, BeginBewerking=1, BeginOrganisatie=1,
GeometriemethodeGebouw=3
)
g = Gebouw("1538575", 1, 4)
g.set_gateway(crab_gateway)
methode = g.methode
assert isinstance(methode, Geometriemethodegebouw)
class TestMetadata:
def test_fully_initialised(self):
m = Metadata(
'1830-01-01 00:00:00',
'2003-12-06 21:42:11.117000',
Bewerking(1, '', ''),
Organisatie(6, '', '')
)
assert m.begin_datum == '1830-01-01 00:00:00'
assert m.begin_tijd == '2003-12-06 21:42:11.117000'
assert isinstance(m.begin_bewerking, Bewerking)
assert int(m.begin_bewerking.id) == 1
assert isinstance(m.begin_organisatie, Organisatie)
assert int(m.begin_organisatie.id) == 6
assert 'Begin datum: 1830-01-01 00:00:00' == str(m)
def test_lazy_load(self, crab_gateway, crab_service):
m = Metadata(
'1830-01-01 00:00:00',
'2003-12-06 21:42:11.117000',
1,
1,
gateway=crab_gateway
)
assert m.begin_datum == '1830-01-01 00:00:00'
assert m.begin_tijd == '2003-12-06 21:42:11.117000'
assert isinstance(m.begin_bewerking, Bewerking)
assert int(m.begin_bewerking.id) == 1
assert isinstance(m.begin_organisatie, Organisatie)
assert int(m.begin_organisatie.id) == 1
class TestSubadres:
def test_fully_initialised(self):
s = Subadres(
1120936,
"B",
Statussubadres(3, 'inGebruik', 'None'),
38020,
Aardsubadres(1, 'gemeente', 'Gemeente.'),
Metadata(
'1830-01-01 00:00:00',
'2011-04-29 13:27:40.230000',
Bewerking(1, '', ''),
Organisatie(5, '', '')
)
)
assert s.id == 1120936
assert s.subadres == "B"
assert int(s.status_id) == 3
assert isinstance(s.status, Statussubadres)
assert int(s.huisnummer_id) == 38020
assert isinstance(s.metadata, Metadata)
assert s.metadata.begin_datum == '1830-01-01 00:00:00'
assert s.metadata.begin_tijd, '2011-04-29 13:27:40.230000'
assert isinstance(s.metadata.begin_bewerking, Bewerking)
assert int(s.metadata.begin_bewerking.id) == 1
assert isinstance(s.metadata.begin_organisatie, Organisatie)
assert int(s.metadata.begin_organisatie.id) == 5
assert 'B (1120936)' == str(s)
assert "Subadres(1120936, 3, 'B', 38020)" == repr(s)
def test_str_dont_lazy_load(self):
s = Subadres(1120936, 'B', 3)
assert 'B (1120936)' == str(s)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.ListStatusHuisnummers.return_value = Mock(
CodeItem=[Mock(Code=3)]
)
crab_service.GetHuisnummerWithStatusByHuisnummerId.return_value = Mock(
HuisnummerId=38020, BeginBewerking=1, BeginOrganisatie=1
)
crab_service.GetSubadresWithStatusBySubadresId.return_value = Mock(
SubadresId=1120936, BeginBewerking=1, BeginOrganisatie=1,
AardSubadres=2
)
crab_service.ListAardSubadressen.return_value = Mock(
CodeItem=[Mock(Code=2)]
)
s = Subadres(1120936, 'B', 3)
s.set_gateway(crab_gateway)
assert s.id == 1120936
assert int(s.status.id) == 3
assert s.subadres == "B"
assert isinstance(s.aard, Aardsubadres)
assert int(s.huisnummer.id) == 38020
s.metadata.set_gateway(crab_gateway)
assert isinstance(s.metadata, Metadata)
assert s.metadata.begin_datum is not None
assert s.metadata.begin_tijd is not None
assert isinstance(s.metadata.begin_bewerking, Bewerking)
assert int(s.metadata.begin_bewerking.id) == 1
assert isinstance(s.metadata.begin_organisatie, Organisatie)
assert int(s.metadata.begin_organisatie.id) == 1
def test_check_gateway_not_set(self):
s = Subadres(1, 3, 'B', 129462)
with pytest.raises(RuntimeError):
s.check_gateway()
def test_adresposities(self, crab_gateway, crab_service):
crab_service.ListAdrespositiesBySubadresId.return_value = Mock(
AdrespositieItem=[Mock()]
)
s = Subadres(1120936, 'B', 3)
s.set_gateway(crab_gateway)
adresposities = s.adresposities
assert isinstance(adresposities, list)
def test_postadres(self, crab_gateway, crab_service):
crab_service.GetPostadresBySubadresId.return_value = Mock(
Postadres='Antoon van Brabantstraat 7 bus B, 2630 Aartselaar'
)
s = Subadres(1120936, 'B', 3)
s.set_gateway(crab_gateway)
assert s.postadres == 'Antoon van Brabantstraat 7 bus B, 2630 Aartselaar'
class TestAdrespositie:
def test_fully_initialised(self):
a = Adrespositie(
4087928,
Herkomstadrespositie(
'6',
'manueleAanduidingVanToegangTotDeWeg',
None
),
"""POINT(190705.34 224675.26)""",
Aardadres(
'1',
'subAdres',
'Aanduiding van een plaats op een huisnummer'
),
Metadata(
'1830-01-01 00:00:00',
'',
None,
None
)
)
assert a.id == 4087928
assert str(a.herkomst.id) == '6'
assert a.geometrie == 'POINT(190705.34 224675.26)'
assert str(a.aard.id) == '1'
assert isinstance(a.metadata, Metadata)
assert a.metadata.begin_datum == '1830-01-01 00:00:00'
assert 'Adrespositie 4087928' == str(a)
assert "Adrespositie(4087928, 6)" == repr(a)
def test_str_dont_lazy_load(self, crab_gateway):
a = Adrespositie(4087928, 2)
a.set_gateway(crab_gateway)
assert 'Adrespositie 4087928' == str(a)
def test_lazy_load(self, crab_gateway, crab_service):
crab_service.GetAdrespositieByAdrespositieId.return_value = Mock(
AdrespositieId=4428005, BeginBewerking=1, BeginOrganisatie=1,
Geometrie='POINT (74414.91 225777.36)', AardAdres=2,
BeginDatum='1830-01-01 00:00:00'
)
crab_service.ListAardAdressen.return_value = Mock(
CodeItem=[Mock(Code=2)]
)
a = Adrespositie(4428005, 3)
a.set_gateway(crab_gateway)
assert a.id == 4428005
assert a.herkomst_id == 3
assert str(a.geometrie) == 'POINT (74414.91 225777.36)'
assert int(a.aard.id) == 2
assert isinstance(a.metadata, Metadata)
assert a.metadata.begin_datum == '1830-01-01 00:00:00'
def test_check_gateway_not_set(self):
a = Adrespositie(4087928, 2)
with pytest.raises(RuntimeError):
a.check_gateway()
|
OnroerendErfgoed/crabpy
|
tests/gateway/test_crab.py
|
Python
|
mit
| 88,239
|
from bs4 import BeautifulSoup
def cap_xml_to_dict(cap_xml):
# This function assumes that it’s being passed valid CAP XML
cap = BeautifulSoup(cap_xml, "xml")
return {
"msgType": cap.alert.msgType.text,
"reference": cap.alert.identifier.text,
"references": (
# references to previous events belonging to the same alert
cap.alert.references.text if cap.alert.references else None
),
"cap_event": cap.alert.info.event.text,
"category": cap.alert.info.category.text,
"expires": cap.alert.info.expires.text,
"content": cap.alert.info.description.text,
"areas": [
{
"name": area.areaDesc.text,
"polygons": [
cap_xml_polygon_to_list(polygon.text)
for polygon in area.find_all('polygon')
]
}
for area in cap.alert.info.find_all('area')
]
}
def cap_xml_polygon_to_list(polygon_string):
return [
[
float(coordinate) for coordinate in pair.split(',')
]
for pair in polygon_string.strip().split(' ')
]
|
alphagov/notifications-api
|
app/broadcast_message/translators.py
|
Python
|
mit
| 1,183
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Said Sef'
import os
import json
import requests as r
from optparse import OptionParser
tkn = os.environ.get("tkn", None)
email = os.environ.get("email", None)
parser = OptionParser()
parser.add_option("-s", "--site", dest="site", help="site name - abc.com", metavar="SITE")
parser.add_option("-t", "--task", dest="task", help="task name - [all,devmode,purge,file_purge]", metavar="TASK")
parser.add_option("-u", "--url", dest="url", help="url name - /this-is-a-url", metavar="URL")
(options, args) = parser.parse_args()
if not options.site:
parser.error('Site name not given')
if not options.task:
parser.error('Task name not given')
if tkn is None or email is None:
parser.error('tkn and/or email has not been set')
site = options.site
task = options.task
if "file_purge" in task and not options.url:
parser.error('URL name not given')
elif "file_purge" in task and options.url:
url = options.url
params = {
'tkn': tkn,
'email': email,
'z': site
}
if "all" in task:
params.update({'a':'rec_load_all'})
if "devmode" in task:
params.update({'a': 'devmode', 'v': '1'})
if "purge" in task:
params.update({'a': 'fpurge_ts', 'v': '1'})
if "file_purge" in task:
params.update({'a': 'zone_file_purge', 'v': '1', 'url': url})
j = r.post("https://www.cloudflare.com/api_json.html", params=params)
j = j.json()
print '|==>', 'result', j['result'], '<==|'
print json.dumps(j, sort_keys=False, indent=2)
|
saidsef/cloudflare
|
lib/cf_api.py
|
Python
|
mit
| 1,490
|
import numpy as np
import re
import itertools
from collections import Counter
import numpy as np
import time
import gc
from tensorflow.contrib import learn
from gensim.models.word2vec import Word2Vec
import gzip
from random import random
from preprocess import MyVocabularyProcessor
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class InputHelper(object):
pre_emb = dict()
vocab_processor = None
def cleanText(self, s):
s = re.sub(r"[^\x00-\x7F]+"," ", s)
s = re.sub(r'[\~\!\`\^\*\{\}\[\]\#\<\>\?\+\=\-\_\(\)]+',"",s)
s = re.sub(r'( [0-9,\.]+)',r"\1 ", s)
s = re.sub(r'\$'," $ ", s)
s = re.sub('[ ]+',' ', s)
return s.lower()
def getVocab(self,vocab_path, max_document_length,filter_h_pad):
if self.vocab_processor==None:
print('locading vocab')
vocab_processor = MyVocabularyProcessor(max_document_length-filter_h_pad,min_frequency=0)
self.vocab_processor = vocab_processor.restore(vocab_path)
return self.vocab_processor
def loadW2V(self,emb_path, type="bin"):
print("Loading W2V data...")
num_keys = 0
if type=="textgz":
# this seems faster than gensim non-binary load
for line in gzip.open(emb_path):
l = line.strip().split()
st=l[0].lower()
self.pre_emb[st]=np.asarray(l[1:])
num_keys=len(self.pre_emb)
if type=="text":
# this seems faster than gensim non-binary load
for line in open(emb_path):
l = line.strip().split()
st=l[0].lower()
self.pre_emb[st]=np.asarray(l[1:])
num_keys=len(self.pre_emb)
else:
self.pre_emb = Word2Vec.load_word2vec_format(emb_path,binary=True)
self.pre_emb.init_sims(replace=True)
num_keys=len(self.pre_emb.vocab)
print("loaded word2vec len ", num_keys)
gc.collect()
def deletePreEmb(self):
self.pre_emb=dict()
gc.collect()
def getTsvData(self, filepath):
print("Loading training data from "+filepath)
x1=[]
x2=[]
y=[]
# positive samples from file
for line in open(filepath):
l=line.strip().split("\t")
if len(l)<2:
continue
if random() > 0.5:
x1.append(l[0].lower())
x2.append(l[1].lower())
else:
x1.append(l[1].lower())
x2.append(l[0].lower())
y.append(int(l[2]))
return np.asarray(x1),np.asarray(x2),np.asarray(y)
def getTsvDataCharBased(self, filepath):
print("Loading training data from "+filepath)
x1=[]
x2=[]
y=[]
# positive samples from file
for line in open(filepath):
l=line.strip().split("\t")
if len(l)<2:
continue
if random() > 0.5:
x1.append(l[0].lower())
x2.append(l[1].lower())
else:
x1.append(l[1].lower())
x2.append(l[0].lower())
y.append(1)#np.array([0,1]))
# generate random negative samples
combined = np.asarray(x1+x2)
shuffle_indices = np.random.permutation(np.arange(len(combined)))
combined_shuff = combined[shuffle_indices]
for i in xrange(len(combined)):
x1.append(combined[i])
x2.append(combined_shuff[i])
y.append(0) #np.array([1,0]))
return np.asarray(x1),np.asarray(x2),np.asarray(y)
def getTsvTestData(self, filepath):
print("Loading testing/labelled data from "+filepath)
x1=[]
x2=[]
y=[]
# positive samples from file
for line in open(filepath):
l=line.strip().split("\t")
if len(l)<3:
continue
x1.append(l[1].lower())
x2.append(l[2].lower())
y.append(int(l[0])) #np.array([0,1]))
return np.asarray(x1),np.asarray(x2),np.asarray(y)
def batch_iter(self, data, batch_size, num_epochs, shuffle=True):
"""
Generates a batch iterator for a dataset.
"""
data = np.asarray(data)
print(data)
print(data.shape)
data_size = len(data)
num_batches_per_epoch = int(len(data)/batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
else:
shuffled_data = data
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
def dumpValidation(self,x1_text,x2_text,y,shuffled_index,dev_idx,i):
print("dumping validation "+str(i))
x1_shuffled=x1_text[shuffled_index]
x2_shuffled=x2_text[shuffled_index]
y_shuffled=y[shuffled_index]
x1_dev=x1_shuffled[dev_idx:]
x2_dev=x2_shuffled[dev_idx:]
y_dev=y_shuffled[dev_idx:]
del x1_shuffled
del y_shuffled
with open('validation.txt'+str(i),'w') as f:
for text1,text2,label in zip(x1_dev,x2_dev,y_dev):
f.write(str(label)+"\t"+text1+"\t"+text2+"\n")
f.close()
del x1_dev
del y_dev
# Data Preparatopn
# ==================================================
def getDataSets(self, training_paths, max_document_length, percent_dev, batch_size, is_char_based):
if is_char_based:
x1_text, x2_text, y=self.getTsvDataCharBased(training_paths)
else:
x1_text, x2_text, y=self.getTsvData(training_paths)
# Build vocabulary
print("Building vocabulary")
vocab_processor = MyVocabularyProcessor(max_document_length,min_frequency=0,is_char_based=is_char_based)
vocab_processor.fit_transform(np.concatenate((x2_text,x1_text),axis=0))
print("Length of loaded vocabulary ={}".format( len(vocab_processor.vocabulary_)))
i1=0
train_set=[]
dev_set=[]
sum_no_of_batches = 0
x1 = np.asarray(list(vocab_processor.transform(x1_text)))
x2 = np.asarray(list(vocab_processor.transform(x2_text)))
# Randomly shuffle data
np.random.seed(131)
shuffle_indices = np.random.permutation(np.arange(len(y)))
x1_shuffled = x1[shuffle_indices]
x2_shuffled = x2[shuffle_indices]
y_shuffled = y[shuffle_indices]
dev_idx = -1*len(y_shuffled)*percent_dev//100
del x1
del x2
# Split train/test set
self.dumpValidation(x1_text,x2_text,y,shuffle_indices,dev_idx,0)
# TODO: This is very crude, should use cross-validation
x1_train, x1_dev = x1_shuffled[:dev_idx], x1_shuffled[dev_idx:]
x2_train, x2_dev = x2_shuffled[:dev_idx], x2_shuffled[dev_idx:]
y_train, y_dev = y_shuffled[:dev_idx], y_shuffled[dev_idx:]
print("Train/Dev split for {}: {:d}/{:d}".format(training_paths, len(y_train), len(y_dev)))
sum_no_of_batches = sum_no_of_batches+(len(y_train)//batch_size)
train_set=(x1_train,x2_train,y_train)
dev_set=(x1_dev,x2_dev,y_dev)
gc.collect()
return train_set,dev_set,vocab_processor,sum_no_of_batches
def getTestDataSet(self, data_path, vocab_path, max_document_length):
x1_temp,x2_temp,y = self.getTsvTestData(data_path)
# Build vocabulary
vocab_processor = MyVocabularyProcessor(max_document_length,min_frequency=0)
vocab_processor = vocab_processor.restore(vocab_path)
print len(vocab_processor.vocabulary_)
x1 = np.asarray(list(vocab_processor.transform(x1_temp)))
x2 = np.asarray(list(vocab_processor.transform(x2_temp)))
# Randomly shuffle data
del vocab_processor
gc.collect()
return x1,x2, y
|
dhwajraj/deep-siamese-text-similarity
|
input_helpers.py
|
Python
|
mit
| 8,232
|
from astropy import units as u
from astropy.coordinates import (
HCRS,
ITRS as _ITRS,
BaseRADecFrame,
FunctionTransform,
TimeAttribute,
frame_transform_graph,
)
from astropy.coordinates.builtin_frames.utils import DEFAULT_OBSTIME
from astropy.coordinates.matrix_utilities import rotation_matrix
from poliastro.bodies import (
Jupiter,
Mars,
Mercury,
Moon,
Neptune,
Saturn,
Sun,
Uranus,
Venus,
)
from poliastro.constants import J2000
from poliastro.core.fixed import (
jupiter_rot_elements_at_epoch as jupiter_rot_elements_at_epoch_fast,
mars_rot_elements_at_epoch as mars_rot_elements_at_epoch_fast,
mercury_rot_elements_at_epoch as mercury_rot_elements_at_epoch_fast,
moon_rot_elements_at_epoch as moon_rot_elements_at_epoch_fast,
neptune_rot_elements_at_epoch as neptune_rot_elements_at_epoch_fast,
saturn_rot_elements_at_epoch as saturn_rot_elements_at_epoch_fast,
sun_rot_elements_at_epoch as sun_rot_elements_at_epoch_fast,
uranus_rot_elements_at_epoch as uranus_rot_elements_at_epoch_fast,
venus_rot_elements_at_epoch as venus_rot_elements_at_epoch_fast,
)
from poliastro.frames.equatorial import (
JupiterICRS,
MarsICRS,
MercuryICRS,
MoonICRS,
NeptuneICRS,
SaturnICRS,
UranusICRS,
VenusICRS,
)
__all__ = [
"SunFixed",
"MercuryFixed",
"VenusFixed",
"ITRS",
"MarsFixed",
"JupiterFixed",
"SaturnFixed",
"UranusFixed",
"NeptuneFixed",
]
# HACK: sphinx-autoapi variable definition
ITRS = _ITRS
class _PlanetaryFixed(BaseRADecFrame):
obstime = TimeAttribute(default=DEFAULT_OBSTIME)
def __new__(cls, *args, **kwargs):
frame_transform_graph.transform(FunctionTransform, cls, cls.equatorial)(
cls.to_equatorial
)
frame_transform_graph.transform(FunctionTransform, cls.equatorial, cls)(
cls.from_equatorial
)
return super().__new__(cls)
@staticmethod
def to_equatorial(fixed_coo, equatorial_frame):
# TODO replace w/ something smart (Sun/Earth special cased)
if fixed_coo.body == Sun and type(equatorial_frame) != HCRS:
raise ValueError(
f"Equatorial coordinates must be of type `HCRS`, got `{type(equatorial_frame)}` instead."
)
elif fixed_coo.body != Sun and fixed_coo.body != equatorial_frame.body:
raise ValueError(
"Fixed and equatorial coordinates must have the same body if the fixed frame body is not Sun"
)
r = fixed_coo.cartesian
ra, dec, W = fixed_coo.rot_elements_at_epoch(equatorial_frame.obstime)
r = r.transform(rotation_matrix(-W, "z"))
r_trans1 = r.transform(rotation_matrix(-(90 * u.deg - dec), "x"))
data = r_trans1.transform(rotation_matrix(-(90 * u.deg + ra), "z"))
return equatorial_frame.realize_frame(data)
@staticmethod
def from_equatorial(equatorial_coo, fixed_frame):
# TODO replace w/ something smart (Sun/Earth special cased)
if fixed_frame.body == Sun and type(equatorial_coo) != HCRS:
raise ValueError(
f"Equatorial coordinates must be of type `HCRS`, got `{type(equatorial_coo)}` instead."
)
elif fixed_frame.body != Sun and equatorial_coo.body != fixed_frame.body:
raise ValueError(
"Fixed and equatorial coordinates must have the same body if the fixed frame body is not Sun"
)
r = equatorial_coo.cartesian
ra, dec, W = fixed_frame.rot_elements_at_epoch(fixed_frame.obstime)
r_trans2 = r.transform(rotation_matrix(90 * u.deg + ra, "z"))
r_f = r_trans2.transform(rotation_matrix(90 * u.deg - dec, "x"))
r_f = r_f.transform(rotation_matrix(W, "z"))
return fixed_frame.realize_frame(r_f)
@classmethod
def rot_elements_at_epoch(cls, epoch=J2000):
"""Provides rotational elements at epoch.
Provides north pole of body and angle to prime meridian.
Parameters
----------
epoch : ~astropy.time.Time, optional
Epoch, default to J2000.
Returns
-------
ra, dec, W: tuple (~astropy.units.Quantity)
Right ascension and declination of north pole, and angle of the prime meridian.
"""
T = (epoch.tdb - J2000).to_value(u.d) / 36525
d = (epoch.tdb - J2000).to_value(u.d)
return cls._rot_elements_at_epoch(T, d)
@staticmethod
def _rot_elements_at_epoch(T, d):
raise NotImplementedError
class SunFixed(_PlanetaryFixed):
body = Sun
equatorial = HCRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = sun_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class MercuryFixed(_PlanetaryFixed):
body = Mercury
equatorial = MercuryICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = mercury_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class VenusFixed(_PlanetaryFixed):
body = Venus
equatorial = VenusICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = venus_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class MarsFixed(_PlanetaryFixed):
body = Mars
equatorial = MarsICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = mars_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class JupiterFixed(_PlanetaryFixed):
body = Jupiter
equatorial = JupiterICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = jupiter_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class SaturnFixed(_PlanetaryFixed):
body = Saturn
equatorial = SaturnICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = saturn_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class UranusFixed(_PlanetaryFixed):
body = Uranus
equatorial = UranusICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = uranus_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class NeptuneFixed(_PlanetaryFixed):
body = Neptune
equatorial = NeptuneICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = neptune_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
class MoonFixed(_PlanetaryFixed):
body = Moon
equatorial = MoonICRS
@staticmethod
def _rot_elements_at_epoch(T, d):
ra, dec, W = moon_rot_elements_at_epoch_fast(T, d)
return ra * u.deg, dec * u.deg, W * u.deg
|
poliastro/poliastro
|
src/poliastro/frames/fixed.py
|
Python
|
mit
| 6,869
|
# -*- coding: utf-8 -*-
import sys
import os
import inspect
from datetime import datetime
__all__ = ["Logger"]
class Logger(object):
EMERG = 0
ALERT = 1
CRIT = 2
ERR = 3
ERROR = 3
WARNING = 4
WARN = 4
NOTICE = 5
INFO = 6
DEBUG = 7
severities = [
"Emergency",
"Alert",
"Critical",
"Error",
"Warning",
"Notice",
"Informational",
"Debug"]
def __init__(self, identifier):
self.identifier = identifier
self.streams = dict()
self.format = "{time} {severity} from {identifier} in {function} ({file}:{line}): "
def setstream(self, ident, stream, severity):
if severity is None or stream == -1:
if ident in self.streams:
del self.streams[ident]
else:
self.streams[ident] = (stream, severity)
def log(self, severtiy, message):
self._log(severtiy, message)
def emerg(self, message):
self._log(self.EMERG, message)
def alert(self, message):
self._log(self.ALERT, message)
def crit(self, message):
self._log(self.CRIT, message)
def err(self, message):
self._log(self.ERR, message)
def error(self, message):
self._log(self.ERR, message)
def warn(self, message):
self._log(self.WARN, message)
def notice(self, message):
self._log(self.NOTICE, message)
def info(self, message):
self._log(self.INFO, message)
def debug(self, message):
self._log(self.DEBUG, message)
def _log(self, severity, message):
st = inspect.stack()
info = {"identifier": self.identifier,
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"severity": self.severities[severity].upper(),
"function": st[2][3],
"file": os.path.relpath(st[2][1]),
"line": st[2][2]}
line = self.format.format(**info) + message
self._logline(severity, line)
def _logline(self, severity, line):
for stream, maxseverity in self.streams.values():
if severity <= maxseverity:
if isinstance(stream, Logger):
stream._logline(severity, line)
else:
print >>stream, line
|
sciapp/pyMolDyn
|
src/util/logger.py
|
Python
|
mit
| 2,378
|
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.DirectObject import DirectObject
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownTimer
from direct.task import Task
from otp.namepanel import NameTumbler
from otp.otpbase import OTPGlobals
from otp.otpbase import OTPLocalizer
from toontown.fishing import FishSellGUI
from toontown.pets import Pet, PetConstants
from toontown.pets import PetDNA
from toontown.pets import PetUtil
from toontown.pets import PetDetail
from toontown.pets import PetTraits
from toontown.pets import PetNameGenerator
from toontown.hood import ZoneUtil
import string
import random
Dialog_MainMenu = 0
Dialog_AdoptPet = 1
Dialog_ChoosePet = 2
Dialog_ReturnPet = 3
Dialog_SellFish = 4
Dialog_NamePicker = 5
Dialog_GoHome = 6
disabledImageColor = Vec4(0.6, 0.6, 0.6, 1)
text0Color = Vec4(0.65, 0, 0.87, 1)
text1Color = Vec4(0.65, 0, 0.87, 1)
text2Color = Vec4(1, 1, 0.5, 1)
text3Color = Vec4(0.4, 0.4, 0.4, 1)
class PetshopGUI(DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGui')
class GoHomeDlg(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('GoHomeDlg')
def __init__(self, doneEvent):
DirectFrame.__init__(self, pos=(0.0, 0.0, 0.0), image_color=ToontownGlobals.GlobalDialogColor, image_scale=(1.0, 1.0, 0.6), text='', text_wordwrap=13.5, text_scale=0.06, text_pos=(0.0, 0.13))
self['image'] = DGG.getDefaultDialogGeom()
self['text'] = TTLocalizer.PetshopGoHomeText
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
gui = loader.loadModel('phase_3.5/models/gui/avatar_panel_gui')
self.bYes = DirectButton(self, image=(buttons.find('**/ChtBx_OKBtn_UP'), buttons.find('**/ChtBx_OKBtn_DN'), buttons.find('**/ChtBx_OKBtn_Rllvr')), relief=None, text=TTLocalizer.TutorialYes, text_scale=0.05, text_pos=(0.0, -0.1), pos=(-0.15, 0.0, -0.1), command=lambda : messenger.send(doneEvent, [1]))
self.bNo = DirectButton(self, image=(buttons.find('**/CloseBtn_UP'), buttons.find('**/CloseBtn_DN'), buttons.find('**/CloseBtn_Rllvr')), relief=None, text=TTLocalizer.TutorialNo, text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.15, 0.0, -0.1), command=lambda : messenger.send(doneEvent, [0]))
buttons.removeNode()
gui.removeNode()
return
class NamePicker(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGUI.NamePicker')
def __init__(self, doneEvent, petSeed, gender):
zoneId = ZoneUtil.getCanonicalSafeZoneId(base.localAvatar.getZoneId())
name, dna, traitSeed = PetUtil.getPetInfoFromSeed(petSeed, zoneId)
self.gui = loader.loadModel('phase_4/models/gui/PetNamePanel')
self.guiScale = 0.09
DirectFrame.__init__(self, relief=None, geom=self.gui, geom_scale=self.guiScale, state='normal', frameSize=(-1, 1, -1, 1))
self.initialiseoptions(PetshopGUI.NamePicker)
self.petView = self.attachNewNode('petView')
self.petView.setPos(-0.21, 0, -0.04)
self.petModel = Pet.Pet(forGui=1)
self.petModel.setDNA(dna)
self.petModel.fitAndCenterHead(0.435, forGui=1)
self.petModel.reparentTo(self.petView)
self.petModel.setH(225)
self.petModel.enterNeutralHappy()
self.ng = PetNameGenerator.PetNameGenerator()
if gender == 1:
self.allNames = self.ng.boyFirsts
else:
self.allNames = self.ng.girlFirsts
self.allNames += self.ng.neutralFirsts
self.allNames.sort()
self.checkNames()
self.letters = []
for name in self.allNames:
if name[0:TTLocalizer.PGUIcharLength] not in self.letters:
self.letters.append(name[0:TTLocalizer.PGUIcharLength])
self.curLetter = self.letters[0]
self.curNames = []
self.curName = ''
self.alphabetList = self.makeScrollList(self.gui, (-0.012, 0, -0.075), (1, 0.8, 0.8, 1), self.letters, self.makeLabel, [TextNode.ACenter, 'alphabet'], 6)
self.nameList = None
self.rebuildNameList()
self.randomButton = DirectButton(parent=self, relief=None, image=(self.gui.find('**/RandomUpButton'), self.gui.find('**/RandomDownButton'), self.gui.find('**/RandomRolloverButton')), scale=self.guiScale, text=TTLocalizer.RandomButton, text_pos=(-0.8, -5.7), text_scale=0.8, text_fg=text2Color, pressEffect=False, command=self.randomName)
self.nameResult = DirectLabel(parent=self, relief=None, scale=self.guiScale, text='', text_align=TextNode.ACenter, text_pos=(-1.85, 2.6), text_fg=text0Color, text_scale=0.6, text_wordwrap=8)
self.submitButton = DirectButton(parent=self, relief=None, image=(self.gui.find('**/SubmitUpButton'), self.gui.find('**/SubmitDownButton'), self.gui.find('**/SubmitRolloverButton')), scale=self.guiScale, text=TTLocalizer.PetshopAdopt, text_pos=(3.3, -5.7), text_scale=TTLocalizer.PGUIsubmitButton, text_fg=text0Color, pressEffect=False, command=lambda : messenger.send(doneEvent, [self.ng.returnUniqueID(self.curName)]))
model = loader.loadModel('phase_4/models/gui/PetShopInterface')
modelScale = 0.1
cancelImageList = (model.find('**/CancelButtonUp'), model.find('**/CancelButtonDown'), model.find('**/CancelButtonRollover'))
cancelIcon = model.find('**/CancelIcon')
self.cancelButton = DirectButton(parent=self, relief=None, pos=(-0.04, 0, -0.47), image=cancelImageList, geom=cancelIcon, scale=modelScale, pressEffect=False, command=lambda : messenger.send(doneEvent, [-1]))
self.randomName()
return
def checkNames(self):
if __dev__:
for name in self.allNames:
if not name.replace(' ', '').isalpha():
self.notify.warning('Bad name:%s' % name)
def destroy(self):
self.petModel.delete()
DirectFrame.destroy(self)
def rebuildNameList(self):
self.curNames = []
for name in self.allNames:
if name[0:TTLocalizer.PGUIcharLength] == self.curLetter:
self.curNames += [name]
if self.nameList:
self.nameList.destroy()
self.nameList = self.makeScrollList(self.gui, (0.277, 0, -0.075), (1, 0.8, 0.8, 1), self.curNames, self.makeLabel, [TextNode.ACenter, 'name'], 5)
def updateNameText(self):
self.nameResult['text'] = self.curName
def nameClickedOn(self, listType, index):
if listType == 'alphabet':
self.curLetter = self.letters[index]
self.rebuildNameList()
elif listType == 'name':
self.curName = self.curNames[index]
self.updateNameText()
def makeLabel(self, te, index, others):
alig = others[0]
listName = others[1]
if alig == TextNode.ARight:
newpos = (0.44, 0, 0)
elif alig == TextNode.ALeft:
newpos = (0, 0, 0)
else:
newpos = (0.2, 0, 0)
df = DirectButton(parent=self, state='normal', relief=None, text=te, text_scale=0.1, text_pos=(0.2, 0, 0), text_align=alig, textMayChange=0, command=lambda : self.nameClickedOn(listName, index))
return df
def makeScrollList(self, gui, ipos, mcolor, nitems, nitemMakeFunction, nitemMakeExtraArgs, nVisibleItems):
decScale = self.guiScale / 0.44
incScale = (decScale, decScale, -decScale)
it = nitems[:]
listType = nitemMakeExtraArgs[1]
if listType == 'alphabet':
arrowList = (gui.find('**/ArrowSmUpButton'),
gui.find('**/ArrowSmUpRollover'),
gui.find('**/ArrowSmUpRollover'),
gui.find('**/ArrowSmUpButton'))
fHeight = 0.09
elif listType == 'name':
arrowList = (gui.find('**/ArrowUpBigButton'),
gui.find('**/ArrowUpBigRollover'),
gui.find('**/ArrowUpBigRollover'),
gui.find('**/ArrowUpBigButton'))
fHeight = 0.119
ds = DirectScrolledList(parent=self, items=it, itemMakeFunction=nitemMakeFunction, itemMakeExtraArgs=nitemMakeExtraArgs, relief=None, command=None, pos=ipos, scale=0.44, incButton_image=arrowList, incButton_image_pos=(1.015, 0, 3.32), incButton_relief=None, incButton_scale=incScale, incButton_image3_color=Vec4(0.4, 0.4, 0.4, 1), decButton_image=arrowList, decButton_image_pos=(1.015, 0, 1.11), decButton_relief=None, decButton_scale=decScale, decButton_image3_color=Vec4(0.4, 0.4, 0.4, 1), numItemsVisible=nVisibleItems, forceHeight=fHeight)
return ds
def randomName(self):
numNames = len(self.allNames)
self.curName = self.allNames[random.randrange(numNames)]
self.curLetter = self.curName[0:TTLocalizer.PGUIcharLength]
self.rebuildNameList()
self.updateNameText()
self.alphabetList.scrollTo(self.letters.index(self.curLetter))
self.nameList.scrollTo(self.curNames.index(self.curName))
class MainMenuDlg(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGUI.MainMenuDlg')
def __init__(self, doneEvent):
model = loader.loadModel('phase_4/models/gui/AdoptReturnSell')
modelPos = (0, 0, -0.3)
modelScale = 0.055
DirectFrame.__init__(self, relief=None, state='normal', geom=model, geom_scale=(modelScale, modelScale, modelScale), pos=modelPos, frameSize=(-1, 1, -1, 1))
self.initialiseoptions(PetshopGUI.MainMenuDlg)
textScale = TTLocalizer.PGUItextScale
sellFishImageList = (model.find('**/SellButtonUp'),
model.find('**/SellButtonDown'),
model.find('**/SellButtonRollover'),
model.find('**/SellButtonDown'))
fishLogoImageList = model.find('**/Fish')
cancelImageList = (model.find('**/CancelButtonUp'), model.find('**/cancelButtonDown'), model.find('**/CancelButtonRollover'))
XImageList = model.find('**/CancelIcon')
adoptImageList = (model.find('**/AdoptButtonUp'), model.find('**/AdoptButtonDown'), model.find('**/AdoptButtonRollover'))
pawLogoAdoptImageList = model.find('**/PawPink')
returnImageList = (model.find('**/ReturnButtonUp'),
model.find('**/ReturnButtonDown'),
model.find('**/ReturnButtonRollover'),
model.find('**/ReturnButtonDown'))
pawLogoReturnImageList = model.find('**/PawYellow')
self.cancelButton = DirectButton(parent=self, relief=None, scale=(modelScale, modelScale, modelScale), geom=XImageList, image=cancelImageList, text=('', TTLocalizer.PetshopCancel), text_pos=TTLocalizer.PGUIcancelButtonPos, text_scale=0.8, pressEffect=False, command=lambda : messenger.send(doneEvent, [0]))
self.sellFishButton = DirectButton(parent=self, relief=None, image=sellFishImageList, image3_color=disabledImageColor, geom=fishLogoImageList, scale=(modelScale, modelScale, modelScale), text=TTLocalizer.PetshopSell, text_scale=textScale, text_pos=(0, 6), text0_fg=text2Color, text1_fg=text2Color, text2_fg=text0Color, text3_fg=text3Color, pressEffect=False, command=lambda : messenger.send(doneEvent, [1]))
fishValue = base.localAvatar.fishTank.getTotalValue()
if fishValue == 0:
self.sellFishButton['state'] = DGG.DISABLED
self.adoptPetButton = DirectButton(parent=self, relief=None, image=adoptImageList, geom=pawLogoAdoptImageList, scale=(modelScale, modelScale, modelScale), text=TTLocalizer.PetshopAdoptAPet, text_scale=textScale, text_pos=(0, 12.5), text0_fg=text0Color, text1_fg=text1Color, text2_fg=text2Color, text3_fg=text3Color, pressEffect=False, command=lambda : messenger.send(doneEvent, [2]))
self.returnPetButton = DirectButton(parent=self, relief=None, image=returnImageList, geom=pawLogoReturnImageList, image3_color=disabledImageColor, scale=(modelScale, modelScale, modelScale), text=TTLocalizer.PetshopReturnPet, text_scale=textScale, text_pos=(-0.6, 9.2), text0_fg=text2Color, text1_fg=text2Color, text2_fg=text0Color, text3_fg=text3Color, pressEffect=False, command=lambda : messenger.send(doneEvent, [3]))
if not base.localAvatar.hasPet():
self.returnPetButton['state'] = DGG.DISABLED
model.removeNode()
return
class AdoptPetDlg(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGUI.AdoptPetDlg')
def __init__(self, doneEvent, petSeed, petNameIndex):
zoneId = ZoneUtil.getCanonicalSafeZoneId(base.localAvatar.getZoneId())
name, dna, traitSeed = PetUtil.getPetInfoFromSeed(petSeed, zoneId)
name = PetNameGenerator.PetNameGenerator().getName(petNameIndex)
cost = PetUtil.getPetCostFromSeed(petSeed, zoneId)
model = loader.loadModel('phase_4/models/gui/AdoptPet')
modelPos = (0, 0, -0.3)
modelScale = 0.055
DirectFrame.__init__(self, relief=None, state='normal', geom=model, geom_color=ToontownGlobals.GlobalDialogColor, geom_scale=modelScale, frameSize=(-1, 1, -1, 1), pos=modelPos, text=TTLocalizer.PetshopAdoptConfirm % (name, cost), text_wordwrap=12, text_scale=0.05, text_pos=(0, 0.55), text_fg=text0Color)
self.initialiseoptions(PetshopGUI.AdoptPetDlg)
self.petView = self.attachNewNode('petView')
self.petView.setPos(-0.13, 0, 0.8)
self.petModel = Pet.Pet(forGui=1)
self.petModel.setDNA(dna)
self.petModel.fitAndCenterHead(0.395, forGui=1)
self.petModel.reparentTo(self.petView)
self.petModel.setH(130)
self.petModel.enterNeutralHappy()
self.moneyDisplay = DirectLabel(parent=self, relief=None, text=str(base.localAvatar.getTotalMoney()), text_scale=0.075, text_fg=(0.95, 0.95, 0, 1), text_shadow=(0, 0, 0, 1), text_pos=(0.225, 0.33), text_font=ToontownGlobals.getSignFont())
self.accept(localAvatar.uniqueName('moneyChange'), self.__moneyChange)
self.accept(localAvatar.uniqueName('bankMoneyChange'), self.__moneyChange)
okImageList = (model.find('**/CheckButtonUp'), model.find('**/CheckButtonDown'), model.find('**/CheckButtonRollover'))
cancelImageList = (model.find('**/CancelButtonUp'), model.find('**/CancelButtonDown'), model.find('**/CancelRollover'))
cancelIcon = model.find('**/CancelIcon')
checkIcon = model.find('**/CheckIcon')
self.cancelButton = DirectButton(parent=self, relief=None, image=cancelImageList, geom=cancelIcon, scale=modelScale, text=('', TTLocalizer.PetshopGoBack), text_pos=(-5.8, 4.4), text_scale=0.7, pressEffect=False, command=lambda : messenger.send(doneEvent, [0]))
self.okButton = DirectButton(parent=self, relief=None, image=okImageList, geom=checkIcon, scale=modelScale, text=('', TTLocalizer.PetshopAdopt), text_pos=(5.8, 4.4), text_scale=0.7, pressEffect=False, command=lambda : messenger.send(doneEvent, [1]))
model.removeNode()
return
def destroy(self):
self.ignore(localAvatar.uniqueName('moneyChange'))
self.ignore(localAvatar.uniqueName('bankMoneyChange'))
self.petModel.delete()
DirectFrame.destroy(self)
def __moneyChange(self, money):
self.moneyDisplay['text'] = str(base.localAvatar.getTotalMoney())
class ReturnPetDlg(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGUI.ReturnPetDlg')
def __init__(self, doneEvent):
def showDialog(avatar):
model = loader.loadModel('phase_4/models/gui/ReturnPet')
modelPos = (0, 0, -0.3)
modelScale = (0.055, 0.055, 0.055)
base.r = self
DirectFrame.__init__(self, relief=None, state='normal', geom=model, geom_scale=modelScale, frameSize=(-1, 1, -1, 1), pos=modelPos, text=TTLocalizer.PetshopReturnConfirm % avatar.getName(), text_wordwrap=12, text_scale=TTLocalizer.PGUIreturnConfirm, text_pos=(0, 0.45), text_fg=text2Color)
self.initialiseoptions(PetshopGUI.ReturnPetDlg)
okImageList = (model.find('**/CheckButtonUp'), model.find('**/CheckButtonDown'), model.find('**/CheckRollover'))
cancelImageList = (model.find('**/CancelButtonUp'), model.find('**/CancelButtonDown'), model.find('**/CancelRollover'))
cancelIcon = model.find('**/CancelIcon')
checkIcon = model.find('**/CheckIcon')
self.cancelButton = DirectButton(parent=self, relief=None, image=cancelImageList, geom=cancelIcon, scale=modelScale, text=('', TTLocalizer.PetshopGoBack), text_pos=(-5.8, 4.4), text_scale=0.7, pressEffect=False, command=lambda : messenger.send(doneEvent, [0]))
self.okButton = DirectButton(parent=self, relief=None, image=okImageList, geom=checkIcon, scale=modelScale, text=('', TTLocalizer.PetshopReturn), text_pos=(5.8, 4.4), text_scale=0.7, pressEffect=False, command=lambda : messenger.send(doneEvent, [1]))
self.petView = self.attachNewNode('petView')
self.petView.setPos(-0.15, 0, 0.8)
self.petModel = Pet.Pet(forGui=1)
self.petModel.setDNA(avatar.getDNA())
self.petModel.fitAndCenterHead(0.395, forGui=1)
self.petModel.reparentTo(self.petView)
self.petModel.setH(130)
self.petModel.enterNeutralSad()
model.removeNode()
self.initialized = True
return
self.initialized = False
self.petPanel = PetDetail.PetDetail(base.localAvatar.getPetId(), showDialog)
def destroy(self):
if self.initialized:
self.petPanel.avatar.disable()
self.petPanel.avatar.delete()
self.petPanel.avatar = None
self.PetPanel = None
self.petModel.delete()
DirectFrame.destroy(self)
return
class ChoosePetDlg(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('PetshopGUI.ChoosePetDlg')
def __init__(self, doneEvent, petSeeds):
model = loader.loadModel('phase_4/models/gui/PetShopInterface')
modelPos = (0, 0, -0.9)
modelScale = (0.185, 0.185, 0.185)
DirectFrame.__init__(self, relief=None, state='normal', geom=model, geom_scale=modelScale, frameSize=(-1, 1, -1, 1), pos=modelPos, text=TTLocalizer.PetshopChooserTitle, text_wordwrap=26, text_scale=TTLocalizer.PGUIchooserTitle, text_fg=Vec4(0.36, 0.94, 0.93, 1), text_pos=(0, 1.58))
self.initialiseoptions(PetshopGUI.ChoosePetDlg)
adoptImageList = (model.find('**/AdoptButtonUp'),
model.find('**/AdoptButtonDown'),
model.find('**/AdoptButtonRollover'),
model.find('**/AdoptButtonRollover'))
cancelImageList = (model.find('**/CancelButtonUp'), model.find('**/CancelButtonDown'), model.find('**/CancelButtonRollover'))
cancelIcon = model.find('**/CancelIcon')
pawLImageList = (model.find('**/Paw1Up'), model.find('**/Paw1Down'), model.find('**/Paw1Rollover'))
pawLArrowImageList = model.find('**/Arrow1')
pawRImageList = (model.find('**/Paw2Up'), model.find('**/Paw2Down'), model.find('**/Paw2Rollover'))
pawRArrowImageList = model.find('**/Arrow2')
self.cancelButton = DirectButton(parent=self, relief=None, image=cancelImageList, geom=cancelIcon, scale=modelScale, pressEffect=False, command=lambda : messenger.send(doneEvent, [-1]))
self.pawLButton = DirectButton(parent=self, relief=None, image=pawLImageList, geom=pawLArrowImageList, scale=modelScale, pressEffect=False, command=lambda : self.__handlePetChange(-1))
self.pawRButton = DirectButton(parent=self, relief=None, image=pawRImageList, geom=pawRArrowImageList, scale=modelScale, pressEffect=False, command=lambda : self.__handlePetChange(1))
self.okButton = DirectButton(parent=self, relief=None, image=adoptImageList, image3_color=disabledImageColor, scale=modelScale, text=TTLocalizer.PetshopAdopt, text_scale=TTLocalizer.PGUIokButton, text_pos=TTLocalizer.PGUIokButtonPos, text0_fg=text0Color, text1_fg=text1Color, text2_fg=text2Color, text3_fg=text3Color, pressEffect=False, command=lambda : messenger.send(doneEvent, [self.curPet]))
self.moneyDisplay = DirectLabel(parent=self, relief=None, text=str(base.localAvatar.getTotalMoney()), text_scale=0.1, text_fg=(0.95, 0.95, 0, 1), text_shadow=(0, 0, 0, 1), text_pos=(0.34, 0.12), text_font=ToontownGlobals.getSignFont())
self.accept(localAvatar.uniqueName('moneyChange'), self.__moneyChange)
self.accept(localAvatar.uniqueName('bankMoneyChange'), self.__moneyChange)
self.petView = self.attachNewNode('petView')
self.petView.setPos(-0.05, 0, 1.15)
model.removeNode()
self.petSeeds = petSeeds
self.makePetList()
self.showPet()
return
def makePetList(self):
self.numPets = len(self.petSeeds)
self.curPet = 0
self.petDNA = []
self.petName = []
self.petDesc = []
self.petCost = []
for i in range(self.numPets):
random.seed(self.petSeeds[i])
zoneId = ZoneUtil.getCanonicalSafeZoneId(base.localAvatar.getZoneId())
name, dna, traitSeed = PetUtil.getPetInfoFromSeed(self.petSeeds[i], zoneId)
cost = PetUtil.getPetCostFromSeed(self.petSeeds[i], zoneId)
traits = PetTraits.PetTraits(traitSeed, zoneId)
traitList = traits.getExtremeTraitDescriptions()
numGenders = len(PetDNA.PetGenders)
gender = i % numGenders
PetDNA.setGender(dna, gender)
self.petDNA.append(dna)
self.petName.append(TTLocalizer.PetshopUnknownName)
descList = []
descList.append(TTLocalizer.PetshopDescGender % PetDNA.getGenderString(gender=gender))
if traitList:
descList.append(TTLocalizer.PetshopDescTrait % traitList[0])
else:
descList.append(TTLocalizer.PetshopDescTrait % TTLocalizer.PetshopDescStandard)
traitList.extend(['',
'',
'',
''])
for trait in traitList[1:4]:
descList.append('\t%s' % trait)
descList.append(TTLocalizer.PetshopDescCost % cost)
self.petDesc.append(string.join(descList, '\n'))
self.petCost.append(cost)
def destroy(self):
self.ignore(localAvatar.uniqueName('moneyChange'))
self.ignore(localAvatar.uniqueName('bankMoneyChange'))
self.petModel.delete()
DirectFrame.destroy(self)
def __handlePetChange(self, nDir):
self.curPet = (self.curPet + nDir) % self.numPets
self.nameLabel.destroy()
self.petModel.delete()
self.descLabel.destroy()
self.showPet()
def showPet(self):
self.nameLabel = DirectLabel(parent=self, pos=(0, 0, 1.35), relief=None, text=self.petName[self.curPet], text_fg=Vec4(0.45, 0, 0.61, 1), text_pos=(0, 0), text_scale=0.08, text_shadow=(1, 1, 1, 1))
self.petModel = Pet.Pet(forGui=1)
self.petModel.setDNA(self.petDNA[self.curPet])
self.petModel.fitAndCenterHead(0.57, forGui=1)
self.petModel.reparentTo(self.petView)
self.petModel.setH(130)
self.petModel.enterNeutralHappy()
self.descLabel = DirectLabel(parent=self, pos=(-0.4, 0, 0.72), relief=None, scale=0.05, text=self.petDesc[self.curPet], text_align=TextNode.ALeft, text_wordwrap=TTLocalizer.PGUIwordwrap, text_scale=TTLocalizer.PGUIdescLabel)
if self.petCost[self.curPet] > base.localAvatar.getTotalMoney():
self.okButton['state'] = DGG.DISABLED
else:
self.okButton['state'] = DGG.NORMAL
return
def __moneyChange(self, money):
self.moneyDisplay['text'] = str(base.localAvatar.getTotalMoney())
def __init__(self, eventDict, petSeeds):
self.eventDict = eventDict
self.mainMenuDoneEvent = 'MainMenuGuiDone'
self.adoptPetDoneEvent = 'AdoptPetGuiDone'
self.returnPetDoneEvent = 'ReturnPetGuiDone'
self.petChooserDoneEvent = 'PetChooserGuiDone'
self.fishGuiDoneEvent = 'MyFishGuiDone'
self.namePickerDoneEvent = 'NamePickerGuiDone'
self.goHomeDlgDoneEvent = 'GoHomeDlgDone'
self.dialog = None
self.dialogStack = []
self.petSeeds = petSeeds
self.timer = ToontownTimer.ToontownTimer()
self.timer.reparentTo(aspect2d)
self.timer.posInTopRightCorner()
self.timer.countdown(PetConstants.PETCLERK_TIMER, self.__timerExpired)
self.doDialog(Dialog_MainMenu)
return
def __timerExpired(self):
messenger.send(self.eventDict['guiDone'], [True])
def destroy(self):
self.destroyDialog()
self.timer.destroy()
del self.timer
self.ignore(self.mainMenuDoneEvent)
self.ignore(self.adoptPetDoneEvent)
self.ignore(self.returnPetDoneEvent)
self.ignore(self.petChooserDoneEvent)
self.ignore(self.fishGuiDoneEvent)
self.ignore(self.namePickerDoneEvent)
self.ignore(self.goHomeDlgDoneEvent)
def destroyDialog(self):
if self.dialog != None:
self.dialog.destroy()
self.dialog = None
return
def popDialog(self):
self.dialogStack.pop()
self.doDialog(self.dialogStack.pop())
def doDialog(self, nDialog):
self.destroyDialog()
self.dialogStack.append(nDialog)
if nDialog == Dialog_MainMenu:
self.acceptOnce(self.mainMenuDoneEvent, self.__handleMainMenuDlg)
self.dialog = self.MainMenuDlg(self.mainMenuDoneEvent)
elif nDialog == Dialog_AdoptPet:
self.acceptOnce(self.adoptPetDoneEvent, self.__handleAdoptPetDlg)
self.dialog = self.AdoptPetDlg(self.adoptPetDoneEvent, self.petSeeds[self.adoptPetNum], self.adoptPetNameIndex)
elif nDialog == Dialog_ChoosePet:
self.acceptOnce(self.petChooserDoneEvent, self.__handleChoosePetDlg)
self.dialog = self.ChoosePetDlg(self.petChooserDoneEvent, self.petSeeds)
elif nDialog == Dialog_ReturnPet:
self.acceptOnce(self.returnPetDoneEvent, self.__handleReturnPetDlg)
self.dialog = self.ReturnPetDlg(self.returnPetDoneEvent)
elif nDialog == Dialog_SellFish:
self.acceptOnce(self.fishGuiDoneEvent, self.__handleFishSellDlg)
self.dialog = FishSellGUI.FishSellGUI(self.fishGuiDoneEvent)
elif nDialog == Dialog_NamePicker:
self.acceptOnce(self.namePickerDoneEvent, self.__handleNamePickerDlg)
self.dialog = self.NamePicker(self.namePickerDoneEvent, self.petSeeds[self.adoptPetNum], gender=self.adoptPetNum % 2)
elif nDialog == Dialog_GoHome:
self.acceptOnce(self.goHomeDlgDoneEvent, self.__handleGoHomeDlg)
self.dialog = self.GoHomeDlg(self.goHomeDlgDoneEvent)
def __handleMainMenuDlg(self, exitVal):
if exitVal == 0:
messenger.send(self.eventDict['guiDone'])
elif exitVal == 1:
self.doDialog(Dialog_SellFish)
elif exitVal == 2:
self.doDialog(Dialog_ChoosePet)
elif exitVal == 3:
self.doDialog(Dialog_ReturnPet)
def __handleFishSellDlg(self, exitVal):
if exitVal == 0:
self.popDialog()
elif exitVal == 1:
self.destroyDialog()
messenger.send(self.eventDict['fishSold'])
def __handleChoosePetDlg(self, exitVal):
if exitVal == -1:
self.popDialog()
else:
self.adoptPetNum = exitVal
self.doDialog(Dialog_NamePicker)
def __handleNamePickerDlg(self, exitVal):
if exitVal == -1:
self.popDialog()
else:
self.adoptPetNameIndex = exitVal
if base.localAvatar.hasPet():
self.doDialog(Dialog_ReturnPet)
else:
self.doDialog(Dialog_AdoptPet)
def __handleAdoptPetDlg(self, exitVal):
if exitVal == 0:
self.popDialog()
elif exitVal == 1:
self.destroyDialog()
messenger.send(self.eventDict['petAdopted'], [self.adoptPetNum, self.adoptPetNameIndex])
messenger.send(self.eventDict['guiDone'])
def __handleGoHomeDlg(self, exitVal):
if exitVal == 0:
messenger.send(self.eventDict['guiDone'])
elif exitVal == 1:
messenger.send(self.eventDict['guiDone'])
place = base.cr.playGame.getPlace()
if place == None:
self.notify.warning('Tried to go home, but place is None.')
return
place.goHomeNow(base.localAvatar.lastHood)
return
def __handleReturnPetDlg(self, exitVal):
if exitVal == 0:
self.popDialog()
elif exitVal == 1:
if self.dialogStack[len(self.dialogStack) - 2] == Dialog_NamePicker:
self.doDialog(Dialog_AdoptPet)
else:
self.destroyDialog()
messenger.send(self.eventDict['petReturned'])
messenger.send(self.eventDict['guiDone'])
|
ksmit799/Toontown-Source
|
toontown/pets/PetshopGUI.py
|
Python
|
mit
| 30,460
|
""" main user entry point for mooshimeter bluetooth multimeter """
import ui
import dialogs
from pymooshi import Mooshimeter
from pymooshi.genlibpy import multimeter as mm
from pymooshi.genlibpy import tls
import time
print(tls.set_logger())
def update_results(results):
vrslt['rsltVal1'].text ='%4f' % results[0]
vrslt['rsltBar1'].value = results[0]/meter.ch_targets[0] # (results[0] - 273.15)/100
vrslt['rsltVal2'].text ='%4f' % results[1]
vrslt['rsltBar2'].value = results[1]/meter.ch_targets[1]
def show_page(page_idx, pages):
print('page %s/%d' % (page_idx,len(vw.subviews)))
global actPage
if actPage is None:
for pg in pages:
pg.hidden=True
pg.y=60
pg.flex='WH'
vw.add_subview(pg)
fill_function(1)
fill_function(2)
else:
actPage.hidden=True
#vw.remove_subview(actPage)
actPage = pages[page_idx]
actPage.hidden=False
def fill_function(chan, mmFunction=None):
if mmFunction is None:
mmFunction = meter.get_mmFunction(chan)
vset['function%d' % chan].title = mm.mmFunctions[mmFunction][0]
vrslt['unit%d' % chan].text = mm.mmFunctions[mmFunction][1]
vset['target%d' % chan].text = '%.1f' % meter.ch_targets[chan-1]
print('u%d:%s' % (chan,vrslt['unit%d' % chan].text))
def set_function(chan):
if chan==2:
keys = Mooshimeter.mooshiFunc2.keys()
else:
keys = Mooshimeter.mooshiFunc1.keys()
nms = [mm.mmFunctions[k][0] for k in mm.mmFunctions.keys() if k in keys]
lds =ui.ListDataSource([{'title':tm} for tm in nms] )
print([d['title'] for d in lds.items])
sel =dialogs.list_dialog('select function',lds.items)
if sel:
fnc = mm.mmFunction(sel['title'])
trg = float(vset['target%d' % chan].text)
print('mmfunc:%s(%d) trg:%f' % (sel, fnc, trg))
meter.set_function(chan, fnc, trg)
fill_function(chan,fnc)
return sel['title']
def func1act(sender):
sender.title = set_function(1)
def func2act(sender):
sender.title = set_function(2)
def selPageAct(sender):
page = sender.selected_index
show_page(page,(vset,vrslt))
class vwMultimeter(ui.View):
def did_load(self):
pass
def will_close(self):
print('closing')
meter.trigger(0)
meter.close()
class vwSettings(ui.View):
def did_load(self):
pass
class vwResults(ui.View):
def did_load(self):
pass
class vwGraph(ui.View):
def did_load(self):
pass
meter = Mooshimeter.Mooshimeter() # 'FB55' is my meter
time.sleep(2)
meter.set_results_callback(update_results)
meter.meter.print_command_tree()
meter.trigger(mm.trModes['continuous'])
actPage=None
vw = ui.load_view()
#vw.flex = 'WH'
vw['selPage'].action = selPageAct
vset = ui.load_view('mm_settings.pyui')
vrslt = ui.load_view('mm_results.pyui')
show_page(0,(vset,vrslt))
if min(ui.get_screen_size()) >= 768:
# iPad
vw.frame = (0, 0, 500, 600)
vw.present('sheet')
else:
# iPhone
vw.present(orientations=['portrait'])
print('bye')
|
HJvA/Mooshimeter-Pythonista-IOS
|
uiMeter.py
|
Python
|
mit
| 2,849
|
import argparse
import subprocess
import os
import collections
import pylev
from seqtools.utils import revcomp,fileOpen
from seqtools.fastq import Fastq
def isIndexRevComp(indexfile,indexes,n=500000):
"""Determine if the indexes are reverse complemented or not
:param indexfile: filename of the Fastq index file
:param indexes: list or tuple of index strings
:param n: integer number of reads to sample
"""
print("HERE")
ifile = Fastq(indexfile)
ilength=len(indexes[0])
print(ilength)
indexreads = collections.defaultdict(int)
for i in range(n):
indexreads[ifile.next().sequence[:ilength]]+=1
counts = {'normal':0,
'revcomp':0}
for k,v in list(indexreads.items()):
print(k,v)
for i in indexes:
if(pylev.levenshtein(k,i)<=1):
counts['normal']+=v
continue
if(pylev.levenshtein(k,revcomp(i))<=1):
counts['revcomp']+=v
if(counts['revcomp']>counts['normal']):
print('using revcomp')
else:
print('NOT revcomp')
return(counts['revcomp']>counts['normal'])
def demultiplex(readfile,
indexfile,
indexes,
readfile2=None,
indexfile2=None):
"""Demultiplex from separate FASTQ files.
All FASTQ files can be gzipped (with suffix .gz).
:param readfile: The filename of the first fastq file
:param indexfile: The filename of the first index fastq file
:param indexes: An iterable of indexes. If dual-barcoding is used, the indexes should be comma-separated strings, one string for each barcode pair.
:param indexfile2: The filename of the second index fastq file. If this parameter is included, then the indexes parameter should be a set of comma-separated pairs of indexes.
:param readfile2: The filename of the second fastq file [optional]
"""
# single readfile, single indexfile
if(readfile2 is None) and (indexfile2 is None):
rfile1 = Fastq(readfile)
(rpath,rname) = os.path.split(readfile)
ifile = Fastq(indexfile)
indexRevComp = isIndexRevComp(indexfile,indexes)
existingIndexes = []
for i in indexes:
ofname1 = os.path.join(rpath,i + "_" + rname)
if(not os.path.exists(ofname1)):
ofile1[i]=fileOpen(os.path.join(rpath,i + "_" + rname),'w')
else:
print(ofname1," already exists, skipping")
existingIndexes.append(i)
for i in existingIndexes:
indexes.remove(i)
if(len(indexes)==0):
exit(0)
for (r1,i) in zip(rfile1,ifile):
try:
if indexRevComp:
i2 = revcomp(i.sequence[:indexlen])
ofile1[i2].write(str(r1))
else:
i2 = i.sequence[:indexlen]
ofile1[i2].write(str(r1))
except KeyError:
pass
rfile1.close()
ifile.close()
for ofile in list(ofile1.values()):
ofile.close()
## for i in indexes:
## os.rename(os.path.join(rpath,'tmp.' + i + "_" + rname),
## os.path.join(rpath,i + "_" + rname))
# two readfiles, single indexfile
if(readfile2 is not None) and (indexfile2 is None):
print("here1")
rfile1 = Fastq(readfile)
rfile2 = Fastq(readfile2)
(rpath,rname) = os.path.split(readfile)
(rpath2,rname2) = os.path.split(readfile2)
ifile = Fastq(indexfile)
indexRevComp = isIndexRevComp(indexfile,indexes)
ofile1 = {}
ofile2 = {}
existingIndexes = []
for i in indexes:
ofname1 = os.path.join(rpath,i + "_" + rname)
ofname2 = os.path.join(rpath2,i + "_" + rname2)
if(os.path.exists(ofname1) and os.path.exists(ofname2)):
print(ofname1,ofname2, " already exist, skipping")
existingIndexes.append(i)
else:
ofile1[i]=fileOpen(os.path.join(rpath,i + "_" + rname),'w')
ofile2[i]=fileOpen(os.path.join(rpath2,i + "_" + rname2),'w')
for i in existingIndexes:
indexes.remove(i)
if(len(indexes)==0):
exit(0)
indexlen = len(indexes[0])
for (r1,r2,i) in zip(rfile1,rfile2,ifile):
try:
if indexRevComp:
i2 = revcomp(i.sequence[:indexlen])
ofile1[i2].write(str(r1))
ofile2[i2].write(str(r2))
else:
i2 = i.sequence[:indexlen]
ofile1[i2].write(str(r1))
ofile2[i2].write(str(r2))
except KeyError:
pass
rfile1.close()
rfile2.close()
ifile.close()
for ofile in list(ofile1.values()):
ofile.close()
for ofile in list(ofile2.values()):
ofile.close()
## for i in indexes:
## print os.path.join(rpath,'tmp.' + i + "_" + rname),os.path.join(rpath,i + "_"+rname)
## os.rename(os.path.join(rpath,'tmp.' + i + "_" + rname),
## os.path.join(rpath,i + "_"+rname))
## os.rename(os.path.join(rpath2,'tmp.' + i +"_"+ rname2),
## os.path.join(rpath2,i +"_"+ rname2))
# two readfiles, two indexfiles
if(readfile2 is not None) and (indexfile2 is not None):
rfile1 = Fastq(readfile)
rfile2 = Fastq(readfile2)
(rpath,rname) = os.path.split(readfile)
(rpath2,rname2) = os.path.split(readfile2)
ifile = Fastq(indexfile)
ifile2 = Fastq(indexfile2)
indexes = [tuple(x.split(',')) for x in indexes]
indexRevComp = isIndexRevComp(indexfile,[i[0] for i in indexes])
ofile1 = {}
ofile2 = {}
existingIndexes = []
for j in indexes:
i = ''.join(j)
ofname1 = os.path.join(rpath,i + "_" + rname)
ofname2 = os.path.join(rpath2,i + "_" + rname2)
if(os.path.exists(ofname1) and os.path.exists(ofname2)):
print(ofname1,ofname2, " already exist, skipping")
existingIndexes.append(i)
else:
ofile1[i]=fileOpen(ofname1,'w')
ofile2[i]=fileOpen(ofname2,'w')
for i in existingIndexes:
indexes.remove(i)
if(len(indexes)==0):
exit(0)
indexlen = len(indexes[0][0])
for (r1,r2,i,i2) in zip(rfile1,rfile2,ifile,ifile2):
try:
if indexRevComp:
ir = revcomp(i.sequence[:indexlen])
ir2 = revcomp(i2.sequence[:indexlen])
istr = ir+ir2
ofile1[istr].write(str(r1))
ofile2[istr].write(str(r2))
else:
ir = i.sequence[:indexlen]
ir2 = i2.sequence[:indexlen]
istr = ir+ir2
ofile1[istr].write(str(r1))
ofile2[istr].write(str(r2))
except KeyError:
pass
rfile1.close()
rfile2.close()
ifile.close()
ifile2.close()
for ofile in list(ofile1.values()):
ofile.close()
for ofile in list(ofile2.values()):
ofile.close()
## for i in indexes:
## ofname1 = os.path.join(rpath,''.join(i) + "_" + rname)
## ofname2 = os.path.join(rpath2,''.join(i) + "_" + rname2)
## os.rename(os.path.join(rpath,'tmp.' + ofname1),
## os.path.join(rpath,ofname1))
## os.rename(os.path.join(rpath2,'tmp.'+ofname2),
## os.path.join(rpath2,ofname2))
|
lowks/SDST
|
seqtools/demultiplexer.py
|
Python
|
mit
| 7,927
|
import wx
_ = wx.GetTranslation
|
conradoplg/navi
|
appcommon/i18n/__init__.py
|
Python
|
mit
| 32
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from django.contrib import admin
from django.http import JsonResponse
from cmsplugin_cascade.models import CascadePage
@admin.register(CascadePage)
class CascadePageAdmin(admin.ModelAdmin):
def get_model_perms(self, request):
"""
Return empty perms dict to hide the model from admin index.
"""
return {}
def get_urls(self):
return patterns('',
url(r'^get_page_sections/$', lambda: None, name='get_page_sections'), # just to reverse
url(r'^get_page_sections/(?P<page_pk>\d+)$',
self.admin_site.admin_view(self.get_page_sections)),
) + super(CascadePageAdmin, self).get_urls()
def get_page_sections(self, request, page_pk=None):
choices = []
try:
for key, val in self.model.objects.get(extended_object_id=page_pk).glossary['element_ids'].items():
choices.append((key, val))
except (self.model.DoesNotExist, KeyError):
pass
return JsonResponse({'element_ids': choices})
|
rfleschenberg/djangocms-cascade
|
cmsplugin_cascade/admin.py
|
Python
|
mit
| 1,152
|
import numpy as np
import inspect
from cStringIO import StringIO
from _to_native_converter import to_native_class_converter
from _inference_parameter_injector import \
_injectGenericInferenceParameterInterface
from _inference_injector import _injectGenericInferenceInterface
from _misc import defaultAccumulator
import sys
from opengmcore import index_type,value_type,label_type
from abc import ABCMeta, abstractmethod, abstractproperty
from optparse import OptionParser
import inspect
class InferenceBase:
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, gm, accumulator, parameter):
pass
@abstractmethod
def infer(self, visitor):
pass
#@abstractproperty
#def gm(self):
# pass
@abstractmethod
def arg(self, out=None):
pass
#def bound(self, out=None):
# return self.gm.evaluate(self.arg(out))
class ImplementationPack(object):
def __init__(self):
self.implDict = {}
def __hash__(self):
return self.implDict.__hash__()
def _check_consistency(self):
hyperParamsKeywords = None # as ['minStCut']
hyperParamsHelp = None # as ['minStCut implementation for graphcut']
allowedHyperParams = set() # as {['push-relabel'],['komolgorov'] }
hasInterchangeableParameter = None
# loop over all allowedHyperParams
implDict = self.implDict
for semiRingDict in implDict:
hyperParameters = None
# loop over all semi rings
for algClass, paramClass in semiRingDict:
hp = algClass.__hyperParameters()
# check if the hyper parameter (as push-relabel)
# is the same for all semi-rings
if hyperParameters is not None:
raise RuntimeError("inconsistency in hyperParameters of %s"
% algClass._algNames())
hyperParameters = hp
allowedHyperParams.add(hyperParameters)
hpK = algClass._hyperParameterKeywords()
hpH = algClass._hyperParametersHelp()
icp = algClass._hasInterchangeableParameter()
if hasInterchangeableParameter is not None:
assert (icp == hasInterchangeableParameter)
else:
hasInterchangeableParameter = icp
# check if the hyper parameter keywords are the same for all
# algorithms within the implementation pack
if (hyperParamsKeywords is not None
and hyperParamsHelp is not None):
if hpK != hyperParamsKeywords:
raise RuntimeError("inconsistency in hyperParamsKeywords of %s"
% algClass._algNames())
if hpH != hyperParamsHelp:
raise RuntimeError("inconsistency in hyperParamsHelp of %s"
% algClass._algNames())
else:
hyperParamsKeywords = hpK
hyperParamsHelp = hpH
if len(hyperParamsKeywords) != len(hyperParamsHelp):
raise RuntimeError("inconsistency in hyperParamsHelp and "
"hyperParamsKeywords of %s"
% algClass._algNames())
@ property
def allowedHyperParameters(self):
allowedHyperParams = set() # as {['push-relabel'],['komolgorov'] }
implDict = self.implDict
for hyperParameters in implDict.keys():
allowedHyperParams.add(hyperParameters)
return allowedHyperParams
@ property
def hasHyperParameters(self):
return len(self.hyperParameterKeywords) != 0
@ property
def hyperParameterKeywords(self):
try:
return dictDictElement(self.implDict)[0]._hyperParameterKeywords()
except:
raise RuntimeError(dictDictElement(self.implDict))
@ property
def hyperParametersDoc(self):
return dictDictElement(self.implDict)[0]._hyperParametersDoc()
@ property
def hyperParameters(self):
return dictDictElement(self.implDict)[0]._hyperParameters()
@ property
def hasInterchangeableParameter(self):
return dictDictElement(self.implDict)[0]._hasInterchangeableParameter()
@ property
def anyParameterClass(self):
return dictDictElement(self.implDict)[1]
def classGenerator(
classname,
inferenceClasses,
defaultHyperParams,
exampleClass,
):
""" generates a high level class for each BASIC inference algorithm:
There will be One class For Bp regardless what the operator
and accumulator is .
Also all classes with addidional templates lie
GraphCut<PushRelabel> and GraphCut<komolgorov> will glued
together to one class GraphCut
"""
#print "className ",classname
members = inspect.getmembers(exampleClass, predicate=inspect.ismethod)
def inference_init(self, gm, accumulator=None, parameter=None):
# self._old_init()
# set up basic properties
self.gm = gm
self.operator = gm.operator
if accumulator is None:
self.accumulator = defaultAccumulator(gm)
else:
self.accumulator = accumulator
self._meta_parameter = parameter
# get hyper parameter (as minStCut for graphcut, or the subsolver for
# dualdec.)
hyperParamKeywords = self._infClasses.hyperParameterKeywords
numHyperParams = len(hyperParamKeywords)
userHyperParams = [None]*numHyperParams
collectedHyperParameters = 0
# get the users hyper parameter ( if given)
if(self._meta_parameter is not None):
for hpIndex, hyperParamKeyword in enumerate(hyperParamKeywords):
if hyperParamKeyword in self._meta_parameter.kwargs:
userHyperParams[hpIndex] = self._meta_parameter.kwargs.pop(
hyperParamKeyword)
collectedHyperParameters += 1
# check if ZERO or ALL hyperParamerts have been collected
if collectedHyperParameters != 0 and collectedHyperParameters != numHyperParams:
raise RuntimeError("All or none hyper-parameter must be given")
# check if the WHOLE tuple of hyperParameters is allowed
if collectedHyperParameters != 0:
if tuple(str(x) for x in userHyperParams) not in inferenceClasses.implDict:
raise RuntimeError("%s is not an allowed hyperParameter\nAllowed hyperParameters are %s" % (
repr(userHyperParams), repr(inferenceClasses.implDict.keys())))
else:
userHyperParams = defaultHyperParams
try:
# get the selected inference class and the parameter
if(numHyperParams == 0):
self._selectedInfClass, self._selectedInfParamClass = inferenceClasses.implDict[
"__NONE__"][(self.operator, self.accumulator)]
else:
hp = tuple(str(x) for x in userHyperParams)
self._selectedInfClass, self._selectedInfParamClass = inferenceClasses.implDict[
hp][(self.operator, self.accumulator)]
except:
dictStr=str(inferenceClasses.implDict)
raise RuntimeError("given seminring (operator = %s ,accumulator = %s) is not implemented for this solver\n %s" % \
(self.operator, self.accumulator,dictStr))
if self._meta_parameter is None:
self.parameter = self._selectedInfClass._parameter()
self.parameter.set()
else:
self.parameter = to_native_class_converter(
givenValue=self._meta_parameter, nativeClass=self._selectedInfParamClass)
assert self.parameter is not None
self.inference = self._selectedInfClass(self.gm, self.parameter)
def verboseVisitor(self, printNth=1, multiline=True):
""" factory function to get a verboseVisitor:
A verboseVisitor will print some information while inference is running
**Args**:
printNth : call the visitor in each nth visit (default : ``1``)
multiline : print the information in multiple lines or in one line (default: ``True``)
**Notes**:
The usage of a verboseVisitor can slow down inference a bit
"""
return self.inference.verboseVisitor(printNth, multiline)
def timingVisitor(self, visitNth=1,reserve=0,verbose=True, multiline=True,timeLimit=float('inf')):
""" factory function to get a verboseVisitor:
A verboseVisitor will print some information while inference is running
**Args**:
visitNth : call the python visitor in each nth visit (default : ``1``)
reserve : reserve space for bounds,values,times, and iteratios (default: ``0``)
verbose : print information (default ``True``)
multiline : print the information in multiple lines or in one line (default: ``True``)
**Notes**:
The usage of a timingVisitor can slow down inference a bit
"""
return self.inference.timingVisitor(visitNth=visitNth,reserve=reserve,verbose=verbose, multiline=multiline,timeLimit=timeLimit)
def pythonVisitor(self, callbackObject, visitNth):
""" factory function to get a pythonVisitor:
A python visitor can callback to pure python within the c++ inference
**Args**:
callbackObject : python function ( or class with implemented ``__call__`` function)
visitNth : call the python function in each nth visit (default : 1)
**Notes**:
The usage of a pythonVisitor can slow down inference
"""
return self.inference.pythonVisitor(callbackObject, visitNth)
def infer(self, visitor=None, releaseGil=True):
""" start the inference
**Args**:
visitor : run inference with an optional visitor (default : None)
**Notes**:
a call of infer will unlock the GIL
"""
assert self.inference is not None
return self.inference.infer(visitor=visitor, releaseGil=releaseGil)
def arg(self, returnAsVector=False, out=None):
""" get the result of the inference
**Args**:
returnAsVector : return the result as ``opengm.LabelVector`` (default : ``False``)
To get a numpy ndarray ignore this argument or set it to ``False``
out : ``if returnAsVector==True`` a preallocated ``opengm.LabelVector`` can be passed to this function
"""
return self.inference.arg(out=out, returnAsVector=returnAsVector)
def partialOptimality(self):
"""get a numpy array of booleans which are true where the variables are optimal
"""
return self.inference.partialOptimality()
def setStartingPoint(self, labels):
""" set a starting point / start labeling
**Args**:
labels : starting point labeling
"""
numpyLabels=np.require(labels,dtype=label_type)
self.inference.setStartingPoint(numpyLabels)
def bound(self):
""" get the bound"""
return self.inference.bound()
def value(self):
""" get the value of inference.
The same as ``gm.evaluate(inf.arg())``
"""
return self.inference.value()
def reset(self):
"""
reset a inference solver (structure of gm must not change)
"""
return self.inference.reset()
def marginals(self,vis):
"""get the marginals for a subset of variable indices
Args:
vis : variable indices (for highest performance use a numpy.ndarray with ``opengm.index_type`` as dtype)
Returns :
a 2d numpy.ndarray where the first axis iterates over the variables passed by ``vis``
Notes :
All variables in ``vis`` must have the same number of labels
"""
return self.inference.marginals(vis)
def factorMarginals(self,fis):
"""get the marginals for a subset of variable indices
Args:
fis : factor indices (for highest performance use a numpy.ndarray with ``opengm.index_type`` as dtype)
Returns :
a N-d numpy.ndarray where the first axis iterates over the factors passed by ``fis``
Notes :
All factors in ``fis`` must have the same number of variables and shape
"""
return self.inference.factorMarginals(fis)
def addConstraint(self, lpVariableIndices, coefficients, lowerBound, upperBound):
"""
Add a constraint to the lp
**Args** :
lpVariableIndices : variable indices w.r.t. the lp
coefficients : coefficients of the constraint
lowerBound : lowerBound of the constraint
upperBound : upperBound of the constraint
"""
self.inference.addConstraint(
lpVariableIndices, coefficients, lowerBound, upperBound)
def addConstraints(self, lpVariableIndices, coefficients, lowerBounds, upperBounds):
"""
Add constraints to the lp
**Args** :
lpVariableIndices : variable indices w.r.t. the lp
coefficients : coefficients of the constraints
lowerBounds : lowerBounds of the constraints
upperBounds : upperBounds of the constraints
"""
self.inference.addConstraints(
lpVariableIndices, coefficients, lowerBounds, upperBounds)
def getEdgeLabeling(self):
return self.inference.getEdgeLabeling()
def lpNodeVariableIndex(self, variableIndex, label):
"""
get the lp variable index from a gm variable index and the label
**Args**:
variableIndex : variable index w.r.t. the graphical model
label : label of the variable
**Returns**:
variableIndex w.r.t. the lp
"""
return self.inference.lpNodeVariableIndex(variableIndex, label)
def lpFactorVariableIndex(self, factorIndex, labels):
"""
get the lp factor index from a gm variable index and the labeling (or the scalar index of the labeling)
**Args**:
factorIndex : factor index w.r.t. the graphical model
labels : labeling of the factor (or a scalar index of the labeling)
**Returns**:
variableIndex w.r.t. the lp of the factor (and it's labeling )
"""
return self.inference.lpFactorVariableIndex(factorIndex, labels)
def generateParamHelp():
# simple parameter
if not inferenceClasses.hasHyperParameters:
# get any parameter of this impl pack
exampleParam = inferenceClasses.anyParameterClass()
exampleParam.set()
paramHelp = exampleParam._str_spaced_()
return paramHelp
# with hyper parameter(s)
else:
# the C++ parameter does NOT CHANGE if hyper parameters change
if inferenceClasses.hasInterchangeableParameter:
# get any parameter of this impl pack
exampleParam = inferenceClasses.anyParameterClass()
exampleParam.set()
paramHelp = exampleParam._str_spaced_()
# append hyper parameter(s)
# print to string!!!
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
# loop over all hp Keywords (usually there is max. 1 hyper parameter)
# (should it be allowed to use more than 1 hp??? right now it is!)
assert len(inferenceClasses.hyperParameterKeywords) == 1
hyperParameterKeyword = inferenceClasses.hyperParameterKeywords[0]
hyperParameterDoc = inferenceClasses.hyperParametersDoc[0]
print " * %s : %s" % (hyperParameterKeyword, hyperParameterDoc)
# loop over all hyperparamtersbound
for hyperParameters in inferenceClasses.implDict.keys():
hyperParameter = hyperParameters[0]
# get an example for this hyperparameter class
classes = inferenceClasses.implDict[hyperParameters]
# get any semi ring solver
[solverC, paramC] = dictElement(classes)
assert len(hyperParameters) == 1
if(solverC._isDefault()):
print " - ``'%s'`` (default)\n" % (hyperParameter,)
else:
print " - ``'%s'``\n" % (hyperParameter,)
sys.stdout = old_stdout
hyperParamHelp = mystdout.getvalue()
return paramHelp + "\n\n" + hyperParamHelp
# the C++ parameter DOES CHANGE if hyper parameters change
else:
# print to string!!!
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
print "The parameter object of has internal dependencies:\n\n"
assert len(inferenceClasses.hyperParameterKeywords) == 1
hyperParameterKeyword = \
inferenceClasses.hyperParameterKeywords[0]
hyperParameterDoc = inferenceClasses.hyperParametersDoc[0]
print(" * %s : %s"
% (hyperParameterKeyword, hyperParameterDoc))
# loop over all hyperparamters
for hyperParameters in inferenceClasses.implDict.keys():
hyperParameter = hyperParameters[0]
# get an example for this hyperparameter class
classes = inferenceClasses.implDict[hyperParameters]
# get any semi ring solver
[solverC, paramC] = dictElement(classes)
assert len(hyperParameters) == 1
if(solverC._isDefault()):
print(" - ``'%s'`` (default)\n"
% (hyperParameter,))
else:
print(" - ``'%s'``\n"
% (hyperParameter,))
for hyperParameters in inferenceClasses.implDict.keys():
hyperParameter = hyperParameters[0]
# get an example for this hyperparameter class
classes = inferenceClasses.implDict[hyperParameters]
# get any semi ring solver
[solverC, paramC] = dictElement(classes)
hyperParameterKeywords = solverC._hyperParameterKeywords()
hyperParameters = solverC._hyperParameters()
assert len(hyperParameterKeywords) == 1
assert len(hyperParameters) == 1
hyperParameterKeyword = hyperParameterKeywords[0]
hyperParameter = hyperParameters[0]
print(" ``if %s == %s`` : \n\n"
% (hyperParameterKeyword, hyperParameter))
exampleParam = paramC()
exampleParam.set()
print exampleParam._str_spaced_(' ')
sys.stdout = old_stdout
return mystdout.getvalue()
# exampleClass
memberDict = {
# public members
'__init__': inference_init,
'infer': infer,
'arg': arg,
'bound': bound,
'value': value,
'setStartingPoint': setStartingPoint,
#
'gm': None,
'operator': None,
'accumulator': None,
'inference': None,
'parameter': None,
# 'protected' members
'_meta_parameter': None,
'_infClasses': inferenceClasses,
'_selectedInfClass': None,
'_selectedInfParamClass': None
}
def _generateFunction_(function):
def _f_(self,*args,**kwargs):
#""" %s """ % str(function.__doc__)
return function(self.inference,*args,**kwargs)
#print "\n\n DOCSTRING",function.__doc__
_f_.__doc__=function.__doc__
return _f_
for m in members:
if m[0].startswith('_') or m[0].endswith('_') :
pass
else :
#print m[0]
memberDict[m[0]]=_generateFunction_(m[1])
"""
if hasattr(exampleClass, "reset"):
memberDict['reset'] = reset
if hasattr(exampleClass, "verboseVisitor"):
memberDict['verboseVisitor'] = verboseVisitor
if hasattr(exampleClass, "timingVisitor"):
memberDict['timingVisitor'] = timingVisitor
if hasattr(exampleClass, "pythonVisitor"):
memberDict['pythonVisitor'] = pythonVisitor
if hasattr(exampleClass, "marginals") and hasattr(exampleClass, "factorMarginals"):
memberDict['marginals'] = marginals
memberDict['factorMarginals'] = factorMarginals
if hasattr(exampleClass, "addConstraint") and hasattr(exampleClass, "addConstraints"):
memberDict['addConstraints'] = addConstraints
memberDict['addConstraint'] = addConstraint
memberDict['lpNodeVariableIndex'] = lpNodeVariableIndex
memberDict['lpFactorVariableIndex'] = lpFactorVariableIndex
if hasattr(exampleClass, "partialOptimality") :
memberDict['partialOptimality'] = partialOptimality
if hasattr(exampleClass, "getEdgeLabeling") :
memberDict['getEdgeLabeling'] = getEdgeLabeling
"""
infClass = type(classname, (InferenceBase,), memberDict)
infClass.__init__ = inference_init
# print to string!!!
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
print """ %s is a %s inference algorithm
**Args** :
gm : the graphical model to infere / optimize
accumulator : accumulator used for inference can be:
-``'minimizer'`` (default : ``if gm.operator is 'adder'==True:``)
-``'maximizer'`` (default : ``if gm.operator is 'multiplier'==True:``)
-``'integrator'``
Not any accmulator can be used for any solver.
Which accumulator can be used will be in the documentation soon.
parameter : parameter object of the solver
""" % (exampleClass._algName(), exampleClass._algType())
print """
**Parameter** :
%s
""" % (generateParamHelp(),)
if(exampleClass._examples() != ''):
print """ **Examples**: ::
%s
""" % (exampleClass._examples() .replace("\n", "\n "),)
if(exampleClass._guarantees() != ''):
print """ **Guarantees** :
%s
""" % (exampleClass._guarantees(),)
if(exampleClass._limitations() != ''):
print """ **Limitations** :
%s
""" % (exampleClass._limitations(),)
if(exampleClass._cite() != ''):
print """ **Cite** :
%s
""" % (exampleClass._cite().replace("\n\n", "\n\n "),)
if(exampleClass._dependencies() != ''):
print """ **Dependencies** :
%s
""" % (exampleClass._dependencies(),)
if(exampleClass._notes() != ''):
print """ **Notes** :
%s
""" % (exampleClass._notes().replace("\n\n", "\n\n "),)
sys.stdout = old_stdout
infClass.__dict__['__init__'].__doc__ = mystdout.getvalue()
return infClass, classname
def dictElement(aDict):
return aDict.itervalues().next()
def dictDictElement(dictDict):
return dictElement(dictElement(dictDict))
def _inject_interface(solverDicts):
algs = dict()
algDefaultHyperParams = dict()
exampleClasses = dict()
for solverDict, op, acc in solverDicts:
semiRing = (op, acc)
# inject raw interface to paramters and subparameters
try:
paramDict = solverDict['parameter'].__dict__
except:
raise RuntimeError(repr(solverDict))
for key in paramDict:
paramClass = paramDict[key]
if inspect.isclass(paramClass):
_injectGenericInferenceParameterInterface(
paramClass, infParam=not key.startswith('_SubParameter'),
subInfParam=key.startswith('_SubParameter'))
for key in solverDict:
elementInDict = solverDict[key]
if (inspect.isclass(elementInDict) and not key.endswith('Visitor')
and hasattr(elementInDict, '_algName')
and hasattr(elementInDict, '_parameter')):
solverClass = elementInDict
param = solverClass._parameter()
paramClass = param.__class__
# inject raw interface to inference
_injectGenericInferenceInterface(solverClass)
# Get Properties to group algorithm
algName = solverClass._algName()
hyperParamKeywords = [str(
x) for x in solverClass._hyperParameterKeywords()]
hyperParameters = tuple(str(
x) for x in solverClass._hyperParameters())
assert hyperParamKeywords is not None
exampleClasses[algName] = solverClass
# algs['GraphCut']
if algName in algs:
metaAlgs = algs[algName]
else:
implPack = ImplementationPack()
algs[algName] = implPack
metaAlgs = algs[algName]
metaAlgs = algs[algName]
if(len(hyperParameters) == 0):
if '__NONE__' in metaAlgs.implDict:
semiRingAlgs = metaAlgs.implDict["__NONE__"]
else:
metaAlgs.implDict["__NONE__"] = dict()
semiRingAlgs = metaAlgs.implDict["__NONE__"]
else:
if hyperParameters in metaAlgs.implDict:
semiRingAlgs = metaAlgs.asDict()[hyperParameters]
else:
metaAlgs.implDict[hyperParameters] = dict()
semiRingAlgs = metaAlgs.implDict[hyperParameters]
semiRingAlgs[semiRing] = (solverClass, paramClass)
if(len(hyperParameters) == 0):
metaAlgs.implDict["__NONE__"] = semiRingAlgs
else:
metaAlgs.implDict[hyperParameters] = semiRingAlgs
algs[algName] = metaAlgs
# check if this implementation is the default
if solverClass._isDefault():
algDefaultHyperParams[algName] = hyperParameters
result = []
# generate high level interface
for algName in algs.keys():
a = algs[algName]
adhp = algDefaultHyperParams[algName]
ec = exampleClasses[algName]
result.append(classGenerator(algName, a, adhp, ec))
return result
|
yanlend/opengm
|
src/interfaces/python/opengm/_inference_interface_generator.py
|
Python
|
mit
| 27,300
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, print_function
import os
import re
import sys
import uuid
import codecs
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from pip.req import parse_requirements
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
errno = tox.cmdline(self.test_args)
sys.exit(errno)
def read(*parts):
filename = os.path.join(os.path.dirname(__file__), *parts)
with codecs.open(filename, encoding='utf-8') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
pypi_readme_note = """\
.. note::
For the latest source, discussions, etc., please visit the
`GitHub repository <https://github.com/OohlaLabs/uniauth>`_
"""
setup(
name='uniauth',
version=find_version('uniauth', '__init__.py'),
author='OohlaLabs Limited',
author_email='packages@oohlalabs.com',
maintainer='Thierry Jossermoz',
maintainer_email='thierry.jossermoz@oohlalabs.com',
url="https://github.com/OohlaLabs/uniauth",
description="Minimalist and framework independent OAuth(1 & 2) consumers",
long_description="\n\n".join([pypi_readme_note, read('README.rst')]),
install_requires=[str(ir.req) for ir in parse_requirements('requirements.txt', session=uuid.uuid1())],
packages=find_packages(),
tests_require=["tox"],
cmdclass={"test": Tox},
license='MIT',
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
)
|
jthi3rry/uniauth
|
setup.py
|
Python
|
mit
| 2,280
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-05-19 19:58
#
# Filename: batfile.py
#
# Description: All Rights Are Reserved
#
"""
class PyColor(object):
""" This class is for colored print in the python interpreter!
"F3" call Addpy() function to add this class which is defined
in the .vimrc for vim Editor."""
def __init__(self):
self.self_doc = r"""
STYLE: \033['display model';'foreground';'background'm
DETAILS:
FOREGROUND BACKGOUND COLOR
---------------------------------------
30 40 black
31 41 red
32 42 green
33 43 yellow
34 44 blue
35 45 purple
36 46 cyan
37 47 white
DISPLAY MODEL DETAILS
-------------------------
0 default
1 highlight
4 underline
5 flicker
7 reverse
8 non-visiable
e.g:
\033[1;31;40m <!--1-highlight;31-foreground red;40-background black-->
\033[0m <!--set all into default-->
"""
self.warningcolor = '\033[0;37;41m'
self.tipcolor = '\033[0;31;42m'
self.endcolor = '\033[0m'
self._newcolor = ''
@property
def new(self):
"""
Customized Python Print Color.
"""
return self._newcolor
@new.setter
def new(self,color_str):
"""
New Color.
"""
self._newcolor = color_str
def disable(self):
"""
Disable Color Print.
"""
self.warningcolor = ''
self.endcolor = ''
#import scipy as sp
#import math as m
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D as Ax3
#from scipy import stats as st
#from matplotlib import cm
#import numpy as np
import os
from packages.filesline.filesline import FileLine
class BatchFileMerge(FileLine):
def __init__(self, dir1, dir2, dir3):
FileLine.__init__(self, dir1)
self.mergeinto_direction = dir1
self.mergefrom_direction = dir2
self.main_direction = dir3
self.new_files = []
self.mergeinto_files = []
self.mergefrom_files = []
def find_file_list(self, file_dir):
self.alldir = [file_dir]
self.filelist()
def match_file_name(self):
self.find_file_list(self.mergeinto_direction)
self.find_file_list(self.mergefrom_direction)
for file in self.files[self.mergeinto_direction]:
if file in self.files[self.mergefrom_direction]:
self.mergeinto_files.append(self.mergeinto_direction + "/" + file)
else:
self.new_files.append(self.mergeinto_direction + "/" + file)
for file in self.files[self.mergefrom_direction]:
if file in self.files[self.mergeinto_direction]:
self.mergefrom_files.append(self.mergefrom_direction + "/" + file)
else:
self.new_files.append(self.mergefrom_direction + "/" + file)
if len(self.mergefrom_files) != len(self.mergeinto_files):
raise Exception("WRONG FILES")
def backupfiles(self, backup_dir):
os.chdir(backup_dir)
for file in self.files[backup_dir]:
command = "cp "
command += file + " " + file + "_bak"
print command
val = os.system(command)
def mergefiles(self):
self.mergeinto_files = sorted(self.mergeinto_files)
self.mergefrom_files = sorted(self.mergefrom_files)
file_counter = 0
os.chdir(self.main_direction)
file_diff = open('difffile.txt', 'w')
for file in self.mergeinto_files:
merge_command = "/home/edony/code/github/toolkitem/mergefile/analysefile.sh "
merge_command += "." + file.split(self.main_direction)[1]
merge_command += " " + "."
merge_command += self.mergefrom_files[file_counter].split(self.main_direction)[1]
print merge_command
os.system(merge_command)
file_tmp = open('buf.diff')
if len(file_tmp.readlines()) > 2:
file_diff.write(file)
file_diff.write('\n')
file_tmp.close()
file_counter += 1
file_diff.close()
if __name__ == "__main__":
dir1 = "/home/edony/code/github/toolkitem/mergefile/f1"
dir2 = "/home/edony/code/github/toolkitem/mergefile/f2"
dir3 = "/home/edony/code/github/toolkitem/mergefile"
test = BatchFileMerge(dir1, dir2, dir3)
test.match_file_name()
test.backupfiles(dir1)
print test.mergeinto_files
print test.mergefrom_files
test.mergefiles()
|
edonyM/toolkitem
|
fileprocess/mergefile/batfile.py
|
Python
|
mit
| 5,919
|
from core.himesis import Himesis
import uuid
class HSon2Man(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the DSLTrans rule Son2Man.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HSon2Man, self).__init__(name='HSon2Man', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """Son2Man"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Son2Man')
# match model. We only support one match model
self.add_node()
self.vs[0]["mm__"] = """MatchModel"""
# apply model node
self.add_node()
self.vs[1]["mm__"] = """ApplyModel"""
# paired with relation between match and apply models
self.add_node()
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["attr1"] = """Son2Man"""
# match class Child() node
self.add_node()
self.vs[3]["mm__"] = """Child"""
self.vs[3]["attr1"] = """+"""
# match class Family() node
self.add_node()
self.vs[4]["mm__"] = """Family"""
self.vs[4]["attr1"] = """1"""
# apply class Man() node
self.add_node()
self.vs[5]["mm__"] = """Man"""
self.vs[5]["attr1"] = """1"""
# match association Child--family-->Family node
self.add_node()
self.vs[6]["attr1"] = """family"""
self.vs[6]["mm__"] = """directLink_S"""
# match association Family--sons-->Child node
self.add_node()
self.vs[7]["attr1"] = """sons"""
self.vs[7]["mm__"] = """directLink_S"""
# Add the edges
self.add_edges([
(0,3), # matchmodel -> match_class Child()
(0,4), # matchmodel -> match_class Family()
(1,5), # applymodel -> -> apply_class Man()
(3,6), # match_class Child() -> association family
(6,4), # association family -> match_class Family()
(4,7), # match_class Family() -> association sons
(7,3), # association sons -> match_class Child()
(0,2), # matchmodel -> pairedwith
(2,1) # pairedwith -> applyModel
])
# Add the attribute equations
self["equations"] = [((5,'fullName'),('concat',((3,'firstName'),(4,'lastName')))), ]
|
levilucio/SyVOLT
|
ExFamToPerson/transformation/no_contains/HSon2Man.py
|
Python
|
mit
| 2,616
|
import targets
import testlib
import spike64 # pylint: disable=import-error
class spike64_2_rtos(targets.Target):
harts = [spike64.spike64_hart(), spike64.spike64_hart()]
openocd_config_path = "spike-rtos.cfg"
timeout_sec = 60
def create(self):
return testlib.Spike(self)
|
timtian090/Playground
|
RISC-V/riscv-tests/debug/targets/RISC-V/spike64-2-rtos.py
|
Python
|
mit
| 300
|
"""empty message
Revision ID: 52cf5973de2
Revises: 2ecc9f25f6
Create Date: 2015-01-06 11:14:08.980499
"""
# revision identifiers, used by Alembic.
revision = '52cf5973de2'
down_revision = '2ecc9f25f6'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('superuser', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'superuser')
### end Alembic commands ###
|
hreeder/WHAuth
|
migrations/versions/52cf5973de2_.py
|
Python
|
mit
| 601
|
# coding=utf-8
from __future__ import unicode_literals
import unittest
import pykka
from mopidy_soundcloud import actor, SoundCloudExtension
from mopidy_soundcloud.soundcloud import safe_url
from mopidy_soundcloud.library import SoundCloudLibraryProvider, new_folder, \
simplify_search_query
class ApiTest(unittest.TestCase):
def setUp(self):
config = SoundCloudExtension().get_config_schema()
config['auth_token'] = '1-35204-61921957-55796ebef403996'
# using this user http://maildrop.cc/inbox/mopidytestuser
self.backend = actor.SoundCloudBackend.start(
config={'soundcloud': config},
audio=None
).proxy()
self.library = SoundCloudLibraryProvider(backend=self.backend)
def tearDown(self):
pykka.ActorRegistry.stop_all()
def test_add_folder(self):
try:
from mopidy.models import Ref
except ImportError as e:
self.skipTest(e.message)
self.assertEquals(
new_folder('Test', ['test']),
Ref(name='Test', type='directory',
uri='soundcloud:directory:test')
)
def test_mpc_search(self):
self.assertEquals(
simplify_search_query({u'any': [u'explosions in the sky']}),
'explosions in the sky'
)
def test_moped_search(self):
self.assertEquals(
simplify_search_query(
{
u'track_name': [u'explosions in the sky'],
u'any': [u'explosions in the sky']
}
),
'explosions in the sky explosions in the sky'
)
def test_simple_search(self):
self.assertEquals(
simplify_search_query('explosions in the sky'),
'explosions in the sky'
)
def test_aria_search(self):
self.assertEquals(
simplify_search_query(['explosions', 'in the sky']),
'explosions in the sky'
)
def test_only_resolves_soundcloud_uris(self):
self.assertIsNone(self.library.search(
{'uri': 'http://www.youtube.com/watch?v=wD6H6Yhluo8'}))
def test_returns_url_safe_string(self):
self.assertEquals(
safe_url('Alternative/Indie/rock/pop '),
'Alternative%2FIndie%2Frock%2Fpop+')
self.assertEquals(
safe_url('D∃∃P Hau⑀ iNDiE DᴬNCE | №➊ ²⁰¹⁴'),
'DP+Hau+iNDiE+DANCE+%7C+No+2014')
def test_default_folders(self):
try:
from mopidy.models import Ref
except ImportError as e:
self.skipTest(e.message)
self.assertEquals(
self.library.browse('soundcloud:directory'),
[
Ref(name='Explore', type='directory',
uri='soundcloud:directory:explore'),
Ref(name='Following', type='directory',
uri='soundcloud:directory:following'),
Ref(name='Groups', type='directory',
uri='soundcloud:directory:groups'),
Ref(name='Liked', type='directory',
uri='soundcloud:directory:liked'),
Ref(name='Sets', type='directory',
uri='soundcloud:directory:sets'),
Ref(name='Stream', type='directory',
uri='soundcloud:directory:stream')
]
)
|
yakumaa/mopidy-soundcloud
|
tests/test_library.py
|
Python
|
mit
| 3,430
|
x = int(input('Insira o primeiro número inteiro, por favor: '))
y = int(input('Insira o segundo número inteiro, por favor: '))
z = int(input('Insira o terceiro número inteiro, por favor: '))
if z > y and y > x:
print('crescente')
else:
print('não está em ordem crescente')
|
marcelomiky/PythonCodes
|
Coursera/CICCP1/ordenacao.py
|
Python
|
mit
| 288
|
import L2
def test_L2_result():
assert L2.vector_len([1,1,1,1,1],[1,2,3,4,5])==7.416198487095663
def test_L2_no_weight():
assert L2.vector_len([1,1,1,1,1])==2.2360679774997898
def test_L2_dimensions():
try:
L2.vector_len([1,1,1,1,1,1],[1,2,3])
except ValueError as err:
assert(type(err)==ValueError)
|
crystalzhaizhai/cs207_yi_zhai
|
lectures/L7/test_L2.py
|
Python
|
mit
| 339
|