content
stringlengths 5
1.05M
|
|---|
money = int(input())
money = 1000 - money
fh = money // 500
money = money - fh * 500
oh = money // 100
money = money - oh * 100
fy = money // 50
money = money - fy * 50
ten = money // 10
money = money - ten * 10
five = money // 5
money = money - five * 5
one = money
money = money - one
print(fh + oh + fy + ten + five + one)
|
import torch
from PIL import Image
from torchvision import transforms as T
import cv2
from torchvision.models.detection import *
from loguru import logger
import numpy as np
import pickle
COCO_INSTANCE_CATEGORY_NAMES = [
'__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'N/A', 'stop sign',
'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
'elephant', 'bear', 'zebra', 'giraffe', 'N/A', 'backpack', 'umbrella', 'N/A', 'N/A',
'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', 'tennis racket',
'bottle', 'N/A', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl',
'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed', 'N/A', 'dining table',
'N/A', 'N/A', 'toilet', 'N/A', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',
'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'N/A', 'book',
'clock', 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush'
]
def get_prediction(img, threshold, model):
img = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
transform = T.Compose([T.ToTensor()])
img = transform(img)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
img = img.to(device)
pred = model([img])
if torch.cuda.is_available():
pred_class = [COCO_INSTANCE_CATEGORY_NAMES[i] for i in list(pred[0]['labels'].cuda().data.cpu().numpy())]
pred_boxes = [[(i[0], i[1]), (i[2], i[3])] for i in list(pred[0]['boxes'].detach().cpu().numpy())]
pred_score = list(pred[0]['scores'].detach().cpu().numpy())
else:
pred_class = [COCO_INSTANCE_CATEGORY_NAMES[i] for i in list(pred[0]['labels'].numpy())]
pred_boxes = [[(i[0], i[1]), (i[2], i[3])] for i in list(pred[0]['boxes'].detach().numpy())]
pred_score = list(pred[0]['scores'].detach().numpy())
try:
pred_t = [pred_score.index(x) for x in pred_score if x > threshold][-1]
except IndexError:
return None, None
else:
pred_boxes = pred_boxes[:pred_t+1]
pred_class = pred_class[:pred_t+1]
return pred_boxes, pred_class
def object_detection_api(img, model, rect_th=15, text_th=7, text_size=5, threshold=0.8):
boxes, pred_cls = get_prediction(img, threshold, model)
if boxes is None and pred_cls is None:
return img
for i in range(len(boxes)):
# Draw Rectangle with the coordinates
cv2.rectangle(img, boxes[i][0], boxes[i][1], color=(0, 255, 0), thickness=rect_th)
# Write the prediction class
cv2.putText(img, pred_cls[i], boxes[i][0], cv2.FONT_HERSHEY_SIMPLEX, text_size, (0, 255, 0), thickness=text_th)
return img
|
# -*- coding: utf-8 -*-
import torch as th
import torch.nn as nn
from leibniz.nn.layer.hyperbolic import BasicBlock, Bottleneck
class HyperBasic(nn.Module):
extension = 1
least_required_dim = 1
def __init__(self, dim, step, ix, tx, relu, conv, reduction=16):
super(HyperBasic, self).__init__()
self.dim = dim
self.step = step
self.ix = ix
self.tx = tx
self.input = BasicBlock(dim, 2 * dim, step, relu, conv, reduction=reduction)
self.output = BasicBlock(4 * dim, dim, step, relu, conv, reduction=reduction)
def forward(self, x):
input = self.input(x)
velo = input[:, :self.dim]
theta = input[:, self.dim:]
step = self.step * velo
y1 = (x + th.tanh(theta)) * th.exp(step * th.sin(theta)) - th.tanh(theta)
y2 = (x + th.tanh(theta)) * th.exp(- step * th.cos(theta)) - th.tanh(theta)
y3 = (x - th.tanh(theta)) * th.exp(step * th.sin(theta)) + th.tanh(theta)
y4 = (x - th.tanh(theta)) * th.exp(- step * th.cos(theta)) + th.tanh(theta)
ys = th.cat((y1, y2, y3, y4), dim=1)
return x + self.output(ys)
class HyperBottleneck(nn.Module):
extension = 4
least_required_dim = 1
def __init__(self, dim, step, ix, tx, relu, conv, reduction=16):
super(HyperBottleneck, self).__init__()
self.dim = dim
self.step = step
self.ix = ix
self.tx = tx
self.input = Bottleneck(dim, 2 * dim, step, relu, conv, reduction=reduction)
self.output = Bottleneck(4 * dim, dim, step, relu, conv, reduction=reduction)
def forward(self, x):
input = self.input(x)
velo = input[:, :self.dim]
theta = input[:, self.dim:]
step = self.step * velo
y1 = (x + th.tanh(theta)) * th.exp(step * th.sin(theta)) - th.tanh(theta)
y2 = (x + th.tanh(theta)) * th.exp(- step * th.cos(theta)) - th.tanh(theta)
y3 = (x - th.tanh(theta)) * th.exp(step * th.sin(theta)) + th.tanh(theta)
y4 = (x - th.tanh(theta)) * th.exp(- step * th.cos(theta)) + th.tanh(theta)
ys = th.cat((y1, y2, y3, y4), dim=1)
return x + self.output(ys)
|
"""The proxy component."""
|
"""
# lint-amnesty, pylint: disable=django-not-configured
Mobile API
"""
|
import turtle
filename = "decoded_wavepattern.txt"
#filename = "all about you cutted - decoded.txt"
drawStamps = True
drawRightLine = False
def min(a, b):
if (a < b):
return a
return b
def drawgrid():
for i in range(int(len_vert / step_mark * 2)):
a.forward(step_mark)
a.left(90)
a.forward(len_mark)
a.left(180)
a.forward(len_mark * 2)
a.left(180)
a.forward(len_mark)
a.right(90)
a = turtle.Turtle()
a.shape("arrow")
turtle.screensize(1500, 1000)
turtle.getscreen().bgcolor("black")
a.pencolor("white")
len_vert = 490
len_hor = 1900
len_mark = 5
step_mark = 20
#draw coordinate system
a.speed(100)
a.pensize(1)
a.penup()
a.setpos(0 - len_hor / 2, 0)
a.pendown()
a.forward(len_hor)
a.stamp()
a.setpos(0 - len_hor / 2, 0)
a.left(90)
a.forward(len_vert)
a.stamp()
a.back(len_vert * 2)
#drawgrid()
a.setpos(0 - len_hor / 2, 0)
a.right(90)
#drawgrid()
file = open(filename, "r")
l = turtle.Turtle()
r = turtle.Turtle()
l.pencolor("white")
r.pencolor("white")
l.setpos(0 - len_hor / 2, 0)
r.setpos(0 - len_hor / 2, 0)
l.speed(200)
r.speed(200)
l.pencolor("red")
r.pencolor("green")
l.shape("triangle")
r.shape("triangle")
l.shapesize(0.3, 0.3, 0.3)
r.shapesize(0.3, 0.3, 0.3)
valueGap = 5
valuesPerLine = 8
numValues = int(len_hor / valueGap)
sizeAmplitudeFactor = 0.01
text = ""
for i in range(int(numValues / valuesPerLine + 1)):
text += file.readline()
endidx = 0
for i in range(numValues):
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
try:
l.setpos(i * valueGap - len_hor / 2, int(text[startidx:endidx]) * sizeAmplitudeFactor)
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
if (drawRightLine):
r.setpos(i * valueGap - len_hor / 2, int(text[startidx:endidx]) * sizeAmplitudeFactor)
if (drawStamps):
l.stamp()
r.stamp()
except:
break
file.close()
#mainloop()
print("drawing 440hz line")
##draw 440hz wave
a.penup()
a.setpos(0 - len_hor / 2, 0)
a.speed(200)
a.pencolor("blue")
a.pendown()
file = open("wavepattern_440hz.txt", "r")
text = ""
for i in range(int(numValues / valuesPerLine + 1)):
text += file.readline()
endidx = 0
for i in range(numValues):
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
try:
a.setpos(i * valueGap - len_hor / 2, int(text[startidx:endidx]) * sizeAmplitudeFactor)
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
#r.setpos(i * valueGap - len_hor / 2, - int(text[startidx:endidx]) * sizeAmplitudeFactor)
except:
break
file.close()
#mainloop()
#draw 880hz line
print("drawing 880hz line")
a.penup()
a.setpos(0 - len_hor / 2, 0)
a.speed(200)
a.pencolor("yellow")
a.pendown()
file = open("wavepattern_880hz.txt", "r")
text = ""
for i in range(int(numValues / valuesPerLine + 1)):
text += file.readline()
endidx = 0
for i in range(numValues):
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
try:
a.setpos(i * valueGap - len_hor / 2, int(text[startidx:endidx]) * sizeAmplitudeFactor)
startidx = min(text.find("+", endidx), text.find("-", endidx))
endidx = text.find("\t", startidx)
#r.setpos(i * valueGap - len_hor / 2, - int(text[startidx:endidx]) * sizeAmplitudeFactor)
except:
break
file.close()
mainloop()
input()
|
import os
import numpy as np
from PIL import Image
import torch
from torch import nn
from torch.nn.modules.conv import _ConvNd
from torch.nn.modules.batchnorm import _BatchNorm
import torch.nn.init as initer
def knn(x, k=20):
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pairwise_distance = -xx - inner - xx.transpose(2, 1)
idx = pairwise_distance.topk(k=k, dim=-1)[1] # (batch_size, num_points, k)
return idx
def pairwise_distance(x):
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pdist = -xx - inner - xx.transpose(2, 1)
pdist_softmax = torch.nn.functional.softmax(pdist, dim=2)
return pdist_softmax
def pairwise_distance_mask(x, k=20):
# print(x.size())
bs, ch, nump = x.size()
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pdist = -xx - inner - xx.transpose(2, 1)
topk, indices = pdist.topk(k=k, dim=-1)
res = torch.autograd.Variable(torch.zeros(bs, nump, nump)).cuda()
res = res.scatter(2, indices, topk)
pdist_softmax = torch.nn.functional.softmax(pdist, dim=2)
return pdist_softmax
def pairwise_distance_mask1(x, k=20):
# print(x.size())
bs, ch, nump = x.size()
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pdist = -xx - inner - xx.transpose(2, 1)
topk, indices = pdist.topk(k=k, dim=-2)
res = torch.autograd.Variable(torch.ones(bs, nump, nump)).cuda()
res = res.scatter(1, indices, topk)
res = res < 0.00001
res = res.float()
# pdist_softmax = torch.nn.functional.softmax(pdist, dim=2)
return res
def pairwise_distance_mask1_dilate(x, k=20):
# print(x.size())
bs, ch, nump = x.size()
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pdist = -xx - inner - xx.transpose(2, 1)
ek = k + k
topk, indices = pdist.topk(k=ek, dim=-2) # indices: BxekXN
idx_ek = np.array([i for i in range(ek)])
np.random.shuffle(idx_ek)
idx_k = idx_ek[:k]
indices = indices[:, idx_k, :]
res = torch.autograd.Variable(torch.ones(bs, nump, nump)).cuda()
res = res.scatter(1, indices, topk)
res = res < 0.00001
res = res.float()
# pdist_softmax = torch.nn.functional.softmax(pdist, dim=2)
return res
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def step_learning_rate(optimizer, base_lr, epoch, step_epoch, multiplier=0.1, clip=1e-6):
"""Sets the learning rate to the base LR decayed by 10 every step epochs"""
lr = max(base_lr * (multiplier ** (epoch // step_epoch)), clip)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def poly_learning_rate(optimizer, base_lr, curr_iter, max_iter, power=0.9):
"""poly learning rate policy"""
lr = base_lr * (1 - float(curr_iter) / max_iter) ** power
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def intersectionAndUnion(output, target, K, ignore_index=255):
# 'K' classes, output and target sizes are N or N * L or N * H * W, each value in range 0 to K - 1.
assert (output.ndim in [1, 2, 3])
assert output.shape == target.shape
output = output.reshape(output.size).copy()
target = target.reshape(target.size)
output[np.where(target == ignore_index)[0]] = 255
intersection = output[np.where(output == target)[0]]
area_intersection, _ = np.histogram(intersection, bins=np.arange(K+1))
area_output, _ = np.histogram(output, bins=np.arange(K+1))
area_target, _ = np.histogram(target, bins=np.arange(K+1))
area_union = area_output + area_target - area_intersection
return area_intersection, area_union, area_target
def intersectionAndUnionGPU(output, target, K, ignore_index=255):
# 'K' classes, output and target sizes are N or N * L or N * H * W, each value in range 0 to K - 1.
assert (output.dim() in [1, 2, 3])
assert output.shape == target.shape
output = output.view(-1)
target = target.view(-1)
output[target == ignore_index] = ignore_index
intersection = output[output == target]
# https://github.com/pytorch/pytorch/issues/1382
area_intersection = torch.histc(intersection.float().cpu(), bins=K, min=0, max=K-1)
area_output = torch.histc(output.float().cpu(), bins=K, min=0, max=K-1)
area_target = torch.histc(target.float().cpu(), bins=K, min=0, max=K-1)
area_union = area_output + area_target - area_intersection
return area_intersection.cuda(), area_union.cuda(), area_target.cuda()
def check_mkdir(dir_name):
if not os.path.exists(dir_name):
os.mkdir(dir_name)
def check_makedirs(dir_name):
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def init_weights(model, conv='kaiming', batchnorm='normal', linear='kaiming', lstm='kaiming'):
"""
:param model: Pytorch Model which is nn.Module
:param conv: 'kaiming' or 'xavier'
:param batchnorm: 'normal' or 'constant'
:param linear: 'kaiming' or 'xavier'
:param lstm: 'kaiming' or 'xavier'
"""
for m in model.modules():
if isinstance(m, (_ConvNd)):
if conv == 'kaiming':
initer.kaiming_normal_(m.weight)
elif conv == 'xavier':
initer.xavier_normal_(m.weight)
else:
raise ValueError("init type of conv error.\n")
if m.bias is not None:
initer.constant_(m.bias, 0)
elif isinstance(m, _BatchNorm):
if batchnorm == 'normal':
initer.normal_(m.weight, 1.0, 0.02)
elif batchnorm == 'constant':
initer.constant_(m.weight, 1.0)
else:
raise ValueError("init type of batchnorm error.\n")
initer.constant_(m.bias, 0.0)
elif isinstance(m, nn.Linear):
if linear == 'kaiming':
initer.kaiming_normal_(m.weight)
elif linear == 'xavier':
initer.xavier_normal_(m.weight)
else:
raise ValueError("init type of linear error.\n")
if m.bias is not None:
initer.constant_(m.bias, 0)
elif isinstance(m, nn.LSTM):
for name, param in m.named_parameters():
if 'weight' in name:
if lstm == 'kaiming':
initer.kaiming_normal_(param)
elif lstm == 'xavier':
initer.xavier_normal_(param)
else:
raise ValueError("init type of lstm error.\n")
elif 'bias' in name:
initer.constant_(param, 0)
def convert_to_syncbn(model):
def recursive_set(cur_module, name, module):
if len(name.split('.')) > 1:
recursive_set(getattr(cur_module, name[:name.find('.')]), name[name.find('.')+1:], module)
else:
setattr(cur_module, name, module)
from lib.sync_bn import SynchronizedBatchNorm1d, SynchronizedBatchNorm2d, SynchronizedBatchNorm3d
for name, m in model.named_modules():
if isinstance(m, nn.BatchNorm1d):
recursive_set(model, name, SynchronizedBatchNorm1d(m.num_features, m.eps, m.momentum, m.affine))
elif isinstance(m, nn.BatchNorm2d):
recursive_set(model, name, SynchronizedBatchNorm2d(m.num_features, m.eps, m.momentum, m.affine))
elif isinstance(m, nn.BatchNorm3d):
recursive_set(model, name, SynchronizedBatchNorm3d(m.num_features, m.eps, m.momentum, m.affine))
def colorize(gray, palette):
# gray: numpy array of the label and 1*3N size list palette
color = Image.fromarray(gray.astype(np.uint8)).convert('P')
color.putpalette(palette)
return color
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
NAME_LIST = [
'Dian', 'Nese', 'Falledrick', 'Mae', 'Valhein', 'Dol', 'Earl', 'Cedria', 'Azulei', 'Yun', 'Cybel',
'Ina', 'Foolly', 'Skili', 'Juddol', 'Janver', 'Viska', 'Hirschendy', 'Silka', 'Hellsturn', 'Essa',
'Mykonos', 'Fenton', 'Tyrena', 'Inqoul', 'Mankov', 'Derilia', 'Hexema', 'Wyton', 'Kaedum', 'Gouram',
'Libertia', 'Berasailles', 'Juxta', 'Tae’hr', 'Comtol', 'Gherak', 'Hest', 'Qony', 'Masamka', 'Twyll',
'Tenos', 'Axim', 'Westrynda', 'Saphros', 'Olkham', 'Handok', 'Kemetra', 'Yos', 'Wentingle', 'Ames',
'Molosh', 'Inkov', 'Phasasia', 'Ziedinghal', 'Bregul', 'Eishvack', 'Lora', 'Krenting', 'Symbole',
'Elignoir', 'Keligkrul', 'Qwey', 'Vindinglag', 'Kusakira', 'Weme', 'Fayd', 'Rushvita', 'Vulkor',
'Amers', 'Ortos', 'Vanius', 'Chandellia', 'Lilikol', 'Catca', 'Cormus', 'Yuela', 'Ariban', 'Tryton',
'Fesscha', 'Opalul', 'Zakzos', 'Hortimer', 'Anklos', 'Dushasiez', 'Polop', 'Mektal', 'Orinphus',
'Denatra', 'Elkazzi', 'Dyne', 'Domos', 'Letryal', 'Manniv', 'Sylestia', 'Esnol', 'Fasafuros',
'Ghanfer', 'Kahnite', 'Sweyda', 'Uylis', 'Retenia', 'Bassos', 'Arkensval', 'Impelos', 'Grandius',
'Fulcrux', 'Lassahein', 'Edsveda', 'Earakun', 'Fous', 'Maas', 'Basenphal', 'Jubidya', 'Divya',
'Kosunten', 'Ordayius', 'Dozzer', 'Gangher', 'Escha', 'Manchul', 'Kempos', 'Kulo', 'Urtench',
'Kesta', 'Helahona', 'Ryte', 'Falcia', 'Umannos', 'Urkensvall', 'Fedra', 'Bulkensar', 'Comia',
'Tyul', 'Lasendarl'
]
class Race:
"""Basic characteristics for race"""
race = 'Unknown'
speed = 30
strength_bonus = 0
dexterity_bonus = 0
constitution_bonus = 0
intelligence_bonus = 0
wisdom_bonus = 0
charisma_bonus = 0
hp_bonus = 0
class Dwarf(Race):
""""""
race = 'Dwarf'
constitution_bonus = 2
class Dragonborn(Race):
""""""
race = 'Dragonborn'
strength_bonus = 2
charisma_bonus = 1
class Elf(Race):
""""""
race = 'Elf'
dexterity_bonus = 2
class Gnome(Race):
""""""
race = 'Gnome'
intelligence_bonus = 2
class HalfElf(Race):
""""""
race = 'Half-Elf'
charisma_bonus = 2
intelligence_bonus = 1
wisdom_bonus = 1
class Halfling(Race):
""""""
race = 'Halfling'
dexterity_bonus = 2
class HalfOrc(Race):
""""""
race = 'Half-Orc'
strength_bonus = 2
constitution_bonus = 1
class Human(Race):
""""""
race = 'Human'
strength_bonus = 1
constitution_bonus = 1
intelligence_bonus = 1
dexterity_bonus = 1
wisdom_bonus = 1
charisma_bonus = 1
class Tiefling(Race):
""""""
race = 'Tiefling'
charisma_bonus = 2
intelligence_bonus = 1
race_list = [
Dwarf,
Dragonborn,
Elf,
Gnome,
HalfElf,
Halfling,
HalfOrc,
Human,
Tiefling
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-02-06 22:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('docker_registry', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dockerfile',
name='cpushare',
field=models.IntegerField(blank=True, help_text='CPU shares (relative weight)', null=True),
),
migrations.AlterField(
model_name='dockerfile',
name='memorylimit',
field=models.IntegerField(blank=True, help_text='maximum memory usage allowed in KBs', null=True),
),
]
|
"""
Module for processing Rss.
Note: The main purpose of this module is to provide support for the
RssSpider, its API is subject to change without notice.
"""
import lxml.etree
from six.moves.urllib.parse import urljoin
class Rss(object):
"""Class to parse Rss (type=urlset) and Rss Index
(type=rssindex) files"""
def __init__(self, xmltext):
xmlp = lxml.etree.XMLParser(recover=True, remove_comments=True, resolve_entities=False)
self._root = lxml.etree.fromstring(xmltext, parser=xmlp)
rt = self._root.tag
self.type = self._root.tag.split('}', 1)[1] if '}' in rt else rt
def __iter__(self):
for elem in self._root.getchildren():
d = {}
for el in elem.getchildren():
tag = el.tag
name = tag.split('}', 1)[1] if '}' in tag else tag
if name == 'link':
if 'href' in el.attrib:
d.setdefault('alternate', []).append(el.get('href'))
else:
d[name] = el.text.strip() if el.text else ''
if 'loc' in d:
yield d
|
from datetime import datetime
from enum import Enum
from typing import Iterable, Optional
from httpnet._core import Element, Service
class SpamFilter(Element):
banned_files_checks: Optional[bool]
delete_spam: Optional[bool]
header_checks: Optional[bool]
malware_checks: Optional[bool]
modify_subject_on_spam: Optional[bool]
spam_checks: Optional[bool]
spam_level: Optional[str]
use_greylisting: Optional[bool]
class MailboxType(Enum):
IMAP = 'ImapMailbox'
EXCHANGE = 'ExchangeMailbox'
FORWARDER = 'Forwarder'
def __repr__(self):
return f'{self.__class__.__qualname__}.{self.name}'
def __str__(self):
return self.value
class ForwarderType(Enum):
INTERNAL = 'internalForwarder'
EXTERNAL = 'externalForwarder'
def __repr__(self):
return f'{self.__class__.__qualname__}.{self.name}'
def __str__(self):
return self.value
class Mailbox(Element):
id: Optional[str]
account_id: Optional[str]
email_address: str
email_address_unicode: Optional[str]
domain_name: Optional[str]
domain_name_unicode: Optional[str]
status: Optional[str]
spam_filter: Optional[SpamFilter]
type: Optional[MailboxType]
product_code: Optional[str]
forwarder_targets: Optional[Iterable[str]] # only IMAP and Forwarder
smtp_forwarder_target: Optional[str] # only IMAP
is_admin: Optional[bool] # only IMAP
first_name: Optional[str] # only Exchange
last_name: Optional[str] # only Exchange
exchange_guid: Optional[str] # only Exchange
organization_id: Optional[str] # only Exchange
forwarder_type: Optional[ForwarderType] # only Forwarder
password: Optional[str]
storage_quota: int
storage_quota_used: Optional[int]
paid_until: Optional[datetime]
renew_on: Optional[datetime]
deletion_scheduled_for: Optional[datetime]
restorable_until: Optional[datetime]
add_date: Optional[datetime]
last_change_date: Optional[datetime]
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.type == MailboxType.IMAP:
if self.forwarder_targets is None:
raise ValueError('List of forwarder targets is required for IMAP mailboxes.')
elif self.type == MailboxType.EXCHANGE:
if self.first_name is None:
raise ValueError('First name is required for Exchange mailboxes.')
if self.last_name is None:
raise ValueError('Last name is required for Exchange mailboxes.')
elif self.type == MailboxType.FORWARDER:
if self.forwarder_targets is None:
raise ValueError('List of forwarder targets is required for Forwarder mailboxes.')
class MailboxService(Service[Mailbox]):
_find_method_name = 'mailboxesFind'
def delete(self, mailbox_id: Optional[str] = None, email_address: Optional[str] = None,
exec_date: Optional[datetime] = None) -> Mailbox:
parameters = {}
if mailbox_id:
parameters['mailboxId'] = mailbox_id
elif email_address:
parameters['emailAddress'] = email_address
else:
raise ValueError('Either mailbox id or email address are required.')
if exec_date is not None:
parameters['execDate'] = exec_date.isoformat()
response = self._call(
method='mailboxDelete',
parameters=parameters
)
return Mailbox.from_json(response.get('response', {}))
def cancel_deletion(self, mailbox_id: Optional[str] = None, email_address: Optional[str] = None) -> Mailbox:
parameters = {}
if mailbox_id:
parameters['mailboxId'] = mailbox_id
elif email_address:
parameters['emailAddress'] = email_address
else:
raise ValueError('Either mailbox id or email address are required.')
response = self._call(
method='mailboxDeletionCancel',
parameters=parameters
)
return Mailbox.from_json(response.get('response', {}))
def restore(self, mailbox_id: Optional[str] = None, email_address: Optional[str] = None) -> Mailbox:
parameters = {}
if mailbox_id:
parameters['mailboxId'] = mailbox_id
elif email_address:
parameters['emailAddress'] = email_address
else:
raise ValueError('Either mailbox id or email address are required.')
response = self._call(
method='mailboxRestore',
parameters=parameters
)
return Mailbox.from_json(response.get('response', {}))
def purge_restorable(self, mailbox_id: Optional[str] = None, email_address: Optional[str] = None) -> None:
parameters = {}
if mailbox_id:
parameters['mailboxId'] = mailbox_id
elif email_address:
parameters['emailAddress'] = email_address
else:
raise ValueError('Either mailbox id or email address are required.')
self._call(
method='mailboxPurgeRestorable',
parameters=parameters
)
class Organization(Element):
id: Optional[str]
account_id: Optional[str]
comment: Optional[str]
name: str
status: Optional[str]
member_domains: Optional[Iterable[str]]
add_date: Optional[datetime]
last_change_date: Optional[datetime]
class OrganizationService(Service[Organization]):
pass
class DomainSettings(Element):
domainName: str
domainNameUnicode: Optional[str]
storageQuota: Optional[int]
storageQuotaAllocated: Optional[int]
mailboxQuota: Optional[int]
exchangeMailboxQuota: Optional[int]
exchangeStorageQuotaAllocated: Optional[int]
exchangeStorageQuota: Optional[int]
addDate: Optional[datetime]
lastChangeDate: Optional[datetime]
class DomainSettingsService(Service[DomainSettings]):
pass
|
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
slow = fast = head
while fast and fast.next:
slow = slow.next
fast = fast.next.next
if slow == fast:
break
else:
return None
while head != slow:
head = head.next
slow = slow.next
return head
# 大致思路
# 这道题不是那种一看就能知道做法的
# 大致意思是当Fast和Slow重合的时候,他们离开起始Loop的距离
# 和Head离开Loop起始的距离是相等的
# 所以有了以上代码的形式。
# 另外一种解法
# 可以用一个Set来储存遍历过得Node,然后若发现有重复
# 返回那个重复的点。 不过这个需要 O(N)的空间
# 违背了题目的要求。
|
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
"""
This module defines a versionless model for ``TList``.
"""
from __future__ import absolute_import
import struct
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
import uproot
_tlist_format1 = struct.Struct(">i")
class Model_TList(uproot.model.Model, Sequence):
"""
A versionless :doc:`uproot.model.Model` for ``TList``.
"""
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {0}
in file {1}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
uproot.models.TObject.Model_TObject.read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._members["fName"] = cursor.string(chunk, context)
self._members["fSize"] = cursor.field(chunk, _tlist_format1, context)
self._starts = []
self._data = []
self._options = []
self._stops = []
for _ in uproot._util.range(self._members["fSize"]):
self._starts.append(cursor.index)
item = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self._parent
)
self._data.append(item)
self._options.append(cursor.bytestring(chunk, context))
self._stops.append(cursor.index)
def __repr__(self):
if self.class_version is None:
version = ""
else:
version = " (version {0})".format(self.class_version)
return "<{0}{1} of {2} items at 0x{3:012x}>".format(
self.classname,
version,
len(self),
id(self),
)
def __getitem__(self, where):
return self._data[where]
def __len__(self):
return len(self._data)
@property
def byte_ranges(self):
return zip(self._starts, self._stops)
def tojson(self):
return {
"_typename": "TList",
"name": "TList",
"arr": [x.tojson() for x in self._data],
"opt": [],
}
writable = True
def _to_writable_postprocess(self, original):
self._data = original._data
self._options = original._options
def _serialize(self, out, header, name, tobject_flags):
import uproot._writing
where = len(out)
for x in self._bases:
x._serialize(out, True, None, tobject_flags)
out.append(uproot._writing.serialize_string(self._members["fName"]))
out.append(_tlist_format1.pack(self._members["fSize"]))
for datum, option in zip(self._data, self._options):
uproot.serialization._serialize_object_any(out, datum, None)
out.append(option)
if header:
num_bytes = sum(len(x) for x in out[where:])
version = 5
out.insert(where, uproot.serialization.numbytes_version(num_bytes, version))
uproot.classes["TList"] = Model_TList
|
from django.shortcuts import render, HttpResponseRedirect, HttpResponse
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from django.db.models.deletion import ProtectedError
from guifw.models.host import Host, FormHost
# Create your views here.
def multipleDelete(request):
# To implement best ways to delete multiple registers
# To implement validation checks
# Avoid insecures algorithms
hostlist=request.GET.getlist('hosts[]')
if hostlist:
#Host.objects.filter(id__in=hostlist).delete()
print "Deleting " + str(hostlist)
return HttpResponseRedirect('/guifw/host/list')
def usedBy(self):
used = []
for use in self.filter_source.all():
used.append(["filter", "Source", use.order, str(use.name), use.id])
for use in self.filter_destiny.all():
used.append(["filter", "Destiny", use.order, str(use.name), use.id])
for use in self.nat_source.all():
used.append(["nat", "Source", use.order, str(use.name), use.id])
for use in self.nat_destiny.all():
used.append(["nat:", "Destiny", use.order, str(use.name), use.id])
for use in self.nat_toip.all():
used.append(["nat", "To", use.order, str(use.name), use.id])
for use in self.shapp_source.all():
used.append(["shapping", "Source", use.order, str(use.name), use.id])
for use in self.shapp_destiny.all():
used.append(["shapping", "Destiny", use.order, str(use.name), use.id])
for use in self.hostset_address.all():
used.append(["hostset","---", "---", str(use.name), use.id])
return used
class HostList(ListView):
model = Host
template_name = 'host_list.html'
class HostDetail(DetailView):
model = Host
template_name = 'host_detail.html'
class HostCreate(CreateView):
model = Host
form_class = FormHost
template_name = 'host_form.html'
success_url = '/guifw/host/list'
class HostUpdate(UpdateView):
model = Host
form_class = FormHost
template_name = 'host_form.html'
success_url = '/guifw/host/list'
class HostDelete(DeleteView):
model = Host
success_url = '/guifw/host/list'
template_name = 'host_delete.html'
def get_context_data(self, **kwargs):
context = super(self.__class__, self).get_context_data(**kwargs)
context['used'] = usedBy(self.object)
return context
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
try:
self.object.delete()
return HttpResponseRedirect('/guifw/host/list')
except ProtectedError as e:
result = {'error': str(e)}
return render(request,'error.html',result)
def post(self, request, *args, **kwargs):
if "cancel" in request.POST:
self.object = self.get_object()
url = self.get_success_url()
return HttpResponseRedirect(url)
else:
return super(HostDelete, self).post(request, *args, **kwargs)
|
from dataclasses import dataclass
from .node_worker_target_type import NodeWorkerTargetType
@dataclass
class NodeWorkerTarget:
targetCount: float
targetType: NodeWorkerTargetType
# KEEP
@staticmethod
def per_node(target_count: int) -> 'NodeWorkerTarget':
return NodeWorkerTarget(target_count, NodeWorkerTargetType.PER_NODE)
@staticmethod
def per_vcpus(target_count: float) -> 'NodeWorkerTarget':
return NodeWorkerTarget(target_count, NodeWorkerTargetType.PER_VCPU)
|
import os
import sys
import unittest
from pyshex import ShExEvaluator
from CFGraph import CFGraph
class ShexEvalTestCase(unittest.TestCase):
def test_biolink_shexeval(self) -> None:
base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data'))
g = CFGraph()
g.load(os.path.join(base_dir, 'validation', 'biolink-model.ttl'), format="turtle")
evaluator = ShExEvaluator(g,
os.path.join(base_dir, 'schemas', 'meta.shex'),
"https://biolink.github.io/biolink-model/ontology/biolink.ttl",
"http://bioentity.io/vocab/SchemaDefinition")
result = evaluator.evaluate(debug=False)
for rslt in result:
if not rslt.result:
print(f"Error: {rslt.reason}")
self.assertTrue(all(r.result for r in result))
if __name__ == '__main__':
unittest.main()
|
import matplotlib.pyplot as plt
import numpy as np
from optmize import *
fig, axes = plt.subplots(nrows=1,ncols=2, figsize=(12,5))
all_data = loadCsv('./test.20.log')
all_data = [np.random.normal(0, std, 100) for std in range(6, 10)]
#fig = plt.figure(figsize=(8,6))
axes[0].violinplot(all_data,
showmeans=False,
showmedians=True
)
axes[0].set_title('violin plot')
axes[1].boxplot(all_data,
)
axes[1].set_title('box plot')
# adding horizontal grid lines
for ax in axes:
ax.yaxis.grid(True)
ax.set_xticks([y+1 for y in range(len(all_data))], )
ax.set_xlabel('xlabel')
ax.set_ylabel('ylabel')
plt.setp(axes, xticks=[y+1 for y in range(len(all_data))],
xticklabels=['abc', 'pso', 'pso-basic', 'tsfcm'],
)
plt.show()
|
import os
import re
import numpy as np
import pandas as pd
def make_features(data,data_size,max_chars_per_sentence,max_length_vocab):
char_level_features = np.zeros((data_size,max_chars_per_sentence,max_length_vocab))
for i in range(data_size):
count = 0
sent_features = np.zeros((max_chars_per_sentence,max_length_vocab))
chars = list(data[i])
for c in chars:
if(count>=max_chars_per_sentence):
break
elif(c>='a' and c<='z'):
feature = np.zeros(max_length_vocab)
feature[ord(c)-97] = 1
sent_features[count,:] = feature
count+=1
char_level_features[i,:,:] = sent_features
return char_level_features
|
"""
Generic recording functionality
"""
from copy import deepcopy
from operator import attrgetter
from collections import defaultdict
from typing import Optional, Type
from .system import SystemInterface
class Recorder:
"""Generic recorder functor.
Can be used as an `observer` in conjunction with a `Simulation`.
"""
def __init__(self):
self._results = defaultdict(lambda: defaultdict(list))
self._stores = defaultdict(list)
@property
def results(self) -> dict:
"""The recorded results
Returns:
dict: The recorded results as a dict
"""
return dict(self._results)
def store(
self, system: Type[SystemInterface], attribute: str, alias: Optional[str] = None
) -> None:
"""Register a variable/parameter of a system for storing.
Args:
system (Type[SystemInterface]): System that owns this variable/parameter
attribute (str): A string of the form `x.y.z` pointing to the
variable/parameter to be stored
alias (Optional[str], optional): A string under which the stored
attribute will be available at. Defaults to None.
Raises:
AttributeError: If the provided `attribute` does not exist on `system`
"""
attrgetter(attribute)(
system
) # Try to access attribute, raises AttributeError if non-existing
self._stores[system].append((attribute, alias or attribute))
def __call__(self, time, _):
for sys, store_vars in self._stores.items():
self._results[sys]["time"].append(time)
for attr_str, key_str in store_vars:
val = deepcopy(attrgetter(attr_str)(sys))
self._results[sys][key_str].append(
val
) # TODO: Find a better way to handle `sys`
|
from io import StringIO
from pathlib import Path
import pytest
from .._yaml_api import yaml_dumps, yaml_loads, read_yaml, write_yaml
from .._yaml_api import is_yaml_serializable
from ..ruamel_yaml.comments import CommentedMap
from .util import make_tempdir
def test_yaml_dumps():
data = {"a": [1, "hello"], "b": {"foo": "bar", "baz": [10.5, 120]}}
result = yaml_dumps(data)
expected = "a:\n - 1\n - hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
assert result == expected
def test_yaml_dumps_indent():
data = {"a": [1, "hello"], "b": {"foo": "bar", "baz": [10.5, 120]}}
result = yaml_dumps(data, indent_mapping=2, indent_sequence=2, indent_offset=0)
expected = "a:\n- 1\n- hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
assert result == expected
def test_yaml_loads():
data = "a:\n- 1\n- hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
result = yaml_loads(data)
# Check that correct loader is used and result is regular dict, not the
# custom ruamel.yaml "ordereddict" class
assert not isinstance(result, CommentedMap)
assert result == {"a": [1, "hello"], "b": {"foo": "bar", "baz": [10.5, 120]}}
def test_read_yaml_file():
file_contents = "a:\n- 1\n- hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
with make_tempdir({"tmp.yaml": file_contents}) as temp_dir:
file_path = temp_dir / "tmp.yaml"
assert file_path.exists()
data = read_yaml(file_path)
assert len(data) == 2
assert data["a"] == [1, "hello"]
def test_read_yaml_file_invalid():
file_contents = "a: - 1\n- hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
with make_tempdir({"tmp.yaml": file_contents}) as temp_dir:
file_path = temp_dir / "tmp.yaml"
assert file_path.exists()
with pytest.raises(ValueError):
read_yaml(file_path)
def test_read_yaml_stdin(monkeypatch):
input_data = "a:\n - 1\n - hello\nb:\n foo: bar\n baz:\n - 10.5\n - 120\n"
monkeypatch.setattr("sys.stdin", StringIO(input_data))
data = read_yaml("-")
assert len(data) == 2
assert data["a"] == [1, "hello"]
def test_write_yaml_file():
data = {"hello": "world", "test": [123, 456]}
expected = "hello: world\ntest:\n - 123\n - 456\n"
with make_tempdir() as temp_dir:
file_path = temp_dir / "tmp.yaml"
write_yaml(file_path, data)
with Path(file_path).open("r", encoding="utf8") as f:
assert f.read() == expected
def test_write_yaml_stdout(capsys):
data = {"hello": "world", "test": [123, 456]}
expected = "hello: world\ntest:\n - 123\n - 456\n\n"
write_yaml("-", data)
captured = capsys.readouterr()
assert captured.out == expected
@pytest.mark.parametrize(
"obj,expected",
[
(["a", "b", 1, 2], True),
({"a": "b", "c": 123}, True),
("hello", True),
(lambda x: x, False),
({"a": lambda x: x}, False),
],
)
def test_is_yaml_serializable(obj, expected):
assert is_yaml_serializable(obj) == expected
# Check again to be sure it's consistent
assert is_yaml_serializable(obj) == expected
|
from __future__ import annotations
import io
import struct
import zipfile
from dataclasses import dataclass
from typing import Optional
@dataclass()
class SimpleLauncher:
launcher: Optional[bytes]
shebang: Optional[bytes]
data: bytes # Note: zip
def parse_simple_launcher(all_data: bytes, verbose: bool=False) -> SimpleLauncher:
"""
Parse binary data of simple_launcher to SimpleLauncher dataclass.
The simple_launcer is used in 'pip' to make console_script in Windows and the
project is developped on https://bitbucket.org/vinay.sajip/simple_launcher
The 'launcher' and 'shebang' attribute of the result may be None, indicating that
the given data is not a valid simple_launcher binary.
Note that most of code in this function has forked from pyzzer via Vinay Sajip.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112
"""
# The MIT License (MIT)
#
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# https://bitbucket.org/vinay.sajip/simple_launcher/issues/3/relocatability-of-launchers
launcher = shebang = data = None
pos = all_data.rfind(b'PK\x05\x06')
if pos == -1:
return SimpleLauncher(None, None, all_data)
launcher = shebang = None
try:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
except Exception as e:
if verbose:
print(e)
return SimpleLauncher(launcher, shebang, data)
|
def let_to_num (input):
result=0
if input=='a':
result=1
elif input=='b':
result=2
elif input=='c':
result=3
elif input=='d':
result=4
return result
while True:
rotors=['b','c','d','e']
init=raw_input()
we=let_to_num(input=init)-1
print rotors[we]
|
# Convert "arbitrary" image files to rgb files (SGI's image format).
# Input may be compressed.
# The uncompressed file type may be PBM, PGM, PPM, GIF, TIFF, or Sun raster.
# An exception is raised if the file is not of a recognized type.
# Returned filename is either the input filename or a temporary filename;
# in the latter case the caller must ensure that it is removed.
# Other temporary files used are removed by the function.
import os
import tempfile
import pipes
import imghdr
table = {}
t = pipes.Template()
t.append('fromppm $IN $OUT', 'ff')
table['ppm'] = t
t = pipes.Template()
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['pnm'] = t
table['pgm'] = t
table['pbm'] = t
t = pipes.Template()
t.append('fromgif $IN $OUT', 'ff')
table['gif'] = t
t = pipes.Template()
t.append('tifftopnm', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['tiff'] = t
t = pipes.Template()
t.append('rasttopnm', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['rast'] = t
t = pipes.Template()
t.append('djpeg', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['jpeg'] = t
uncompress = pipes.Template()
uncompress.append('uncompress', '--')
class error(Exception):
pass
def torgb(filename):
temps = []
ret = None
try:
ret = _torgb(filename, temps)
finally:
for temp in temps[:]:
if temp != ret:
try:
os.unlink(temp)
except os.error:
pass
temps.remove(temp)
return ret
def _torgb(filename, temps):
if filename[-2:] == '.Z':
fname = tempfile.mktemp()
temps.append(fname)
sts = uncompress.copy(filename, fname)
if sts:
raise error, filename + ': uncompress failed'
else:
fname = filename
try:
ftype = imghdr.what(fname)
except IOError, msg:
if type(msg) == type(()) and len(msg) == 2 and \
type(msg[0]) == type(0) and type(msg[1]) == type(''):
msg = msg[1]
if type(msg) is not type(''):
msg = `msg`
raise error, filename + ': ' + msg
if ftype == 'rgb':
return fname
if ftype is None or not table.has_key(ftype):
raise error, \
filename + ': unsupported image file type ' + `ftype`
temp = tempfile.mktemp()
sts = table[ftype].copy(fname, temp)
if sts:
raise error, filename + ': conversion to rgb failed'
return temp
|
# -*- coding: utf-8 -*-
from .FileserveCom import FileserveCom
class FilejungleCom(FileserveCom):
__name__ = "FilejungleCom"
__type__ = "downloader"
__version__ = "0.57"
__status__ = "testing"
__pyload_version__ = "0.5"
__pattern__ = r"http://(?:www\.)?filejungle\.com/f/(?P<ID>[^/]+)"
__config__ = [("enabled", "bool", "Activated", True)]
__description__ = """Filejungle.com downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
URLS = [
"http://www.filejungle.com/f/",
"http://www.filejungle.com/check_links.php",
"http://www.filejungle.com/checkReCaptcha.php",
]
LINKCHECK_TR = r'<li>\s*(<div class="col1">.*?)</li>'
LINKCHECK_TD = r'<div class="(?:col )?col\d">(?:<.*?>| )*([^<]*)'
LONG_WAIT_PATTERN = (
r"<h1>Please wait for (\d+) (\w+)\s*to download the next file\.</h1>"
)
|
import pytest
from gaphor import UML
from gaphor.diagram.copypaste import copy, paste_link
from gaphor.diagram.group import group
from gaphor.diagram.tests.fixtures import copy_clear_and_paste_link
from gaphor.UML.deployments import ArtifactItem, NodeItem
@pytest.fixture
def node_with_artifact(diagram, element_factory):
node = element_factory.create(UML.Node)
artifact = element_factory.create(UML.Artifact)
node_item = diagram.create(NodeItem, subject=node)
artifact_item = diagram.create(ArtifactItem, subject=artifact)
group(node_item, artifact_item)
artifact_item.change_parent(node_item)
assert artifact_item.parent is node_item
return node_item, artifact_item
def test_copy_paste_of_nested_item(diagram, element_factory, node_with_artifact):
node_item, artifact_item = node_with_artifact
buffer = copy({artifact_item})
(new_comp_item,) = paste_link(buffer, diagram, element_factory.lookup)
assert new_comp_item.parent is node_item
def test_copy_paste_of_item_with_nested_item(
diagram, element_factory, node_with_artifact
):
node_item, artifact_item = node_with_artifact
buffer = copy(set(node_with_artifact))
new_items = paste_link(buffer, diagram, element_factory.lookup)
new_node_item = next(i for i in new_items if isinstance(i, NodeItem))
new_comp_item = next(i for i in new_items if isinstance(i, ArtifactItem))
assert new_comp_item.parent is new_node_item
def test_copy_remove_paste_of_item_with_nested_item(
diagram, element_factory, node_with_artifact
):
new_items = copy_clear_and_paste_link(
set(node_with_artifact), diagram, element_factory
)
new_node_item = next(i for i in new_items if isinstance(i, NodeItem))
new_comp_item = next(i for i in new_items if isinstance(i, ArtifactItem))
assert new_comp_item.parent is new_node_item
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.status import builder as build_results
from buildbot.status.base import StatusReceiverMultiService
from twisted.internet.defer import inlineCallbacks, returnValue
from . import common
from .buildbot_gateway import BuildbotGateway
BUILD_STATUS_NAMES = {
build_results.EXCEPTION: 'EXCEPTION',
build_results.FAILURE: 'FAILURE',
build_results.RETRY: 'RETRY',
build_results.SKIPPED: 'SKIPPED',
build_results.SUCCESS: 'SUCCESS',
build_results.WARNINGS: 'SUCCESS', # Treat warnings as SUCCESS.
}
class BuildBucketStatus(StatusReceiverMultiService):
"""Updates build status on buildbucket."""
def __init__(self, integrator, buildbucket_service, dry_run):
"""Creates a new BuildBucketStatus.
Args:
integrator (BuildBucketIntegrator): integrator to notify about status
changes.
buildbucket_service (DeferredResource): buildbucket API client.
dry_run (bool): if True, do not start integrator.
"""
StatusReceiverMultiService.__init__(self)
self.integrator = integrator
self.buildbucket_service = buildbucket_service
self.dry_run = dry_run
self.integrator_starting = None
def startService(self):
StatusReceiverMultiService.startService(self)
if self.dry_run:
return
buildbot = BuildbotGateway(self.parent)
self.integrator.start(buildbot, self.buildbucket_service)
self.integrator.poll_builds()
self.parent.getStatus().subscribe(self)
def stopService(self):
self.integrator.stop()
StatusReceiverMultiService.stopService(self)
# pylint: disable=W0613
def builderAdded(self, name, builder):
# Subscribe to this builder.
return self
def buildStarted(self, builder_name, build):
if self.dry_run:
return
self.integrator.on_build_started(build)
def buildFinished(self, builder_name, build, result):
if self.dry_run:
return
assert result in BUILD_STATUS_NAMES
status = BUILD_STATUS_NAMES[result]
self.integrator.on_build_finished(build, status)
|
# -*- coding: utf-8 -*-
import os
import json
import csv
import argparse
import pandas as pd
import numpy as np
from math import ceil
from tqdm import tqdm
import pickle
import shutil
import torch
import torch.nn as nn
from torch.autograd import Variable
from torch.nn import CrossEntropyLoss
from torchvision import datasets, models
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import cv2
from transforms import transforms
from models.LoadModel import MainModel
from utils.dataset_DCL import collate_fn4train, collate_fn4test, collate_fn4val, dataset
from config import LoadConfig, load_data_transformers
# from utils.test_tool import set_text, save_multi_img, cls_base_acc
# if int(torch.__version__.split('.')[0])< 1 and int(torch.__version__.split('.')[1])< 41:
from tensorboardX import SummaryWriter
# else:
# from torch.utils.tensorboard import SummaryWriter
import pdb
import time
os.environ['CUDA_DEVICE_ORDRE'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
def parse_args():
parser = argparse.ArgumentParser(description='dcl parameters')
parser.add_argument('--data', dest='dataset',
default='ItargeCar', type=str)
parser.add_argument('--backbone', dest='backbone',
default='resnet50', type=str)
parser.add_argument('--b', dest='batch_size',
default=8, type=int)
parser.add_argument('--nw', dest='num_workers',
default=0, type=int)
parser.add_argument('--ver', dest='version',
default='test', type=str)
parser.add_argument('--detail', dest='discribe',
default=None, type=str)
parser.add_argument('--save', dest='resume',
default="/NAS/shenjintong/DCL/net_model/training_descibe_41123_ItargeCar/model_best.pth", type=str)
parser.add_argument('--anno', dest='anno',
default=None, type=str)
parser.add_argument('--result_path', dest='result_path',
default="/NAS/shenjintong/Dataset/ItargeCar/Result/DCL/raw_result/", type=str)
parser.add_argument('--size', dest='resize_resolution',
default=512, type=int)
parser.add_argument('--crop', dest='crop_resolution',
default=448, type=int)
parser.add_argument('--ss', dest='save_suffix',
default=None, type=str)
parser.add_argument('--acc_report', dest='acc_report',
action='store_true')
parser.add_argument('--swap_num', default=[7, 7],
nargs=2, metavar=('swap1', 'swap2'),
type=int, help='specify a range')
parser.add_argument('--use_backbone', dest='use_backbone',
action='store_false')
parser.add_argument('--CAM', dest='CAM',
action='store_true')
parser.add_argument('--no_bbox', dest='no_bbox',
action='store_true')
parser.add_argument('--graph', dest='add_stureture_graph',
action='store_true')
parser.add_argument('--no_loc', dest='no_loc',
action='store_true')
parser.add_argument('--cv', dest='opencv_save',
action='store_true')
parser.add_argument('--log_dir', dest='log_dir',
default=None, type=str)
parser.add_argument('--feature', dest='feature',
action='store_true')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
# args.dataset='ItargeCar_0520'
# args.backbone='resnet50'
# args.batch_size=1
# args.num_workers=1
# args.version='test'
# args.resume="/NAS/shenjintong/DCL/net_model/DCL_0520data_147_129_582_ItargeCar_0520/model_best.pth"
# args.detail='feature'
# args.resize_resolution=147
# args.crop_resolution=129
# args.anno="/NAS/shenjintong/Tools/mmdnn/pytorch2caffe/inference_set.csv"
# args.result_path="/NAS/shenjintong/Tools/mmdnn/pytorch2caffe/"
# args.feature=True
print(args)
print(args.anno)
# # todo: debug
# args.anno = "/NAS/shenjintong/Dataset/ItargeCar/class_originbox/test_info.csv"
# args.resume= "/NAS/shenjintong/DCL/net_model/DCL_512_448_41123_ItargeCar/model_best.pth"
# args.CAM=True
# args.opencv_save=True
Config = LoadConfig(args, args.version)
Config.cls_2xmul = True
Config.cls_2 = False
Config.no_loc = args.no_loc
# sw define
Config.size=(args.crop_resolution,args.crop_resolution)
if args.log_dir:
sw_log = args.log_dir
sw = SummaryWriter(log_dir=sw_log)
transformers = load_data_transformers(args.resize_resolution, args.crop_resolution, args.swap_num)
# 由于args.version的作用只是自动选择对应的标记文件进行读取,去除version设置直接使用文件路径输入
if args.anno:
dataset_pd = pd.read_csv(args.anno)
else:
dataset_pd = Config.val_anno if args.version == 'val' else Config.test_anno
data_set = dataset(Config,\
anno=dataset_pd,\
swap=transformers["None"],\
totensor=transformers['test_totensor'],\
test=True)
dataloader = torch.utils.data.DataLoader(data_set,\
batch_size=args.batch_size,\
shuffle=False,\
num_workers=args.num_workers,\
collate_fn=collate_fn4test)
setattr(dataloader, 'total_item_len', len(data_set))
cudnn.benchmark = True
model = MainModel(Config)
model_dict=model.state_dict()
pretrained_dict=torch.load(args.resume).state_dict()
# pretrained_dict=torch.load(args.resume)
pretrained_dict = {k[7:]: v for k, v in pretrained_dict.items() if k[7:] in model_dict}
model_dict.update(pretrained_dict)
model.load_state_dict(model_dict)
model.cuda()
model.train(False)
if args.feature:
feature = pd.DataFrame(columns=range(len(data_set)))
with torch.no_grad():
result=[]
val_size = ceil(len(data_set) / dataloader.batch_size)
result_gather = {}
count_bar = tqdm(total=dataloader.__len__())
for batch_cnt_val, data_val in enumerate(dataloader):
args.batch_cnt_val=batch_cnt_val
count_bar.update(1)
inputs, labels, img_name = data_val
inputs = Variable(inputs.cuda())
outputs = model(inputs)
outputs_pred = outputs[0]
outputs_pred_soft=F.softmax(outputs_pred)
# feature
# outputs_confidence, outputs_predicted = torch.max(outputs_pred_soft, 1)
outputs_confidence, outputs_predicted = torch.max(outputs_pred, 1)
result.append(outputs_confidence.cpu().numpy()[0].tolist())
result.append(outputs_predicted.cpu().numpy()[0].tolist())
m_index = pd.MultiIndex.from_product([['cv'], range(10), ['feature', 'index']],
names=["me", "image_index", "predicted"])
predicted = pd.DataFrame(result, index=m_index)
predicted.columns.names = ['Top1-5']
predicted.to_csv("/NAS/shenjintong/Tools/mmdnn/pytorch2caffe/predicted0.41.csv")
|
import tqdm
import time
import os.path as p
from itertools import chain
import torch
import numpy as np
import torch.nn.functional as F
from datasets import load_from_disk, load_dataset
from transformers import TrainingArguments
from transformers import AdamW, get_linear_schedule_with_warmup
from torch.utils.data import TensorDataset, DataLoader, RandomSampler
from retrieval.dense import DenseRetrieval
def get_retriever_dataset(args):
if args.retriever.dense_train_dataset not in [
"train_dataset",
"squad_kor_v1",
"bm25_document_questions",
"bm25_question_documents",
]:
raise FileNotFoundError(f"{args.retriever.dense_train_dataset}은 DenseRetrieval 데이터셋이 아닙니다.")
if args.retriever.dense_train_dataset == "squad_kor_v1":
train_dataset = load_dataset(args.retriever.dense_train_dataset)
else:
dataset_path = p.join(args.path.train_data_dir, args.retriever.dense_train_dataset)
assert p.exists(dataset_path), f"{args.retriever.dense_train_dataset}이 경로에 존재하지 않습니다."
train_dataset = load_from_disk(dataset_path)
return train_dataset
def epoch_time(start_time, end_time):
elapsed_time = end_time - start_time
elapsed_mins = int(elapsed_time / 60)
elapsed_secs = int(elapsed_time - (elapsed_mins * 60))
return elapsed_mins, elapsed_secs
class DprRetrieval(DenseRetrieval):
def _exec_embedding(self):
p_encoder, q_encoder = self._load_model()
train_dataset, eval_dataset = self._load_dataset(eval=True)
args = TrainingArguments(
output_dir="dense_retrieval",
evaluation_strategy="epoch",
learning_rate=self.args.retriever.learning_rate,
per_device_train_batch_size=self.args.retriever.per_device_train_batch_size,
per_device_eval_batch_size=self.args.retriever.per_device_eval_batch_size,
num_train_epochs=self.args.retriever.num_train_epochs,
weight_decay=self.args.retriever.weight_decay,
gradient_accumulation_steps=self.args.retriever.gradient_accumulation_steps,
)
p_encoder, q_encoder = self._train(args, train_dataset, p_encoder, q_encoder, eval_dataset)
p_embedding = []
for passage in tqdm.tqdm(self.contexts): # wiki
passage = self.tokenizer(
passage, padding="max_length", truncation=True, max_length=512, return_tensors="pt"
).to("cuda")
p_emb = p_encoder(**passage).to("cpu").detach().numpy()
p_embedding.append(p_emb)
p_embedding = np.array(p_embedding).squeeze() # numpy
return p_embedding, q_encoder
class BaseTrainMixin:
def _load_dataset(self, eval=False):
# dataset.features : ['question', 'context', 'answers', ...]
datasets = get_retriever_dataset(self.args)
# tokenizer_input = self.tokenizer(datasets["train"][1]["context"], padding="max_length", max_length=512, truncation=True)
# print("tokenizer:", self.tokenizer.convert_ids_to_tokens(tokenizer_input["input_ids"]))
train_dataset = datasets["train"]
q_seqs = self.tokenizer(
train_dataset["question"], padding="longest", truncation=True, max_length=512, return_tensors="pt"
)
p_seqs = self.tokenizer(
train_dataset["context"], padding="max_length", truncation=True, max_length=512, return_tensors="pt"
)
train_dataset = TensorDataset(
p_seqs["input_ids"],
p_seqs["attention_mask"],
p_seqs["token_type_ids"],
q_seqs["input_ids"],
q_seqs["attention_mask"],
q_seqs["token_type_ids"],
)
eval_dataset = None
if eval:
eval_dataset = datasets["validation"]
q_seqs = self.tokenizer(
eval_dataset["question"], padding="longest", truncation=True, max_length=512, return_tensors="pt"
)
p_seqs = self.tokenizer(
eval_dataset["context"], padding="max_length", truncation=True, max_length=512, return_tensors="pt"
)
eval_dataset = TensorDataset(
p_seqs["input_ids"],
p_seqs["attention_mask"],
p_seqs["token_type_ids"],
q_seqs["input_ids"],
q_seqs["attention_mask"],
q_seqs["token_type_ids"],
)
return train_dataset, eval_dataset
def _train(self, training_args, train_dataset, p_model, q_model, eval_dataset):
print("TRAINING IN BASE TRAIN MIXIN")
train_sampler = RandomSampler(train_dataset)
train_dataloader = DataLoader(
train_dataset, sampler=train_sampler, batch_size=training_args.per_device_train_batch_size, drop_last=True
)
if eval_dataset:
eval_sampler = RandomSampler(eval_dataset)
eval_dataloader = DataLoader(
eval_dataset, sampler=eval_sampler, batch_size=training_args.per_device_eval_batch_size
)
optimizer_grouped_parameters = [{"params": p_model.parameters()}, {"params": q_model.parameters()}]
optimizer = AdamW(optimizer_grouped_parameters, lr=training_args.learning_rate, eps=training_args.adam_epsilon)
t_total = len(train_dataloader) // training_args.gradient_accumulation_steps * training_args.num_train_epochs
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=training_args.warmup_steps, num_training_steps=t_total
)
global_step = 0
p_model.train()
q_model.train()
p_model.zero_grad()
q_model.zero_grad()
torch.cuda.empty_cache()
for epoch in range(training_args.num_train_epochs):
train_loss = 0.0
start_time = time.time()
for step, batch in enumerate(train_dataloader):
if torch.cuda.is_available():
batch = tuple(t.cuda() for t in batch)
p_inputs = {"input_ids": batch[0], "attention_mask": batch[1], "token_type_ids": batch[2]}
q_inputs = {"input_ids": batch[3], "attention_mask": batch[4], "token_type_ids": batch[5]}
p_outputs = p_model(**p_inputs)
q_outputs = q_model(**q_inputs)
sim_scores = torch.matmul(q_outputs, torch.transpose(p_outputs, 0, 1))
targets = torch.arange(0, training_args.per_device_train_batch_size).long()
if torch.cuda.is_available():
targets = targets.to("cuda")
sim_scores = F.log_softmax(sim_scores, dim=1)
loss = F.nll_loss(sim_scores, targets)
loss = loss / training_args.gradient_accumulation_steps
print(f"epoch: {epoch + 1:02} step: {step:02} loss: {loss}", end="\r")
train_loss += loss.item()
loss.backward()
if ((step + 1) % training_args.gradient_accumulation_steps) == 0:
optimizer.step()
scheduler.step()
p_model.zero_grad()
q_model.zero_grad()
global_step += 1
torch.cuda.empty_cache()
end_time = time.time()
epoch_mins, epoch_secs = epoch_time(start_time, end_time)
print(f"Epoch: {epoch + 1:02} | Time: {epoch_mins}m {epoch_secs}s")
print(f"\tTrain Loss: {train_loss / len(train_dataloader):.4f}")
if eval_dataset:
eval_loss = 0
correct = 0
total = 0
p_model.eval()
q_model.eval()
with torch.no_grad():
for idx, batch in enumerate(eval_dataloader):
if torch.cuda.is_available():
batch = tuple(t.cuda() for t in batch)
p_inputs = {"input_ids": batch[0], "attention_mask": batch[1], "token_type_ids": batch[2]}
q_inputs = {"input_ids": batch[3], "attention_mask": batch[4], "token_type_ids": batch[5]}
p_outputs = p_model(**p_inputs)
q_outputs = q_model(**q_inputs)
sim_scores = torch.matmul(q_outputs, torch.transpose(p_outputs, 0, 1))
targets = torch.arange(0, training_args.per_device_eval_batch_size).long()
if torch.cuda.is_available():
targets = targets.to("cuda")
sim_scores = F.log_softmax(sim_scores, dim=1)
loss = F.nll_loss(sim_scores, targets)
loss = loss / training_args.gradient_accumulation_steps
predicts = np.argmax(sim_scores.cpu(), axis=1)
for idx, predict in enumerate(predicts):
total += 1
if predict == idx:
correct += 1
eval_loss += loss.item()
print(
f"Epoch: {epoch + 1:02}\tEval Loss: {eval_loss / len(eval_dataloader):.4f}\tAccuracy: {correct/total:.4f}"
)
p_model.train()
q_model.train()
return p_model, q_model
class Bm25TrainMixin:
def _load_dataset(self):
# dataset.features : ['query', 'negative_samples', 'label']
dataset = get_retriever_dataset(self.args)
corpus_size = len(dataset["negative_samples"][0])
negative_samples = list(chain(*dataset["negative_samples"]))
# query
q_seqs = self.tokenizer(
dataset["query"], padding="longest", truncation=True, max_length=512, return_tensors="pt"
)
print("query tokenized:", self.tokenizer.convert_ids_to_tokens(q_seqs["input_ids"][0]))
# negative_samples
p_seqs = self.tokenizer(
negative_samples, padding="longest", truncation=True, max_length=512, return_tensors="pt"
)
print("negative_sample tokenized:", self.tokenizer.convert_ids_to_tokens(p_seqs["input_ids"][0]))
embedding_size = p_seqs["input_ids"].shape[-1]
for k in p_seqs.keys():
p_seqs[k] = p_seqs[k].reshape(-1, corpus_size, embedding_size)
train_dataset = TensorDataset(
p_seqs["input_ids"],
p_seqs["attention_mask"],
p_seqs["token_type_ids"],
q_seqs["input_ids"],
q_seqs["attention_mask"],
q_seqs["token_type_ids"],
torch.tensor(dataset["label"]),
)
return train_dataset
def _train(self, training_args, dataset, p_model, q_model):
""" Sampling된 데이터 셋으로 학습 """
print("TRAINING IN BM25 TRAIN MIXIN")
train_sampler = RandomSampler(dataset)
train_dataloader = DataLoader(
dataset, sampler=train_sampler, batch_size=training_args.per_device_train_batch_size
)
optimizer_grouped_parameters = [{"params": p_model.parameters()}, {"params": q_model.parameters()}]
optimizer = AdamW(optimizer_grouped_parameters, lr=training_args.learning_rate, eps=training_args.adam_epsilon)
t_total = len(train_dataloader) // training_args.gradient_accumulation_steps * training_args.num_train_epochs
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=training_args.warmup_steps, num_training_steps=t_total
)
global_step = 0
p_model.train()
q_model.train()
p_model.zero_grad()
q_model.zero_grad()
torch.cuda.empty_cache()
for epoch in range(training_args.num_train_epochs):
train_loss = 0.0
start_time = time.time()
for step, batch in enumerate(train_dataloader):
if torch.cuda.is_available():
batch = tuple(t.cuda() for t in batch)
# query
p_inputs = {
"input_ids": batch[0].squeeze(),
"attention_mask": batch[1].squeeze(),
"token_type_ids": batch[2].squeeze(),
}
# context
q_inputs = {"input_ids": batch[3], "attention_mask": batch[4], "token_type_ids": batch[5]}
label = batch[6]
p_outputs = p_model(**p_inputs)
q_outputs = q_model(**q_inputs)
sim_scores = torch.matmul(q_outputs, torch.transpose(p_outputs, 0, 1))
sim_scores = F.log_softmax(sim_scores, dim=1)
loss = F.nll_loss(sim_scores, label) / training_args.gradient_accumulation_steps
print(f"epoch: {epoch:02} step: {step:02} loss: {loss}", end="\r")
train_loss += loss.item()
loss.backward()
if ((step + 1) % training_args.gradient_accumulation_steps) == 0:
optimizer.step()
scheduler.step()
p_model.zero_grad()
q_model.zero_grad()
global_step += 1
torch.cuda.empty_cache()
end_time = time.time()
epoch_mins, epoch_secs = epoch_time(start_time, end_time)
print(f"Epoch: {epoch + 1:02} | Time: {epoch_mins}m {epoch_secs}s")
print(f"\tTrain Loss: {train_loss / len(train_dataloader):.4f}")
# q_model이 document를 encoding했으므로
if self.args.retriever.dense_train_dataset == "bm25_document_questions":
return q_model, p_model
return p_model, q_model
|
import discord
from discord.ext import commands
import random
class administracion(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='info', alises=['info'], brief="[informacion del servidor]")
async def info(self, ctx):
embed = discord.Embed(title=f"{ctx.guild.name}",
description="La casa de los giles",
color=discord.Color.blue())
embed.add_field(name="servidor creado", value=f"{ctx.guild.created_at}")
embed.add_field(name="Amo del servidor", value=f"{ctx.guild.owner}")
embed.add_field(name="Region del servidor", value=f"{ctx.guild.region}")
embed.add_field(name="ID del servidor", value=f"{ctx.guild.id}")
#experimental embed.set_thumbnail(url=f"{ctx.guild.icon}")
await ctx.send(embed=embed)
@commands.command(name='say', alises=['say'], brief="[comando solo para admins]")
async def say(self, ctx, *, text):
message = ctx.message
await message.delete()
await ctx.send(f"{text}")
@commands.command(name='avatar',
alises=['avatar'],
brief="[ve el avatar tuyo o de los demas]")
async def avatar(self, ctx):
args = ctx.message.content.split(" ")[1:]
embed = discord.Embed()
embed.colour = discord.Color.from_rgb(0, 255, 255)
if len(args) == 0:
embed.title = ctx.author.name
embed.set_image(url=ctx.author.avatar_url)
await ctx.send(embed=embed)
elif len(ctx.message.mentions) > 0:
for member in ctx.message.mentions:
embed.title = member.name
embed.set_image(url=member.avatar_url)
await ctx.send(embed=embed)
elif args[0] in ("server", "guild"):
embed.title = ctx.guild.name
embed.set_image(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
else:
embed.title = "avatar"
embed.description = f"Muestra tu avatar, de los usuarios mencionados o del servidor."
embed.add_field(
name="Uso:",
value=
f"{PREFIX}avatar\n{PREFIX}avatar @user1, @user2, ...\n{PREFIX}avatar server",
inline=False)
await ctx.send(embed=embed)
@commands.command(aliases=['8ball'], brief="[¿quieres saber tu futuro?]")
async def _8ball(self, ctx, *, question):
response = [
'En mi opinión, sí', 'Es cierto', 'Es decididamente así',
'Probablemente', 'Buen pronóstico', 'Todo apunta a que sí', 'Sin duda',
'Sí', 'Sí - definitivamente', 'Debes confiar en ello',
'Respuesta vaga, vuelve a intentarlo', 'Pregunta en otro momento',
'Será mejor que no te lo diga ahora', 'No puedo predecirlo ahora',
'Concéntrate y vuelve a preguntar', 'Puede ser', 'No cuentes con ello',
'Mi respuesta es no', 'Mis fuentes me dicen que no',
'Las perspectivas no son buenas', 'Muy dudoso'
]
_8ball_embed = discord.Embed(title=' ',
description=f" ",
color=discord.Color.blue())
_8ball_embed.add_field(
name="Comando 8ball | :8ball:",
value=
f"**Pregunta:** {question}\n**Respuesta:** {random.choice(response)}"
) # Aqui es como quieres que sea su respuesta
await ctx.send(embed=_8ball_embed)
@commands.command(name="l",
help="Muestra un porcentaje de lo L que eres",
brief="[Que tan L eres?]")
async def calculeL(self, ctx):
try:
random_porcentaje = random.choice(
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 27, 28, 29,
30,
31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
57,
58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
85,
86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 200])
await ctx.send(f'{ctx.author} es {random_porcentaje}% L ')
except NameError as error:
await ctx.send(f"Algun error ocurrio ...{error}")
finally:
print(f"{ctx.author}")
@commands.command(name="user",
help="Mira la informacion de un miembro",
brief="[Mira la informacion de un miembro]")
async def some_function_random_ctx(self, ctx, member: discord.Member):
try:
user_info_embed = discord.Embed(colour=0xff00ff).set_author(
name=f"{member}", icon_url=member.avatar_url)
user_info_embed.add_field(name=f"Entro a el servidor {ctx.guild}",
value=member.joined_at)
user_info_embed.add_field(name="Rol mas alto", value=member.top_role)
user_info_embed.add_field(name="Creo su cuenta de discord",
value=member.created_at)
user_info_embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=user_info_embed)
except:
await ctx.send("Una excepcion ocurrio...")
finally:
await ctx.send(
"informacion obtenida con exito, si no es asi, puede que haya ocurrido un error."
)
@commands.command(name="sabias", alises="sabias", brief="[quieres un dato interesante]")
async def sabias(self, ctx):
dato = [
'Los flamencos doblan las piernas en el tobillo no la rodilla',
'La rueda de la fortuna fue inventada para alejar a los americanos del pecado',
'Los perezosos pueden aguantar más tiempo el aliento que los delfines',
'Los Froot Loops son todos del mismo sabor', 'Las manzanas en el supermercado pueden tener hasta un año',
'Los pulpos tienen 3 corazones', 'En las Filipinas, McDonald´s vende spaghetti',
'Hitler fue nominado a un Nobel de la paz', 'Las langostas saborean con los pies',
'El Empire State tiene su propio código postal',
'Las sombras son más oscuras en la luna', 'La Estatua de la Libertad solía ser un faro',
'Las ManhattAnts son una especie de hormigas únicas de Nueva York',
'Los tanques británicos están equipados para hacer té',
'Los elefantes son los únicos animales que no pueden saltar.',
'La palabra [cementerio] significa dormitorio en griego antiguo.',
' Los ojos hacen más ejercicio que las piernas',
'Nuestro aroma es tan único como nuestras huellas digitales', 'Puedes ver un óvulo a simple vista',
'El corazón podría mover un coche', 'Nada es tan inútil como parece',
'Eres el responsable de todo el polvo que se junta en tu casa',
' El calor corporal es más de lo que imaginas',
'La lengua nunca descansa', 'Los hombres y mujeres escuchan de manera diferente',
'Los bebés pueden curar a sus madres en el vientre',
' Para hacer un kilogramo de miel, una abeja debe recorrer 4 millones de flores, pero la miel es un alimento que no se pudre',
'Las primeras almohadas eran de piedra', 'Los primeros despertadores… ¡Eran personas!',
'El cuerpo humano contiene en promedio unos 37 litros de agua',
'Una canilla que gotea desperdicia más de 75 litros de agua por día',
'Se necesitan 200 litros de agua para producir un solo litro de gaseosa cola',
'Una persona puede sobrevivir un mes sin alimentarse, pero puede estar como máximo siete días sin beber agua.',
'sabias que naci por idea de mi novia'
]
dato_embed = discord.Embed(title=' ',
description=f" ",
color=discord.Color.blue())
dato_embed.add_field(name="sabias que", value={random.choice(dato)})
await ctx.send(embed=dato_embed)
@commands.command(name="nivel",
help="Muestra un nivel de pvp",
brief="[Que tan pro eres?]")
async def nivel(self, ctx):
try:
random_porcentaje = random.choice(
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 27, 28, 29,
30,
31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
57,
58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
85,
86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 200])
await ctx.send(f'{ctx.author} eres {random_porcentaje}% bueno en pvp ')
except NameError as error:
await ctx.send(f"Algun error ocurrio ...{error}")
finally:
print(f"{ctx.author}")
def setup(bot):
bot.add_cog(administracion(bot))
|
print('2020-04-09')
class Person:
name = '34'
__age = 0
def __init__(self, name):
self.name = name
@staticmethod
def getname():
print(Person.name)
def setAge(self, age):
self.__age = age
Person.getname()
zhang = Person('张三')
print(zhang.name)
Person.getname()
# p 156
|
from . import util
def get_numbers(lines):
return [int(line) for line in lines]
def find_invalid(numbers, plen):
window = set(numbers[:plen])
for idx, target in enumerate(numbers[plen:]):
if not any(target - n != n and target - n in window for n in window):
return target
window.remove(numbers[idx])
window.add(numbers[plen + idx])
def find_sequence(numbers, target):
head = 0
foot = 0
total = 0
while total != target:
if total < target:
total += numbers[head]
head += 1
else:
total -= numbers[foot]
foot += 1
return numbers[foot:head]
def run():
inputlines = util.get_input_lines("09.txt")
numbers = get_numbers(inputlines)
invalid = find_invalid(numbers, 25)
sequence = find_sequence(numbers, invalid)
return invalid, min(sequence) + max(sequence)
|
from datetime import datetime
class Utc_Clock():
def __init__(self):
pass
def get_time(self):
now = datetime.now()
return now.second + 60* now.minute + 3600 * now.hour
utc_clock = Utc_Clock()
print("Current cclk: %d " %(utc_clock.get_time()))
|
from typing import TYPE_CHECKING, Any, Optional, Type, Union, cast
from pypika import Case, Table, functions
from pypika.functions import DistinctOptionFunction
from pypika.terms import ArithmeticExpression
from pypika.terms import Function as BaseFunction
from tortoise.exceptions import ConfigurationError
from tortoise.expressions import F
from tortoise.fields.relational import BackwardFKRelation, ForeignKeyFieldInstance, RelationalField
from tortoise.query_utils import Q, QueryModifier
if TYPE_CHECKING: # pragma: nocoverage
from tortoise.models import Model
from tortoise.fields.base import Field
##############################################################################
# Base
##############################################################################
class Function:
"""
Function/Aggregate base.
:param field: Field name
:param default_values: Extra parameters to the function.
.. attribute:: database_func
:annotation: pypika.terms.Function
The pypika function this represents.
.. attribute:: populate_field_object
:annotation: bool = False
Enable populate_field_object where we want to try and preserve the field type.
"""
__slots__ = ("field", "field_object", "default_values")
database_func = BaseFunction
# Enable populate_field_object where we want to try and preserve the field type.
populate_field_object = False
def __init__(self, field: Union[str, F, ArithmeticExpression], *default_values: Any) -> None:
self.field = field
self.field_object: "Optional[Field]" = None
self.default_values = default_values
def _get_function_field(
self, field: "Union[ArithmeticExpression, Field, str]", *default_values
):
return self.database_func(field, *default_values)
def _resolve_field_for_model(self, model: "Type[Model]", table: Table, field: str) -> dict:
joins = []
fields = field.split("__")
for iter_field in fields[:-1]:
if iter_field not in model._meta.fetch_fields:
raise ConfigurationError(f"{field} not resolvable")
related_field = cast(RelationalField, model._meta.fields_map[iter_field])
joins.append((table, iter_field, related_field))
model = related_field.related_model
related_table: Table = related_field.related_model._meta.basetable
if isinstance(related_field, ForeignKeyFieldInstance):
# Only FK's can be to same table, so we only auto-alias FK join tables
related_table = related_table.as_(f"{table.get_table_name()}__{iter_field}")
table = related_table
last_field = fields[-1]
if last_field in model._meta.fetch_fields:
related_field = cast(RelationalField, model._meta.fields_map[last_field])
related_field_meta = related_field.related_model._meta
joins.append((table, last_field, related_field))
related_table = related_field_meta.basetable
if isinstance(related_field, BackwardFKRelation):
if table == related_table:
related_table = related_table.as_(f"{table.get_table_name()}__{last_field}")
field = related_table[related_field_meta.db_pk_column]
else:
field_object = model._meta.fields_map[last_field]
if field_object.source_field:
field = table[field_object.source_field]
else:
field = table[last_field]
if self.populate_field_object:
self.field_object = model._meta.fields_map.get(last_field, None)
if self.field_object: # pragma: nobranch
func = self.field_object.get_for_dialect(
model._meta.db.capabilities.dialect, "function_cast"
)
if func:
field = func(self.field_object, field)
return {"joins": joins, "field": field}
def resolve(self, model: "Type[Model]", table: Table) -> dict:
"""
Used to resolve the Function statement for SQL generation.
:param model: Model the function is applied on to.
:param table: ``pypika.Table`` to keep track of the virtual SQL table
(to allow self referential joins)
:return: Dict with keys ``"joins"`` and ``"fields"``
"""
if isinstance(self.field, str):
function = self._resolve_field_for_model(model, table, self.field)
function["field"] = self._get_function_field(function["field"], *self.default_values)
return function
field, field_object = F.resolver_arithmetic_expression(model, self.field)
if self.populate_field_object:
self.field_object = field_object
return {"joins": [], "field": self._get_function_field(field, *self.default_values)}
class Aggregate(Function):
"""
Base for SQL Aggregates.
:param field: Field name
:param default_values: Extra parameters to the function.
:param is_distinct: Flag for aggregate with distinction
"""
database_func = DistinctOptionFunction
def __init__(
self,
field: Union[str, F, ArithmeticExpression],
*default_values: Any,
distinct=False,
_filter: Optional[Q] = None,
) -> None:
super().__init__(field, *default_values)
self.distinct = distinct
self.filter = _filter
def _get_function_field(
self, field: "Union[ArithmeticExpression, Field, str]", *default_values
):
if self.distinct:
return self.database_func(field, *default_values).distinct()
return self.database_func(field, *default_values)
def _resolve_field_for_model(self, model: "Type[Model]", table: Table, field: str) -> dict:
ret = super()._resolve_field_for_model(model, table, field)
if self.filter:
modifier = QueryModifier()
modifier &= self.filter.resolve(model, {}, {}, model._meta.basetable)
where_criterion, joins, having_criterion = modifier.get_query_modifiers()
ret["field"] = Case().when(where_criterion, ret["field"]).else_(None)
return ret
##############################################################################
# Standard functions
##############################################################################
class Trim(Function):
"""
Trims whitespace off edges of text.
:samp:`Trim("{FIELD_NAME}")`
"""
database_func = functions.Trim
class Length(Function):
"""
Returns length of text/blob.
:samp:`Length("{FIELD_NAME}")`
"""
database_func = functions.Length
class Coalesce(Function):
"""
Provides a default value if field is null.
:samp:`Coalesce("{FIELD_NAME}", {DEFAULT_VALUE})`
"""
database_func = functions.Coalesce
class Lower(Function):
"""
Converts text to lower case.
:samp:`Lower("{FIELD_NAME}")`
"""
database_func = functions.Lower
class Upper(Function):
"""
Converts text to upper case.
:samp:`Upper("{FIELD_NAME}")`
"""
database_func = functions.Upper
##############################################################################
# Aggregate functions
##############################################################################
class Count(Aggregate):
"""
Counts the no of entries for that column.
:samp:`Count("{FIELD_NAME}")`
"""
database_func = functions.Count
class Sum(Aggregate):
"""
Adds up all the values for that column.
:samp:`Sum("{FIELD_NAME}")`
"""
database_func = functions.Sum
populate_field_object = True
class Max(Aggregate):
"""
Returns largest value in the column.
:samp:`Max("{FIELD_NAME}")`
"""
database_func = functions.Max
populate_field_object = True
class Min(Aggregate):
"""
Returns smallest value in the column.
:samp:`Min("{FIELD_NAME}")`
"""
database_func = functions.Min
populate_field_object = True
class Avg(Aggregate):
"""
Returns average (mean) of all values in the column.
:samp:`Avg("{FIELD_NAME}")`
"""
database_func = functions.Avg
populate_field_object = True
|
import unittest,os
from deterministic_encryption_utils.encryption.Encryption import Encryption, EncryptionException
import tempfile
import shutil
from deterministic_encryption_utils.encryption.VirtualFile import VirtualFile
class TestEncryption(unittest.TestCase):
class _SaltProviderMock(object):
def getSaltFor(self, absoluteFilePath):
if not os.path.exists(absoluteFilePath):
raise ValueError('The given path {0} cannot be used to determine a salt.'.format(absoluteFilePath))
return '42'
def setUp(self):
saltProviderMock = TestEncryption._SaltProviderMock()
self.subject = Encryption('abc', saltProviderMock, saltProviderMock)
def testEncryptAndDecryptFilename(self):
tmpFile = tempfile.NamedTemporaryFile()
rootPath = tmpFile.name
encryptedFileName = self.subject.encryptFileName(rootPath, os.path.basename(rootPath))
self.assertEqual(os.path.basename(rootPath), self.subject.decryptFileName(encryptedFileName))
def testErrorOnEncryptingNotAbsoluteFilePath(self):
with self.assertRaises(EncryptionException) as _:
self.subject.encryptFileName(tempfile.mktemp(), 'abc123')
def testEncryptAndDecryptFilenameWithUnicode(self):
tmpFile = tempfile.NamedTemporaryFile(prefix='ÄÖÜ')
rootPath = tmpFile.name
encryptedFileName = self.subject.encryptFileName(rootPath, os.path.basename(rootPath))
self.assertEqual(os.path.basename(rootPath), self.subject.decryptFileName(encryptedFileName))
def testEncryptAndDecryptPath(self):
rootDir = tempfile.mkdtemp()
aFolder = tempfile.mkdtemp(dir=rootDir)
aFile = tempfile.NamedTemporaryFile(dir=aFolder, delete=False)
path = aFile.name[len(rootDir):]
try:
encryptedPath = self.subject.encryptPath(rootDir, path)
self.assertEqual(len(os.path.split(path)), len(os.path.split(encryptedPath)))
self.assertEqual(path, self.subject.decryptPath(encryptedPath))
finally:
shutil.rmtree(rootDir)
def testEncryptAndDecryptPathWithSymlinks(self):
rootDir = tempfile.mkdtemp()
aFolder = tempfile.mkdtemp(dir=rootDir)
aFile = tempfile.NamedTemporaryFile(dir=aFolder, delete=False)
symLink = tempfile.mktemp(dir=aFolder)
os.link(aFile.name, symLink)
path = symLink[len(rootDir):]
try:
encryptedPath = self.subject.encryptPath(rootDir, path)
self.assertEqual(len(os.path.split(path)), len(os.path.split(encryptedPath)))
self.assertEqual(path, self.subject.decryptPath(encryptedPath))
finally:
shutil.rmtree(rootDir)
def testEncryptAndDecryptContent(self):
plainFile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
encryptedFile = tempfile.NamedTemporaryFile(mode='w+b', delete=False)
try:
# fill plain text file
with plainFile as f:
f.write('abc')
plainFileVirtual = VirtualFile(plainFile.name)
# fill encrypted file
encryptedContent = self.subject.encryptedContent(plainFileVirtual, 0, 4096)
with encryptedFile as f:
f.write(encryptedContent)
# ensure that encrypted file size matches the real size
self.assertEqual(os.path.getsize(encryptedFile.name), self.subject.encryptedFileSize(os.path.getsize(plainFile.name)))
# ensure that decrypted file size matches the real size
encryptedFileVirtual= VirtualFile(encryptedFile.name)
self.assertEqual(os.path.getsize(plainFile.name), self.subject.decryptedFileSize(encryptedFileVirtual))
# decrypt content and compare to plaintext
decryptedContent = self.subject.decryptedContent(encryptedFileVirtual, 0, 4096)
self.assertEqual('abc', decryptedContent.decode())
finally:
os.remove(plainFile.name)
os.remove(encryptedFile.name)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'EncryptionTest.testName']
unittest.main()
|
#!/usr/bin/env python
#
# Copyright 2011 Rodrigo Ancavil del Pino
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Are incorporated the primitive datatypes defined by XML.
Array is defined for the use of array of elements and his respective datatype.
"""
import inspect
from tornadows import complextypes
def createElementXML(name,type,prefix='xsd'):
""" Function used for the creation of xml elements. """
return '<%s:element name="%s" type="%s:%s"/>'%(prefix,name,prefix,type)
def createArrayXML(name,type,prefix='xsd',maxoccurs=None):
""" Function used for the creation of xml complexElements """
complexType = '<%s:complexType name="%sParams">\n'%(prefix,name)
complexType += '<%s:sequence>\n'%prefix
if maxoccurs == None:
complexType += '<%s:element name="value" type="%s:%s" maxOccurs="unbounded"/>\n'%(prefix,prefix,type)
else:
complexType += '<%s:element name="value" type="%s:%s" maxOccurs="%d"/>\n'%(prefix,prefix,type,maxoccurs)
complexType += '</%s:sequence>\n'%prefix
complexType += '</%s:complexType>\n'%prefix
complexType += '<%s:element name="%s" type="tns:%sParams"/>\n'%(prefix,name,name)
return complexType
class Array:
""" Create arrays of xml elements.
Here an example:
@webservices(_params=xmltypes.Array(xmltypes.Integer),_returns=xmltypes.Integer)
def function(sefl, list_of_elements):
for e in list_of_elements:
# Do something with the element
return len(list_of_elements)
xmltypes.Array(xmltype.Integer) generate an xml element into schema definition:
<xsd:element name="arrayOfElement" type="xsd:integer" maxOccurs="unbounded"/>
this make the parameter of the function list_of_elements is a python list.
if you specify xmltypes.Array(xmltypes.Integer,10), is generated:
<xsd:element name="arrayOfElement" type="xsd:integer" maxOccurs="10"/>
"""
def __init__(self,type,maxOccurs=None):
self._type = type
self._n = maxOccurs
def createArray(self,name):
type = None
if inspect.isclass(self._type) and not issubclass(self._type,PrimitiveType):
type = complextypes.createPythonType2XMLType(self._type.__name__)
else:
type = self._type.getType(self._type)
return createArrayXML(name,type,'xsd',self._n)
def createType(self,name):
prefix = 'xsd'
type = None
if inspect.isclass(self._type) and not issubclass(self._type,PrimitiveType):
type = complextypes.createPythonType2XMLType(self._type.__name__)
else:
type = self._type.getType(self._type)
maxoccurs = self._n
complexType = ''
if self._n == None:
complexType += '<%s:element name="%s" type="%s:%s" maxOccurs="unbounded"/>\n'%(prefix,name,prefix,type)
else:
complexType += '<%s:element name="%s" type="%s:%s" maxOccurs="%d"/>\n'%(prefix,name,prefix,type,maxoccurs)
return complexType
def genType(self,v):
value = None
if inspect.isclass(self._type) and issubclass(self._type,PrimitiveType):
value = self._type.genType(v)
elif hasattr(self._type,'__name__'):
value = complextypes.convert(self._type.__name__,v)
# Convert str to bool
if value == 'true':
value = True
elif value == 'false':
value = False
return value
class PrimitiveType:
""" Class father for all derived types. """
pass
class Integer(PrimitiveType):
""" 1. XML primitive type : integer """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'integer')
@staticmethod
def getType(self):
return 'integer'
@classmethod
def genType(self,v):
return int(v)
class Decimal(PrimitiveType):
""" 2. XML primitive type : decimal """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'decimal')
@staticmethod
def getType(self):
return 'decimal'
@classmethod
def genType(self,v):
return float(v)
class Double(PrimitiveType):
""" 3. XML primitive type : double """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'double')
@staticmethod
def getType(self):
return 'double'
@classmethod
def genType(self,v):
return float(v)
class Float(PrimitiveType):
""" 4. XML primitive type : float """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'float')
@staticmethod
def getType(self):
return 'float'
@classmethod
def genType(self,v):
return float(v)
class Duration(PrimitiveType):
""" 5. XML primitive type : duration """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'duration')
@staticmethod
def getType(self):
return 'duration'
@classmethod
def genType(self,v):
return str(v)
class Date(PrimitiveType):
""" 6. XML primitive type : date """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'date')
@staticmethod
def getType(self):
return 'date'
@classmethod
def genType(self,v):
return str(v)
class Time(PrimitiveType):
""" 7. XML primitive type : time """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'time')
@staticmethod
def getType(self):
return 'time'
@classmethod
def genType(self,v):
return str(v)
class DateTime(PrimitiveType):
""" 8. XML primitive type : dateTime """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'dateTime')
@staticmethod
def getType(self):
return 'dateTime'
@classmethod
def genType(self,v):
return str(v)
class String(PrimitiveType):
""" 9. XML primitive type : string """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'string')
@staticmethod
def getType(self):
return 'string'
@classmethod
def genType(self,v):
return str(v)
class Boolean(PrimitiveType):
""" 10. XML primitive type : boolean """
@staticmethod
def createElement(name,prefix='xsd'):
return createElementXML(name,'boolean')
@staticmethod
def getType(self):
return 'boolean'
@classmethod
def genType(self,v):
return str(v).lower()
|
import click
import os
from tabulate import tabulate
import myhacks as myh
@click.command()
@click.argument("rootdir", required=False)
@click.option("--all/--changes", default=False)
@click.option("--fetch/--no-fetch", default=False)
@click.option("--outputformat", type=click.Choice(myh.OUTPUTS), default="simple")
def run_checkGit(rootdir, all, fetch, outputformat):
"""Output the status of git repos in the projs directory."""
if not rootdir:
rootdir = myh.PROJS_DIR
repos = myh.find_repos(rootdir)
print(f"Found {len(repos)} repos in {rootdir}.")
repo_info = myh.compile_repo_info(repos, all=all, fetch=fetch)
if len(repo_info) < 1:
print("Found no repos that matched specified settings.")
print(tabulate(repo_info, headers="keys", tablefmt=outputformat))
if __name__ == "__main__":
run_checkGit()
|
"""
This is stochastic_gradient_descent algorithm
"""
import numpy as np
import matplotlib.pyplot as plt
"""
Creating the data for model and adding Gaussian Noise
"""
plt.style.use(['ggplot'])
X = 2 * np.random.rand(100,1)
y = 4 +3 * X+np.random.randn(100,1)
plt.plot(X,y,'b.')
plt.xlabel("$x$", fontsize=18)
plt.ylabel("$y$", rotation=0, fontsize=18)
_ =plt.axis([0,2,0,15])
X_b = np.c_[np.ones((100,1)),X]
theta_best = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y)
print(theta_best)
X_new = np.array([[0],[2]])
X_new_b = np.c_[np.ones((2,1)),X_new]
y_predict = X_new_b.dot(theta_best)
y_predict
plt.plot(X_new,y_predict,'r-')
plt.plot(X,y,'b.')
plt.xlabel("$x_1$", fontsize=18)
plt.ylabel("$y$", rotation=0, fontsize=18)
plt.axis([0,2,0,15])
"""
Cost Function and Gradients
"""
def cal_cost(theta,X,y):
'''
Calculates the cost for given X and Y. The following shows and example of a single dimensional X
theta = Vector of thetas
X = Row of X's np.zeros((2,j))
y = Actual y's np.zeros((2,1))
where:
j is the no of features
'''
m = len(y)
predictions = X.dot(theta)
cost = (1/2*m) * np.sum(np.square(predictions-y))
return cost
"""
The stochastic_gradient_descent method
"""
def stocashtic_gradient_descent(X,y,theta,learning_rate=0.01,iterations=10):
'''
X = Matrix of X with added bias units
y = Vector of Y
theta=Vector of thetas np.random.randn(j,1)
learning_rate
iterations = no of iterations
Returns the final theta vector and array of cost history over no of iterations
'''
m = len(y)
cost_history = np.zeros(iterations)
for it in range(iterations):
cost =0.0
for i in range(m):
rand_ind = np.random.randint(0,m)
X_i = X[rand_ind,:].reshape(1,X.shape[1])
y_i = y[rand_ind].reshape(1,1)
prediction = np.dot(X_i,theta)
theta = theta -(1/m)*learning_rate*( X_i.T.dot((prediction - y_i)))
cost += cal_cost(theta,X_i,y_i)
cost_history[it] = cost
return theta, cost_history
lr =0.5
n_iter = 50
theta = np.random.randn(2,1)
X_b = np.c_[np.ones((len(X),1)),X]
theta,cost_history = stocashtic_gradient_descent(X_b,y,theta,lr,n_iter)
print('Theta0: {:0.3f},\nTheta1: {:0.3f}'.format(theta[0][0],theta[1][0]))
print('Final cost/MSE: {:0.3f}'.format(cost_history[-1]))
fig,ax = plt.subplots(figsize=(10,8))
ax.set_ylabel('{J(Theta)}',rotation=0)
ax.set_xlabel('{Iterations}')
theta = np.random.randn(2,1)
_=ax.plot(range(n_iter),cost_history,'b.')
|
# MINLP written by GAMS Convert at 04/21/18 13:51:24
#
# Equation counts
# Total E G L N X C B
# 33 1 0 32 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 63 48 0 15 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 125 94 31 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.i1 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i2 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i3 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i4 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i5 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i6 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i7 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i8 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i9 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i10 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i11 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i12 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i13 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i14 = Var(within=Integers,bounds=(1,10),initialize=1)
m.i15 = Var(within=Integers,bounds=(1,10),initialize=1)
m.x16 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x17 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x18 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x19 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x20 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x21 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x22 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x23 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x24 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x25 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x26 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x27 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x28 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x29 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x30 = Var(within=Reals,bounds=(1,10),initialize=1)
m.x32 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x36 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x40 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x44 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x47 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x50 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x53 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x56 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x57 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x58 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x60 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x61 = Var(within=Reals,bounds=(None,0),initialize=0)
m.x62 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(1E-10,None),initialize=1E-10)
m.obj = Objective(expr= m.i1 + m.i2 + m.i3 + m.i4 + m.i5 + m.i6 + m.i7 + m.i8 + m.i9 + m.i10 + m.i11 + m.i12 + m.i13
+ m.i14 + m.i15 + m.x16 + m.x17 + m.x18 + m.x19 + m.x20 + m.x21 + m.x22 + m.x23 + m.x24 + m.x25
+ m.x26 + m.x27 + m.x28 + m.x29 + m.x30 + 30000*m.x63, sense=minimize)
m.c2 = Constraint(expr= m.x32 + m.x33 + m.x34 + m.x35 + m.x36 + m.x37 + m.x38 + m.x39 + m.x40 + m.x41 + m.x42 + m.x43
+ m.x44 + m.x45 + m.x46 + m.x47 + m.x48 + m.x49 + m.x50 + m.x51 + m.x52 + m.x53 + m.x54 + m.x55
+ m.x56 + m.x57 + m.x58 + m.x59 + m.x60 + m.x61 + m.x62 <= 0)
m.c3 = Constraint(expr=-0.48*log(m.i1) - m.x32 <= 0)
m.c4 = Constraint(expr=-0.275*log(m.i2) - m.x33 <= 0)
m.c5 = Constraint(expr=-0.26*log(m.i3) - m.x34 <= 0)
m.c6 = Constraint(expr=-0.215*log(m.i4) - m.x35 <= 0)
m.c7 = Constraint(expr=-0.245*log(m.i5) - m.x36 <= 0)
m.c8 = Constraint(expr=-0.31*log(m.i6) - m.x37 <= 0)
m.c9 = Constraint(expr=-0.34*log(m.i7) - m.x38 <= 0)
m.c10 = Constraint(expr=-0.2*log(m.i8) - m.x39 <= 0)
m.c11 = Constraint(expr=-0.185*log(m.i9) - m.x40 <= 0)
m.c12 = Constraint(expr=-0.495*log(m.i10) - m.x41 <= 0)
m.c13 = Constraint(expr=-0.02*log(m.i11) - m.x42 <= 0)
m.c14 = Constraint(expr=-0.445*log(m.i12) - m.x43 <= 0)
m.c15 = Constraint(expr=-0.455*log(m.i13) - m.x44 <= 0)
m.c16 = Constraint(expr=-0.4*log(m.i14) - m.x45 <= 0)
m.c17 = Constraint(expr=-0.05*log(m.i15) - m.x46 <= 0)
m.c18 = Constraint(expr=-0.13*log(m.x16) - m.x47 <= 0)
m.c19 = Constraint(expr=-0.17*log(m.x17) - m.x48 <= 0)
m.c20 = Constraint(expr=-0.34*log(m.x18) - m.x49 <= 0)
m.c21 = Constraint(expr=-0.07*log(m.x19) - m.x50 <= 0)
m.c22 = Constraint(expr=-0.36*log(m.x20) - m.x51 <= 0)
m.c23 = Constraint(expr=-0.05*log(m.x21) - m.x52 <= 0)
m.c24 = Constraint(expr=-0.325*log(m.x22) - m.x53 <= 0)
m.c25 = Constraint(expr=-0.245*log(m.x23) - m.x54 <= 0)
m.c26 = Constraint(expr=-0.39*log(m.x24) - m.x55 <= 0)
m.c27 = Constraint(expr=-0.36*log(m.x25) - m.x56 <= 0)
m.c28 = Constraint(expr=-0.45*log(m.x26) - m.x57 <= 0)
m.c29 = Constraint(expr=-0.445*log(m.x27) - m.x58 <= 0)
m.c30 = Constraint(expr=-0.165*log(m.x28) - m.x59 <= 0)
m.c31 = Constraint(expr=-0.35*log(m.x29) - m.x60 <= 0)
m.c32 = Constraint(expr=-0.1*log(m.x30) - m.x61 <= 0)
m.c33 = Constraint(expr=-log(m.x63) - m.x62 <= 0)
|
from rlgym.utils.state_setters import StateSetter
from rlgym.utils.state_setters import StateWrapper
import random
import numpy as np
class DefaultState(StateSetter):
SPAWN_BLUE_POS = [[-2048, -2560, 17], [2048, -2560, 17],
[-256, -3840, 17], [256, -3840, 17], [0, -4608, 17]]
SPAWN_BLUE_YAW = [0.25 * np.pi, 0.75 * np.pi,
0.5 * np.pi, 0.5 * np.pi, 0.5 * np.pi]
SPAWN_ORANGE_POS = [[2048, 2560, 17], [-2048, 2560, 17],
[256, 3840, 17], [-256, 3840, 17], [0, 4608, 17]]
SPAWN_ORANGE_YAW = [-0.75 * np.pi, -0.25 *
np.pi, -0.5 * np.pi, -0.5 * np.pi, -0.5 * np.pi]
def __init__(self):
super().__init__()
def reset(self, state_wrapper: StateWrapper):
"""
Modifies state_wrapper values to emulate a randomly selected default kickoff.
:param state_wrapper: StateWrapper object to be modified with desired state values.
"""
# possible kickoff indices are shuffled
spawn_inds = [0, 1, 2, 3, 4]
random.shuffle(spawn_inds)
blue_count = 0
orange_count = 0
for car in state_wrapper.cars:
pos = [0,0,0]
yaw = 0
# team_num = 0 = blue team
if car.team_num == 0:
# select a unique spawn state from pre-determined values
pos = self.SPAWN_BLUE_POS[spawn_inds[blue_count]]
yaw = self.SPAWN_BLUE_YAW[spawn_inds[blue_count]]
blue_count += 1
# team_num = 1 = orange team
elif car.team_num == 1:
# select a unique spawn state from pre-determined values
pos = self.SPAWN_ORANGE_POS[spawn_inds[orange_count]]
yaw = self.SPAWN_ORANGE_YAW[spawn_inds[orange_count]]
orange_count += 1
# set car state values
car.set_pos(*pos)
car.set_rot(yaw=yaw)
car.boost = 0.33
|
def init_app(app):
Api(app)
|
#
# adventure module
#
# vim: et sw=2 ts=2 sts=2
# for Python3, use:
# import urllib.request as urllib2
import urllib2
import random
import string
import textwrap
import time
# "directions" are all the ways you can describe going some way;
# they are code-visible names for directions for adventure authors
direction_names = ["NORTH","SOUTH","EAST","WEST","UP","DOWN","RIGHT","LEFT",
"IN","OUT","FORWARD","BACK",
"NORTHWEST","NORTHEAST","SOUTHWEST","SOUTHEAST"]
direction_list = [ NORTH, SOUTH, EAST, WEST, UP, DOWN, RIGHT, LEFT,
IN, OUT, FORWARD, BACK,
NORTHWEST, NORTHEAST, SOUTHWEST, SOUTHEAST] = \
range(len(direction_names))
NOT_DIRECTION = None
# some old names, for backwards compatibility
(NORTH_WEST, NORTH_EAST, SOUTH_WEST, SOUTH_EAST) = \
(NORTHWEST, NORTHEAST, SOUTHWEST, SOUTHEAST)
directions = dir_by_name = dict(zip(direction_names, direction_list))
def define_direction (number, name):
if name in dir_by_name:
exit("%s is already defined as %d" % (name, dir_by_name[name]))
dir_by_name[name] = number
def lookup_dir (name):
return dir_by_name.get(name, NOT_DIRECTION)
# add lower-case versions of all names in direction_names
for name in direction_names:
define_direction(dir_by_name[name], name.lower())
# add common aliases:
# maybe the alias mechanism should be a more general
# (text-based?) mechanism that works for any command?!!!
common_aliases = [
(NORTH, "n"),
(SOUTH, "s"),
(EAST, "e"),
(WEST, "w"),
(UP, "u"),
(DOWN, "d"),
(FORWARD, "fd"),
(FORWARD, "fwd"),
(FORWARD, "f"),
(BACK, "bk"),
(BACK, "b"),
(NORTHWEST,"nw"),
(NORTHEAST,"ne"),
(SOUTHWEST,"sw"),
(SOUTHEAST, "se")
]
for (k,v) in common_aliases:
define_direction(k,v)
# define the pairs of opposite directions
opposite_by_dir = {}
def define_opposite_dirs (d1, d2):
for dir in (d1, d2):
opposite = opposite_by_dir.get(dir)
if opposite is not None:
exit("opposite for %s is already defined as %s" % (dir, opposite))
opposite_by_dir[d1] = d2
opposite_by_dir[d2] = d1
opposites = [(NORTH, SOUTH),
(EAST, WEST),
(UP, DOWN),
(LEFT, RIGHT),
(IN, OUT),
(FORWARD, BACK),
(NORTHWEST, SOUTHEAST),
(NORTHEAST, SOUTHWEST)]
for (d1,d2) in opposites:
define_opposite_dirs(d1,d2)
def opposite_direction (dir):
return opposite_by_dir[dir]
# registered games
registered_games = {}
FEEDBACK = 0
TITLE = 1
DESCRIPTION = 2
CONTENTS = 3
DEBUG = 4
class Colors:
'''
Colors class:
reset all colors with colors.reset
two subclasses fg for foreground and bg for background.
use as colors.subclass.colorname.
i.e. colors.fg.red or colors.bg.green
also, the generic bold, disable, underline, reverse, strikethrough,
and invisible work with the main class
i.e. colors.bold
'''
reset='\033[0m'
bold='\033[01m'
disable='\033[02m'
underline='\033[04m'
reverse='\033[07m'
strikethrough='\033[09m'
invisible='\033[08m'
class FG:
black='\033[30m'
red='\033[31m'
green='\033[32m'
orange='\033[33m'
blue='\033[34m'
purple='\033[35m'
cyan='\033[36m'
lightgrey='\033[37m'
darkgrey='\033[90m'
lightred='\033[91m'
lightgreen='\033[92m'
yellow='\033[93m'
lightblue='\033[94m'
pink='\033[95m'
lightcyan='\033[96m'
class BG:
black='\033[40m'
red='\033[41m'
green='\033[42m'
orange='\033[43m'
blue='\033[44m'
purple='\033[45m'
cyan='\033[46m'
lightgrey='\033[47m'
articles = ['a', 'an', 'the']
# some prepositions to recognize indirect objects in prepositional phrases
prepositions = ['aboard', 'about', 'above', 'across', 'after', 'against', 'along'
'among', 'around', 'at', 'atop', 'before', 'behind', 'below', 'beneath',
'beside', 'besides', 'between', 'beyond', 'by', 'for', 'from', 'in', 'including'
'inside', 'into', 'on', 'onto', 'outside', 'over', 'past', 'than' 'through', 'to',
'toward', 'under', 'underneath', 'onto', 'upon', 'with', 'within']
# changes "lock" to "a lock", "apple" to "an apple", etc.
# note that no article should be added to proper names;
# For now we'll just assume
# anything starting with upper case is proper.
# Do not add an article to plural nouns.
def add_article (name):
# simple plural test
if len(name) > 1 and name[-1] == 's' and name[-2] != 's':
return name
# check if there is already an article on the string
if name.split()[0] in articles:
return name
consonants = "bcdfghjklmnpqrstvwxyz"
vowels = "aeiou"
if name and (name[0] in vowels):
article = "an "
elif name and (name[0] in consonants):
article = "a "
else:
article = ""
return "%s%s" % (article, name)
def normalize_input(text):
superfluous = articles + ['and']
rest = []
for word in text.split():
word = "".join(l for l in word if l not in string.punctuation)
if word not in superfluous:
rest.append(word)
return ' '.join(rest)
def proper_list_from_dict(d):
names = d.keys()
buf = []
name_count = len(names)
for (i,name) in enumerate(names):
if i != 0:
buf.append(", " if name_count > 2 else " ")
if i == name_count-1 and name_count > 1:
buf.append("and ")
buf.append(add_article(name))
return "".join(buf)
# Base is a place to put default inplementations of methods that everything
# in the game should support (eg save/restore, how to respond to verbs etc)
class Base(object):
def __init__(self, name):
self.game = None
self.name = name
self.verbs = {}
self.phrases = {}
self.vars = {}
def flag(self, f):
if f in self.vars:
return self.vars[f]
else:
return False
def set_flag(self, f):
self.vars[f] = True
def unset_flag(self, f):
if f in self.vars:
del self.vars[f]
def var(self, var):
if var in self.vars:
return self.vars[var]
else:
return None
def set_var(self, var, val):
self.vars[var] = val
def unset_var(self, var):
if var in self.vars:
del self.vars[var]
def add_verb(self, v):
self.verbs[' '.join(v.name.split())] = v
v.bind_to(self)
return v
def get_verb(self, verb):
c = ' '.join(verb.split())
if c in self.verbs:
return self.verbs[c]
else:
return None
def add_phrase(self, phrase, f, requirements = []):
if isinstance(f, BaseVerb):
f.bind_to(self)
self.phrases[' '.join(phrase.split())] = (f, set(requirements))
def get_phrase(self, phrase, things_present):
phrase = phrase.strip()
things_present = set(things_present)
if not phrase in self.phrases:
return None
p = self.phrases[phrase]
if things_present.issuperset(p[1]):
return p[0]
return None
def output(self, text, message_type = 0):
self.game.output(text, message_type)
class BaseVerb(Base):
def __init__(self, function, name):
Base.__init__(self, name)
self.function = function
self.bound_to = None
def bind_to(self, obj):
self.bound_to = obj
def act(self, actor, noun, words):
result = True
if not self.function(actor, noun, None):
result = False
# treat 'verb noun1 and noun2..' as 'verb noun1' then 'verb noun2'
# treat 'verb noun1, noun2...' as 'verb noun1' then 'verb noun2'
# if any of the nouns work on the verb consider the command successful,
# even if some of them don't
if words:
for noun in words:
if self.function(actor, noun, None):
result = True
return result
class Die(BaseVerb):
def __init__(self, string, name = ""):
BaseVerb.__init__(self, None, name)
self.string = string
def act(self, actor, noun, words):
self.bound_to.game.output("%s %s %s" % (actor.name.capitalize(),
actor.isare, self.string), FEEDBACK)
self.bound_to.game.output("%s %s dead." % (actor.name.capitalize(),
actor.isare), FEEDBACK)
actor.terminate()
return True
class Say(BaseVerb):
def __init__(self, string, name = ""):
BaseVerb.__init__(self, None, name)
self.string = string
def act(self, actor, noun, words):
self.bound_to.game.output(self.string, FEEDBACK)
return True
class SayOnNoun(Say):
def __init__(self, string, noun, name = ""):
Say.__init__(self, string, name)
self.noun = noun
def act(self, actor, noun, words):
if self.noun != noun:
return False
self.bound_to.game.output(self.string, FEEDBACK)
return True
class SayOnSelf(SayOnNoun):
def __init__(self, string, name = ""):
SayOnNoun.__init__(self, string, None, name)
# Verb is used for passing in an unbound global function to the constructor
class Verb(BaseVerb):
def __init__(self, function, name = ""):
BaseVerb.__init__(self, function, name)
# explicitly pass in self to the unbound function
def act(self, actor, noun, words):
return self.function(self.bound_to, actor, noun, words)
def list_prefix(a, b): # is a a prefix of b
if not a:
return True
if not b:
return False
if a[0] != b[0]:
return False
return list_prefix(a[1:], b[1:])
def get_noun(words, things):
if words[0] in articles:
if len(words) > 1:
done = False
for t in things:
n = t.name.split()
if list_prefix(n, words[1:]):
noun = t.name
words = words[len(n)+1:]
done = True
break
if not done:
noun = words[1]
words = words[2:]
else:
done = False
for t in things:
n = t.name.split()
if list_prefix(n, words):
noun = t.name
words = words[len(n):]
done = True
break
if not done:
noun = words[0]
words = words[1:]
return (noun, words)
# A class to hold utility methods useful during game development, but
# not needed for normal game play. Import the advent_devtools module
# to get the full version of the tools.
class DevToolsBase(object):
def __init__(self):
self.game = None
def set_game(self, game):
self.game = game
def debug_output(self, text, level):
return
def start(self):
return
global _devtools
_devtools = DevToolsBase()
def register_devtools(devtools):
global _devtools
_devtools = devtools
# The Game: container for hero, locations, robots, animals etc.
class Game(Base):
def __init__(self, name="bwx-adventure"):
Base.__init__(self, name)
self.objects = {}
self.fresh_location = False
self.player = None
self.current_actor = None
self.location_list = []
self.robots = {}
self.animals = {}
global _devtools
self.devtools = _devtools
self.devtools.set_game(self)
self.http_output = False
self.http_text = ""
self.done = False
def set_name(self, name):
self.name = name
# add a bidirectional connection between points A and B
def add_connection(self, connection):
connection.game = self
if isinstance(connection.way_ab, (list, tuple)):
for way in connection.way_ab:
connection.point_a.add_exit(connection, way)
else:
connection.point_a.add_exit(connection, connection.way_ab)
# this is messy, need a better way to do this
reverse_connection = Connection(connection.name,
connection.point_b,
connection.point_a,
connection.way_ba,
connection.way_ab)
reverse_connection.game = self
if isinstance(connection.way_ba, (list, tuple)):
for way in connection.way_ba:
connection.point_b.add_exit(reverse_connection, way)
else:
connection.point_b.add_exit(reverse_connection, connection.way_ba)
return connection
def new_connection(self, *args):
return self.add_connection(Connection(*args))
def connect(self, place_a, place_b, way_ab, way_ba=None):
"""An easier-to use version of new_connection. It generates a
connection name automatically from the two location names and also
allows the second direction argument to be omitted. If the second
direction is omitted, it defaults to the opposite of the first
direction."""
name = place_a.name + "_to_" + place_b.name
return self.new_connection(name, place_a, place_b, way_ab, way_ba)
# add another location to the game
def add_location(self, location):
location.game = self
self.location_list.append(location)
return location
def new_location(self, *args):
return self.add_location(Location(*args))
# add an actor to the game
def add_actor(self, actor):
actor.game = self
if isinstance(actor, Player):
self.player = actor
if isinstance(actor, Animal):
self.animals[actor.name] = actor
if isinstance(actor, Robot):
self.robots[actor.name] = actor
return actor
def new_player(self, location):
self.player = Player()
self.add_actor(self.player)
self.player.set_location(location)
return self.player
def if_flag(self, flag, s_true, s_false, location = None):
return lambda loc: (s_false, s_true)[flag in (location or loc).vars]
def if_var(self, v, value, s_true, s_false, location = None):
return lambda loc: (s_false, s_true)[v in (location or loc).vars and (location or loc).vars[v] == value]
def output(self, text, message_type = 0):
if message_type != DEBUG:
self.current_actor.set_next_script_response(text)
self.print_output(text, message_type)
def style_text(self, text, message_type):
if False: # trinket.io
return text
if self.http_output:
if (message_type == FEEDBACK):
text = "<font color='red'>" + text + '</font>'
if (message_type == TITLE):
text = "<font color='blue'>" + text + '</font>'
if (message_type == DESCRIPTION):
pass
if (message_type == CONTENTS):
text = "<font color='green'>" + text + '</font>'
if (message_type == DEBUG):
text = "<font color='orange'>" + text + '</font>'
return text
if (message_type == FEEDBACK):
text = Colors.FG.pink + text + Colors.reset
if (message_type == TITLE):
text = Colors.FG.yellow + Colors.BG.blue + "\n" + text + Colors.reset
if (message_type == DESCRIPTION):
text = Colors.reset + text
if (message_type == CONTENTS):
text = Colors.FG.green + text + Colors.reset
if (message_type == DEBUG):
text = Colors.bold + Colors.FG.black + Colors.BG.orange + "\n" + text + Colors.reset
return text
# overload this for HTTP output
def print_output(self, text, message_type = 0):
if self.http_output:
self.http_text += self.style_text(text, message_type) + "\n"
else:
print self.style_text(text, message_type)
# checks to see if the inventory in the items list is in the user's inventory
def inventory_contains(self, items):
if set(items).issubset(set(self.player.inventory.values())):
return True
return False
def entering_location(self, location):
if (self.player.location == location and self.fresh_location):
return True
return False
def say(self, s):
return lambda game: game.output(s)
@staticmethod
def register(name, fn):
global registered_games
registered_games[name] = fn
@staticmethod
def get_registered_games():
global registered_games
return registered_games
def run_init(self, update_func = None):
# reset this every loop so we don't trigger things more than once
self.fresh_location = False
self.update_func = update_func
self.current_actor = self.player
self.devtools.start()
def init_scripts(self):
actor = self.current_actor
script_name = self.var('script_name')
if script_name != None:
self.devtools.debug_output("script_name: " + script_name, 3)
actor.act_load_file(actor, script_name, None)
if self.flag('check'):
actor.act_check_script(actor, script_name, None)
else:
actor.act_run_script(actor, script_name, None)
recording_name = self.var('start_recording')
if recording_name != None:
self.devtools.debug_output("recording_name: " + recording_name, 3)
actor.act_start_recording(actor, recording_name, None)
def run_room(self):
actor = self.current_actor
if actor == self.player or actor.flag('verbose'):
# if the actor moved, describe the room
if actor.check_if_moved():
self.output(actor.location.title(actor), TITLE)
# cache this as we need to know it for the query to entering_location()
self.fresh_location = actor.location.first_time
where = actor.location.describe(actor, actor.flag('verbose'))
if where:
self.output("")
self.output(where)
self.output("")
# See if the animals want to do anything
for animal in self.animals.values():
# first check that it is not dead
if animal.health >= 0:
animal.act_autonomously(actor.location)
def run_step(self, cmd = None):
self.http_text = ""
actor = self.current_actor
# has the developer supplied an update function?
if self.update_func:
self.update_func() # call the update function
# check if we're currently running a script
user_input = actor.get_next_script_command();
if user_input == None:
if cmd != None:
user_input = cmd
else:
# get input from the user
try:
self.output("") # add a blank line
user_input = raw_input("> ")
except EOFError:
return False
# see if the command is for a robot
if ':' in user_input:
robot_name, command = user_input.split(':')
try:
actor = self.robots[robot_name]
except KeyError:
self.output("I don't know anybot named %s" % robot_name, FEEDBACK)
return True
else:
actor = self.player
command = user_input
self.current_actor = actor
# now we're done with punctuation and other superfluous words like articles
command = normalize_input(command)
# see if we want to quit
if command == 'q' or command == 'quit':
return False
# give the input to the actor in case it's recording a script
if not actor.set_next_script_command(command):
return True
words = command.split()
if not words:
return True
# following the Infocom convention commands are decomposed into
# VERB(verb), OBJECT(noun), INDIRECT_OBJECT(indirect).
# For example: "hit zombie with hammer" = HIT(verb) ZOMBIE(noun) WITH HAMMER(indirect).
# handle 'tell XXX ... "
target_name = ""
if words[0].lower() == 'tell' and len(words) > 2:
(target_name, words) = get_noun(words[1:], actor.location.actors.values())
things = actor.inventory.values() + \
actor.location.contents.values() + \
actor.location.exits.values() + \
list(actor.location.actors.values()) + \
[actor.location] + \
[actor]
for c in actor.location.contents.values():
if isinstance(c, Container) and c.is_open:
things += c.contents.values()
potential_verbs = []
for t in things:
potential_verbs += t.verbs.keys()
# extract the VERB
verb = None
potential_verbs.sort(key=lambda key : -len(key))
for v in potential_verbs:
vv = v.split()
if list_prefix(vv, words):
verb = v
words = words[len(vv):]
if not verb:
verb = words[0]
words = words[1:]
# extract the OBJECT
noun = None
if words:
(noun, words) = get_noun(words, things)
# extract INDIRECT (object) in phrase of the form VERB OBJECT PREPOSITION INDIRECT
indirect = None
if len(words) > 1 and words[0].lower() in prepositions:
(indirect, words) = get_noun(words[1:], things)
# first check phrases
for thing in things:
f = thing.get_phrase(command, things)
if f:
if isinstance(f, BaseVerb):
if f.act(actor, noun, words):
return True
else:
f(self, thing)
return True
# if we have an explicit target of the VERB, do that.
# e.g. "tell cat eat foo" -> cat.eat(cat, 'food', [])
if target_name:
for a in actor.location.actors.values():
if a.name != target_name:
continue
v = a.get_verb(verb)
if v:
if v.act(a, noun, words):
return True
self.output("Huh? %s %s?" % (target_name, verb), FEEDBACK)
return True
# if we have an INDIRECT object, try it's handle first
# e.g. "hit cat with hammer" -> hammer.hit(actor, 'cat', [])
if indirect:
# try inventory and room contents
things = actor.inventory.values() + actor.location.contents.values()
for thing in things:
if indirect == thing.name:
v = thing.get_verb(verb)
if v:
if v.act(actor, noun, words):
return True
for a in actor.location.actors.values():
if indirect == a.name:
v = a.get_verb(verb)
if v:
if v.act(a, noun, words):
return True
# if we have a NOUN, try it's handler next
if noun:
for thing in things:
if noun == thing.name:
v = thing.get_verb(verb)
if v:
if v.act(actor, None, words):
return True
for a in actor.location.actors.values():
if noun == a.name:
v = a.get_verb(verb)
if v:
if v.act(a, None, words):
return True
# location specific VERB
v = actor.location.get_verb(verb)
if v:
if v.act(actor, noun, words):
return True
# handle directional moves of the actor
if not noun:
if verb in directions:
actor.act_go1(actor, verb, None)
return True
# general actor VERB
v = actor.get_verb(verb)
if v:
if v.act(actor, noun, words):
return True
# not understood
self.output("Huh?", FEEDBACK)
return True
def run(self , update_func = None):
self.run_init(update_func)
self.run_room() # just set the stage before we do any scripting
self.init_scripts() # now we can set up scripts
while True:
if self.done:
return
self.run_room()
if self.player.health < 0:
self.output ("Better luck next time!")
break
if not self.run_step():
break
self.output("\ngoodbye!\n", FEEDBACK)
class Object(Base):
# name: short name of this thing
# description: full description
# fixed: is it stuck or can it be taken
def __init__(self, name, desc, fixed=False):
Base.__init__(self, name)
self.description = desc
self.fixed = fixed
def describe(self, observer):
if isinstance(self.description, str):
return self.description
else:
return self.description(self)
class Consumable(Object):
def __init__(self, name, desc, verb, replacement = None):
Object.__init__(self, name, desc)
self.verb = verb
verb.bind_to(self)
self.consume_term = "consume"
self.replacement = replacement
def consume(self, actor, noun, words):
if not actor.location.replace_object(actor, self.name, self.replacement):
return False
self.output("%s %s%s %s." % (actor.name.capitalize(), self.consume_term,
actor.verborverbs, self.description))
self.verb.act(actor, noun, words)
return True
class Food(Consumable):
def __init__(self, name, desc, verb, replacement = None):
Consumable.__init__(self, name, desc, verb, replacement)
self.consume_term = "eat"
class Drink(Consumable):
def __init__(self, name, desc, verb, replacement = None):
Consumable.__init__(self, name, desc, verb, replacement)
self.consume_term = "drink"
class Lockable(Base):
def __init__(self, name):
Base.__init__(self, name)
self.requirements = {}
def make_requirement(self, thing):
self.requirements[thing.name] = thing
self.lock()
def lock(self):
self.set_flag('locked')
def unlock(self):
self.unset_flag('locked')
def is_locked(self):
return self.flag('locked')
def try_unlock(self, actor):
# first see if the actor is whitelisted
if isinstance(self, Location) and actor.allowed_locs:
if not self in actor.allowed_locs:
return False
# now check if we're locked
if not self.flag('locked'):
return True
# check if there are any implicit requirements for this object
if len(self.requirements) == 0:
self.output("It's locked!")
return False
# check to see if the requirements are in the inventory
if set(self.requirements).issubset(set(actor.inventory)):
self.output("You use the %s, the %s unlocks" % \
(proper_list_from_dict(self.requirements),
self.name), FEEDBACK)
self.unlock()
return True
self.output("It's locked! You will need %s." % \
proper_list_from_dict(self.requirements), FEEDBACK)
return False
class Container(Lockable):
def __init__(self, name, description):
Lockable.__init__(self, name)
self.description = description
self.first_time = True
self.contents = {}
self.close()
def add_object(self, obj):
self.contents[obj.name] = obj
obj.game = self.game
return obj
def new_object(self, name, desc, fixed=False):
return self.add_object(Object(name, desc, fixed))
def describe(self, observer, force=False):
desc = "" # start with a blank string
# add the description
if self.first_time or force:
desc += self.description
self.first_time = False
else:
desc += add_article(self.name)
if not self.is_open():
desc += " The %s is closed." % self.name
else:
desc += " The %s is open." % self.name
# it's open so describe the contents
desc += self.describe_contents()
return desc
def describe_contents(self):
desc = ""
if not self.contents:
return desc
# try to make a readable list of the things
contents_description = proper_list_from_dict(self.contents)
# is it just one thing?
if len(self.contents) == 1:
desc += self.game.style_text("\nThere is %s in the %s." % \
(contents_description, self.name), CONTENTS)
else:
desc += self.game.style_text("\nThere are a few things in the %s: %s." % \
(self.name, contents_description), CONTENTS)
return desc
def open(self, actor):
if self.is_open():
self.output("The %s is already open." % self.name)
return True
if not self.try_unlock(actor):
return False
self.output("The %s opens." % self.name, FEEDBACK)
self.output(self.describe_contents(), CONTENTS)
self.unset_flag('closed')
def close(self):
self.set_flag('closed')
def is_open(self):
return not self.flag('closed')
# A "location" is a place in the game.
class Location(Lockable):
# name: short name of this location
# description: full description
# contents: things that are in a location
# exits: ways to get out of a location
# first_time: is it the first time here?
# actors: other actors in the location
def __init__(self, name, description, inonat="in"):
Lockable.__init__(self, name)
self.description = description
self.inonat = inonat
self.contents = {}
self.exits = {}
self.first_time = True
self.actors = {}
def title(self, actor):
preamble = ""
if (actor != self.game.player):
preamble = "%s %s %s the " % (actor.name.capitalize(), actor.isare, self.inonat)
return " --=( %s%s )=-- " % (preamble, self.name)
def add_object(self, obj):
self.contents[obj.name] = obj
obj.game = self.game
return obj
def add_actor(self, actor):
actor.set_location(self)
return actor
def new_object(self, name, desc, fixed=False):
return self.add_object(Object(name, desc, fixed))
def description_str(self, d):
if isinstance(d, (list, tuple)):
desc = ""
for dd in d:
desc += self.description_str(dd)
return desc
else:
if isinstance(d, str):
return self.game.style_text(d, DESCRIPTION)
else:
return self.description_str(d(self))
def describe(self, observer, force=False):
desc = "" # start with a blank string
# add the description
if self.first_time or force:
desc += self.description_str(self.description)
self.first_time = False
if self.contents:
# try to make a readable list of the things
contents_description = proper_list_from_dict(self.contents)
# is it just one thing?
if len(self.contents) == 1:
desc += self.game.style_text("\nThere is %s here." % \
contents_description, CONTENTS)
else:
desc += self.game.style_text("\nThere are a few things here: %s." % \
contents_description, CONTENTS)
for k in sorted(self.contents.keys()):
c = self.contents[k]
if isinstance(c, Container) and c.is_open():
desc += c.describe_contents()
if self.actors:
for k in sorted(self.actors.keys()):
a = self.actors[k]
if a.health < 0:
deadornot = "lying here dead as a doornail"
else:
deadornot = "here"
if a != observer:
desc += self.game.style_text("\n" + add_article(a.describe(a)).capitalize() + \
" " + a.isare + " " + deadornot + ".", CONTENTS)
return desc
def find_object(self, actor, name):
if not name:
return None
if self.contents:
if name in self.contents.keys():
return self.contents
for c in self.contents.values():
if isinstance(c, Container) and c.is_open() and name in c.contents.keys():
return c.contents
if name in actor.inventory:
return actor.inventory
return None
def replace_object(self, actor, old_name, new_obj):
d = self.find_object(actor, old_name)
if d == None:
return None
if not old_name in d.keys():
return None
old_obj = d[old_name]
del d[old_name]
if new_obj:
d[new_obj.name] = new_obj
return old_obj
def add_exit(self, con, way):
self.exits[ way ] = con
def go(self, actor, way):
if not way in self.exits:
return None
c = self.exits[ way ]
# first check if the connection is locked
if not c.try_unlock(actor):
return None
# check if the room on the other side is locked
if not c.point_b.try_unlock(actor):
return None
return c.point_b
def debug(self):
for key in self.exits:
print "exit: %s" % key
# A "connection" connects point A to point B. Connections are
# always described from the point of view of point A.
class Connection(Lockable):
# name
# point_a
# point_b
def __init__(self, name, pa, pb, way_ab, way_ba=None):
Lockable.__init__(self, name)
# way_ba defaults to the opposite of way_ab
if way_ba is None:
way_ba = ([opposite_direction(way) for way in way_ab]
if isinstance(way_ab, (list, tuple))
else opposite_direction(way_ab))
self.point_a = pa
self.point_b = pb
self.way_ab = way_ab
self.way_ba = way_ba
# An actor in the game
class Actor(Base):
# location
# inventory
# moved
# verbs
def __init__(self, name):
Base.__init__(self, name)
self.health = 0
self.location = None
self.allowed_locs = None
self.inventory = {}
self.cap_name = name.capitalize()
self.player = False
self.isare = "is"
self.verborverbs = "s"
self.trades = {}
# associate each of the known actions with functions
self.add_verb(BaseVerb(self.act_take1, 'take'))
self.add_verb(BaseVerb(self.act_take1, 'get'))
self.add_verb(BaseVerb(self.act_drop1, 'drop'))
self.add_verb(BaseVerb(self.act_give, 'give'))
self.add_verb(BaseVerb(self.act_inventory, 'inventory'))
self.add_verb(BaseVerb(self.act_inventory, 'i'))
self.add_verb(BaseVerb(self.act_look, 'look'))
self.add_verb(BaseVerb(self.act_examine1, 'examine'))
self.add_verb(BaseVerb(self.act_examine1, 'look at'))
self.add_verb(BaseVerb(self.act_look, 'l'))
self.add_verb(BaseVerb(self.act_go1, 'go'))
self.add_verb(BaseVerb(self.act_eat, 'eat'))
self.add_verb(BaseVerb(self.act_drink, 'drink'))
self.add_verb(BaseVerb(self.act_open, 'open'))
self.add_verb(BaseVerb(self.act_list_verbs, 'verbs'))
self.add_verb(BaseVerb(self.act_list_verbs, 'commands'))
# terminate
def terminate(self):
self.health = -1
# describe ourselves
def describe(self, observer):
return self.name
# establish where we are "now"
def set_location(self, loc):
self.game = loc.game # XXX this is a hack do this better
if not self.player and self.location:
del self.location.actors[self.name]
self.location = loc
self.moved = True
if not self.player:
self.location.actors[self.name] = self
# confine this actor to a list of locations
def set_allowed_locations(self, locs):
self.allowed_locs = locs
# add something to our inventory
def add_to_inventory(self, thing):
self.inventory[thing.name] = thing
return thing
# remove something from our inventory
def remove_from_inventory(self, thing):
return self.inventory.pop(thing.name, None)
# set up a trade
def add_trade(self, received_obj, returned_obj, verb):
verb.bind_to(self)
self.trades[received_obj] = (returned_obj, verb)
# receive a given object
def receive_item(self, giver, thing):
self.add_to_inventory(thing)
if thing in self.trades.keys():
(obj, verb) = self.trades[thing]
verb.act(giver, thing.name, None)
self.location.contents[obj.name] = obj
self.remove_from_inventory(obj)
# give something to another actor
def act_give(self, actor, noun, words):
d = actor.location.find_object(actor, noun)
if not d:
return False
thing = d[noun]
receiver = self
if words:
for w in words:
if w in self.location.actors.keys():
receiver = self.location.actors[w]
break
if not receiver:
return False
receiver.receive_item(actor, thing)
del d[thing.name]
return True
# move a thing from the current location to our inventory
def act_take1(self, actor, noun, words):
if not noun:
return False
t = self.location.contents.pop(noun, None)
if not t:
for c in self.location.contents.values():
if isinstance(c, Container) and c.is_open:
t = c.contents.pop(noun, None)
if t:
self.inventory[noun] = t
self.output("%s take%s the %s." % (actor.cap_name,
actor.verborverbs,
t.name))
return True
else:
self.output("%s can't take the %s." % (actor.cap_name, noun))
return False
# move a thing from our inventory to the current location
def act_drop1(self, actor, noun, words):
if not noun:
return False
t = self.inventory.pop(noun, None)
if t:
self.location.contents[noun] = t
return True
else:
self.output("%s %s not carrying %s." % (self.cap_name, self.isare, add_article(noun)), FEEDBACK)
return False
def act_look(self, actor, noun, words):
self.output(self.location.describe(actor, True))
return True
# examine a thing in our inventory or location
def act_examine1(self, actor, noun, words):
if not noun:
return False
n = None
if noun in self.inventory:
n = self.inventory[noun]
if noun in self.location.contents:
n = self.location.contents[noun]
for c in self.location.contents.values():
if isinstance(c, Container) and c.is_open:
if noun in c.contents:
n = c.contents[noun]
if not n:
return False
self.output("You see " + n.describe(self) + ".")
return True
# list the things we're carrying
def act_inventory(self, actor, noun, words):
msg = '%s %s carrying ' % (self.cap_name, self.isare)
if self.inventory.keys():
msg += proper_list_from_dict(self.inventory)
else:
msg += 'nothing'
msg += '.'
self.output(msg, FEEDBACK)
return True
# check/clear moved status
def check_if_moved(self):
status = self.moved
self.moved = False
return status
# try to go in a given direction
def act_go1(self, actor, noun, words):
if not noun in directions:
self.output("Don't know how to go '%s'." % noun, FEEDBACK)
return False
loc = self.location.go(actor, directions[noun])
if loc == None:
self.output("Bonk! %s can't seem to go that way." % self.name, FEEDBACK)
return False
else:
# update where we are
self.set_location(loc)
return True
# eat something
def act_eat(self, actor, noun, words):
d = actor.location.find_object(actor, noun)
if not d:
return False
t = d[noun]
if isinstance(t, Food):
t.consume(actor, noun, words)
else:
self.output("%s can't eat the %s." % (actor.name.capitalize(), noun))
return True
# drink something
def act_drink(self, actor, noun, words):
d = actor.location.find_object(actor, noun)
if not d:
return False
t = d[noun]
if isinstance(t, Drink):
t.consume(actor, noun, words)
else:
self.output("%s can't drink the %s." % (actor.name.capitalize(), noun))
return True
# open a Container
def act_open(self, actor, noun, words):
if not noun:
return False
if not noun in actor.location.contents:
return False
t = self.location.contents[noun]
if isinstance(t, Container):
t.open(actor)
else:
self.output("%s can't open the %s." % (actor.name.capitalize(), noun))
return True
def act_list_verbs(self, actor, noun, words):
things = (actor.inventory.values() + actor.location.contents.values() +
list(actor.location.actors.values()) + [actor.location] + [actor])
result = set()
for t in things:
for v in t.verbs.keys():
if len(v.split()) > 1:
result.add('"' + v + '"')
else:
result.add(v);
for v in t.phrases.keys():
if len(v.split()) > 1:
result.add('"' + v + '"')
else:
result.add(v);
self.output(textwrap.fill(" ".join(sorted(result))), FEEDBACK)
return True
# support for scriptable actors, override these to implement
def get_next_script_command(self):
return None
def set_next_script_command(self, line):
return True
def set_next_script_response(self, response):
return True
# Scripts are sequences of instructions for Robots to execute
class Script(Base):
def __init__(self, name, lines=None, game=None):
Base.__init__(self, name)
self.game = game
self.commands = list()
self.responses = list()
self.current_response = None
self.check_responses = False
self.mismatched_responses = -1
self.current_command = -1
self.recording = False
self.running = False
self.start_time = None
self.finish_time = None
self.response_errors = 0
self.parse_lines(lines)
def parse_lines(self, lines):
if lines != None:
self.start_recording()
for line in lines.split("\n"):
if line.startswith("> "):
# save the new command, and any accumulated response from previous
self.set_next_command(line.strip("> \n"))
elif self.current_response != None:
# accumulate response lines until the next command
self.current_response += line + "\n"
else:
self.current_response = line + "\n"
# if we didn't manage to get "end" go ahead and stop things brute force
if self.recording:
self.stop_recording()
def start_recording(self):
assert not self.running
assert not self.recording
self.current_response = None
self.responses = list()
self.commands = list()
self.recording = True
def stop_recording(self):
assert self.recording
assert not self.running
self.current_response = None
self.recording = False
def start_running(self):
assert not self.running
assert not self.recording
self.current_response = None
self.check_responses = False
self.running = True
self.current_command = 0
self.mismatched_responses = 0
self.start_time = time.time()
def start_checking(self):
assert self.running
assert not self.recording
print "check_responses on"
self.check_responses = True
self.current_response = ""
def stop_running(self):
assert self.running
assert not self.recording
self.stop_time = time.time()
self.game.devtools.debug_output(
"script \"%s\":\n\tcommands: %d\n\tmismatched responses: %d\n\truntime: %f %s\n" % (
self.name, self.current_command, self.mismatched_responses,
(self.stop_time - self.start_time) * 1000, "milliseconds"), 0)
self.current_response = None
self.check_responses = False
self.running = False
self.current_command = -1
if self.mismatched_responses != 0:
assert(not self.game.flag('fail_on_mismatch'))
def get_next_command(self):
# if we're running a checker, examine the current response vs what's expected
if self.current_command >= 1:
self.check_response_match(self.current_response,
self.responses[self.current_command - 1])
self.current_response = ""
if not self.commands:
return None
while True:
line = self.commands[self.current_command].strip()
self.current_command += 1
# support comments and/or blank lines within the script
line = line.split("#")[0]
if line != "":
break
if line == "end":
self.stop_running()
return None
return line
def check_response_match(self, response, expected_response):
if self.check_responses:
match = "match"
level = 2
if response != expected_response:
match = "mismatch"
level = 0
self.mismatched_responses += 1
self.game.devtools.debug_output(
"response %s:\n>>>\n%s\n===\n%s\n<<<\n" % (match,
response,
expected_response),
level)
def set_next_command(self, command):
if not self.recording:
return True
# save the accumulated response from the previous command
if self.current_response != None:
# append the response, trimming the final newline that preceded this command
self.responses.append(self.current_response[:-1])
self.current_response = ""
# save the current command
self.commands.append(command)
if command.strip() == "end":
self.stop_recording()
return False
self.current_command += 1
return True
def set_next_response(self, response):
if self.current_response != None:
# strip out color changing chars which may be in there
control_chars = False
for c in response:
if c == '\33':
control_chars = True
if control_chars:
if c == 'm':
control_chars = False
continue
self.current_response += c
self.current_response += "\n"
def print_script(self):
i = 0
for command in self.commands:
print "> " + command
if command == "end":
break
print self.responses[i]
i = i + 1
def save_file(self):
f = open(self.name + ".script", "w")
i = 0
for command in self.commands:
f.write('> ' + command + '\n')
if command != "end":
response_lines = self.responses[i].split('\n')
for line in response_lines:
f.write(line + '\n')
i = i + 1
f.close()
def load_file(self):
f = open(self.name + ".script", "r")
self.parse_lines(f.read())
f.close()
# Robots are actors which accept commands to perform actions.
# They can also record and run scripts.
class Robot(Actor):
def __init__(self, name):
#super(Robot, self).__init__(name )
Actor.__init__(self, name)
self.name = name
self.scripts = {}
self.current_script = None
self.script_think_time = 0
self.add_verb(BaseVerb(self.act_start_recording, 'record'))
self.add_verb(BaseVerb(self.act_run_script, 'run'))
self.add_verb(BaseVerb(self.act_check_script, 'check'))
self.add_verb(BaseVerb(self.act_print_script, 'print'))
self.add_verb(BaseVerb(self.act_save_file, 'save'))
self.add_verb(BaseVerb(self.act_load_file, 'load'))
self.add_verb(BaseVerb(self.set_think_time, 'think'))
self.add_verb(BaseVerb(self.toggle_verbosity, 'verbose'))
self.leader = None
self.add_verb(BaseVerb(self.act_follow, 'heel'))
self.add_verb(BaseVerb(self.act_follow, 'follow'))
self.add_verb(BaseVerb(self.act_stay, 'stay'))
def act_follow(self, actor, noun, words=None):
if noun == None or noun == "" or noun == "me":
self.leader = self.game.player
elif noun in self.game.robots:
self.leader = self.game.robots[noun]
elif noun in self.game.animals:
self.leader = self.game.animals[noun]
self.output("%s obediently begins following %s" % \
(self.name, self.leader.name) , FEEDBACK)
return True
def act_stay(self, actor, noun, words=None):
if self.leader:
self.output("%s obediently stops following %s" % \
(self.name, self.leader.name) , FEEDBACK)
self.leader = None
return True
def toggle_verbosity(self, actor, noun, words):
if self.flag('verbose'):
self.unset_flag('verbose')
self.output("minimal verbosity")
else:
self.set_flag('verbose')
self.output("maximum verbosity")
return True
def parse_script_name(self, noun):
if not noun:
script_name = "default"
else:
script_name = noun
return script_name
def act_start_recording(self, actor, noun, words):
script_name = self.parse_script_name(noun)
self.set_flag('verbose')
self.game.devtools.debug_output("start recording %s" % script_name, 2)
script = Script(script_name, None, self.game)
self.scripts[script_name] = script
script.start_recording()
self.current_script = script
return True
def act_run_script(self, actor, noun, words):
if self.current_script:
print "You must stop \"%s\" first." % (self.current_script.name)
script_name = self.parse_script_name(noun)
if not script_name in self.scripts:
print "%s can't find script \"%s\" in its memory." % (self.name,
script_name)
return True;
self.game.devtools.debug_output("start running %s" % script_name, 2)
script = self.scripts[script_name]
self.current_script = script
script.start_running()
return True
def act_check_script(self, actor, noun, words):
if self.act_run_script(actor, noun, words):
self.set_flag('verbose')
self.current_script.start_checking()
self.game.devtools.debug_output("start checking", 2)
return True
return False
def act_print_script(self, actor, noun, words):
script_name = self.parse_script_name(noun)
if not script_name in self.scripts:
print "%s can't find script \"%s\" in its memory." % (self.name,
script_name)
return True
print "----------------------8<-------------------------\n"
print "# Paste the following into your game code in order"
print "# to be able to run this script in the game:"
print "%s_script = Script(\"%s\"," % (script_name, script_name)
print "\"\"\""
self.scripts[script_name].print_script()
print "\"\"\")"
print "\n# Then add the script to a player, or a robot"
print "# with code like the following:"
print "player.add_script(%s_script)" % script_name
print "\n# Now you can run the script from within the game"
print "# by typing \"run %s\"" % script_name
print "\n---------------------->8-------------------------"
return True
def act_save_file(self, actor, noun, words):
script_name = self.parse_script_name(noun)
if not script_name in self.scripts:
print "%s can't find script \"%s\" in its memory." % (self.name,
script_name)
return True
self.scripts[script_name].save_file()
return True
def act_load_file(self, actor, noun, words):
script_name = self.parse_script_name(noun)
self.scripts[script_name] = Script(script_name, None, self.game)
self.scripts[script_name].load_file()
return True
def add_script(self, script):
script.game = self.game
self.scripts[script.name] = script
def set_think_time(self, actor, noun, words):
if noun:
t = float(noun)
if t >= 0 and t <= 60:
self.script_think_time = t
return True
print "\"think\" requires a number of seconds (0.0000-60.0000) as an argument"
return True
def get_next_script_command(self):
if not self.current_script or not self.current_script.running:
return None
line = self.current_script.get_next_command()
if not line:
print "%s %s done running script \"%s\"." % (self.name,
self.isare,
self.current_script.name)
self.current_script = None
return None
if self.script_think_time > 0:
time.sleep(self.script_think_time)
line = self.name + ": " + line
print "> %s" % line
return line
def set_next_script_command(self, command):
if not self.current_script:
return True
if not self.current_script.set_next_command(command):
print "%s finished recording script \"%s\"." % (self.name,
self.current_script.name)
self.current_script = None
return False
return True
def set_next_script_response(self, response):
if not self.current_script:
return True
self.current_script.set_next_response(response)
return True
# Player derives from Robot so that we can record and run scripts as the player
class Player(Robot):
def __init__(self):
# super(Player, self).__init__("you")
Robot.__init__(self, "you")
self.player = True
self.isare = "are"
self.verborverbs = ""
# Animals are actors which may act autonomously each turn
class Animal(Actor):
def __init__(self, name):
#super(Animal, self).__init__(name )
Actor.__init__(self, name)
def act_autonomously(self, observer_loc):
self.random_move(observer_loc)
def random_move(self, observer_loc):
if random.random() > 0.2: # only move 1 in 5 times
return
# filter out any locked or forbidden locations
exits = list()
for (d, c) in self.location.exits.items():
if c.is_locked():
continue
if c.point_b.is_locked():
continue
if self.allowed_locs and not c.point_b in self.allowed_locs:
continue
exits.append((d ,c))
if not exits:
return
(exitDir, exitConn) = random.choice(exits)
quiet = True
if self.game.current_actor == self.game.player:
quiet = False
if self.game.current_actor.flag('verbose'):
quiet = False
if not quiet and self.location == observer_loc:
self.output("%s leaves the %s, heading %s." % \
(add_article(self.name).capitalize(),
observer_loc.name,
direction_names[exitDir].lower()), FEEDBACK)
self.act_go1(self, direction_names[exitDir], None)
if not quiet and self.location == observer_loc:
self.output("%s enters the %s via the %s." % (add_article(self.name).capitalize(),
observer_loc.name,
exitConn.name), FEEDBACK)
# A pet is an actor with free will (Animal) that you can also command to do things (Robot)
class Pet(Robot, Animal):
def __init__(self, name):
#super(Pet, self).__init__(name )
Robot.__init__(self, name)
def act_autonomously(self, observer_loc):
if self.leader:
self.set_location(self.leader.location)
else:
self.random_move(observer_loc)
class Share(object):
def __init__(self):
self.hostname = None
self.port = None
self.username = None
self.password = None
self.GLOBAL = 1
self.ADVENTURE = 2
self.PLAYER = 3
self.SESSION = 4
self.game = ""
self.player = ""
self.session = ""
self.key_fns = {
self.GLOBAL: self.global_key,
self.ADVENTURE: self.adventure_key,
self.PLAYER: self.player_key,
self.SESSION: self.session_key,
}
try:
f = open("share.info", "r")
self.hostname = f.readline().strip()
self.port = f.readline().strip()
self.username = f.readline().strip()
self.password = f.readline().strip()
except IOError:
pass
def set_host(self, hostname, port, username, password):
self.hostname = hostname
self.port = port
self.username = username
self.password = password
def set_adventure(self, adventure):
self.adventure = adventure
def set_player(self, player):
self.player = player
def set_session(self, session):
self.session = session
def is_available(self):
return self.hostname != None
def start(self):
if not self.is_available():
return
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
webdis_url = "http://%s:%s/" % (self.hostname, self.port)
password_mgr.add_password(None, webdis_url, self.username, self.password)
self.opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_mgr))
def global_key(self, key):
return 'g.' + key
def adventure_key(self, key):
return 'a.' + self.adventure + '.' + key
def player_key(self, key):
return 'p.' + self.adventure + '.' + self.player + '.' + key
def session_key(self, key):
return 's.' + self.adventure + '.' + self.player + '.' + self.session + key
def _do(self, domain, cmd, key):
assert(domain in self.key_fns)
if not self.is_available():
return None
k = self.key_fns[domain](key)
net_f = self.opener.open('http://%s:%s/%s/%s.raw' % (self.hostname, self.port, cmd, k))
v = net_f.read().split('\n')
if len(v) > 1:
return v[1].strip()
return None
def _do1(self, domain, cmd, key, arg1):
assert(domain in self.key_fns)
if not self.is_available():
return None
k = self.key_fns[domain](key)
net_f = self.opener.open('http://%s:%s/%s/%s/%s.raw' % (self.hostname, self.port, cmd, k, arg1))
v = net_f.read().split('\n')
if len(v) > 1:
return v[1] # should be ""
return None
def _do2(self, domain, cmd, key, arg1, arg2):
assert(domain in self.key_fns)
if not self.is_available():
return None
k = self.key_fns[domain](key)
net_f = self.opener.open('http://%s:%s/%s/%s/%s/%s.raw' % (self.hostname, self.port, cmd, k, arg1, arg2))
v = net_f.read().split('\n')
if len(v) > 1:
return v[1] # should be ""
return None
# return a list
def _do2l(self, domain, cmd, key, arg1, arg2):
assert(domain in self.key_fns)
if not self.is_available():
return []
k = self.key_fns[domain](key)
net_f = self.opener.open('http://%s:%s/%s/%s/%s/%s.raw' % (self.hostname, self.port, cmd, k, arg1, arg2))
v = net_f.read().split('\n')
return v
# return a list
def _do3l(self, domain, cmd, key, arg1, arg2, arg3):
assert(domain in self.key_fns)
if not self.is_available():
return []
k = self.key_fns[domain](key)
net_f = self.opener.open('http://%s:%s/%s/%s/%s/%s/%s.raw' % (self.hostname, self.port, cmd, k, arg1, arg2, arg3))
v = net_f.read().split('\n')
return v
def delete(self, domain, key):
return self._do(domain, "DEL", key)
def get(self, domain, key):
return self._do(domain, "GET", key)
def put(self, domain, key, value):
return self._do1(domain, "SET", key, value)
def increment(self, domain, key):
return self._do(domain, "INCR", key)
def decrement(self, domain, key):
return self._do(domain, "DECR", key)
def push(self, domain, key, value):
return self._do1(domain, "LPUSH", key, value)
def pop(self, domain, key):
return self._do(domain, "LPOP", key)
def zadd(self, domain, key, value, score):
return self._do2(domain, "ZADD", key, score, value)
def zscore(self, domain, key):
return self._do(domain, "ZSCORE", key, value)
def zdelete_over_rank(self, domain, key, rank):
return self._do2(domain, "ZREMRANGEBYRANK", key, rank, "-1")
def ztop(self, domain, key, rank):
v = self._do2l(domain, "ZREVRANGE", key, "0", rank)
v = [x.strip() for x in v[1:]]
result = []
for x in xrange(0, len(v)):
if x % 2 == 1:
result.append(v[x])
return result
def ztop_with_scores(self, domain, key, rank):
v = self._do3l(domain, "ZREVRANGE", key, "0", rank, "WITHSCORES")
v = [x.strip() for x in v[1:]]
result = []
for x in xrange(0, len(v)):
if x % 4 == 1:
p = [v[x]]
elif x % 4 == 3:
p.append(v[x])
result.append(p)
return result
def zdelete(self, domain, key, value):
return self._do(domain, "ZREM", key, value)
|
from typing import Iterable
def gen_content_block(s: str) -> str:
return f"""<div>{s}</div>"""
def report(content: Iterable[str]) -> str:
result = f"""
<!DOCTYPE html>
<html>
<body>
{ "".join(gen_content_block(s) for s in content) }
</body>
</html>
"""
return result
|
"""
Module containing route for zookeeper journal
"""
import http.client
import json
import logging
from flask import request, current_app, make_response
from journal.main import MAIN
# W0611: Unused import
# This module import is needed for journal_obj.write
from journal import mjournal # pylint: disable=W0611
from journal.main import errors
_LOG = logging.getLogger(__name__)
@MAIN.route('/<string:txid>/<string:step>', methods=['POST'])
def journalview(txid, step):
"""
Handler for journal write
"""
payload = request.get_json()
journal_obj = current_app.config['journal']
rc = journal_obj.write(txid, step, payload)
if rc == 0:
return('', http.client.CREATED)
else:
_LOG.critical('Unsaved journal entry %s:%s', txid, step)
raise errors.APIError(
'{0} -- {1}##{2}'.format(
'Unsaved Journal entry',
txid,
step),
status_code=http.client.INTERNAL_SERVER_ERROR)
@MAIN.route('/status/<string:txid>', methods=['GET'])
def journalstatus(txid):
"""
Handler for journal status
"""
journal_obj = current_app.config['journal']
(resp, status_code) = journal_obj.status(txid)
if resp is not None:
resp = json.dumps(resp)
output = make_response(resp, status_code)
output.headers['Content-Type'] = 'application/json'
return output
|
import random
import numpy as np
import torch
import torch.distributed as dist
from torch.utils.data.sampler import Sampler
class SubsetSampler(Sampler):
"""Samples elements sequentially or randomly from a given list of indices, without replacement.
Args:
indices (list): a list of indices
"""
def __init__(self, indices, shuffle=True):
self.indices = indices
self.shuffle = shuffle
def __iter__(self):
return (self.indices[i] for i in torch.randperm(len(self.indices))) if self.shuffle else iter(self.indices)
def __len__(self):
return len(self.indices)
class DistributedNoExtraSampler(Sampler):
r"""Very similar to regular DistributedSampler from pytorch
but does not add extra samples if the dataset is not divisible by replicas
"""
def __init__(self, dataset, num_replicas = None,
rank = None, shuffle= True,
seed= 0):
if num_replicas is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = dist.get_world_size()
if rank is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = dist.get_rank()
if rank >= num_replicas or rank < 0:
raise ValueError(
"Invalid rank {}, rank should be in the interval"
" [0, {}]".format(rank, num_replicas - 1))
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.total_size = len(self.dataset)
indices = list(range(self.total_size))
indices = indices[self.rank:self.total_size:self.num_replicas]
self.num_samples = len(indices)
self.shuffle = shuffle
self.seed = seed
def __iter__(self):
if self.shuffle:
# deterministically shuffle based on epoch and seed
g = torch.Generator()
g.manual_seed(self.seed + self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
# subsample
indices = indices[self.rank:self.total_size:self.num_replicas]
assert len(indices) == self.num_samples
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
r"""
Sets the epoch for this sampler. When :attr:`shuffle=True`, this ensures all replicas
use a different random ordering for each epoch. Otherwise, the next iteration of this
sampler will yield the same ordering.
Args:
epoch (int): Epoch number.
"""
self.epoch = epoch
class MultilabelBalancedRandomSampler(Sampler):
"""
From: https://github.com/issamemari/pytorch-multilabel-balanced-sampler/blob/master/sampler.py
MultilabelBalancedRandomSampler: Given a multilabel dataset of length n_samples and
number of classes n_classes, samples from the data with equal probability per class
effectively oversampling minority classes and undersampling majority classes at the
same time. Note that using this sampler does not guarantee that the distribution of
classes in the output samples will be uniform, since the dataset is multilabel and
sampling is based on a single class. This does however guarantee that all classes
will have at least batch_size / n_classes samples as batch_size approaches infinity
"""
def __init__(self, labels, indices=None, class_choice="least_sampled"):
"""
Parameters:
-----------
labels: a multi-hot encoding numpy array of shape (n_samples, n_classes)
indices: an arbitrary-length 1-dimensional numpy array representing a list
of indices to sample only from
class_choice: a string indicating how class will be selected for every
sample:
"least_sampled": class with the least number of sampled labels so far
"random": class is chosen uniformly at random
"cycle": the sampler cycles through the classes sequentially
"""
self.labels = labels
self.indices = indices
if self.indices is None:
self.indices = range(len(labels))
self.num_classes = self.labels.shape[1]
# List of lists of example indices per class
self.class_indices = []
for class_ in range(self.num_classes):
lst = np.where(self.labels[:, class_] == 1)[0]
lst = lst[np.isin(lst, self.indices)]
self.class_indices.append(lst)
self.counts = [0] * self.num_classes
assert class_choice in ["least_sampled", "random", "cycle"]
self.class_choice = class_choice
self.current_class = 0
def __iter__(self):
self.count = 0
return self
def __next__(self):
if self.count >= len(self.indices):
raise StopIteration
self.count += 1
return self.sample()
def sample(self):
class_ = self.get_class()
class_indices = self.class_indices[class_]
chosen_index = np.random.choice(class_indices)
if self.class_choice == "least_sampled":
for class_, indicator in enumerate(self.labels[chosen_index]):
if indicator == 1:
self.counts[class_] += 1
return chosen_index
def get_class(self):
if self.class_choice == "random":
class_ = random.randint(0, self.labels.shape[1] - 1)
elif self.class_choice == "cycle":
class_ = self.current_class
self.current_class = (self.current_class + 1) % self.labels.shape[1]
elif self.class_choice == "least_sampled":
min_count = self.counts[0]
min_classes = [0]
for class_ in range(1, self.num_classes):
if self.counts[class_] < min_count:
min_count = self.counts[class_]
min_classes = [class_]
if self.counts[class_] == min_count:
min_classes.append(class_)
class_ = np.random.choice(min_classes)
return class_
def __len__(self):
return len(self.indices)
|
"""Test functions for i_square.py.
"""
from datetime import datetime
import os
import pytest
from squaredown.i_square import SquareInterface
@pytest.fixture(name='square_if')
def fixture_square_iface():
"""Pytest fixture to initialize and return the SquareInterface object.
"""
# logger.debug(f'using fixture "{name}"')
return SquareInterface()
def test_init_square(square_if):
"""Tests Square Interface initialization.
"""
# test authentication
square = square_if.square_client
api_locations = square.locations
result = api_locations.list_locations()
assert result.is_success()
# test results
square_locations = os.environ.get('SQUARE_LOCATIONS').split(',')
found = False
for loc in result.body['locations']:
if loc['id'] in square_locations:
found = True
break
assert found
def test_decode_order(square_if):
"""Tests order decoding.
"""
order = {
'id': '110DevtQKzovAih4SVcVphyeV',
'created_at': '2016-09-04T23:59:33.123Z',
'updated_at': '2016-09-04T23:59:33.123Z',
}
square_if.decode_order(order)
assert isinstance(order['created_at'], datetime)
assert isinstance(order['updated_at'], datetime)
def test_decode_datetime(square_if):
"""Tests datetime decoding.
"""
ref_dt_str = '2016-09-04T23:59:33.123Z'
ref_dt = square_if.decode_datetime(ref_dt_str)
assert isinstance(ref_dt, datetime)
assert ref_dt.isoformat(timespec='milliseconds')[0:23] == ref_dt_str[0:23]
assert ref_dt.isoformat(timespec='milliseconds')[-6:] == '+00:00'
|
# -*- coding: utf-8 -*-
"""
Astro functions (
"""
import os
import astropy.io.fits as fits
import operator
from scipy import (special, log10, array, sqrt, sin,
exp, log, average,
arange, meshgrid, std)
from numpy.random import random_sample
import numpy as np
def CC(z, H0=67.3, WM=0.315, WV=0.685, v = 0):
""" Cosmo calculator given the cosmological parameters z, H0, WM, WV returns
the luminosity distance, angular seperation, and possibly many more. Based on
http://www.astro.ucla.edu/~wright/CC.python
"""
c = 299792.458 # velocity of light in km/sec
Tyr = 977.8 # coefficent for converting 1/H into Gyr
h = H0/100.
WR = 4.165E-5/(h*h) # includes 3 massless neutrino species, T0 = 2.72528
WK = 1-WM-WR-WV
n = 5000
i = arange(n)
if not hasattr(z, '__iter__'):
z = np.array([float(z)])
zage_Gyra = np.array([])
for zs in z:
az = 1.0 / (1 + zs)
a = az * (i + 0.5) / n
adot = sqrt(WK+(WM/a)+(WR/(a*a))+(WV*a*a))
age = sum(1./adot)
zage = az*age/n
zage_Gyr = (Tyr/H0)*zage
zage_Gyra = np.append(zage_Gyra, zage_Gyr)
if v == 'age':
return zage_Gyra
DTT, DCMR = 0.0, 0.0
# do integral over a=1/(1+z) from az to 1 in n steps, midpoint rule
a = az+(1-az)*(i+0.5)/n
adot = sqrt(WK+(WM/a)+(WR/(a*a))+(WV*a*a))
DTT = sum(1./adot)
DCMR = sum(1./(a*adot))
DTT = (1.-az)*DTT/n
DCMR = (1.-az)*DCMR/n
age = DTT+zage
age_Gyr = age*(Tyr/H0)
DTT_Gyr = (Tyr/H0)*DTT
DCMR_Gyr = (Tyr/H0)*DCMR
DCMR_Mpc = (c/H0)*DCMR
# tangential comoving distance
ratio = 1.00
x = sqrt(abs(WK))*DCMR
if x > 0.1:
if WK > 0: ratio = 0.5*(exp(x)-exp(-x))/x
else: ratio = sin(x)/x
else:
y = x*x
if WK < 0: y = -y
ratio = 1. + y/6. + y*y/120.
DCMT = ratio*DCMR
DA = az*DCMT
DA_Mpc = (c/H0)*DA
kpc_DA = DA_Mpc/206.264806
DA_Gyr = (Tyr/H0)*DA
DL = DA/(az*az)
DL_Mpc = (c/H0)*DL
DL_Gyr = (Tyr/H0)*DL
# comoving volume computation
ratio = 1.00
x = sqrt(abs(WK))*DCMR
if x > 0.1:
if WK > 0: ratio = (0.125*(np.exp(2.*x)-np.exp(-2.*x))-x/2.)/(x*x*x/3.)
else: ratio = (x/2. - np.sin(2.*x)/4.)/(x*x*x/3.)
else:
y = x*x
if WK < 0: y = -y
ratio = 1. + y/5. + (2./105.)*y*y
VCM = ratio*DCMR*DCMR*DCMR/3.
V_Gpc = 4.*np.pi*((0.001*c/H0)**3)*VCM
DL_cm = DL_Mpc * 3.08568E24
if v == 1:
print ('\tH_0 = %1.1f' % H0 + ', Omega_M = ' + '%1.2f' % WM + ', Omega_vac = %1.2f' % WV + ', z = ' + '%1.3f' % z)
print ('\tIt is now %1.3f' % age_Gyr + ' Gyr since the Big Bang.')
print ('\tAge at redshift z was %1.3f' % zage_Gyr + ' Gyr.')
print ('\tLight travel time was %1.3f' % DTT_Gyr + ' Gyr.')
print ('\tComoving radial distance is \t%1.1f' % DCMR_Mpc + ' Mpc or ' + '%1.1f' % DCMR_Gyr + ' Gly.')
print ('\tComoving volume within redshift z ' + '%1.1f' % V_Gpc + ' Gpc^3.')
print ('\tAngular size distance D_A is ' + '%1.1f' % DA_Mpc + ' Mpc or %1.1f' % DA_Gyr + ' Gly.')
print ('\tAngular scale of %.2f' % kpc_DA + ' kpc/".')
print ('\tLuminosity distance D_L is %1.1f' % DL_Mpc + ' Mpc or ' + '%1.4e' % DL_cm + ' cm.')
print ('\tDistance modulus, m-M, is %1.2f mag' % (5*log10(DL_Mpc*1e6)-5))
print ('\tK-correction for equal effective wavelength %1.2f' %(-2.5*log10(1+z)))
return(DL_Mpc, kpc_DA)
elif v == 2:
return(DL_Mpc, kpc_DA)
else:
return(DL_Mpc)
|
import os
import cv2
import numpy as np
import time
from tools.mytracking_sort import MyTrackingSort
if __name__ == '__main__':
## -----------------------------------------
## Init
## -----------------------------------------
# a. Model
mt = MyTrackingSort(lenRecord=30, distance_th=0.005, time_th=0.5)
mt.init_sort(max_age=1, min_hits=2)
# Video
cap = cv2.VideoCapture(vidSrc)
while cap.isOpened():
ret, frame = cap.read()
ts = time.time()
## ---
## 1 Detection
boxes = []
boxes, scores = detection.predict(frame)
## ---
## 2 Tracking
ts = time.time()
tracker = mt.update(boxes)
mt.get_tracker(tracker, ts)
trackDict = mt.get_array()
## -----------------------------------------
## Visualization
## -----------------------------------------
# Visualization Detection
for box in boxes:
cv2.rectangle(frame, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])),(255,255,0), 2)
# Visualization Tracking
for objectId in list(trackDict):
try:
bbox = np.array(trackDict[objectId])[-1,1]
except:
bbox = np.array(trackDict[objectId])[0,1]
# vis tracker bbox
cv2.rectangle(frame, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])),(255,255,255), 2)
# vis id
cv2.putText(frame, str(objectId),(int(bbox[0]), int(bbox[1])),0, 5e-3 * 100, (0,255,0),2)
arrCentroid = np.array(trackDict[objectId])[:,2]
# vis line tracker obj
for i,v in enumerate(arrCentroid):
if i>1:
cv2.line(frame, (int(arrCentroid[i][0]), int(arrCentroid[i][1])), (int(arrCentroid[i-1][0]),int(arrCentroid[i-1][1])), (0, 255, 0), 2)
cv2.imshow("Frame", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
|
import unittest
from ansiblelint import RulesCollection, Runner
from ansiblelints.rules.HardCodePassword import HardCodePassword
class TestHardCodePassword(unittest.TestCase):
collection = RulesCollection()
def setUp(self):
self.collection.register(HardCodePassword())
def test_file(self):
success = 'testResources/ansible-smell/hardcodepassword.yml'
good_runner = Runner(self.collection, success, [], [], [])
print(good_runner.run())
print(type(good_runner.run()))
print(len(good_runner.run()))
self.assertEqual(0, len(good_runner.run()))
|
import csv
import datetime
class Backtest(object):
def __init__(self, data, brain):
self.d = data
self.b = brain
self.backtest_data = []
self.backtest_data_file = self.d.datadir + '\\' + self.d.tradingpair + str(self.d.timeframe) + '_BACKTEST.csv'
self.trade_data = []
self.trade_data_file = self.d.datadir + '\\' + self.d.tradingpair + str(self.d.timeframe) + '_SIMTRADES.csv'
pass
# Simulate a single moving average combination. Data object needs to be updated with the
# required SMA calcs before calling.
def ma_sim(self, type, ma_1, ma_2):
firstrun = True
profit = 0.0
last_trade = 0.0
last_id = self.d.ohlc_last_id
# Calculate buy and hold profit / loss using first and last OHLC data frames.
buy_hold = self.d.ohlc_data[-1][4] - self.d.ohlc_data[0][4]
# Number of days in the OHLC data
start_date = datetime.date.fromtimestamp(self.d.ohlc_data[0][0])
end_date = datetime.date.fromtimestamp(self.d.ohlc_data[-1][0])
delta = end_date - start_date
if type == 'SMA':
print('Backtesting SMA using values: ' + str(ma_1) + '/' + str(ma_2))
else:
print('Backtesting EMA using values: ' + str(ma_1) + '/' + str(ma_2))
for frame in self.d.ohlc_data:
if firstrun:
# Don't do anything for the first frame, since we don't have a prev value.
firstrun = False
elif frame[0] == last_id:
# Stop if there are no further records.
break
else:
decision = self.b.ma_decide_sim(type, ma_1, ma_2, frame[0])
price = decision[2]
if decision[0] == 'BUY':
last_trade = price
self.trade_data.append(decision)
elif decision[0] == 'SELL':
profit += (price - last_trade)
self.trade_data.append(decision)
# Calculate profit in excess of buy & hold.
vs_buy_hold = profit - buy_hold
# Calculate profit per day
profit_pday = profit / delta.days
result = [str(ma_1) + '/' + str(ma_2), profit, buy_hold, vs_buy_hold, profit_pday]
self.backtest_data.append(result)
print('Profit: ' + str(profit))
print('Buy & hold: ' + str(buy_hold))
print('vs Buy & hold: ' + str(vs_buy_hold))
return result
# Export monte carlo results
def export(self):
print('Exporting backtest data to CSV.')
# Export sim data (overwrite existing data)
with open(self.backtest_data_file, 'w', newline='') as f:
w = csv.writer(f)
w.writerows(self.backtest_data)
return
# Export detailed trades data, should be used with single SMA pair test
def export_trades(self):
print('Exporting sim trade data to CSV.')
with open(self.trade_data_file, 'w', newline='') as f:
w = csv.writer(f)
w.writerows(self.trade_data)
return
# Simulate SMA / EMA strategy for a given data object from ma_min to ma_max (monte carlo sim)
def run_ma_sim(self, type, ma_min, ma_max):
best_profit = 0.0
best_ma = ''
best_profit_pday = 0.0
# Calculate SMA / EMA values upto ma_max
if type == 'SMA':
for x in range (ma_max):
self.d.update_sma(x+1)
else:
for x in range(ma_max):
self.d.update_ema(x+1)
for ma_1 in range(ma_min, ma_max):
for ma_2 in range(ma_min, ma_max):
if ma_1 < ma_2:
self.b.sim_open_position = False # Reset open position after each iteration
result = self.ma_sim(type, ma_1, ma_2)
profit = result[1]
if profit > best_profit:
best_profit = profit
best_ma = str(ma_1) + '/' + str(ma_2)
best_profit_pday = result[4]
print('Best combo is: ' + best_ma + ' with ' + str(best_profit) + ' profit.')
print('..Averaging ' + str(best_profit_pday) + ' profit per day.')
self.export()
return
|
from setuptools import setup
setup(
name="document",
version="0.1",
packages=["document"],
)
|
'''
Roll and dice game
'''
import tkinter as tk
import random as rd
def main():
'''
Main game loop
'''
def rolling_dice():
roll.configure(text=rd.choice(dice))
roll.pack()
window = tk.Tk()
edge = int(window.winfo_screenheight()/2)
window.geometry(str(edge) + 'x' + str(edge))
window.title('Dicer')
dice = ['⚀', '⚁', '⚂', '⚃', '⚄', '⚅']
roll = tk.Label(window,
text=rd.choice(dice),
font=('Arial', int(edge/2)))
play = tk.Button(window,
text='Roll',
command=rolling_dice)
close = tk.Button(window,
text='Close',
command=window.destroy)
roll.pack(expand=True)
play.pack(fill='x')
close.pack(fill='x')
window.mainloop()
if __name__ == '__main__':
main()
|
import datetime
import json
import socket
import redis as redis
from channels.generic.websocket import AsyncJsonWebsocketConsumer
from djim.models import UserSession, channel, UserList
from djim.serializers import usersession_json, user_list_json
from utils import const, ipHelper
class ChatConsumer(AsyncJsonWebsocketConsumer):
USER_SEEION = 'UserSession'
pool = redis.ConnectionPool(host='127.0.0.1', port=6379)
r = redis.Redis(connection_pool=pool)
#建立连接
async def connect(self):
self.group_name = 'common'
await self.channel_layer.group_add(self.group_name, self.channel_name)
#将user信息加入到reids
try:
print('222')
userid = self.scope['url_route']['kwargs']['group_name']
usersession = UserSession()
print('333')
usersession.name = userid
print(userid)
usersession.channel_name = self.channel_name
print(self.channel_name)
usersession.subIP = '127.0.0.1'
#print(self.getLocalIP())
print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
UserSession.activeTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
print(json.dumps(usersession, default=usersession_json))
self.r.hset(self.USER_SEEION, self.channel_name, json.dumps(usersession, default=usersession_json))
await self.accept()
except Exception as e:
print(e)
# 创建连接时调用
#await self.accept()
#连接断开
async def disconnect(self, close_code):
# 连接关闭时调用
# 将关闭的连接从群组中移除
await self.channel_layer.group_discard(self.group_name, self.channel_name)
# 从Redis UserSession中删除当前用户
try:
self.r.hdel(self.USER_SEEION, self.channel_name)
except:
pass
await self.close()
# Receive message from WebSocket
async def receive(self, text_data=None, bytes_data=None):
print(text_data)
print(bytes_data)
print(channel.user_list)
if(text_data != None and bytes_data == None):
json_text_data = json.loads(text_data)
print(text_data)
#群发消息
if json_text_data['type'] == str(channel.chat_all):
print(self.group_name)
content = json_text_data['message']
await self.channel_layer.group_send(
self.group_name, {
'sender': json_text_data['sender'],
'message': content,
'accepter': json_text_data['accepter'],
'type': json_text_data['type'],
}
)
await self.send_json({
'type': json_text_data['type'],
'message': content
})
#获取userlist
if json_text_data['type'] == str(channel.user_list):
print('user_list')
allvalues = self.r.hvals(self.USER_SEEION)
lists = []
for item in allvalues:
try:
json_allvalues_data = json.loads(item)
listitem = UserList()
listitem.uid = json_allvalues_data['name']
listitem.session = json_allvalues_data['channel_name']
l = json.dumps(listitem, default=user_list_json)
print(l)
lists.append(l)
await self.send_json({
'type': 'userlist',
'message': lists
})
except Exception as e:
print(e)
#发送单条消息给指定ID
if json_text_data['type'] == str(channel.chat_solo):
content = json_text_data['message']
channelname = json_text_data['accepter']
if(text_data == None and bytes_data != None):
pass
if(text_data != None and bytes_data != None):
pass
def getLocalIP(self):
myname = socket.getfqdn(socket.gethostname())
myaddr = socket.gethostname(myname)
return myaddr
|
import uuid
from itertools import product
from functools import partial
import numpy as np
import zarr
from numcodecs import Blosc
from skimage import feature, filters
import multiprocessing
from scipy.optimize import minimize, basinhopping, differential_evolution
from scipy.ndimage import map_coordinates
from scipy import spatial
from scipy.interpolate import Rbf, RegularGridInterpolator
try:
import tifffile
except:
from skimage.external import tifffile
from skimage.filters import threshold_otsu
from sklearn.neighbors import NearestNeighbors
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
import tqdm
import time
from functools import partial
from phathom import utils
from phathom.registration import pcloud
#from phathom.io import conversion
def chunk_coordinates(shape, chunks):
"""Calculate the global coordaintes for each chunk's starting position
Parameters
----------
shape : tuple
shape of the image to chunk
chunks : tuple
shape of each chunk
Returns
-------
start : list
the starting indices of each chunk
"""
nb_chunks = utils.chunk_dims(shape, chunks)
start = []
for indices in product(*tuple(range(n) for n in nb_chunks)):
start.append(tuple(i*c for i, c in zip(indices, chunks)))
return start
def chunk_bboxes(shape, chunks, overlap):
"""Calculates the bounding box coordinates for each overlapped chunk
Parameters
----------
shape : tuple
overall shape
chunks : tuple
tuple containing the shape of each chunk
overlap : int
int indicating number of voxels to overlap adjacent chunks
Returns
-------
chunk_coords : list
starting indices for each chunk wrt the top-upper-left
start : list
starting indices for each bbox with overlap
stop : list
stopping indices for each bbox with overlap
"""
chunk_coords = chunk_coordinates(shape, chunks)
start = []
stop = []
for coord in chunk_coords:
start.append(tuple(max(0, s-overlap) for s in coord))
stop.append(tuple(min(e, s+c+overlap) for s,c,e in zip(coord, chunks, shape)))
return chunk_coords, start, stop
def chunk_generator(z_arr, overlap):
"""Reads the chunks from a 3D on-disk zarr array
Parameters
----------
z_arr : zarr
input zarr array
overlap : int
int indicating number of voxels to overlap adjacent chunks
Yields
------
start : tuple
starting index of the next chunk in the `z_arr`
stop : tuple
stopping index of the next chunk in the `z_arr`
"""
_, starts, stops = chunk_bboxes(z_arr.shape, z_arr.chunks, overlap)
for start, stop in zip(starts, stops):
yield start, stop
def detect_blobs(bbox, z_arr, sigma, min_distance, min_intensity):
"""Detects blobs in an image using local maxima
Parameters
----------
bbox : tuple
tuple of two tuples with start-stop indices of chunk
z_arr : zarr
reference to persistent zarr array
sigma : float
float for gaussian blurring
Returns
-------
array
(N,3) ndarray of blob coordinates
"""
start, stop = bbox
z0, y0, x0 = start
z1, y1, x1 = stop
img = z_arr[z0:z1, y0:y1, x0:x1]
smoothed = filters.gaussian(img, sigma=sigma, preserve_range=True)
if np.any(smoothed > 0):
thresh = threshold_otsu(smoothed)
else:
thresh = 1.0 # Otsu fails if all voxels are black
peaks = feature.peak_local_max(smoothed,
min_distance=min_distance,
threshold_abs=max(min_intensity, thresh))
# Note that using mean here can introduce from chunk artifacts
return peaks
def detect_blobs_parallel(z_arr, sigma, min_distance, min_intensity, overlap, nb_workers):
"""Detects blobs in a chunked zarr array in parallel using local maxima
Parameters
----------
z_arr : zarr
input zarr array
sigma : float
float for gaussian blurring
min_distance : float
minimum distance in voxels allowed between blobs
min_intensity : float
minimum gray-level intensity allowed for blobs
overlap : float
int indicating how much overlap to include between adjacent chunks
nb_workers : int
number of parallel processes to use
Returns
-------
array
(N,3) ndarray of blob coordinates
"""
chunk_coords, starts, _ = chunk_bboxes(z_arr.shape, z_arr.chunks, overlap)
detect_blobs_in_chunk = partial(detect_blobs,
z_arr=z_arr,
sigma=sigma,
min_distance=min_distance,
min_intensity=min_intensity)
chunks = list(chunk_generator(z_arr, overlap))
pts_list = []
with multiprocessing.Pool(nb_workers) as pool:
r = list(tqdm.tqdm(pool.imap(detect_blobs_in_chunk, chunks), total=len(chunks)))
for i, pts_local in enumerate(r):
if len(pts_local) == 0:
continue
chunk_coord = np.asarray(chunk_coords[i])
start = np.asarray(starts[i])
local_start = chunk_coord - start
local_stop = local_start + np.array(z_arr.chunks)
idx = np.all(np.logical_and(local_start <= pts_local, pts_local < local_stop), axis=1)
pts_trim = pts_local[idx]
pts_list.append(pts_trim + start)
if len(pts_list) == 0:
return np.zeros((0, 3))
else:
return np.concatenate(pts_list)
def pts_to_img(pts, shape, path):
"""Saves a set of points into a binary image
Parameters
----------
pts: an (N, D) array with N D-dimensional points
shape: a tuple describing the overall shape of the image
path: path to save the output tiff image
Returns
-------
img : array
An 8-bit image array
"""
from skimage.external import tifffile
img = np.zeros(shape, dtype='uint8')
img[tuple(pts.T)] = 255
tifffile.imsave(path, img)
return img
def mark_pts(arr, pts, cval=None):
"""Mark a list of points in an array using 3-voxel cubes
Parameters
----------
arr : array
Input array to modify
pts : array
Points to mark
cval : int, optional
fill value, defaults to unique labels
Returns
-------
arr : array
Original array with the blob marked
"""
for i, pt in enumerate(pts):
if cval is None:
label = i+1
else:
label = cval
arr[pt[0], pt[1], pt[2]] = label
if 1 < pt[0] < arr.shape[0]-2:
if 1 < pt[1] < arr.shape[1]-2:
if 1 < pt[2] < arr.shape[2]-2:
arr[pt[0]-2:pt[0]+2, pt[1]-2:pt[1]+2, pt[2]-2:pt[2]+2] = cval
return arr
def estimate_rigid(fixed_inliers, moving_inliers):
"""Estimate a rigid transformation from fixed to moving points using SVD
Parameters
----------
fixed_inliers : array
array (N, 3) of fixed coordinates
moving_inliers: array
array (N, 3) of corresponding moving coordinates
Returns
-------
t : array
translation vector
r : array
rotation matrix
"""
# Find centroids
fixed_centroid = np.mean(fixed_inliers, axis=0)
moving_centroid = np.mean(moving_inliers, axis=0)
# Find covariance matrix
M = np.zeros((3, 3))
for f, m in zip(fixed_inliers, moving_inliers):
M += np.outer(f - fixed_centroid, m - moving_centroid)
# Get rigid transformation
u, s, vh = np.linalg.svd(M)
r = vh.T.dot(u.T)
t = moving_centroid - r.dot(fixed_centroid)
return t, r
def indices_to_um(pts, voxel_dimensions):
"""Convert indicies to micron units wrt the top-upper-left
Parameters
----------
pts : array
2D array (N, D) of ints to convert
voxel_dimensions : array
1D array (D,) of floats representing voxel shape
"""
return np.array([d*pts[:, i] for d, i in zip(voxel_dimensions, range(len(voxel_dimensions)))]).T
def um_to_indices(pts_um, voxel_dimensions):
"""Convert micron units wtf top-upper-left to indices
Parameters
----------
pts_um : array
2D array (N, D) of floats in micron to convert
voxel_dimensions: array
1D array (D,) of floats representing voxel shape
"""
return np.array([pts_um[:, i]/d for d, i in zip(voxel_dimensions, range(len(voxel_dimensions)))]).T
def rigid_transformation(t, r, pts):
"""Apply rotation and translation (rigid transformtion) to a set of points
Parameters
----------
t : array
1D array representing the translation vector
r : array
2D array representing the rotation matrix
"""
return r.dot(pts.T).T + t
def rigid_residuals(t, r, fixed_pts, moving_pts):
"""Compute the residuals for all points after the rigid transformation
Parameters
----------
t : array
1D array (D,) of the translation
r : array
2D array (D, D) of the rotation matrix
fixed_pts : array
2D array (N, D) of points to transform
moving_pts : array
2D array (N, D) of target points
"""
return moving_pts - rigid_transformation(t, r, fixed_pts)
def residuals_to_distances(residuals):
"""Compute the Euclidean distances given residuals in each dimension
Parameters
----------
residuals : array
2D array (N, D) of residuals
"""
return np.linalg.norm(residuals, axis=-1)
def average_distance(distances):
"""Compute the average Euclidean distance over a sequence of distances
Parameters
----------
distances : array
1D array (N,) of distances
"""
return np.mean(distances)
def shape_to_coordinates(shape):
"""Build an array containing all array indices for a given shape
Parameters
----------
shape : array-like
array-like containing 3 ints representing the array shape
"""
indices = np.indices(shape)
coords = indices.reshape((indices.shape[0], np.prod(indices.shape[1:]))).T
return coords
def interpolate(image, coordinates, order=3):
"""Interpolate an image at a list of coordinates
Parameters
----------
image : array
array to interpolate
coordinates : array
2D array (N, D) of N coordinates to be interpolated
order : int
polynomial order of the interpolation (default: 3, cubic)
"""
output = map_coordinates(image,
coordinates.T,
output=None,
order=order,
mode='constant',
cval=0.0,
prefilter=True)
return output
def mean_square_error(fixed, transformed):
"""Calculate the nmean squared error between two images
Parameters
----------
fixed : array
array of first image to be compared
transformed : array
array of second image to be compared
"""
idx = np.where(transformed > 0)
a = fixed[idx]
b = transformed[idx]
return np.mean(np.linalg.norm(a-b))
transformation = None
def register_slice(moving_img, zslice, output_shape, transformation, batch_size=None, padding=4):
"""Apply transformation and interpolate for a single z slice in the output
Parameters
----------
moving_img : zarr array
input image to be interpolated
zslice : int
index of the z-slice to compute
output_shape : tuple
shape of the output image
transformation : callable
mapping from output image coordinates to moving image coordinates
batch_size : int, optional
number of points to transform at a time. Default, all coordinates
padding : int, optional
amount of padding to use when extracting pixels for interpolation
Returns
-------
registered_img : ndarray
registered slice from the moving image
"""
img_shape = np.array(output_shape)
local_coords = shape_to_coordinates(img_shape)
global_coords = np.hstack((zslice*np.ones((local_coords.shape[0], 1)), local_coords))
if batch_size is None:
moving_coords = transformation(pts=global_coords)
else:
moving_coords = np.empty_like(global_coords)
nb_pts = len(global_coords)
nb_batches = int(np.ceil(nb_pts/batch_size))
for i in tqdm.tqdm(range(nb_batches)):
batch_start = i*batch_size
if i == nb_batches-1:
batch_stop = nb_pts
else:
batch_stop = batch_start + batch_size
moving_coords[batch_start:batch_stop] = transformation(pts=global_coords[batch_start:batch_stop])
# Find the padded bounding box of the warped chunk coordinates
transformed_start = tuple(np.floor(moving_coords.min(axis=0) - padding).astype('int'))
transformed_stop = tuple(np.ceil(moving_coords.max(axis=0) + padding).astype('int'))
# Read in the available portion data (not indexing outside the moving image boundary)
moving_start = tuple(max(0, s) for s in transformed_start)
moving_stop = tuple(min(e, s) for e, s in zip(moving_img.shape, transformed_stop))
moving_coords_local = moving_coords - np.array(moving_start)
print(moving_start, moving_stop)
moving_data = moving_img[moving_start[0]:moving_stop[0],
moving_start[1]:moving_stop[1],
moving_start[2]:moving_stop[2]]
# interpolate the moving data
interp_values = interpolate(moving_data, moving_coords_local, order=1)
registered_img = np.reshape(interp_values, output_shape)
return registered_img
def register_chunk(moving_img, chunks, output_img, start, fixed_img, batch_size=None, padding=4):
"""Apply transformation and interpolate for a single chunk in the output
Parameters
----------
moving_img : zarr array
zarr array with read access to interpolate
chunks : tuple
chunk size of the output image (and ideally the fixed image too)
output_img : zarr array
zarr array with write access for output
transformation : callable
callable that takes a single "pts" argument
start : tuple
starting index of the chunk to write
batch_size : int, optional
number of points to apply the transformation on at once. Default, whole chunk.
padding : int, optional
number of pixels to borrow from adjacent chunks in `fixed_img`. Default, 4.
"""
global transformation
# zarr.blosc.use_threads = True
# Get dimensions
chunks = np.array(chunks)
img_shape = np.array(output_img.shape)
# Find the appropriate global stop coordinate and chunk shape accounting for boundary cases
stop = np.minimum(start + chunks, img_shape)
chunk_shape = np.array([b-a for a, b in zip(start, stop)])
# Check the target to see if we need to do anything
fixed_data = fixed_img[start[0]:stop[0],
start[1]:stop[1],
start[2]:stop[2]]
if not np.any(fixed_data):
output_img[start[0]:stop[0], start[1]:stop[1], start[2]:stop[2]] = np.zeros(chunk_shape, output_img.dtype)
return
# Find all global coordinates in the fixed image for this chunk
local_coords = shape_to_coordinates(chunk_shape)
global_coords = start + local_coords
# Find the coordinates on the moving image to be interpolated
if batch_size is None:
moving_coords = transformation(pts=global_coords) # This is using multiple cores
else:
moving_coords = np.empty_like(global_coords)
nb_pts = len(global_coords)
nb_batches = int(np.ceil(nb_pts/batch_size))
for i in range(nb_batches):
batch_start = i*batch_size
if i == nb_batches-1:
batch_stop = nb_pts
else:
batch_stop = batch_start + batch_size
moving_coords[batch_start:batch_stop] = transformation(pts=global_coords[batch_start:batch_stop])
# Find the padded bounding box of the warped chunk coordinates
transformed_start = tuple(np.floor(moving_coords.min(axis=0)-padding).astype('int'))
transformed_stop = tuple(np.ceil(moving_coords.max(axis=0)+padding).astype('int'))
if np.any(np.asarray(transformed_stop) < 0): # Chunk is outside for some dimension
interp_chunk = np.zeros(chunk_shape, output_img.dtype)
elif np.any(np.greater(np.asarray(transformed_start), np.asarray(output_img.shape))): # Chunk is outside
interp_chunk = np.zeros(chunk_shape, output_img.dtype)
else:
# Read in the available portion data (not indexing outside the moving image boundary)
moving_start = tuple(max(0, s) for s in transformed_start)
moving_stop = tuple(min(e, s) for e, s in zip(moving_img.shape, transformed_stop))
moving_coords_local = moving_coords - np.array(moving_start)
moving_data = moving_img[moving_start[0]:moving_stop[0],
moving_start[1]:moving_stop[1],
moving_start[2]:moving_stop[2]]
if not np.any(moving_data): # No need to interpolate if moving image is just zeros
interp_chunk = np.zeros(chunk_shape, dtype=output_img.dtype)
else:
# interpolate the moving data
interp_values = interpolate(moving_data, moving_coords_local, order=1)
interp_chunk = np.reshape(interp_values, chunk_shape)
# write results to disk
output_img[start[0]:stop[0], start[1]:stop[1], start[2]:stop[2]] = interp_chunk
def _register_chunk(args):
arr, start_coord, chunks, moving_img, fixed_img, batch_size, padding = args
register_chunk(moving_img, chunks, arr, start_coord, fixed_img, batch_size, padding)
def register(moving_img, output_img, fixed_img, transform_path, nb_workers, batch_size=None, padding=4):
"""Transform a moving zarr array for registration
Parameters
----------
moving_img: zarr array
zarr array with read access to be interpolated
output_img : zarr array
zarr array with write access for the output
transformation : callable
function that takes one "pts" argument and warps them
nb_workers : int
number of processes to work on separate chunks
batch_size : int, optional
number of points to apply the transformation on at once. Default, whole chunk.
padding : int, optional
number of pixels to borrow from adjacent chunks in `fixed_img`. Default, 4.
"""
global transformation
# Get transformation
transformation = utils.pickle_load(transform_path)
start_coords = chunk_coordinates(output_img.shape, output_img.chunks)
args_list = []
for i, start_coord in tqdm.tqdm(enumerate(start_coords)):
start = np.asarray(start_coord)
# args = (moving_img, output_img.chunks, output_img, start, batch_size, padding)
args = (output_img, start, output_img.chunks, moving_img, fixed_img, batch_size, padding)
args_list.append(args)
# register_chunk(*args)
with multiprocessing.Pool(processes=nb_workers) as pool:
list(tqdm.tqdm(pool.imap_unordered(_register_chunk, args_list), total=len(args_list)))
# f = partial(_register_chunk,
# moving_img=moving_img,
# batch_size=batch_size,
# padding=padding)
#
# utils.pmap_chunks(f, output_img, output_img.chunks, nb_workers)
def coherence(n_neighbors, fixed_pts_um, moving_pts_um):
"""Calculate the cosine similarity between displacement vectors using `n_neighbors`
"""
nbrs = NearestNeighbors(n_neighbors=n_neighbors+1, algorithm='kd_tree', n_jobs=-1)
nbrs.fit(fixed_pts_um)
distances, indices = nbrs.kneighbors(fixed_pts_um)
cosine_similarity = np.zeros((fixed_pts_um.shape[0], n_neighbors))
for i, idxs in enumerate(indices):
displacement = moving_pts_um[i] - fixed_pts_um[i]
neighbor_idxs = idxs[1:]
fixed_neighbors = fixed_pts_um[neighbor_idxs]
moving_neighbors = moving_pts_um[neighbor_idxs]
displacement_neighbors = moving_neighbors - fixed_neighbors
for j, d in enumerate(displacement_neighbors):
cosine_similarity[i, j] = 1 - spatial.distance.cosine(displacement, d)
return cosine_similarity.mean(axis=-1)
def match_distance(pts1, pts2):
"""Calculate the distance between matches points"""
return np.linalg.norm(pts1-pts2, axis=-1)
def fit_polynomial_transform(fixed_keypts, moving_keypts, degree):
"""Fit a low-order polynomial mapping from fixed to moving keypoints"""
fixed_poly = PolynomialFeatures(degree=degree).fit_transform(fixed_keypts)
model_z = LinearRegression(fit_intercept=False).fit(fixed_poly,
moving_keypts[:, 0])
model_y = LinearRegression(fit_intercept=False).fit(fixed_poly,
moving_keypts[:, 1])
model_x = LinearRegression(fit_intercept=False).fit(fixed_poly,
moving_keypts[:, 2])
return model_z, model_y, model_x
def polynomial_transform(pts, degree, model_z, model_y, model_x):
"""Apply a low-order polynomial transformation to pts"""
poly = PolynomialFeatures(degree=degree).fit_transform(pts)
transformed_keypts = np.empty_like(pts)
transformed_keypts[:, 0] = model_z.predict(poly)
transformed_keypts[:, 1] = model_y.predict(poly)
transformed_keypts[:, 2] = model_x.predict(poly)
return transformed_keypts
def fit_rbf(affine_pts, moving_pts, smooth=0, mode='thin_plate'):
rbf_z = Rbf(affine_pts[:, 0], affine_pts[:, 1], affine_pts[:, 2], moving_pts[:, 0], smooth=smooth, function=mode)
rbf_y = Rbf(affine_pts[:, 0], affine_pts[:, 1], affine_pts[:, 2], moving_pts[:, 1], smooth=smooth, function=mode)
rbf_x = Rbf(affine_pts[:, 0], affine_pts[:, 1], affine_pts[:, 2], moving_pts[:, 2], smooth=smooth, function=mode)
return rbf_z, rbf_y, rbf_x
def rbf_transform(pts, rbf_z, rbf_y, rbf_x):
zi = rbf_z(pts[:, 0], pts[:, 1], pts[:, 2])
yi = rbf_y(pts[:, 0], pts[:, 1], pts[:, 2])
xi = rbf_x(pts[:, 0], pts[:, 1], pts[:, 2])
return np.column_stack([zi, yi, xi])
def nonrigid_transform(pts, affine_transform, rbf_z, rbf_y, rbf_x):
affine_pts = affine_transform(pts)
return rbf_transform(affine_pts, rbf_z, rbf_y, rbf_x)
TRANSFORMS = {}
GRIDS = {}
def wrg_transform(my_uuid, start, end):
transform = TRANSFORMS[my_uuid]
grid = GRIDS[my_uuid]
return transform(grid[start:end])
def warp_regular_grid(np_pts, z, y, x, transform, n_processes=1,
chunk_size=250):
Z, Y, X = np.meshgrid(z, y, x, indexing='ij')
grid = np.column_stack([Z.ravel(), Y.ravel(), X.ravel()])
if n_processes == 1:
values = transform(grid)
else:
my_uuid = uuid.uuid4()
TRANSFORMS[my_uuid] = transform
GRIDS[my_uuid] = grid
try:
n_grid = len(grid)
my_chunk_size = min((n_grid + n_processes - 1) // n_processes,
chunk_size)
starts = np.arange(0, n_grid, my_chunk_size)
ends = np.concatenate((starts[1:], [n_grid]))
with multiprocessing.Pool(n_processes) as pool:
futures = []
for start, end in zip(starts, ends):
future = pool.apply_async(transform, (grid[start:end],))
futures.append(future)
values = []
for future in tqdm.tqdm(futures):
values.append(future.get())
finally:
del TRANSFORMS[my_uuid]
del GRIDS[my_uuid]
values = np.concatenate(values)
grid_shape = values.shape[-1] * (np_pts,) # If same # for each
values_z = np.reshape(values[:, 0], grid_shape)
values_y = np.reshape(values[:, 1], grid_shape)
values_x = np.reshape(values[:, 2], grid_shape)
return values_z, values_y, values_x
def fit_grid_interpolator(z, y, x, values):
interp_z = RegularGridInterpolator((z, y, x), values[0]) # Could be useful to use map_coordinates here instead
interp_y = RegularGridInterpolator((z, y, x), values[1])
interp_x = RegularGridInterpolator((z, y, x), values[2])
return interp_z, interp_y, interp_x
def interpolator(pts, interp):
interp_z, interp_y, interp_x = interp
values_z = interp_z(pts)
values_y = interp_y(pts)
values_x = interp_x(pts)
return np.column_stack([values_z, values_y, values_x])
class MapCoordinatesInterpolator:
def __init__(self, values, shape, order=1):
self.values = values
self.shape = shape
self.order = order
def __call__(self, pts):
# pts must be (n, 3)
# scale pixel coordinates to grid coordinates
coords = tuple(pts[:, i]/(self.shape[i]-1)*(self.values.shape[i]-1) for i in range(pts.shape[-1]))
coords = np.asarray(coords)
results = map_coordinates(self.values, coords, order=self.order)
return results
def fit_map_interpolator(values, shape, order=1):
interp_z = MapCoordinatesInterpolator(values[0], shape, order)
interp_y = MapCoordinatesInterpolator(values[1], shape, order)
interp_x = MapCoordinatesInterpolator(values[2], shape, order)
return interp_z, interp_y, interp_x
def main2():
import os
import zarr
from precomputed_tif.zarr_stack import ZarrStack
from phathom import io
from phathom.utils import pickle_load
working_dir = '/home/jswaney/coregistration'
# Open images
fixed_zarr_path = 'fixed/zarr_stack/1_1_1'
moving_zarr_path = 'moving/zarr_stack/1_1_1'
fixed_img = io.zarr.open(os.path.join(working_dir, fixed_zarr_path), mode='r')
moving_img = io.zarr.open(os.path.join(working_dir, moving_zarr_path), mode='r')
# Load the coordinate interpolator
interpolator_path = 'map_interpolator.pkl'
interpolator = pickle_load(os.path.join(working_dir, interpolator_path))
# Create a new zarr array for the registered image
nonrigid_zarr_path = 'moving/registered/1_1_1'
nonrigid_img = io.zarr.new_zarr(os.path.join(working_dir, nonrigid_zarr_path),
fixed_img.shape,
fixed_img.chunks,
fixed_img.dtype)
# Warp the entire moving image
nb_workers = 1
batch_size = None
padding = 2
register(moving_img,
nonrigid_img,
fixed_img,
os.path.join(working_dir, interpolator_path),
nb_workers,
batch_size=batch_size,
padding=padding)
def main():
# Working directory
# project_path = '/media/jswaney/Drive/Justin/coregistration/whole_brain/'
project_path = '/home/jswaney/coregistration/'
# Input images
voxel_dimensions = (2.0, 1.6, 1.6)
fixed_zarr_path = project_path + 'fixed/zarr_stack/1_1_1'
moving_zarr_path = project_path + 'moving/zarr_stack/1_1_1'
# registered_zarr_path = project_path + 'registered_affine.zarr'
# preview_zarr_path = project_path + 'registered_preview.zarr'
# preview_tif_path = project_path + 'registered_preview.tif'
# Caching intermediate data
fixed_pts_path = project_path + 'fixed_blobs.npy'
moving_pts_path = project_path + 'moving_blobs_1200.npy'
# fixed_pts_img_path = project_path + 'fixed_pts.tif'
# moving_pts_img_path = project_path + 'moving_pts.tif'
# fixed_matches_img_path = project_path + 'fixed_matches.tif'
# moving_matches_img_path = project_path + 'moving_matches.tif'
fixed_features_path = project_path + 'fixed_features.npy'
moving_features_path = project_path + 'moving_features.npy'
# fixed_idx_path = project_path + 'fixed_idx.npy'
# moving_idx_path = project_path + 'moving_idx.npy'
# Processing
nb_workers = 48
overlap = 8
# Keypoints
sigma = (1.2, 2.0, 2.0)
min_distance = 3
min_intensity = 600
# Coarse registration
niter = 100
# Nuclei matching
prominence_thresh = 0.2
max_distance = 300
max_feat_dist = 1.0
dist_thresh = None
# Transform estimation (RANSAC)
min_samples = 12
residual_threshold = 2
# Interpolation
batch_size = 10000
# Output
compression = 1
# ---------------------------------- #
t0 = time.time()
# print('opening input images')
# fixed_store = zarr.NestedDirectoryStore(fixed_zarr_path)
# moving_store = zarr.NestedDirectoryStore(moving_zarr_path)
# fixed_img = zarr.open(fixed_store, mode='r')
# moving_img = zarr.open(moving_store, mode='r')
# print('detecting keypoints')
# t1 = time.time()
# fixed_pts = detect_blobs_parallel(fixed_img, sigma, min_distance, min_intensity, nb_workers, overlap)
# moving_pts = detect_blobs_parallel(moving_img, sigma, min_distance, min_intensity, nb_workers, overlap)
# t2 = time.time()
# print(' found {} keypoints in fixed image'.format(len(fixed_pts)))
# print(' found {} keypoints in moving image'.format(len(moving_pts)))
# print(' Took {0:.2f} seconds'.format(t2-t1))
#
# print('saving blob locations')
# np.save(fixed_pts_path, fixed_pts)
# np.save(moving_pts_path, moving_pts)
# print('saving blob images')
# fixed_blob_arr = mark_pts(np.zeros(fixed_img.shape, dtype='uint8'), fixed_pts)
# moving_blob_arr = mark_pts(np.zeros(moving_img.shape, dtype='uint8'), moving_pts)
# conversion.imsave(fixed_pts_img_path, fixed_blob_arr, compress=1)
# conversion.imsave(moving_pts_img_path, moving_blob_arr, compress=1)
print('loading precalculated keypoints')
fixed_pts = np.load(fixed_pts_path)
moving_pts = np.load(moving_pts_path)
fixed_pts_um = np.asarray(voxel_dimensions) * fixed_pts
moving_pts_um = np.asarray(voxel_dimensions) * moving_pts
print('extracting features')
t1 = time.time()
fixed_features = pcloud.geometric_features(fixed_pts_um, nb_workers)
moving_features = pcloud.geometric_features(moving_pts_um, nb_workers)
t2 = time.time()
print(' Took {0:.2f} seconds'.format(t2 - t1))
print('saving features')
np.save(fixed_features_path, fixed_features)
np.save(moving_features_path, moving_features)
# print('loading precalculated features')
# fixed_features = np.load(fixed_features_path)
# moving_features = np.load(moving_features_path)
# print('performing coarse registration')
#
# print(fixed_img.shape, fixed_pts.shape, fixed_features.shape)
# print(moving_img.shape, moving_pts.shape, moving_features.shape)
# print('transforming the fixed point cloud')
# t1 = time.time()
#
# def transform_pts(theta, pts_um):
# r = rotation_matrix(theta)
# fixed_coords_um_zeroed = pts_um - pts_um.mean(axis=0)
# rotated_coords_um_zeroed = rigid_transformation(np.zeros(3), r, fixed_coords_um_zeroed)
# transformed_coords_um = rotated_coords_um_zeroed + moving_centroid_um
# return transformed_coords_um
#
# transformed_pts_um = transform_pts(thetas, fixed_pts_um)
# t2 = time.time()
# print(' Took {0:.2f} seconds'.format(t2-t1))
# print('matching points')
# t1 = time.time()
# fixed_idx, moving_idx = pcloud.match_pts(transformed_pts_um,
# moving_pts_um,
# fixed_features,
# moving_features,
# max_feat_dist,
# prominence_thresh,
# max_distance,
# nb_workers)
# print(' found {} matches for {} cells'.format(len(fixed_idx), len(fixed_pts)))
#
# print('saving matching indices')
# np.save(fixed_idx_path, fixed_idx)
# np.save(moving_idx_path, moving_idx)
# print('Loading cached matches')
# fixed_idx = np.load(fixed_idx_path)
# moving_idx = np.load(moving_idx_path)
# print('Extracting matched points...')
# fixed_matches = fixed_pts[fixed_idx]
# moving_matches = moving_pts[moving_idx]
# coarse_error = np.linalg.norm(moving_pts_um[moving_idx]-transformed_pts_um[fixed_idx], axis=-1).mean()
# print('Coarse registration error: {} voxels'.format(coarse_error))
# print('Saving match images')
# fixed_matches_img = label_pts(np.zeros(fixed_img.shape, dtype='uint16'), fixed_matches)
# moving_matches_img = label_pts(np.zeros(moving_img.shape, dtype='uint16'), moving_matches)
# conversion.imsave(fixed_matches_img_path, fixed_matches_img, compress=1)
# conversion.imsave(moving_matches_img_path, moving_matches_img, compress=1)
# pcloud.plot_pts(fixed_pts, moving_pts, candid1=fixed_matches, candid2=moving_matches)
# print('estimating affine transformation with RANSAC')
# t1 = time.time()
# ransac, inlier_idx = pcloud.estimate_affine(fixed_matches,
# moving_matches,
# min_samples=min_samples,
# residual_threshold=residual_threshold)
# transformed_matches = pcloud.register_pts(fixed_matches, ransac)
# average_residual = np.linalg.norm(transformed_matches - moving_matches, axis=-1).mean()
# t2 = time.time()
# print(' {} matches before RANSAC'.format(len(fixed_matches)))
# print(' {} matches remain after RANSAC'.format(len(inlier_idx)))
# print(' Ave. residual: {0:.1f} voxels'.format(average_residual))
# print(' Took {0:.2f} seconds'.format(t2 - t1))
# print('estimating non-rigid transformation with RBFs')
# nb_samples = 1000
#
# sample_idx = np.random.choice(len(fixed_matches), nb_samples, replace=False)
# fixed_sample = fixed_matches[sample_idx]
# moving_sample = moving_matches[sample_idx]
#
# correspondence_sample = np.hstack((fixed_sample, moving_sample))
#
# from scipy.interpolate import Rbf
#
# rbf_z = Rbf(correspondence_sample[:, 0], # fixed z
# correspondence_sample[:, 1], # fixed y
# correspondence_sample[:, 2], # fixed x
# correspondence_sample[:, 3], # moving z (output)
# function='thin-plate',
# epsilon=None,
# smooth=0)
#
# rbf_y = Rbf(correspondence_sample[:, 0], # fixed z
# correspondence_sample[:, 1], # fixed y
# correspondence_sample[:, 2], # fixed x
# correspondence_sample[:, 4], # moving y (output)
# function='thin-plate',
# epsilon=None,
# smooth=0)
#
# rbf_x = Rbf(correspondence_sample[:, 0], # fixed z
# correspondence_sample[:, 1], # fixed y
# correspondence_sample[:, 2], # fixed x
# correspondence_sample[:, 5], # moving x (output)
# function='thin-plate',
# epsilon=None,
# smooth=0)
#
# zm = rbf_z(correspondence_sample[:, 0], correspondence_sample[:, 1], correspondence_sample[:, 2])
# ym = rbf_y(correspondence_sample[:, 0], correspondence_sample[:, 1], correspondence_sample[:, 2])
# xm = rbf_x(correspondence_sample[:, 0], correspondence_sample[:, 1], correspondence_sample[:, 2])
# ave_keypt_resid = np.linalg.norm(np.vstack([zm, ym, xm]).T - moving_sample, axis=-1).mean()
# print('RBF average residual at keypoints: {0:.1f} voxels'.format(ave_keypt_resid))
#
# zm = rbf_z(fixed_matches[:,0], fixed_matches[:,1], fixed_matches[:,2])
# ym = rbf_y(fixed_matches[:,0], fixed_matches[:,1], fixed_matches[:,2])
# xm = rbf_x(fixed_matches[:,0], fixed_matches[:,1], fixed_matches[:,2])
# ave_test_resid = np.linalg.norm(np.vstack([zm, ym, xm]).T - moving_matches, axis=-1).mean()
# print('RBF average residual on test set: {0:.1f} voxels'.format(ave_test_resid))
# z = np.linspace(0, fixed_img.shape[0], 10)
# y = np.linspace(0, fixed_img.shape[1], 10)
# x = np.linspace(0, fixed_img.shape[2], 10)
# X, Y, Z = np.meshgrid(x, y, z)
#
# zm = rbf_z(Z, Y, X)
# ym = rbf_y(Z, Y, X)
# xm = rbf_x(Z, Y, X)
#
# print(X.shape, xm.shape)
# print(zm.max(), ym.max(), xm.max())
#
# fig = plt.figure()
# ax = fig.gca(projection='3d')
# ax.view_init(0, 0)
# ax.quiver(X, Y, Z, xm, ym, zm, length=0.1)
# plt.show()
# fixed_inliers = fixed_matches[inlier_idx]
# moving_inliers = moving_matches[inlier_idx]
# pcloud.plot_pts(fixed_pts, moving_pts, candid1=fixed_inliers, candid2=moving_inliers)
# print('registering the moving image')
# t1 = time.time()
# output_img = zarr.open(registered_zarr_path,
# mode='w',
# shape=fixed_img.shape,
# chunks=fixed_img.chunks,
# dtype=fixed_img.dtype,
# compressor=Blosc(cname='zstd', clevel=1, shuffle=Blosc.BITSHUFFLE))
# transformation = partial(pcloud.register_pts, linear_model=ransac)
# register(moving_img, fixed_img, output_img, transformation, nb_workers, batch_size)
# t2 = time.time()
# print(' Took {0:.2f} seconds'.format(t2 - t1))
#
# print('downsamplingx the registered zarr array')
# t1 = time.time()
# conversion.downsample_zarr(output_img, (4, 4, 4), preview_zarr_path, 44)
# t2 = time.time()
# print(' Took {0:.2f} seconds'.format(t2 - t1))
#
# print('converting zarr to tiffs')
# t1 = time.time()
# conversion.zarr_to_tifs(preview_zarr_path, preview_tif_path, nb_workers, compression)
# t2 = time.time()
# print(' Took {0:.2f} seconds'.format(t2 - t1))
#
# t3 = time.time()
# print('Total time: {0:.2f} seconds'.format(t3-t0))
if __name__ == '__main__':
main2()
|
from django.conf.urls import patterns, url
urlpatterns = patterns(
'api.views',
url(r'^get_All_SugRec/$', 'get_All_SugRec', name='get_All_SugRec'),
)
|
import sys
check_type = int(sys.argv[1])
input_file = sys.argv[2]
output_file = sys.argv[3]
if check_type == 0:
integer_file = sys.argv[4]
# Check insertion code
input_arr = []
integers = []
with open(input_file) as f:
content = f.readlines()
content = [x.strip() for x in content]
for c in content:
if c != "page over" and c != "file over":
input_arr.append(int(c))
if c == "file over":
break
with open(integer_file) as f:
content = f.readlines()
integers = [int(x.strip()) for x in content]
integers.sort()
integer_pointer = 0
for p in range(len(input_arr)):
if integers[integer_pointer] <= input_arr[p]:
input_arr.insert(p, integers[integer_pointer])
integer_pointer += 1
# Pages now holds inserted array
# print ("Desired File: ")
# print (input_arr)
output_arr = []
with open(output_file) as f:
content = f.readlines()
content = [x.strip() for x in content]
for c in content:
if c != "page over" and c != "file over":
output_arr.append(int(c))
if c == "file over":
break
# print ("Output File: ")
# print (output_arr)
problem = False
problem_index = 0
for index in range(len(input_arr)):
if input_arr[index] != output_arr[index]:
print ("{0}".format(index))
problem = True
problem_index = index
if not problem:
print ("Insertion worked good")
else:
print ("Problem in insertion at index {0}".format(problem_index))
else:
# Check merge sort code
print ("Checking merge sort {0} with {1}".format(input_file, output_file))
array = []
with open(input_file) as f:
content = f.readlines()
content = [x.strip() for x in content]
for c in content:
if c != "page over" and c != "file over":
array.append(int(c))
array.sort()
print (array)
with open(output_file) as f:
content = f.readlines()
content = [x.strip() for x in content]
output_array = []
for c in content:
if c != "page over" and c != "file over":
output_array.append(int(c))
if c == "file over":
break
print (output_array)
problem = False
for index in range(len(array)):
if array[index] != output_array[index]:
problem = True
print ("Expected {0}, found {1}".format(array[index], output_array[index]))
if not problem:
print ("Success!")
|
from gpiozero import Servo
from time import sleep
servo = Servo(11)
while True:
servo.mid()
sleep(0.5)
servo.min()
sleep(0.5)
servo.max()
sleep(0.5)
|
import pytest
from wowDB import WowDB
from wowapi.exceptions import *
from dbConnect import config
from PIL import Image, ImageChops
import io
locale = 'en_US'
region = 'us'
realm = 'Arathor'
bnetcred = config('settings.ini', 'bnetcred')
client_id = bnetcred['client_id']
client_secret = bnetcred['client_secret']
class TestwowDB():
def test_init(self):
'''Test init constructor with existing inputs'''
wow = WowDB(locale, region, realm, client_id, client_secret)
assert wow.locale == locale
assert wow.region == region
assert wow.realm == realm
assert wow.realm_slug == 'arathor'
assert wow.connected_realm_id == 1138
@pytest.mark.parametrize(
"locale,region,realm,client_id,client_secret,expected",
[
(locale,region,realm,'',client_secret,pytest.raises(WowApiOauthException)),
(locale,region,realm,client_id,'',pytest.raises(WowApiOauthException)),
('',region,realm,client_id,client_secret,pytest.raises(ValueError)),
(locale,'',realm,client_id,client_secret,pytest.raises(ValueError)),
(locale,region,'',client_id,client_secret,pytest.raises(ValueError))
])
def test_init_missing_server_info_client_cred(self, locale, region, realm, client_id, client_secret, expected):
'''Test init constructor for missing locale, region or realm.'''
with expected:
assert WowDB(locale, region, realm, client_id, client_secret) is not None
@pytest.mark.parametrize(
"locale,region,realm,client_id,client_secret,expected",
[
(locale,region,realm,'blah',client_secret,pytest.raises(WowApiOauthException)),
(locale,region,realm,client_id,'blah',pytest.raises(WowApiOauthException)),
('blah',region,realm,client_id,client_secret,pytest.raises(WowApiException)),
(locale,'blah',realm,client_id,client_secret,pytest.raises(WowApiException)),
(locale,region,'blah',client_id,client_secret,pytest.raises(WowApiException))
])
def test_init_bad_server_info_client_cred(self, locale, region, realm, client_id, client_secret, expected):
'''Test init constructor for missing locale, region or realm.'''
with expected:
assert WowDB(locale, region, realm, client_id, client_secret) is not None
def test_findItemName(self):
'''Test getting the item name corresponding to a given item ID'''
wow = WowDB(locale, region, realm, client_id, client_secret)
item_name = wow.findItemName(19019)
assert item_name == 'Thunderfury, Blessed Blade of the Windseeker'
@pytest.mark.parametrize(
"item_id,expected",
[
(10000000,pytest.raises(WowApiException)),
('blah',pytest.raises(WowApiException))
])
def test_bad_findItemName(self, item_id, expected):
'''Test getting the item name with an invalid item ID'''
wow = WowDB(locale, region, realm, client_id, client_secret)
with expected:
assert wow.findItemName(item_id) is not None
def test_findItemPic(self):
'''Test getting the item pic corresponding to a give item ID'''
wow = WowDB(locale, region, realm, client_id, client_secret)
ba = wow.findItemPic(19019)
image1 = Image.open(io.BytesIO(ba))
image2 = Image.open('test_picture.jpg')
diff = ImageChops.difference(image1, image2)
assert not diff.getbbox()
@pytest.mark.parametrize(
"item_id,expected",
[
(10000000,pytest.raises(WowApiException)),
('blah',pytest.raises(WowApiException))
])
def test_bad_findItemPic(self, item_id, expected):
'''Test getting the item pic with an invalid item ID'''
wow = WowDB(locale, region, realm, client_id, client_secret)
with expected:
assert wow.findItemPic(item_id) is not None
def test_findAuctions(self):
'''Test getting auction house results'''
wow = WowDB(locale, region, realm, client_id, client_secret)
data = wow.findAuctions()
assert 'id' in data[0].keys()
# def test_sortListings(self):
# '''Test sorting auction house results'''
# wow = WowDB(locale, region, realm, client_id, client_secret)
# data = wow.findAuctions()
# sorted_list = wow.sortListings(data)
# # assert all (k in sorted_list[0] for k in (
# # 'item_id',
# # 'quantity',
# # 'avg_unit_price',
# # 'std_dev',
# # 'high_price',
# # 'low_price',
# # 'num'
# # ))
# for i in range(len(sorted_list) - 1):
# assert sorted_list[i]['item_id'] < sorted_list[i+1]['item_id']
|
# Made by Mr. Have fun! Version 0.2
import sys
from com.l2jserver import Config
from com.l2jserver.gameserver.model.quest import State
from com.l2jserver.gameserver.model.quest import QuestState
from com.l2jserver.gameserver.model.quest.jython import QuestJython as JQuest
qn = "292_CrushBrigands"
GOBLIN_NECKLACE = 1483
GOBLIN_PENDANT = 1484
GOBLIN_LORD_PENDANT = 1485
SUSPICIOUS_MEMO = 1486
SUSPICIOUS_CONTRACT = 1487
class Quest (JQuest) :
def __init__(self,id,name,descr):
JQuest.__init__(self,id,name,descr)
self.questItemIds = [GOBLIN_NECKLACE, GOBLIN_PENDANT, GOBLIN_LORD_PENDANT, SUSPICIOUS_CONTRACT, SUSPICIOUS_MEMO]
def onAdvEvent (self,event,npc, player) :
htmltext = event
st = player.getQuestState(qn)
if not st : return
if event == "30532-03.htm" :
st.set("cond","1")
st.setState(State.STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "30532-06.htm" :
st.takeItems(SUSPICIOUS_MEMO,-1)
st.exitQuest(1)
st.playSound("ItemSound.quest_finish")
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not on a quest that involves this NPC, or you don't meet this NPC's minimum quest requirements.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
if npcId != 30532 and id != State.STARTED : return htmltext
if id == State.CREATED :
st.set("cond","0")
if npcId == 30532 :
if st.getInt("cond")==0 :
if player.getRace().ordinal() != 4 :
htmltext = "30532-00.htm"
st.exitQuest(1)
elif player.getLevel() >= 5 :
htmltext = "30532-02.htm"
return htmltext
else:
htmltext = "30532-01.htm"
st.exitQuest(1)
else :
neckl=st.getQuestItemsCount(GOBLIN_NECKLACE)
penda=st.getQuestItemsCount(GOBLIN_PENDANT)
lordp=st.getQuestItemsCount(GOBLIN_LORD_PENDANT)
smemo=st.getQuestItemsCount(SUSPICIOUS_MEMO)
scont=st.getQuestItemsCount(SUSPICIOUS_CONTRACT)
if neckl==penda==lordp==smemo==scont==0 :
htmltext = "30532-04.htm"
else :
st.takeItems(GOBLIN_NECKLACE,-1)
st.takeItems(GOBLIN_PENDANT,-1)
st.takeItems(GOBLIN_LORD_PENDANT,-1)
if scont == 0 :
if smemo == 1 :
htmltext = "30532-08.htm"
elif smemo >= 2 :
htmltext = "30532-09.htm"
else :
htmltext = "30532-05.htm"
else :
htmltext = "30532-10.htm"
st.takeItems(SUSPICIOUS_CONTRACT,-1)
st.giveItems(57,12*neckl+36*penda+33*lordp+100*scont)
elif npcId == 30533 :
if st.getQuestItemsCount(SUSPICIOUS_CONTRACT)==0 :
htmltext = "30533-01.htm"
else :
htmltext = "30533-02.htm"
st.giveItems(57,st.getQuestItemsCount(SUSPICIOUS_CONTRACT)*120)
st.takeItems(SUSPICIOUS_CONTRACT,-1)
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != State.STARTED : return
npcId = npc.getNpcId()
if npcId in [20322, 20323]: item = GOBLIN_NECKLACE
if npcId in [20324, 20327]: item = GOBLIN_PENDANT
if npcId == 20528 : item = GOBLIN_LORD_PENDANT
if st.getInt("cond") :
n = st.getRandom(10)
if n > 5 :
st.giveItems(item,1)
st.playSound("ItemSound.quest_itemget")
elif n > 4 :
if st.getQuestItemsCount(SUSPICIOUS_CONTRACT) == 0 :
if st.getQuestItemsCount(SUSPICIOUS_MEMO) < 3 :
st.giveItems(SUSPICIOUS_MEMO,1)
st.playSound("ItemSound.quest_itemget")
else :
st.giveItems(SUSPICIOUS_CONTRACT,1)
st.takeItems(SUSPICIOUS_MEMO,-1)
st.playSound("ItemSound.quest_middle")
st.set("cond","2")
return
QUEST = Quest(292,qn,"Crush Brigands")
QUEST.addStartNpc(30532)
QUEST.addTalkId(30532)
QUEST.addTalkId(30533)
QUEST.addKillId(20322)
QUEST.addKillId(20323)
QUEST.addKillId(20324)
QUEST.addKillId(20327)
QUEST.addKillId(20528)
|
import logic.game as game
import os
import asyncio
async def main(print_foo=print, input_foo=input):
print_foo('Welcome to Macau Game!')
how_many_players = int(input_foo('How many players will play?: '))
if how_many_players < 2:
raise Exception('Wrong number of players entered!')
how_many_cards = int(input_foo('How many cards on start?: '))
if how_many_cards < 3:
raise Exception('Wrong number of starting cards entered!')
how_many_deck = round(0.5 + ((how_many_players * how_many_cards) * 2) / 52)
print_foo(f'Game will be played with {how_many_deck} decks.')
names = []
for index in range(1, how_many_players + 1):
name = input_foo(f'Enter name for player#{index} : ')
names.append(name)
game_state = game.GameState()
game_state.deck, game_state.table, game_state.players = game.prepare_game(names, how_many_deck, how_many_cards)
os.system('cls||clear')
await game.play_game(game_state)
if __name__ == '__main__':
asyncio.run(main())
|
import torch
import torch.nn as nn
from . import common
from .ResNet import ResNet
def build_model(args):
return MSResNet(args)
class conv_end(nn.Module):
def __init__(self, in_channels=3, out_channels=3, kernel_size=5, ratio=2):
super(conv_end, self).__init__()
modules = [
common.default_conv(in_channels, out_channels, kernel_size),
nn.PixelShuffle(ratio)
]
self.uppath = nn.Sequential(*modules)
def forward(self, x):
return self.uppath(x)
class MSResNet(nn.Module):
def __init__(self, args):
super(MSResNet, self).__init__()
self.rgb_range = args.rgb_range
self.mean = self.rgb_range / 2
self.n_resblocks = args.n_resblocks
self.n_feats = args.n_feats
self.kernel_size = args.kernel_size
self.n_scales = args.n_scales
self.body_models = nn.ModuleList([
ResNet(args, 3, 3),
])
for _ in range(1, self.n_scales):
# self.body_models += [ResNet(args, 6, 3)]
self.body_models.insert(0, ResNet(args, 6, 3))
self.conv_end_models = nn.ModuleList([None])
for _ in range(1, self.n_scales):
self.conv_end_models += [conv_end(3, 12)]
def forward(self, input_pyramid):
scales = range(self.n_scales-1, -1, -1) # 0: fine, 2: coarse
for s in scales:
input_pyramid[s] = input_pyramid[s] - self.mean
output_pyramid = [None] * self.n_scales
input_s = input_pyramid[-1]
for s in scales: # [2, 1, 0]
output_pyramid[s] = self.body_models[s](input_s)
if s > 0:
up_feat = self.conv_end_models[s](output_pyramid[s])
input_s = torch.cat((input_pyramid[s-1], up_feat), 1)
for s in scales:
output_pyramid[s] = output_pyramid[s] + self.mean
return output_pyramid
|
#!/usr/bin/env python
import random
import dijkstra
import goal
import math as np
import pygame
class PRMGenerator:
"""
Class used to hold methods and variables that are important
for the global path planning problem. This class generates
the roadmap and finds the shortest path to the the goals by
determining intermediate goals for the boids to be attracted to
"""
def __init__(
self,
_startPos,
_endPos,
_obstacleList,
_xSize,
_ySize,
_subGoalNumber,
_screen
):
"""
Creates a new instance of the PRMGenerator. Intializes key variables
used in the generation of the global planner.
@param _startPos The starting position of the boids
@param _endPos The final goal position of the boids
@param _obstacleList The list of obstacles that the global planner needs to avoid
@param _xSize The size of the x component of the screen
@param _ySize The size of the y component of the screen
@param _subGoalNumber The initial number of sample points for the global planner
@param _screen The PyGame screen that the PRMGenerator will draw to
"""
## List of obstacles
self.obstacleList = _obstacleList
## Position of the first goal
self.startPos = _startPos
## Position of the last goal
self.endPos = _endPos
## PyGame screen that is will be drawn on
self.screen = _screen
## Horizontal size of the PyGame screen
self.xSize = _xSize
## Vertical size of the PyGame screen
self.ySize = _ySize
## Distance that the PRM is willing to check when
## connecting sample points
self.adjacentThresh = 80
## Maximum number of sample points that can be connected
self.numNext = 20
## Number of initial sample points
self.subGoalNumber = _subGoalNumber
## Initial positions of the sample points
self.subGoalPositionList = [self.startPos] + \
self.generatePositionList(self.subGoalNumber) + \
[self.endPos]
## The global roadmap. It will be a graph
## represented as a dictionary
self.roadmap = dict()
## Holds the positions of the intermediate goals that were selected
## by the global path planner
self.gPosList = list()
## Indexes of the goal positions
self.goalNodes = list()
## Dictionary (for easy access) that holds the weights for the nodes
self.omegaDict = dict()
self.filterSubGoal()
self.initOmega(self.subGoalPositionList)
def norm(self, p1, p2):
"""
Gets the distance between p1 and p2
@param p1 The first point
@param p2 The second point
@return The Eulidean distance from p1 to p2
"""
return np.sqrt(pow(p1[0] - p2[0], 2) + pow(p1[1] - p2[1], 2))
def generatePositionList(self, num):
"""
Generates the random positions for the sample points
@param num The number of points to generate
@return A list of random subgoals (sample points)
"""
return [
(
self.getRandom(0, self.xSize),
self.getRandom(0, self.ySize)
) for _ in range(num)
]
def initOmega(self, posList):
"""
Initiates the omega function which holds the node weights
@param posList The list of positions for the sample points
"""
omega = lambda p: (
sum(
map(
lambda ob: self.norm(p, ob.getPoint(p)),
self.obstacleList
)
)
)
for p in posList:
self.omegaDict[p] = omega(p)
def filterSubGoal(self):
"""
Filters out sample points that are inside of obstacles
or otherwise inadequate
"""
delList = list()
for i in range(len(self.subGoalPositionList)):
for obst in self.obstacleList:
if obst.dynamic is False:
dist = self.norm(
obst.getPoint(self.subGoalPositionList[i]),
self.subGoalPositionList[i]
)
if (dist < 10):
delList += [i]
self.subGoalPositionList = [
self.subGoalPositionList[j] for j in range(
len(self.subGoalPositionList)
) if not j in delList
]
def findNeighbors(self, point):
"""
Finds suitable neighbours for a sample point
"""
minList = list()
obList = filter(
lambda ob: self.norm(
point,
ob.avgPoint
) < self.adjacentThresh + ob.maxDist,
self.obstacleList
)
sGoalList = filter(
lambda g: self.norm(point, g[1]) < self.adjacentThresh,
enumerate(self.subGoalPositionList)
)
searchList = filter(
lambda p: not any(
filter(
lambda ob: ob.detectCollision(
point,
p[1]
),
obList
)
),
sGoalList
)
normList = dict()
maxVal = self.xSize * self.ySize
for i, j in searchList:
normList[i] = self.norm(point, j)
for _ in range(self.numNext):
try:
minPos = [
(i, j) for i, j in searchList if normList[i] == min(normList.values())
][0]
minList += [minPos]
normList[minPos[0]] = maxVal
except IndexError:
print 'balls'
return minList
def getRandom(self, p, q):
"""
Gets a random number and cathes the ValueError
if the two numbers are the same
@param p Lower bound for the random number
@param q upper bound for the random number
@return A random number
"""
try:
return random.randint(int(p), int(q))
except ValueError:
return int(p)
def generate(self, subGoalRadius):
"""
Generates a series of random points that will become the
roadmap and connects them and weights them into a graph.
If the goal and the starting point are not connected, more
points are added. The roadmap is then searched for the shortest
weighted distance which become the intermediate goals.
@param subGoalRadius The radius of the intermediate goals
@return A list of sub goals from the roadmap connecting
the starting point and the end goal
"""
self.roadmap = dict()
currentPos = 0
self.dontDraw = list()
while len(self.gPosList) <= 1:
for i, j in enumerate(self.subGoalPositionList):
# adds the neighbours for a certain vertex to the its sub
# dictionary neighbours are decided by linear distance
if i >= currentPos:
self.roadmap[i] = dict()
for p, q in self.findNeighbors(j):
self.roadmap[i][p] = (
1000 * self.norm(j, q) /
min(
self.omegaDict[j],
self.omegaDict[q]
)
)
try:
self.roadmap[p][i] = self.roadmap[i][p]
except KeyError:
pass
self.screen.fill(
(255, 255, 255)
)
self.draw()
map(
lambda o: o.draw(),
self.obstacleList
)
pygame.display.flip()
for e in pygame.event.get():
if e.type is pygame.QUIT:
exit()
self.goalNodes = dijkstra.shortestPath(
self.roadmap,
0,
len(self.subGoalPositionList) - 1
)
self.gPosList = map(
lambda k: self.subGoalPositionList[k],
self.goalNodes
)
if len(self.gPosList) == 1:
currentPos = len(self.subGoalPositionList) - 1
self.dontDraw += [
currentPos,
currentPos - 1,
currentPos + 1,
]
newPosList = self.generatePositionList(
int(self.subGoalNumber / 2) + 1
)
self.initOmega(newPosList)
self.subGoalPositionList[1: -1] += newPosList
self.filterSubGoal()
#print self.roadmap
retList = map(
lambda p: goal.CircleGoal(
subGoalRadius,
p,
self.screen
),
self.gPosList
)
retList[-1].radius = 1 * subGoalRadius
return retList
def getShortestPath(self, roadmap, fromNode, toNode):
return dijkstra.shortestPath(
roadmap,
fromNode,
toNode
)
def draw(self):
"""
Draws the graph
"""
map(
lambda circ: pygame.draw.circle(
self.screen,
(100, 100, 100),
circ,
5
),
self.subGoalPositionList
)
for k in self.roadmap.keys():
for p in self.roadmap[k].keys():
if not k in self.dontDraw and not p in self.dontDraw:
pygame.draw.line(
self.screen,
(0, 0, 0),
self.subGoalPositionList[k],
self.subGoalPositionList[p]
)
def drawPath(self):
"""
Draws the selected shortest path
"""
pygame.draw.lines(
self.screen,
# (255, 255, 255),
(0, 255, 0),
False,
self.gPosList,
2
)
"""
for gPos in self.gPosList:
pygame.draw.circle(
self.screen,
(255, 0, 255),
gPos,
20,
2
)
"""
|
import argparse
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--key', required=True,
help='Slack webhook key')
parser.add_argument('--cpu', required=True, type=float,
help='Limitation for cpu use percentage')
parser.add_argument('--ram', required=True, type=float,
help='Limitation for ram use percentage')
parser.add_argument('--interval', required=True, type=float,
help='Checking loop time')
parser.add_argument('--url', required=False, type=str,
help='check state target url')
args = parser.parse_args()
return args
|
# I took this very simple app from the gunicorn website.
# It's how they suggested getting startd
def app(environ, start_response):
data = b"Hello, World!\n"
start_response("200 OK", [("Content-Type", "text/plain"), ("Content-Length", str(len(data)))])
return iter([data])
|
# --------------------------------------------------------
# Domain adpatation training
# Copyright (c) 2019 valeo.ai
#
# Written by Tuan-Hung Vu
# --------------------------------------------------------
import os
import sys
from pathlib import Path
import os.path as osp
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import torch.optim as optim
from tensorboardX import SummaryWriter
from torch import nn
from torchvision.utils import make_grid
from tqdm import tqdm
import copy
from advent.model.discriminator import get_fc_discriminator
from advent.model.conv_abstract import get_conv_abstract
from advent.utils.func import adjust_learning_rate, adjust_learning_rate_discriminator
from advent.utils.func import loss_calc, bce_loss, mse_loss, reg_loss_calc_ign
from advent.utils.loss import entropy_loss
from advent.utils.simclr_loss import NTXentLoss
from advent.utils.func import prob_2_entropy
from advent.utils.viz_segmask import colorize_mask
import random
def train_advent(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_aux, pred_src_main = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, _, _, _ = batch
pred_trg_aux, pred_trg_main = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_trg_aux)))
loss_adv_trg_aux = bce_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
d_out_main = d_main(prob_2_entropy(F.softmax(pred_trg_main)))
loss_adv_trg_main = bce_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_src_aux)))
loss_d_aux = bce_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(prob_2_entropy(F.softmax(pred_src_main)))
loss_d_main = bce_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_trg_aux)))
loss_d_aux = bce_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(prob_2_entropy(F.softmax(pred_trg_main)))
loss_d_main = bce_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == cfg.TRAIN.TENSORBOARD_VIZRATE - 1:
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def train_adaptseg(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, _, _, _ = batch
pred_trg_main = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_adv_trg_aux = mse_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
d_out_main = d_main(F.softmax(pred_trg_main))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(F.softmax(pred_src_aux))
loss_d_aux = mse_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_d_aux = mse_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == cfg.TRAIN.TENSORBOARD_VIZRATE - 1:
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def train_adaptseg_w_trans(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
criterion = nn.MSELoss()
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_aug, _, _, _ = batch
pred_trg_main, _ = model(images_aug.cuda(device))
pred_trg_main_real, _ = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_adv_trg_aux = mse_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
pred_trg_main_real = interp_target(pred_trg_main_real)
d_out_main = d_main(F.softmax(pred_trg_main))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(F.softmax(pred_src_aux))
loss_d_aux = mse_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_d_aux = mse_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# import pdb
# pdb.set_trace()
loss_consistency = 10 * criterion(F.softmax(pred_trg_main_real), F.softmax(pred_trg_main).detach())
loss_consistency.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main,
'loss_consistency': loss_consistency}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
draw_in_tensorboard_trans(writer, images_aug, images, i_iter, pred_trg_main, pred_trg_main_real, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def label_generator(pred_trg, cls_thresh_tot, cfg, i_iter, tot_iter):
import math
device = cfg.GPU_ID
### ###
output_main = F.softmax(pred_trg,dim=1)
amax_output = torch.argmax(output_main, dim=1).type(torch.uint8)
pred_label_trainIDs = amax_output.clone()
pred_label = amax_output.clone()
conf, _ = torch.max(output_main, dim=1)
conf_dict = {k:[] for k in range(cfg.NUM_CLASSES)}
pred_cls_num = torch.zeros(cfg.NUM_CLASSES)
for idx_cls in range(cfg.NUM_CLASSES):
idx_temp = pred_label == idx_cls
pred_cls_num[idx_cls] = pred_cls_num[idx_cls] + torch.sum(idx_temp)
if idx_temp.any():
conf_cls_temp = conf[idx_temp].type(torch.float32)
len_cls_temp = len(conf_cls_temp)
conf_cls = conf_cls_temp[0:len_cls_temp:16]
conf_dict[idx_cls].extend(conf_cls)
cls_thresh = torch.ones(cfg.NUM_CLASSES).type(torch.float32)
cls_sel_size = torch.zeros(cfg.NUM_CLASSES).type(torch.float32)
tgt_dict_tot = {}
for idx_cls in range(cfg.NUM_CLASSES):
if conf_dict[idx_cls] != None:
# conf_dict[idx_cls].sort(reverse=True) # sort in descending order
conf_dict[idx_cls], _ = torch.sort(torch.FloatTensor(conf_dict[idx_cls]), descending=True)
len_cls = len(conf_dict[idx_cls])
iter_ratio = 1.0-float(i_iter / (tot_iter+1))
coeff = 0.2 * (iter_ratio ** 0.5)
cls_sel_size[idx_cls] = int(math.floor(len_cls * coeff))
len_cls_thresh = int(cls_sel_size[idx_cls])
if len_cls_thresh != 0:
cls_thresh[idx_cls] = conf_dict[idx_cls][len_cls_thresh-1]
conf_dict[idx_cls] = None
cls_thresh_tot_ = torch.where(cls_thresh_tot==1.0, cls_thresh, 0.9 * cls_thresh_tot + 0.1 * cls_thresh)
cls_thresh_mask = (cls_thresh == 1.0) * (cls_thresh_tot!=1.0)
cls_thresh_tot = torch.where(cls_thresh_mask==1.0, cls_thresh_tot, cls_thresh_tot_)
weighted_prob = output_main / cls_thresh_tot.to(device).unsqueeze(0).unsqueeze(2).unsqueeze(3)
weighted_pred_trainIDs = torch.argmax(weighted_prob, dim=1).type(torch.uint8)
weighted_conf, _ = torch.max(weighted_prob, dim=1)
weighted_pred_trainIDs[weighted_conf < 1] = 255
return weighted_pred_trainIDs, cls_thresh_tot
def train_selfself(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
# Model clone
model_runner = copy.deepcopy(model)
model_runner.eval()
model_runner.to(device)
conv3x3_tgt = get_conv_abstract(cfg)
conv3x3_tgt.train()
conv3x3_tgt.to(device)
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
tgt_dict_tot = {}
cudnn.benchmark = True
cudnn.enabled = True
# OPTIMIZERS
params = list(model.parameters()) + list(conv3x3_tgt.parameters())
optimizer = optim.SGD(params,
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
cls_thresh = torch.ones(num_classes).type(torch.float32)
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# for round in range(3):
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
source_label = 0
target_label = 1
for i_iter in tqdm(range(len(targetloader))):
# reset optimizers
optimizer.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_rev, _, _, name, name_next = batch
pred_trg_main, feat_trg_main = model(images.cuda(device))
pred_trg_main = interp_target(pred_trg_main)
with torch.no_grad():
pred_trg_main_run, feat_trg_main_run = model_runner(images.cuda(device))
pred_trg_main_run = interp_target(pred_trg_main_run)
##### Label generator for target #####
label_trg, cls_thresh = label_generator(pred_trg_main_run, cls_thresh, cfg, i_iter)
##### CE loss for trg
# MRKLD + Ign Region
loss_seg_trg_main = reg_loss_calc_ign(pred_trg_main, label_trg, device)
loss_tgt_seg = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_main
loss_tgt_selfsup, tgt_dict_tot = classSimCLR(feat_trg_main, label_trg, conv3x3_tgt, tgt_dict_tot, device)
loss = loss_tgt_seg + 0.1 * loss_tgt_selfsup
loss.backward()
##### Discriminator #####
for param in d_main.parameters():
param.requires_grad = False
pred_trg_main_rev, _ = model(images_rev.cuda(device))
pred_trg_main_rev = interp_target(pred_trg_main_rev)
d_out_main = d_main(F.softmax(pred_trg_main_rev))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
loss = loss
loss.backward()
for param in d_main.parameters():
param.requires_grad = True
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main_src = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main_src / 2
loss_d_main.backward()
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main_trg = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main_trg / 2
loss_d_main.backward()
pred_trg_main_rev = pred_trg_main_rev.detach()
d_out_main = d_main(F.softmax(pred_trg_main_rev))
loss_d_main_trg_rev = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main_trg_rev / 2
loss_d_main.backward()
##### Contrastive loss for trg
# Contrastive loss ()
optimizer.step()
optimizer_d_main.step()
if i_iter+1 % 500 == 0:
model_runner = copy.deepcopy(model)
# for param_fol, param_run in zip(model.parameters(), model_runner.parameters()):
# param_run.data = param_fol.data
current_losses = {'loss_seg_trg_main': loss_seg_trg_main,
'loss_seg_src_main': loss_seg_src_main,
'loss_tgt_selfsup': loss_tgt_selfsup,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_main_src': loss_d_main_src,
'loss_d_main_trg': loss_d_main_trg,
'loss_d_main_trg_rev': loss_d_main_trg_rev
}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(model_runner.state_dict(), snapshot_dir / f'model_{i_iter}_run.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
# draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_trg_main, pred_trg_main_rev, num_classes, 'T')
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
# draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
#TODO: self-training here !!!
def train_self_domain_swarp(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
# Model clone
model_runner = copy.deepcopy(model)
model_runner.eval()
model_runner.to(device)
# conv3x3_tgt = get_conv_abstract(cfg)
# conv3x3_tgt.train()
# conv3x3_tgt.to(device)
# d_main = get_fc_discriminator(num_classes=num_classes)
# d_main.train()
# d_main.to(device)
tgt_dict_tot = {}
cudnn.benchmark = True
cudnn.enabled = True
# OPTIMIZERS
# params = list(model.parameters()) + list(conv3x3_tgt.parameters())
optimizer = optim.SGD(model.parameters(),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
cls_thresh = torch.ones(num_classes).type(torch.float32)
# optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
# betas=(0.9, 0.99))
# for round in range(3):
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
source_label = 0
target_label = 1
tot_iter = len(targetloader)
for i_iter in tqdm(range(tot_iter)):
# reset optimizers
optimizer.zero_grad()
# optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
# adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_rev, _, _, name, name_next = batch
pred_trg_main, feat_trg_main = model(images.cuda(device))
pred_trg_main = interp_target(pred_trg_main)
with torch.no_grad():
pred_trg_main_run, feat_trg_main_run = model_runner(images.cuda(device))
pred_trg_main_run = interp_target(pred_trg_main_run)
##### Label generator for target #####
label_trg, cls_thresh = label_generator(pred_trg_main_run, cls_thresh, cfg, i_iter, tot_iter)
##### CE loss for trg
# MRKLD + Ign Region
loss_seg_trg_main = reg_loss_calc_ign(pred_trg_main, label_trg, device)
loss_tgt_seg = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_main
##### Domain swarping ####
feat_tgt_swarped, tgt_dict_tot, tgt_label = DomainSwarping(feat_trg_main, label_trg, tgt_dict_tot, device)
ignore_mask = tgt_label == 255
feat_tgt_swarped = ~ignore_mask*feat_tgt_swarped + ignore_mask*feat_trg_main
pred_tgt_swarped = model.classifier_(feat_tgt_swarped)
pred_tgt_swarped = interp_target(pred_tgt_swarped)
loss_seg_trg_swarped = reg_loss_calc_ign(pred_tgt_swarped, label_trg, device)
loss_tgt_seg_swarped = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_swarped
loss_tgt = loss_tgt_seg + loss_tgt_seg_swarped
loss_tgt.backward()
optimizer.step()
current_losses = {'loss_seg_trg_main': loss_seg_trg_main,
'loss_seg_src_main': loss_seg_src_main,
'loss_seg_trg_swarped': loss_seg_trg_swarped
}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(model_runner.state_dict(), snapshot_dir / f'model_{i_iter}_run.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
# draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_trg_main, pred_trg_main_rev, num_classes, 'T')
draw_in_tensorboard(writer, images, label_trg, i_iter, pred_trg_main, pred_tgt_swarped, num_classes, 'T')
# draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def classSimCLR(tgt_feat_warped_cat, tgt_label, conv3x3_tgt, tgt_dict_tot, device):
tgt_feat_warped_cat_abs = conv3x3_tgt(tgt_feat_warped_cat)
# fnt_tgt_feat_warped_cat_abs = tgt_feat_warped_cat_abs[:,:tgt_feat_warped_cat_abs.size(1)//2,:,:]
##### class-wise Simclr #####
tgt_label = F.interpolate(tgt_label.unsqueeze(0).float(), (tgt_feat_warped_cat.size(2), tgt_feat_warped_cat.size(3)), mode='nearest')
tgt_label = tgt_label.long()
tgt_unique = torch.unique(tgt_label)
tgt_dict = {}
tgt_dict_tot_temp = {}
m = nn.AdaptiveAvgPool2d(1)
for label_ele in tgt_unique.tolist():
if not label_ele == 255:
cls_mask = tgt_label == label_ele
masked_tgt = cls_mask * tgt_feat_warped_cat_abs
avg_masked_tgt = m(masked_tgt) * (cls_mask.size(2) * cls_mask.size(3) / cls_mask.sum())
tgt_dict[label_ele] = avg_masked_tgt
# if label_ele in tgt_dict_tot:
# tgt_dict_tot[label_ele] = 0.99 * tgt_dict_tot[label_ele] + 0.01 * tgt_dict[label_ele]
# else:
# tgt_dict_tot[label_ele] = tgt_dict[label_ele]
if not label_ele in tgt_dict_tot:
tgt_dict_tot[label_ele] = tgt_dict[label_ele]
tgt_dict_tot_temp[label_ele] = tgt_dict_tot[label_ele]
if label_ele in tgt_dict_tot:
tgt_dict_tot[label_ele] = 0.99 * tgt_dict_tot[label_ele] + 0.01 * tgt_dict[label_ele]
tgt_dict = dict(sorted(tgt_dict.items()))
tgt_list = []
for key, value in tgt_dict.items():
tgt_list.append(value)
try:
tgt_cat = torch.cat(tgt_list,dim=0).squeeze().to(device)
tgt_cat = F.normalize(tgt_cat, dim=1)
tgt_dict_tot_temp = dict(sorted(tgt_dict_tot_temp.items()))
tgt_tot_temp_list = []
for key, value in tgt_dict_tot_temp.items():
tgt_tot_temp_list.append(value)
tgt_dict_temp_cat = torch.cat(tgt_tot_temp_list,dim=0).squeeze().to(device)
tgt_dict_temp_cat = F.normalize(tgt_dict_temp_cat, dim=1)
batch_size = tgt_dict_temp_cat.size(0)
simloss_xent = NTXentLoss(device, batch_size=batch_size, temperature=0.5, use_cosine_similarity=True)
cls_sim_loss = simloss_xent(tgt_dict_temp_cat.detach(), tgt_cat)
cls_sim_loss = cls_sim_loss
except:
cls_sim_loss = 0
# return src_feat_embedding_loss, tgt_feat_embedding_loss, cls_sim_loss
return cls_sim_loss, tgt_dict_tot
def DomainSwarping(tgt_feat_warped_cat, tgt_label, tgt_dict_tot, device):
alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
alpha = random.choice(alpha_list)
tgt_label = F.interpolate(tgt_label.unsqueeze(0).float(), (tgt_feat_warped_cat.size(2), tgt_feat_warped_cat.size(3)), mode='nearest')
tgt_label = tgt_label.long()
tgt_unique = torch.unique(tgt_label)
tgt_dict = {}
tgt_dict_tot_temp = {}
m = nn.AdaptiveAvgPool2d(1)
new_masked_tgt_init = 0
for label_ele in tgt_unique.tolist():
if not label_ele == 255:
cls_mask = tgt_label == label_ele
masked_tgt = cls_mask * tgt_feat_warped_cat
avg_masked_tgt = m(masked_tgt) * (cls_mask.size(2) * cls_mask.size(3) / cls_mask.sum())
tgt_dict[label_ele] = avg_masked_tgt
if not label_ele in tgt_dict_tot:
print('new class info inserted')
tgt_dict_tot[label_ele] = tgt_dict[label_ele]
# new_masked_tgt = alpha * tgt_dict_tot[label_ele] + (1-alpha) * masked_tgt
new_masked_tgt = tgt_dict_tot[label_ele]
new_masked_tgt_init += cls_mask * new_masked_tgt
tgt_dict_tot[label_ele] = alpha * tgt_dict_tot[label_ele] + (1-alpha) * tgt_dict[label_ele]
tgt_dict_tot[label_ele] = tgt_dict_tot[label_ele].detach()
return new_masked_tgt_init, tgt_dict_tot, tgt_label
def draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_main, pred_trg_main_rev, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
grid_image = make_grid(images_rev[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'images_rev - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_trg_main_rev).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_rev - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(label_trg.cpu().squeeze(), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Labels_IAST - {type_}', grid_image, i_iter)
def draw_in_tensorboard(writer, images, label_trg, i_iter, pred_main, pred_main_swarp, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
pred_main_cat = torch.cat((pred_main, pred_main_swarp), dim=-1)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main_cat).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_main_swarp - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(label_trg.cpu().squeeze(), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Labels_IAST - {type_}', grid_image, i_iter)
# grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
# np.argmax(F.softmax(pred_main_tgt).cpu().data[0].numpy().transpose(1, 2, 0),
# axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
# normalize=False, range=(0, 255))
# writer.add_image(f'Prediction_swarped - {type_}', grid_image, i_iter)
# output_sm = F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0)
# output_ent = np.sum(-np.multiply(output_sm, np.log2(output_sm)), axis=2,
# keepdims=False)
# grid_image = make_grid(torch.from_numpy(output_ent), 3, normalize=True,
# range=(0, np.log2(num_classes)))
# writer.add_image(f'Entropy - {type_}', grid_image, i_iter)
def draw_in_tensorboard_trans(writer, images, images_real, i_iter, pred_main, pred_main_real, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
grid_image = make_grid(images_real[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image_real - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main_real).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_real - {type_}', grid_image, i_iter)
# output_sm = F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0)
# output_ent = np.sum(-np.multiply(output_sm, np.log2(output_sm)), axis=2,
# keepdims=False)
# grid_image = make_grid(torch.from_numpy(output_ent), 3, normalize=True,
# range=(0, np.log2(num_classes)))
# writer.add_image(f'Entropy - {type_}', grid_image, i_iter)
def print_losses(current_losses, i_iter):
list_strings = []
for loss_name, loss_value in current_losses.items():
list_strings.append(f'{loss_name} = {to_numpy(loss_value):.3f} ')
full_string = ' '.join(list_strings)
tqdm.write(f'iter = {i_iter} {full_string}')
def log_losses_tensorboard(writer, current_losses, i_iter):
for loss_name, loss_value in current_losses.items():
writer.add_scalar(f'data/{loss_name}', to_numpy(loss_value), i_iter)
def to_numpy(tensor):
if isinstance(tensor, (int, float)):
return tensor
else:
return tensor.data.cpu().numpy()
def train_domain_adaptation(model, trainloader, targetloader, cfg):
if cfg.TRAIN.DA_METHOD == 'AdvEnt':
train_advent(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'AdaptSeg':
train_adaptseg(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'AdaptSeg_w_trans':
train_adaptseg_w_trans(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'self_domain_swarp':
train_self_domain_swarp(model, trainloader, targetloader, cfg)
else:
raise NotImplementedError(f"Not yet supported DA method {cfg.TRAIN.DA_METHOD}")
|
#!/usr/bin/env python
"""
Functions and objects to deal with meteoroids orbits
"""
__author__ = "Hadrien A.R. Devillepoix, Trent Jansen-Sturgeon "
__copyright__ = "Copyright 2016-2017, Desert Fireball Network"
__license__ = "MIT"
__version__ = "1.0"
import numpy as np
from numpy.linalg import norm
import matplotlib.pyplot as plt
from astropy import units as u
from astropy.time import Time
from astropy.coordinates import HCRS, ITRS, GCRS
from astropy.utils.iers import IERS_A, IERS_A_URL, IERS
from astropy.utils.data import download_file
from trajectory_utilities import ECEF2LLH, \
EarthPosition, HCRS2HCI, HCI2ECI_pos, \
OrbitalElements2PosVel, ECI2ECEF_pos
try:
iers_a_file = download_file(IERS_A_URL, cache=True)
iers_a = IERS_A.open(iers_a_file)
IERS.iers_table = iers_a
except:
print('IERS_A_URL is temporarily unavailable')
pass
AU = 1*u.au.to(u.m)
SMA_JUPITER = 5.20336301 * u.au
def tisserand_wrt_jupiter(a, e, i):
'''
Calculate the Tisserrand criterion with respect to Jupiter
'''
T_j = (SMA_JUPITER / a +
2 * np.cos(i) *
np.sqrt(a / SMA_JUPITER * (1 - e**2)))
return T_j
# Conversion vector
AU_Deg2m_Rad = np.vstack((AU, 1, np.pi / 180 * np.ones((4, 1))))
Planets = {'Mercury': np.vstack((0.387099, 0.205636, 7.004979, 29.127030, 48.330766, 252.250324)),
'Venus': np.vstack((0.723336, 0.006777, 3.394676, 54.922625, 76.679843, 181.979100)),
'Earth': np.vstack((1.000003, 0.016711, -0.000015, 102.937682, 0.000000, 100.464572)),
'Mars': np.vstack((1.523710, 0.093394, 1.849691, -73.503169, 49.559539, -4.553432)),
'Jupiter': np.vstack((5.202887, 0.048386, 1.304397, -85.745429, 100.473909, 34.396441)),
'Saturn': np.vstack((9.536676,0.053862,2.485992,-21.063546,113.662424,49.954244)),
'Uranus': np.vstack((19.189165,0.047257,0.772638,96.937351,74.016925,313.238105)),
'Neptune': np.vstack((30.069923,0.008590,1.770043,-86.819463,131.784226,-55.120030))}
class OrbitObject(object):
"""
Solar system object osculating orbit
"""
def __init__(self,
orbit_type,
a, e, i, omega, Omega, theta,
ra_corr=np.nan*u.rad, dec_corr=np.nan*u.rad,
v_g=np.nan*u.m/u.second):
self.semi_major_axis = a.to(u.au)
self.eccentricity = e
self.inclination = i.to(u.deg)
self.argument_periapsis = omega.to(u.deg)
self.longitude_ascending_node = Omega.to(u.deg)
self.longitude_perihelion = (self.longitude_ascending_node + self.argument_periapsis) % (360 * u.deg)
self.true_anomaly = theta.to(u.deg)
self.orbit_type = orbit_type
self.perihelion = (1 - self.eccentricity) * self.semi_major_axis
self.aphelion = (1 + self.eccentricity) * self.semi_major_axis
self.corr_radiant_ra = (ra_corr.to(u.deg)) % (360 * u.deg)
self.corr_radiant_dec = dec_corr.to(u.deg)
radiant = HCRS(ra=self.corr_radiant_ra, dec=self.corr_radiant_dec, distance=1.0*u.au)
ecpliptic_radiant = HCRS2HCI(np.vstack(radiant.cartesian.xyz.value))
self.ecliptic_latitude = np.rad2deg(np.arcsin(ecpliptic_radiant[2] / norm(ecpliptic_radiant)))*u.deg
self.velocity_g = v_g.to(u.m / u.second)
self.T_j = self.tisserand_criterion_wrt_jupiter()
def tisserand_criterion_wrt_jupiter(self):
'''
Calculate the Tisserrand criterion with respect to Jupiter
'''
return tisserand_wrt_jupiter(self.semi_major_axis, self.eccentricity, self.inclination)
def __str__(self):
return str("Semi-major axis: " + str(self.semi_major_axis) + "\n" +
"Eccentricity: " + str(self.eccentricity) + "\n" +
"Inclination: " + str(self.inclination) + "\n" +
"Argument of Periapsis: " + str(self.argument_periapsis) + "\n" +
"Longitude of Ascending Node: " + str(self.longitude_ascending_node) + "\n" +
"True Anomaly: " + str(self.true_anomaly) + "\n\n" +
"Ra_corrected: " + str(self.corr_radiant_ra) + "\n" +
"Dec_corrected: " + str(self.corr_radiant_dec) + "\n" +
"Vel_g: " + str(self.velocity_g))
'''
Function delibaretely outside of native StateVector class to allow multithreaded call
'''
def random_compute_orbit_ceplecha(sv):
sv.randomize_velocity_vector()
sv.computeOrbit(orbit_computation_method='Ceplecha')
return sv
def random_compute_orbit_integration_EOE(sv):
sv.randomize_velocity_vector()
sv.computeOrbit(orbit_computation_method='integrate_EOE')
return sv
def random_compute_orbit_integration_posvel(sv):
sv.randomize_velocity_vector()
sv.computeOrbit(orbit_computation_method='integrate_posvel')
return sv
def PlotOrbitalElements(COE, t_jd, t_soi, Sol):
Colour = ['b', 'g', 'r', 'c', 'm', 'y', 'k']
i = 2 #FIXME error
plt.figure()
plt.subplot(321)
plt.plot(t_jd, COE[0] / AU, Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("Semi-major Axis (AU)")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
plt.subplot(322)
plt.plot(t_jd, COE[1], Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("Eccentricity")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
plt.subplot(323)
plt.plot(t_jd, COE[2] * 180 / np.pi, Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("Inclination (deg)")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
plt.subplot(324)
plt.plot(t_jd, COE[3] * 180 / np.pi, Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("Argument of Periapsis (deg)")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
plt.subplot(325)
plt.plot(t_jd, COE[4] * 180 / np.pi, Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("Longitude of the Ascending Node (deg)")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
plt.subplot(326)
plt.plot(t_jd, COE[5] * 180 / np.pi, Colour[i])
plt.axvline(x=t_soi[0], color='b'); plt.grid()
plt.xlabel("Time (JD)"); plt.ylabel("True Anomaly (deg)")
# plt.axvline(x=t_soi[1], color='k')
# plt.axvline(x=t_soi[2], color='c')
if Sol != 'NoSol':
plt.subplot(321)
plt.axhline(Sol.semi_major_axis.value, color='g')
plt.subplot(322)
plt.axhline(Sol.eccentricity, color='g')
plt.subplot(323)
plt.axhline(Sol.inclination.value, color='g')
plt.subplot(324)
plt.axhline(Sol.argument_periapsis.value, color='g')
plt.subplot(325)
plt.axhline(Sol.longitude_ascending_node.value, color='g')
plt.subplot(326)
plt.axhline(Sol.true_anomaly.value, color='g')
plt.show()
def PlotOrbit3D(OrbObjList, t0=2457535.0, Sol='NoSol'):
from mpl_toolkits.mplot3d import Axes3D
''' 3D Orbit Plot'''
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
for OrbObj in OrbObjList:
COE = np.vstack((OrbObj.semi_major_axis.value,
OrbObj.eccentricity,
OrbObj.inclination.value,
OrbObj.argument_periapsis.value,
OrbObj.longitude_ascending_node.value,
OrbObj.true_anomaly.value)) * AU_Deg2m_Rad
COE = COE + np.vstack((np.zeros((5, 100)), np.linspace(0, 2 * np.pi, 100)))
[Pos_HCI, Vel_HCI] = OrbitalElements2PosVel(COE, 'Sun', 'Classical')
ax.plot(Pos_HCI[0]/AU, Pos_HCI[1]/AU, Pos_HCI[2]/AU, color='r', label='Determined Orbit')
''' Plot the planets'''
for Planet in Planets:
COE = Planets[Planet] * AU_Deg2m_Rad
COEs = COE + np.vstack((np.zeros((5, 200)), np.linspace(0, 2 * np.pi, 200)))
[pos, vel] = OrbitalElements2PosVel(COEs, 'Sun', 'Classical')
ax.plot(pos[0]/AU, pos[1]/AU, pos[2]/AU, color='b')
# t_yr = t0 + np.linspace(0, 365.25, 100)
# pos_earth = EarthPosition(t_yr)
# ax.plot(pos_earth[0]/AU, pos_earth[1]/AU, pos_earth[2]/AU,
# color='b', linewidth=2.0, label='Earth')
''' Plot the solution (if given) '''
if Sol != 'NoSol':
Sol_oe = np.vstack((Sol.semi_major_axis.value,
Sol.eccentricity,
Sol.inclination.value,
Sol.argument_periapsis.value,
Sol.longitude_ascending_node.value,
Sol.true_anomaly.value)) * AU_Deg2m_Rad
Sol_oe = Sol_oe + np.vstack((np.zeros((5, 100)), np.linspace(0, 2 * np.pi, 100)))
[pos, vel] = OrbitalElements2PosVel(Sol_oe, 'Sun', 'Classical')
ax.plot(pos[0]/AU, pos[1]/AU, pos[2]/AU, color='g', label='Published Orbit')
plt.legend()
ax.set_xlim([-5, 5])
ax.set_ylim([-5, 5])
ax.set_zlim([-5, 5])
plt.show()
def PlotPerts(Pert):
PPert = np.vstack(Pert).T; t = PPert[0]
plt.figure(figsize=(16,9))
t_rel = t - np.max(t) # Days
plt.plot(t_rel, PPert[1], '-b', linewidth=3.0, label='Earth')
plt.plot(t_rel, PPert[2], '--k', linewidth=3.0, label='Moon')
plt.plot(t_rel, PPert[3], '-.r', linewidth=3.0, label='Sun')
PertJ2 = PPert[4][~np.isnan(PPert[4])]
plt.plot(t_rel[~np.isnan(PPert[4])], PertJ2, ':g', linewidth=3.0, label='J2')
PertDrag = PPert[5][~np.isnan(PPert[5])]
plt.plot(t_rel[~np.isnan(PPert[5])], PertDrag, '-.c', linewidth=3.0, label='Drag')
plt.yscale('log'); plt.grid(True); plt.legend(loc='best')
plt.xlabel('Relative Time [days]'); plt.ylabel('Perturbation Acceleration [m/s^2]')
plt.show()
def PlotIntStep(t):
dt=[]
for k in range(len(t)-1):
dt.append((t[k+1] - t[k]) * 24*60*60)
plt.figure(figsize=(16,9))
t_rel = t - np.max(t) # Days
plt.plot(t_rel[1:], abs(np.array(dt)))
plt.yscale('log'); plt.grid(True)#; plt.legend()
plt.xlabel('Relative Time [days]'); plt.ylabel('Timestep [sec]')
plt.show()
def ThirdBodyPerturbation(Pos, rho, mu):
'''
Pos is the position of the meteoroid (m)
rho is the position of the third body (m)
mu is the standard gravitational parameter of the third body (m3/s2)
'''
# Battin's scalar formula for vector difference
q = np.dot(Pos.T, (Pos - 2 * rho) / (np.dot(rho.T, rho)))
f = (3 * q + 3 * q**2 + q**3) / (1 + (1 + q)**1.5)
# Third body perturbation acceleration (with indirect term)
u = -mu * (Pos + f * rho) / ((norm(Pos - rho))**3)
return u
def NRLMSISE_00(pos, time, pos_type='eci'):
''' Courtesy of Ellie Sansom '''
"""
Inputs: inertial position and time
Outputs: [altitude, temp, atm_pres, atm density, sos, dyn_vis]
"""
from nrlmsise_00_header import nrlmsise_input, nrlmsise_output, nrlmsise_flags
from nrlmsise_00 import gtd7
time = Time(time, format='jd', scale='utc')
# Convert ECI to LLH coordinates
if pos_type == 'eci':
Pos_LLH = ECEF2LLH(ECI2ECEF_pos(pos, time))
elif pos_type == 'ecef':
Pos_LLH = ECEF2LLH(pos)
elif pos_type == 'llh':
Pos_LLH = pos
else:
print('NRLMSISE_00 error: Invalid pos_type')
exit()
g_lat = np.rad2deg(Pos_LLH[0][0])
g_long = np.rad2deg(Pos_LLH[1][0])
alt = Pos_LLH[2][0]
# Break up time into year, day of year, and seconds of the day
yDay = time.yday.split(':'); yr = float(yDay[0]); doy = float(yDay[1])
sec = float(yDay[2]) * 60*60 + float(yDay[3]) * 60 + float(yDay[4])
# Assign our variables into the nrmsise inputs
Input = nrlmsise_input(yr, doy, sec, alt/1000, g_lat, g_long)
Output = nrlmsise_output(); Flags = nrlmsise_flags()
# Switches
for i in range(1, 24):
Flags.switches[i]=1
# GTD7 atmospheric model subroutine
gtd7(Input, Flags, Output)
# Temperature at alt [deg K]
T = Output.t[1]
# Molecular number densities [m-3]
He = Output.d[0] # He
O = Output.d[1] # O
N2 = Output.d[2] # N2
O2 = Output.d[3] # O2
Ar = Output.d[4] # Ar
H = Output.d[6] # H
N = Output.d[7] # N
# ano_O = Output.d[8] # Anomalous oxygen
sum_mass = He + O + N2 + O2 + Ar + H + N
# Molar mass
He_mass = 4.0026 # g/mol
O_mass = 15.9994 # g/mol
N2_mass = 28.013 # g/mol
O2_mass = 31.998 # g/mol
Ar_mass = 39.948 # g/mol
H_mass = 1.0079 # g/mol
N_mass = 14.0067 # g/mol
# Molecular weight of air [kg/mol]
mol_mass_air = (He_mass * He + O_mass * O + N2_mass * N2 + O2_mass * O2
+ Ar_mass * Ar + H_mass * H + N_mass * N) / (1000 * sum_mass)
# Total mass density [kg*m-3]
po = Output.d[5] * 1000
Ru = 8.3144621 # Universal gas constant [J/(K*mol)]
R = Ru / mol_mass_air # Individual gas constant [J/(kg*K)] #287.058
# Ideal gas law
atm_pres = po * T * R
# Speed of sound in atm
sos = 331.3 * np.sqrt(1 + T / 273.15)
# Dynamic viscosity (http://en.wikipedia.org/wiki/Viscosity)
C = 120 #Sutherland's constant for air [deg K]
mu_ref = 18.27e-6 # Reference viscosity [[mu_Pa s] * e-6]
T_ref = 291.15 # Reference temperature [deg K]
dyn_vis = mu_ref * (T_ref + C) / (T + C) * (T / T_ref)**1.5
return T, atm_pres, po, sos, dyn_vis
# def compute_infinity_radiant(stateVec):
# ''' This method computing the apparent radiant, it doesn't consider the zenith attraction '''
# Pos_geo = stateVec.position
# Vel_geo = stateVec.vel_xyz
# t0 = stateVec.epoch
# # Compute radiant (apparent ORIGIN of meteoroid)
# Vel_eci = ECEF2ECI(Pos_geo, Vel_geo, t0)[1]
# ra_eci = np.arctan2(-Vel_eci[1], -Vel_eci[0])
# dec_eci = np.arcsin(-Vel_eci[2] / norm(Vel_eci))
# # ^-- redundant information. Already have it in metadata
# return ra_eci, dec_eci
def compute_cartesian_velocities_from_radiant(stateVec):
'''
Turn apparent ecef radiant and velocity into cartesian velocity component
'''
vel_geo = -(stateVec.velocity_inf *
np.vstack((np.cos(np.deg2rad(stateVec.ra_ecef_inf)) * np.cos(np.deg2rad(stateVec.dec_ecef_inf)),
np.sin(np.deg2rad(stateVec.ra_ecef_inf)) * np.cos(np.deg2rad(stateVec.dec_ecef_inf)),
np.sin(np.deg2rad(stateVec.dec_ecef_inf)))))
return vel_geo
def SimilarityCriterion(COE1, COE2, method='SH'):
'''
Southworth & Hawkins similarity criterion (1963); or
Drummond's similarity criterion (1981); or
Jopek's similarity criterion (1993).
'''
if type(COE1) == np.ndarray:
a1 = COE1[0]/AU; a2 = COE2[0]/AU # [AU]
e1 = COE1[1]; e2 = COE2[1] # []
i1 = COE1[2]; i2 = COE2[2] # [rad]
w1 = COE1[3]; w2 = COE2[3] # [rad]
W1 = COE1[4]; W2 = COE2[4] # [rad]
else:
a1 = COE1.semi_major_axis.value; a2 = COE2.semi_major_axis.value # [AU]
e1 = COE1.eccentricity; e2 = COE2.eccentricity # []
i1 = COE1.inclination.to(u.rad).value; i2 = COE2.inclination.to(u.rad).value # [rad]
w1 = COE1.argument_periapsis.to(u.rad).value; w2 = COE2.argument_periapsis.to(u.rad).value # [rad]
W1 = COE1.longitude_ascending_node.to(u.rad).value; W2 = COE2.longitude_ascending_node.to(u.rad).value # [rad]
q1 = a1 * (1 - e1) # [AU]
q2 = a2 * (1 - e2) # [AU]
# Angle between the orbital planes (I21)
var = (2 * np.sin((i2 - i1) / 2))**2 + np.sin(i1) * np.sin(i2) * (2 * np.sin((W2 - W1) / 2))**2
I21 = 2 * np.arcsin(np.sqrt(var) / 2)
if method == 'SH':
# Difference between orbits longitude of perihelion (pi21)
pi21 = w2 - w1 + 2 * np.arcsin(np.cos((i2 + i1) / 2) * np.sin((W2 - W1) / 2) / np.cos(I21 / 2))
Similarity2 = (e2 - e1)**2 + (q2 - q1)**2 + var + (((e2 + e1) / 2) * (2 * np.sin(pi21 / 2)))**2
Similarity = np.sqrt(Similarity2)
elif method == 'D':
# Angle between the orbital lines of apsides (theta21)
# l1 = W1 + np.arcsin(np.cos(i1) * np.tan(w1)); b1 = np.arcsin(np.sin(i1) * np.sin(w1))
# l2 = W2 + np.arcsin(np.cos(i2) * np.tan(w2)); b2 = np.arcsin(np.sin(i2) * np.sin(w2))
l1 = W1 + np.arctan(np.cos(i1) * np.tan(w1)); b1 = np.arcsin(np.sin(i1) * np.sin(w1))
l2 = W2 + np.arctan(np.cos(i2) * np.tan(w2)); b2 = np.arcsin(np.sin(i2) * np.sin(w2))
theta21 = np.arccos(np.sin(b1) * np.sin(b2) + np.cos(b1) * np.cos(b2) * np.cos(l2 - l1))
Similarity2 = ((e2 - e1) / (e2 + e1))**2 + ((q2 - q1) / (q2 + q1))**2 + \
(I21 / np.pi)**2 + ((e2 + e1) / 2)**2 * (theta21 / np.pi)**2
Similarity = np.sqrt(Similarity2)
elif method == 'H':
# Difference between orbits longitude of perihelion (pi21)
pi21 = w2 - w1 + 2 * np.arcsin(np.cos((i2 + i1) / 2) * np.sin((W2 - W1) / 2) / np.cos(I21 / 2))
Similarity2 = (e2 - e1)**2 + ((q2 - q1) / (q2 + q1))**2 + var + \
(((e2 + e1) / 2) * (2 * np.sin(pi21 / 2)))**2
Similarity = np.sqrt(Similarity2)
return Similarity
def generate_ephemeris(pos_hci, t_jd):
# Save the datetime
ephem_dict = {'datetime': Time(t_jd, format='jd', scale='utc').isot}
ephem_dict['MJD'] = Time(t_jd, format='jd', scale='utc').mjd
# distance to sun
ephem_dict['distance_to_sun'] = norm(pos_hci, axis=0) / 1000 #km
# Convert to eci coordinates
pos_eci = HCI2ECI_pos(pos_hci, t_jd)
ephem_dict['pos_eci_x'] = pos_eci[0]
ephem_dict['pos_eci_y'] = pos_eci[1]
ephem_dict['pos_eci_z'] = pos_eci[2]
pos_hcrs = HCI2HCRS(pos_hci)
# Calculate phase angle
ephem_dict['phase_angle'] = np.rad2deg(np.arccos(np.sum(pos_hcrs * pos_eci, axis=0)
/ (norm(pos_hcrs, axis=0) * norm(pos_eci, axis=0))))
# Calculate elongation angle
pos_sun = pos_eci - pos_hcrs
ephem_dict['elongation_angle'] = np.rad2deg(np.arccos(np.sum(pos_sun * pos_eci, axis=0)
/ (norm(pos_sun, axis=0) * norm(pos_eci, axis=0))))
# Calculate ephemeris
dist = norm(pos_eci, axis=0) #m
ephem_dict['ra'] = np.rad2deg(np.arctan2(pos_eci[1], pos_eci[0]))%360 #deg
ephem_dict['dec'] = np.rad2deg(np.arcsin(pos_eci[2] / dist)) #deg
ephem_dict['distance_to_earth'] = norm(pos_eci, axis=0) / 1000 #km
return ephem_dict
|
user = {
'basket': [1, 2, 3],
'greet': 'hello',
'age': 20
}
user2 = {
'basket': [10, 20, 30],
'greet': 'hello',
'age': 30
}
print(user['basket'])
print(user2['basket'])
print(user.get('name')) # None
print(user.get('name', 'Elena')) # default = 'Elena'; print: Elena
checker = 'basket' in user2.keys()
print(checker) # True
print(30 in user2.values()) # True
# dict_items([('basket', [1, 2, 3]), ('greet', 'hello'), ('age', 20)])
print(user.items())
user3 = user2 # user3 points in the same ref like user2
user3.clear()
print(user3) # {}
print(user2) # {}
user4 = user.copy()
user4.clear()
print(user4) # {}
print(user) # {'basket': [1, 2, 3], 'greet': 'hello', 'age': 20}
print(user.pop('age')) # 20
print(user) # {'basket': [1, 2, 3], 'greet': 'hello'}
user.popitem()
print(user) # {'basket': [1, 2, 3]}
user.update({'age': 90})
print(user) # {'basket': [1, 2, 3], 'age': 90}
user.update({'name': 'Elena'})
print(user) # {'basket': [1, 2, 3], 'age': 90, 'name': 'Elena'}
|
import requests
def get_quotes():
url ='http://quotes.stormconsultancy.co.uk/random.json'
response = requests.get(url)
quote =response.json()
return quote
|
# -*- coding: utf-8 -*-
"""create cities table
Revision ID: 333c6c0afa8f
Revises: 3741581c7fc4
Create Date: 2017-10-08 15:29:44.416140
"""
# revision identifiers, used by Alembic.
revision = '333c6c0afa8f'
down_revision = '3741581c7fc4'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer
def downgrade():
op.drop_table('cities')
def upgrade():
op.create_table(
'cities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('symbol_code', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('search_heb', sa.String(length=100), nullable=False),
sa.Column('search_eng', sa.String(length=100), nullable=True),
sa.Column('search_priority', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('search_heb')
)
cities_table = table('cities',
column('search_priority', Integer()),
column('search_heb', String()),
column('name', String()),
column('symbol_code', Integer()),
)
op.bulk_insert(cities_table,
[{"search_priority": 1, "search_heb": u"לא רשום", "symbol_code": 0, "name": u"לא רשום"},
{"search_priority": 0, "search_heb": u"שחר", "symbol_code": 7, "name": u"שחר"},
{"search_priority": 0, "search_heb": u"תירוש", "symbol_code": 10, "name": u"תירוש"},
{"search_priority": 1, "search_heb": u"ניר ח\"ן", "symbol_code": 11, "name": u"ניר ח\"ן"},
{"search_priority": 0, "search_heb": u"חצבה", "symbol_code": 13, "name": u"חצבה"},
{"search_priority": 0, "search_heb": u"נועם", "symbol_code": 15, "name": u"נועם"},
{"search_priority": 1, "search_heb": u"בית ניר", "symbol_code": 16, "name": u"בית ניר"},
{"search_priority": 1, "search_heb": u"שדה משה", "symbol_code": 18, "name": u"שדה משה"},
{"search_priority": 1, "search_heb": u"באר אורה", "symbol_code": 21, "name": u"באר אורה"},
{"search_priority": 1, "search_heb": u"מקווה ישראל", "symbol_code": 22, "name": u"מקווה ישראל"},
{"search_priority": 0, "search_heb": u"אמציה", "symbol_code": 23, "name": u"אמציה"},
{"search_priority": 0, "search_heb": u"לכיש", "symbol_code": 24, "name": u"לכיש"},
{"search_priority": 1, "search_heb": u"ראש פינה", "symbol_code": 26, "name": u"ראש פינה"},
{"search_priority": 1, "search_heb": u"שדות מיכה", "symbol_code": 27, "name": u"שדות מיכה"},
{"search_priority": 1, "search_heb": u"מזכרת בתיה", "symbol_code": 28, "name": u"מזכרת בתיה"},
{"search_priority": 1, "search_heb": u"יסוד המעלה", "symbol_code": 29, "name": u"יסוד המעלה"},
{"search_priority": 0, "search_heb": u"אופקים", "symbol_code": 31, "name": u"אופקים"},
{"search_priority": 0, "search_heb": u"עוצם", "symbol_code": 32, "name": u"עוצם"},
{"search_priority": 1, "search_heb": u"בת שלמה", "symbol_code": 33, "name": u"בת שלמה"},
{"search_priority": 0, "search_heb": u"גדות", "symbol_code": 35, "name": u"גדות"},
{"search_priority": 1, "search_heb": u"שדה דוד", "symbol_code": 36, "name": u"שדה דוד"},
{"search_priority": 0, "search_heb": u"איתן", "symbol_code": 37, "name": u"איתן"},
{"search_priority": 1, "search_heb": u"כרי דשא", "symbol_code": 38, "name": u"כרי דשא"},
{"search_priority": 0, "search_heb": u"גפן", "symbol_code": 39, "name": u"גפן"},
{"search_priority": 0, "search_heb": u"אליכין", "symbol_code": 41, "name": u"אליכין"},
{"search_priority": 0, "search_heb": u"מטולה", "symbol_code": 43, "name": u"מטולה"},
{"search_priority": 0, "search_heb": u"זוהר", "symbol_code": 44, "name": u"זוהר"},
{"search_priority": 0, "search_heb": u"יבנאל", "symbol_code": 46, "name": u"יבנאל"},
{"search_priority": 1, "search_heb": u"כפר תבור", "symbol_code": 47, "name": u"כפר תבור"},
{"search_priority": 0, "search_heb": u"מנחמיה", "symbol_code": 48, "name": u"מנחמיה"},
{"search_priority": 0, "search_heb": u"אילניה", "symbol_code": 49, "name": u"אילניה"},
{"search_priority": 0, "search_heb": u"לוזית", "symbol_code": 52, "name": u"לוזית"},
{"search_priority": 0, "search_heb": u"עתלית", "symbol_code": 53, "name": u"עתלית"},
{"search_priority": 0, "search_heb": u"נוגה", "symbol_code": 55, "name": u"נוגה"},
{"search_priority": 1, "search_heb": u"כנרת )קבוצה(", "symbol_code": 57, "name": u"כנרת )קבוצה("},
{"search_priority": 0, "search_heb": u"מצפה", "symbol_code": 58, "name": u"מצפה"},
{"search_priority": 0, "search_heb": u"נחושה", "symbol_code": 59, "name": u"נחושה"},
{"search_priority": 1, "search_heb": u"דגניה א'", "symbol_code": 62, "name": u"דגניה א'"},
{"search_priority": 1, "search_heb": u"כנרת )מושבה(", "symbol_code": 63, "name": u"כנרת )מושבה("},
{"search_priority": 1, "search_heb": u"יד רמב\"ם", "symbol_code": 64, "name": u"יד רמב\"ם"},
{"search_priority": 0, "search_heb": u"מגדל", "symbol_code": 65, "name": u"מגדל"},
{"search_priority": 1, "search_heb": u"מרחביה )קיבוץ(", "symbol_code": 66, "name": u"מרחביה )קיבוץ("},
{"search_priority": 1, "search_heb": u"אור הנר", "symbol_code": 67, "name": u"אור הנר"},
{"search_priority": 1, "search_heb": u"ניר עוז", "symbol_code": 69, "name": u"ניר עוז"},
{"search_priority": 0, "search_heb": u"אשדוד", "symbol_code": 70, "name": u"אשדוד"},
{"search_priority": 0, "search_heb": u"אשבול", "symbol_code": 71, "name": u"אשבול"},
{"search_priority": 1, "search_heb": u"גן שמואל", "symbol_code": 72, "name": u"גן שמואל"},
{"search_priority": 1, "search_heb": u"עין הוד", "symbol_code": 74, "name": u"עין הוד"},
{"search_priority": 1, "search_heb": u"כפר גלעדי", "symbol_code": 76, "name": u"כפר גלעדי"},
{"search_priority": 1, "search_heb": u"איילת השחר", "symbol_code": 77, "name": u"איילת השחר"},
{"search_priority": 1, "search_heb": u"קרית ענבים", "symbol_code": 78, "name": u"קרית ענבים"},
{"search_priority": 1, "search_heb": u"דגניה ב'", "symbol_code": 79, "name": u"דגניה ב'"},
{"search_priority": 0, "search_heb": u"נהלל", "symbol_code": 80, "name": u"נהלל"},
{"search_priority": 2, "search_heb": u"עין חרוד )מאוחד(", "symbol_code": 82,
"name": u"עין חרוד )מאוחד("},
{"search_priority": 1, "search_heb": u"תל יוסף", "symbol_code": 84, "name": u"תל יוסף"},
{"search_priority": 1, "search_heb": u"כפר יחזקאל", "symbol_code": 85, "name": u"כפר יחזקאל"},
{"search_priority": 0, "search_heb": u"גבע", "symbol_code": 86, "name": u"גבע"},
{"search_priority": 2, "search_heb": u"כרם בן שמן", "symbol_code": 88, "name": u"כרם בן שמן"},
{"search_priority": 2, "search_heb": u"עין חרוד )איחוד(", "symbol_code": 89,
"name": u"עין חרוד )איחוד("},
{"search_priority": 0, "search_heb": u"חפצי-בה", "symbol_code": 90, "name": u"חפצי-בה"},
{"search_priority": 0, "search_heb": u"גיניגר", "symbol_code": 92, "name": u"גיניגר"},
{"search_priority": 0, "search_heb": u"בלפוריה", "symbol_code": 94, "name": u"בלפוריה"},
{"search_priority": 1, "search_heb": u"בית אלפא", "symbol_code": 95, "name": u"בית אלפא"},
{"search_priority": 0, "search_heb": u"יגור", "symbol_code": 96, "name": u"יגור"},
{"search_priority": 1, "search_heb": u"מרחביה )מושב(", "symbol_code": 97, "name": u"מרחביה )מושב("},
{"search_priority": 1, "search_heb": u"כפר מל\"ל", "symbol_code": 98, "name": u"כפר מל\"ל"},
{"search_priority": 1, "search_heb": u"מצפה רמון", "symbol_code": 99, "name": u"מצפה רמון"},
{"search_priority": 1, "search_heb": u"מאיר שפיה", "symbol_code": 102, "name": u"מאיר שפיה"},
{"search_priority": 1, "search_heb": u"תל עדשים", "symbol_code": 103, "name": u"תל עדשים"},
{"search_priority": 0, "search_heb": u"מזרע", "symbol_code": 104, "name": u"מזרע"},
{"search_priority": 1, "search_heb": u"כפר גדעון", "symbol_code": 106, "name": u"כפר גדעון"},
{"search_priority": 1, "search_heb": u"כפר סילבר", "symbol_code": 107, "name": u"כפר סילבר"},
{"search_priority": 2, "search_heb": u"כפר חסידים א'", "symbol_code": 112, "name": u"כפר חסידים א'"},
{"search_priority": 0, "search_heb": u"אדירים", "symbol_code": 113, "name": u"אדירים"},
{"search_priority": 0, "search_heb": u"חופית", "symbol_code": 115, "name": u"חופית"},
{"search_priority": 1, "search_heb": u"רמת ישי", "symbol_code": 122, "name": u"רמת ישי"},
{"search_priority": 0, "search_heb": u"שריד", "symbol_code": 126, "name": u"שריד"},
{"search_priority": 1, "search_heb": u"רמת רחל", "symbol_code": 127, "name": u"רמת רחל"},
{"search_priority": 1, "search_heb": u"גת רימון", "symbol_code": 128, "name": u"גת רימון"},
{"search_priority": 1, "search_heb": u"משמר העמק", "symbol_code": 130, "name": u"משמר העמק"},
{"search_priority": 1, "search_heb": u"כפר ברוך", "symbol_code": 132, "name": u"כפר ברוך"},
{"search_priority": 0, "search_heb": u"גבת", "symbol_code": 133, "name": u"גבת"},
{"search_priority": 0, "search_heb": u"יפעת", "symbol_code": 134, "name": u"יפעת"},
{"search_priority": 1, "search_heb": u"רמת דוד", "symbol_code": 135, "name": u"רמת דוד"},
{"search_priority": 1, "search_heb": u"עין שמר", "symbol_code": 139, "name": u"עין שמר"},
{"search_priority": 1, "search_heb": u"כפר יהושע", "symbol_code": 140, "name": u"כפר יהושע"},
{"search_priority": 0, "search_heb": u"ברק", "symbol_code": 141, "name": u"ברק"},
{"search_priority": 1, "search_heb": u"שדה יעקב", "symbol_code": 142, "name": u"שדה יעקב"},
{"search_priority": 1, "search_heb": u"בית זרע", "symbol_code": 143, "name": u"בית זרע"},
{"search_priority": 1, "search_heb": u"גן שלמה", "symbol_code": 144, "name": u"גן שלמה"},
{"search_priority": 0, "search_heb": u"גדיש", "symbol_code": 145, "name": u"גדיש"},
{"search_priority": 0, "search_heb": u"דבורה", "symbol_code": 146, "name": u"דבורה"},
{"search_priority": 1, "search_heb": u"גבעת ברנר", "symbol_code": 147, "name": u"גבעת ברנר"},
{"search_priority": 1, "search_heb": u"תל מונד", "symbol_code": 154, "name": u"תל מונד"},
{"search_priority": 1, "search_heb": u"באר טוביה", "symbol_code": 155, "name": u"באר טוביה"},
{"search_priority": 0, "search_heb": u"עיינות", "symbol_code": 156, "name": u"עיינות"},
{"search_priority": 1, "search_heb": u"עין ורד", "symbol_code": 157, "name": u"עין ורד"},
{"search_priority": 0, "search_heb": u"נען", "symbol_code": 158, "name": u"נען"},
{"search_priority": 1, "search_heb": u"בית חנן", "symbol_code": 159, "name": u"בית חנן"},
{"search_priority": 0, "search_heb": u"חולדה", "symbol_code": 160, "name": u"חולדה"},
{"search_priority": 0, "search_heb": u"חירות", "symbol_code": 162, "name": u"חירות"},
{"search_priority": 0, "search_heb": u"תימורים", "symbol_code": 163, "name": u"תימורים"},
{"search_priority": 0, "search_heb": u"מלאה", "symbol_code": 164, "name": u"מלאה"},
{"search_priority": 1, "search_heb": u"ניר יפה", "symbol_code": 165, "name": u"ניר יפה"},
{"search_priority": 1, "search_heb": u"גן יבנה", "symbol_code": 166, "name": u"גן יבנה"},
{"search_priority": 1, "search_heb": u"עין החורש", "symbol_code": 167, "name": u"עין החורש"},
{"search_priority": 1, "search_heb": u"כפר יונה", "symbol_code": 168, "name": u"כפר יונה"},
{"search_priority": 1, "search_heb": u"כפר יעבץ", "symbol_code": 170, "name": u"כפר יעבץ"},
{"search_priority": 0, "search_heb": u"פרדסיה", "symbol_code": 171, "name": u"פרדסיה"},
{"search_priority": 2, "search_heb": u"גבעת חיים )מאוחד(", "symbol_code": 173,
"name": u"גבעת חיים )מאוחד("},
{"search_priority": 0, "search_heb": u"נטעים", "symbol_code": 174, "name": u"נטעים"},
{"search_priority": 0, "search_heb": u"אביחיל", "symbol_code": 175, "name": u"אביחיל"},
{"search_priority": 0, "search_heb": u"אפיקים", "symbol_code": 176, "name": u"אפיקים"},
{"search_priority": 1, "search_heb": u"כפר ביל\"ו", "symbol_code": 177, "name": u"כפר ביל\"ו"},
{"search_priority": 1, "search_heb": u"רמת יוחנן", "symbol_code": 178, "name": u"רמת יוחנן"},
{"search_priority": 1, "search_heb": u"אבן יהודה", "symbol_code": 182, "name": u"אבן יהודה"},
{"search_priority": 0, "search_heb": u"ירקונה", "symbol_code": 183, "name": u"ירקונה"},
{"search_priority": 1, "search_heb": u"רמת הכובש", "symbol_code": 184, "name": u"רמת הכובש"},
{"search_priority": 0, "search_heb": u"נעורים", "symbol_code": 186, "name": u"נעורים"},
{"search_priority": 1, "search_heb": u"כפר הס", "symbol_code": 187, "name": u"כפר הס"},
{"search_priority": 3, "search_heb": u"אשדות יעקב )מאוחד(", "symbol_code": 188,
"name": u"אשדות יעקב )מאוחד("},
{"search_priority": 1, "search_heb": u"כפר פינס", "symbol_code": 189, "name": u"כפר פינס"},
{"search_priority": 1, "search_heb": u"כפר ויתקין", "symbol_code": 190, "name": u"כפר ויתקין"},
{"search_priority": 1, "search_heb": u"הדר עם", "symbol_code": 191, "name": u"הדר עם"},
{"search_priority": 1, "search_heb": u"כפר החורש", "symbol_code": 192, "name": u"כפר החורש"},
{"search_priority": 1, "search_heb": u"כפר חיים", "symbol_code": 193, "name": u"כפר חיים"},
{"search_priority": 1, "search_heb": u"משמר השרון", "symbol_code": 194, "name": u"משמר השרון"},
{"search_priority": 0, "search_heb": u"קדימה-צורן", "symbol_code": 195, "name": u"קדימה-צורן"},
{"search_priority": 0, "search_heb": u"גיבתון", "symbol_code": 196, "name": u"גיבתון"},
{"search_priority": 0, "search_heb": u"מעברות", "symbol_code": 197, "name": u"מעברות"},
{"search_priority": 0, "search_heb": u"צופית", "symbol_code": 198, "name": u"צופית"},
{"search_priority": 3, "search_heb": u"אשדות יעקב )איחוד(", "symbol_code": 199,
"name": u"אשדות יעקב )איחוד("},
{"search_priority": 1, "search_heb": u"בית ינאי", "symbol_code": 200, "name": u"בית ינאי"},
{"search_priority": 1, "search_heb": u"בית עובד", "symbol_code": 202, "name": u"בית עובד"},
{"search_priority": 0, "search_heb": u"אלישיב", "symbol_code": 204, "name": u"אלישיב"},
{"search_priority": 0, "search_heb": u"חגלה", "symbol_code": 205, "name": u"חגלה"},
{"search_priority": 1, "search_heb": u"רמות השבים", "symbol_code": 206, "name": u"רמות השבים"},
{"search_priority": 1, "search_heb": u"גבעת ח\"ן", "symbol_code": 207, "name": u"גבעת ח\"ן"},
{"search_priority": 1, "search_heb": u"מוצא עילית", "symbol_code": 208, "name": u"מוצא עילית"},
{"search_priority": 1, "search_heb": u"בית צבי", "symbol_code": 212, "name": u"בית צבי"},
{"search_priority": 0, "search_heb": u"משמרות", "symbol_code": 213, "name": u"משמרות"},
{"search_priority": 1, "search_heb": u"כפר הרא\"ה", "symbol_code": 217, "name": u"כפר הרא\"ה"},
{"search_priority": 1, "search_heb": u"גני עם", "symbol_code": 218, "name": u"גני עם"},
{"search_priority": 1, "search_heb": u"חיבת ציון", "symbol_code": 219, "name": u"חיבת ציון"},
{"search_priority": 1, "search_heb": u"כפר ביאליק", "symbol_code": 220, "name": u"כפר ביאליק"},
{"search_priority": 1, "search_heb": u"עין עירון", "symbol_code": 223, "name": u"עין עירון"},
{"search_priority": 1, "search_heb": u"שושנת העמקים", "symbol_code": 224, "name": u"שושנת העמקים"},
{"search_priority": 1, "search_heb": u"גן השומרון", "symbol_code": 225, "name": u"גן השומרון"},
{"search_priority": 1, "search_heb": u"גני תקווה", "symbol_code": 229, "name": u"גני תקווה"},
{"search_priority": 0, "search_heb": u"מעש", "symbol_code": 230, "name": u"מעש"},
{"search_priority": 0, "search_heb": u"שפיים", "symbol_code": 232, "name": u"שפיים"},
{"search_priority": 1, "search_heb": u"כפר ידידיה", "symbol_code": 233, "name": u"כפר ידידיה"},
{"search_priority": 0, "search_heb": u"ביצרון", "symbol_code": 234, "name": u"ביצרון"},
{"search_priority": 1, "search_heb": u"חבצלת השרון", "symbol_code": 235, "name": u"חבצלת השרון"},
{"search_priority": 1, "search_heb": u"שער העמקים", "symbol_code": 237, "name": u"שער העמקים"},
{"search_priority": 1, "search_heb": u"גן חיים", "symbol_code": 239, "name": u"גן חיים"},
{"search_priority": 1, "search_heb": u"יקנעם עילית", "symbol_code": 240, "name": u"יקנעם עילית"},
{"search_priority": 1, "search_heb": u"יקנעם )מושבה(", "symbol_code": 241, "name": u"יקנעם )מושבה("},
{"search_priority": 1, "search_heb": u"בית השיטה", "symbol_code": 242, "name": u"בית השיטה"},
{"search_priority": 0, "search_heb": u"נתיבות", "symbol_code": 246, "name": u"נתיבות"},
{"search_priority": 0, "search_heb": u"רשפון", "symbol_code": 247, "name": u"רשפון"},
{"search_priority": 1, "search_heb": u"בית שערים", "symbol_code": 248, "name": u"בית שערים"},
{"search_priority": 1, "search_heb": u"כפר סירקין", "symbol_code": 249, "name": u"כפר סירקין"},
{"search_priority": 0, "search_heb": u"הזורע", "symbol_code": 250, "name": u"הזורע"},
{"search_priority": 1, "search_heb": u"ביתן אהרן", "symbol_code": 252, "name": u"ביתן אהרן"},
{"search_priority": 0, "search_heb": u"חולתה", "symbol_code": 253, "name": u"חולתה"},
{"search_priority": 1, "search_heb": u"כפר המכבי", "symbol_code": 254, "name": u"כפר המכבי"},
{"search_priority": 1, "search_heb": u"כפר חיטים", "symbol_code": 255, "name": u"כפר חיטים"},
{"search_priority": 3, "search_heb": u"ניר דוד )תל עמל(", "symbol_code": 256,
"name": u"ניר דוד )תל עמל("},
{"search_priority": 0, "search_heb": u"נופך", "symbol_code": 257, "name": u"נופך"},
{"search_priority": 1, "search_heb": u"שדה נחום", "symbol_code": 259, "name": u"שדה נחום"},
{"search_priority": 0, "search_heb": u"גינוסר", "symbol_code": 262, "name": u"גינוסר"},
{"search_priority": 0, "search_heb": u"מסדה", "symbol_code": 263, "name": u"מסדה"},
{"search_priority": 1, "search_heb": u"שער הגולן", "symbol_code": 264, "name": u"שער הגולן"},
{"search_priority": 1, "search_heb": u"בית יוסף", "symbol_code": 265, "name": u"בית יוסף"},
{"search_priority": 1, "search_heb": u"כפר שמריהו", "symbol_code": 267, "name": u"כפר שמריהו"},
{"search_priority": 1, "search_heb": u"טירת צבי", "symbol_code": 268, "name": u"טירת צבי"},
{"search_priority": 0, "search_heb": u"מולדת", "symbol_code": 269, "name": u"מולדת"},
{"search_priority": 1, "search_heb": u"עין השופט", "symbol_code": 270, "name": u"עין השופט"},
{"search_priority": 1, "search_heb": u"מעוז חיים", "symbol_code": 272, "name": u"מעוז חיים"},
{"search_priority": 1, "search_heb": u"עין גב", "symbol_code": 273, "name": u"עין גב"},
{"search_priority": 1, "search_heb": u"כפר מנחם", "symbol_code": 274, "name": u"כפר מנחם"},
{"search_priority": 1, "search_heb": u"צור משה", "symbol_code": 276, "name": u"צור משה"},
{"search_priority": 0, "search_heb": u"אושה", "symbol_code": 278, "name": u"אושה"},
{"search_priority": 0, "search_heb": u"חניתה", "symbol_code": 280, "name": u"חניתה"},
{"search_priority": 1, "search_heb": u"פקיעין חדשה", "symbol_code": 281, "name": u"פקיעין חדשה"},
{"search_priority": 1, "search_heb": u"שבי ציון", "symbol_code": 282, "name": u"שבי ציון"},
{"search_priority": 1, "search_heb": u"שדה ורבורג", "symbol_code": 284, "name": u"שדה ורבורג"},
{"search_priority": 0, "search_heb": u"אלונים", "symbol_code": 285, "name": u"אלונים"},
{"search_priority": 1, "search_heb": u"מעלה החמישה", "symbol_code": 286, "name": u"מעלה החמישה"},
{"search_priority": 1, "search_heb": u"תל יצחק", "symbol_code": 287, "name": u"תל יצחק"},
{"search_priority": 1, "search_heb": u"בית יהושע", "symbol_code": 288, "name": u"בית יהושע"},
{"search_priority": 1, "search_heb": u"עין המפרץ", "symbol_code": 289, "name": u"עין המפרץ"},
{"search_priority": 1, "search_heb": u"מעין צבי", "symbol_code": 290, "name": u"מעין צבי"},
{"search_priority": 0, "search_heb": u"שרונה", "symbol_code": 292, "name": u"שרונה"},
{"search_priority": 1, "search_heb": u"שדה יואב", "symbol_code": 293, "name": u"שדה יואב"},
{"search_priority": 0, "search_heb": u"אילון", "symbol_code": 294, "name": u"אילון"},
{"search_priority": 1, "search_heb": u"כפר רופין", "symbol_code": 295, "name": u"כפר רופין"},
{"search_priority": 1, "search_heb": u"נווה איתן", "symbol_code": 296, "name": u"נווה איתן"},
{"search_priority": 1, "search_heb": u"כפר מסריק", "symbol_code": 297, "name": u"כפר מסריק"},
{"search_priority": 0, "search_heb": u"מסילות", "symbol_code": 298, "name": u"מסילות"},
{"search_priority": 0, "search_heb": u"דליה", "symbol_code": 300, "name": u"דליה"},
{"search_priority": 1, "search_heb": u"בית עוזיאל", "symbol_code": 301, "name": u"בית עוזיאל"},
{"search_priority": 0, "search_heb": u"דפנה", "symbol_code": 302, "name": u"דפנה"},
{"search_priority": 0, "search_heb": u"דן", "symbol_code": 303, "name": u"דן"},
{"search_priority": 1, "search_heb": u"שדה אליהו", "symbol_code": 304, "name": u"שדה אליהו"},
{"search_priority": 0, "search_heb": u"גשר", "symbol_code": 305, "name": u"גשר"},
{"search_priority": 1, "search_heb": u"שדמות דבורה", "symbol_code": 306, "name": u"שדמות דבורה"},
{"search_priority": 0, "search_heb": u"הזורעים", "symbol_code": 307, "name": u"הזורעים"},
{"search_priority": 0, "search_heb": u"מחניים", "symbol_code": 308, "name": u"מחניים"},
{"search_priority": 0, "search_heb": u"נהורה", "symbol_code": 309, "name": u"נהורה"},
{"search_priority": 1, "search_heb": u"כפר גליקסון", "symbol_code": 310, "name": u"כפר גליקסון"},
{"search_priority": 1, "search_heb": u"גן שורק", "symbol_code": 311, "name": u"גן שורק"},
{"search_priority": 1, "search_heb": u"נווה ים", "symbol_code": 312, "name": u"נווה ים"},
{"search_priority": 0, "search_heb": u"אפק", "symbol_code": 313, "name": u"אפק"},
{"search_priority": 0, "search_heb": u"נגבה", "symbol_code": 315, "name": u"נגבה"},
{"search_priority": 1, "search_heb": u"כפר נטר", "symbol_code": 316, "name": u"כפר נטר"},
{"search_priority": 1, "search_heb": u"בית אורן", "symbol_code": 317, "name": u"בית אורן"},
{"search_priority": 0, "search_heb": u"עמיעוז", "symbol_code": 318, "name": u"עמיעוז"},
{"search_priority": 0, "search_heb": u"עמיר", "symbol_code": 319, "name": u"עמיר"},
{"search_priority": 1, "search_heb": u"כפר ורבורג", "symbol_code": 320, "name": u"כפר ורבורג"},
{"search_priority": 1, "search_heb": u"בית הלל", "symbol_code": 322, "name": u"בית הלל"},
{"search_priority": 1, "search_heb": u"שאר ישוב", "symbol_code": 324, "name": u"שאר ישוב"},
{"search_priority": 0, "search_heb": u"מצובה", "symbol_code": 325, "name": u"מצובה"},
{"search_priority": 2, "search_heb": u"בית יצחק-שער חפר", "symbol_code": 326,
"name": u"בית יצחק-שער חפר"},
{"search_priority": 1, "search_heb": u"שדות ים", "symbol_code": 327, "name": u"שדות ים"},
{"search_priority": 0, "search_heb": u"עזוז", "symbol_code": 328, "name": u"עזוז"},
{"search_priority": 1, "search_heb": u"שדה נחמיה", "symbol_code": 329, "name": u"שדה נחמיה"},
{"search_priority": 0, "search_heb": u"אלומות", "symbol_code": 330, "name": u"אלומות"},
{"search_priority": 1, "search_heb": u"ניר צבי", "symbol_code": 331, "name": u"ניר צבי"},
{"search_priority": 1, "search_heb": u"קבוצת יבנה", "symbol_code": 334, "name": u"קבוצת יבנה"},
{"search_priority": 1, "search_heb": u"רמת השופט", "symbol_code": 335, "name": u"רמת השופט"},
{"search_priority": 0, "search_heb": u"דורות", "symbol_code": 336, "name": u"דורות"},
{"search_priority": 0, "search_heb": u"איבים", "symbol_code": 338, "name": u"איבים"},
{"search_priority": 1, "search_heb": u"רמת צבי", "symbol_code": 339, "name": u"רמת צבי"},
{"search_priority": 1, "search_heb": u"גת )קיבוץ(", "symbol_code": 340, "name": u"גת )קיבוץ("},
{"search_priority": 0, "search_heb": u"גברעם", "symbol_code": 342, "name": u"גברעם"},
{"search_priority": 0, "search_heb": u"חמדיה", "symbol_code": 343, "name": u"חמדיה"},
{"search_priority": 0, "search_heb": u"מענית", "symbol_code": 344, "name": u"מענית"},
{"search_priority": 1, "search_heb": u"כפר סאלד", "symbol_code": 345, "name": u"כפר סאלד"},
{"search_priority": 1, "search_heb": u"גליל ים", "symbol_code": 346, "name": u"גליל ים"},
{"search_priority": 0, "search_heb": u"מנרה", "symbol_code": 347, "name": u"מנרה"},
{"search_priority": 1, "search_heb": u"ניר עם", "symbol_code": 348, "name": u"ניר עם"},
{"search_priority": 0, "search_heb": u"ניצן", "symbol_code": 351, "name": u"ניצן"},
{"search_priority": 0, "search_heb": u"גבולות", "symbol_code": 352, "name": u"גבולות"},
{"search_priority": 1, "search_heb": u"בית זיד", "symbol_code": 353, "name": u"בית זיד"},
{"search_priority": 0, "search_heb": u"רביבים", "symbol_code": 354, "name": u"רביבים"},
{"search_priority": 0, "search_heb": u"חורשים", "symbol_code": 355, "name": u"חורשים"},
{"search_priority": 0, "search_heb": u"הגושרים", "symbol_code": 356, "name": u"הגושרים"},
{"search_priority": 1, "search_heb": u"כפר בלום", "symbol_code": 357, "name": u"כפר בלום"},
{"search_priority": 1, "search_heb": u"יד מרדכי", "symbol_code": 358, "name": u"יד מרדכי"},
{"search_priority": 0, "search_heb": u"ניצנים", "symbol_code": 359, "name": u"ניצנים"},
{"search_priority": 1, "search_heb": u"גבעת ניל\"י", "symbol_code": 360, "name": u"גבעת ניל\"י"},
{"search_priority": 0, "search_heb": u"רוחמה", "symbol_code": 362, "name": u"רוחמה"},
{"search_priority": 1, "search_heb": u"חפץ חיים", "symbol_code": 363, "name": u"חפץ חיים"},
{"search_priority": 1, "search_heb": u"כפר אוריה", "symbol_code": 364, "name": u"כפר אוריה"},
{"search_priority": 1, "search_heb": u"בית קשת", "symbol_code": 365, "name": u"בית קשת"},
{"search_priority": 0, "search_heb": u"שמיר", "symbol_code": 366, "name": u"שמיר"},
{"search_priority": 1, "search_heb": u"עין העמק", "symbol_code": 367, "name": u"עין העמק"},
{"search_priority": 0, "search_heb": u"ביריה", "symbol_code": 368, "name": u"ביריה"},
{"search_priority": 2, "search_heb": u"גלעד )אבן יצחק(", "symbol_code": 369,
"name": u"גלעד )אבן יצחק("},
{"search_priority": 0, "search_heb": u"גזר", "symbol_code": 370, "name": u"גזר"},
{"search_priority": 0, "search_heb": u"כדורי", "symbol_code": 371, "name": u"כדורי"},
{"search_priority": 1, "search_heb": u"רמות נפתלי", "symbol_code": 372, "name": u"רמות נפתלי"},
{"search_priority": 1, "search_heb": u"בית הלוי", "symbol_code": 373, "name": u"בית הלוי"},
{"search_priority": 0, "search_heb": u"חוקוק", "symbol_code": 374, "name": u"חוקוק"},
{"search_priority": 0, "search_heb": u"מגל", "symbol_code": 375, "name": u"מגל"},
{"search_priority": 0, "search_heb": u"עברון", "symbol_code": 376, "name": u"עברון"},
{"search_priority": 0, "search_heb": u"המעפיל", "symbol_code": 377, "name": u"המעפיל"},
{"search_priority": 1, "search_heb": u"משגב עם", "symbol_code": 378, "name": u"משגב עם"},
{"search_priority": 0, "search_heb": u"גאולים", "symbol_code": 379, "name": u"גאולים"},
{"search_priority": 1, "search_heb": u"להבות הבשן", "symbol_code": 380, "name": u"להבות הבשן"},
{"search_priority": 0, "search_heb": u"מכמורת", "symbol_code": 382, "name": u"מכמורת"},
{"search_priority": 1, "search_heb": u"עין הנצי\"ב", "symbol_code": 383, "name": u"עין הנצי\"ב"},
{"search_priority": 0, "search_heb": u"עמיעד", "symbol_code": 385, "name": u"עמיעד"},
{"search_priority": 1, "search_heb": u"בני דרור", "symbol_code": 386, "name": u"בני דרור"},
{"search_priority": 1, "search_heb": u"כפר מונש", "symbol_code": 387, "name": u"כפר מונש"},
{"search_priority": 1, "search_heb": u"כפר קיש", "symbol_code": 388, "name": u"כפר קיש"},
{"search_priority": 0, "search_heb": u"בצרה", "symbol_code": 389, "name": u"בצרה"},
{"search_priority": 0, "search_heb": u"רגבה", "symbol_code": 390, "name": u"רגבה"},
{"search_priority": 0, "search_heb": u"קדמה", "symbol_code": 392, "name": u"קדמה"},
{"search_priority": 0, "search_heb": u"גלאון", "symbol_code": 393, "name": u"גלאון"},
{"search_priority": 0, "search_heb": u"שובל", "symbol_code": 394, "name": u"שובל"},
{"search_priority": 1, "search_heb": u"משמר הנגב", "symbol_code": 395, "name": u"משמר הנגב"},
{"search_priority": 0, "search_heb": u"נבטים", "symbol_code": 396, "name": u"נבטים"},
{"search_priority": 0, "search_heb": u"חצרים", "symbol_code": 397, "name": u"חצרים"},
{"search_priority": 0, "search_heb": u"שרשרת", "symbol_code": 398, "name": u"שרשרת"},
{"search_priority": 0, "search_heb": u"בארי", "symbol_code": 399, "name": u"בארי"},
{"search_priority": 1, "search_heb": u"אבן שמואל", "symbol_code": 400, "name": u"אבן שמואל"},
{"search_priority": 1, "search_heb": u"ניר יצחק", "symbol_code": 402, "name": u"ניר יצחק"},
{"search_priority": 0, "search_heb": u"אורים", "symbol_code": 403, "name": u"אורים"},
{"search_priority": 1, "search_heb": u"נווה אילן", "symbol_code": 405, "name": u"נווה אילן"},
{"search_priority": 0, "search_heb": u"חצור-אשדוד", "symbol_code": 406, "name": u"חצור-אשדוד"},
{"search_priority": 0, "search_heb": u"דברת", "symbol_code": 407, "name": u"דברת"},
{"search_priority": 1, "search_heb": u"נאות מרדכי", "symbol_code": 408, "name": u"נאות מרדכי"},
{"search_priority": 0, "search_heb": u"יחיעם", "symbol_code": 409, "name": u"יחיעם"},
{"search_priority": 1, "search_heb": u"קרית שלמה", "symbol_code": 412, "name": u"קרית שלמה"},
{"search_priority": 0, "search_heb": u"צאלים", "symbol_code": 413, "name": u"צאלים"},
{"search_priority": 0, "search_heb": u"קלחים", "symbol_code": 414, "name": u"קלחים"},
{"search_priority": 0, "search_heb": u"שוקדה", "symbol_code": 415, "name": u"שוקדה"},
{"search_priority": 1, "search_heb": u"מעין ברוך", "symbol_code": 416, "name": u"מעין ברוך"},
{"search_priority": 0, "search_heb": u"יקום", "symbol_code": 417, "name": u"יקום"},
{"search_priority": 1, "search_heb": u"בני ציון", "symbol_code": 418, "name": u"בני ציון"},
{"search_priority": 0, "search_heb": u"סעד", "symbol_code": 419, "name": u"סעד"},
{"search_priority": 1, "search_heb": u"משאבי שדה", "symbol_code": 421, "name": u"משאבי שדה"},
{"search_priority": 1, "search_heb": u"חרב לאת", "symbol_code": 422, "name": u"חרב לאת"},
{"search_priority": 0, "search_heb": u"העוגן", "symbol_code": 423, "name": u"העוגן"},
{"search_priority": 0, "search_heb": u"גבים", "symbol_code": 424, "name": u"גבים"},
{"search_priority": 0, "search_heb": u"משמרת", "symbol_code": 425, "name": u"משמרת"},
{"search_priority": 1, "search_heb": u"עין כרמל", "symbol_code": 426, "name": u"עין כרמל"},
{"search_priority": 1, "search_heb": u"כפר גלים", "symbol_code": 427, "name": u"כפר גלים"},
{"search_priority": 1, "search_heb": u"ברור חיל", "symbol_code": 428, "name": u"ברור חיל"},
{"search_priority": 1, "search_heb": u"אלוני אבא", "symbol_code": 429, "name": u"אלוני אבא"},
{"search_priority": 2, "search_heb": u"בית לחם הגלילית", "symbol_code": 430,
"name": u"בית לחם הגלילית"},
{"search_priority": 0, "search_heb": u"דלתון", "symbol_code": 431, "name": u"דלתון"},
{"search_priority": 0, "search_heb": u"שמרת", "symbol_code": 432, "name": u"שמרת"},
{"search_priority": 0, "search_heb": u"נחשולים", "symbol_code": 433, "name": u"נחשולים"},
{"search_priority": 0, "search_heb": u"החותרים", "symbol_code": 434, "name": u"החותרים"},
{"search_priority": 1, "search_heb": u"נצר סרני", "symbol_code": 435, "name": u"נצר סרני"},
{"search_priority": 1, "search_heb": u"עין דור", "symbol_code": 436, "name": u"עין דור"},
{"search_priority": 0, "search_heb": u"רשפים", "symbol_code": 437, "name": u"רשפים"},
{"search_priority": 0, "search_heb": u"שלוחות", "symbol_code": 439, "name": u"שלוחות"},
{"search_priority": 0, "search_heb": u"יסודות", "symbol_code": 440, "name": u"יסודות"},
{"search_priority": 0, "search_heb": u"גדעונה", "symbol_code": 442, "name": u"גדעונה"},
{"search_priority": 1, "search_heb": u"כפר הנשיא", "symbol_code": 443, "name": u"כפר הנשיא"},
{"search_priority": 0, "search_heb": u"רגבים", "symbol_code": 444, "name": u"רגבים"},
{"search_priority": 1, "search_heb": u"רמות מנשה", "symbol_code": 445, "name": u"רמות מנשה"},
{"search_priority": 0, "search_heb": u"אודים", "symbol_code": 446, "name": u"אודים"},
{"search_priority": 0, "search_heb": u"נורדיה", "symbol_code": 447, "name": u"נורדיה"},
{"search_priority": 1, "search_heb": u"בני עטרות", "symbol_code": 448, "name": u"בני עטרות"},
{"search_priority": 0, "search_heb": u"נחלים", "symbol_code": 449, "name": u"נחלים"},
{"search_priority": 1, "search_heb": u"בארות יצחק", "symbol_code": 450, "name": u"בארות יצחק"},
{"search_priority": 0, "search_heb": u"יזרעאל", "symbol_code": 452, "name": u"יזרעאל"},
{"search_priority": 0, "search_heb": u"יפתח", "symbol_code": 453, "name": u"יפתח"},
{"search_priority": 0, "search_heb": u"סער", "symbol_code": 454, "name": u"סער"},
{"search_priority": 0, "search_heb": u"שורש", "symbol_code": 456, "name": u"שורש"},
{"search_priority": 0, "search_heb": u"גזית", "symbol_code": 457, "name": u"גזית"},
{"search_priority": 1, "search_heb": u"רמת רזיאל", "symbol_code": 460, "name": u"רמת רזיאל"},
{"search_priority": 1, "search_heb": u"טל שחר", "symbol_code": 462, "name": u"טל שחר"},
{"search_priority": 0, "search_heb": u"געתון", "symbol_code": 463, "name": u"געתון"},
{"search_priority": 0, "search_heb": u"הראל", "symbol_code": 464, "name": u"הראל"},
{"search_priority": 0, "search_heb": u"צובה", "symbol_code": 465, "name": u"צובה"},
{"search_priority": 1, "search_heb": u"בית דגן", "symbol_code": 466, "name": u"בית דגן"},
{"search_priority": 1, "search_heb": u"קרית עקרון", "symbol_code": 469, "name": u"קרית עקרון"},
{"search_priority": 1, "search_heb": u"אבו גוש", "symbol_code": 472, "name": u"אבו גוש"},
{"search_priority": 1, "search_heb": u"אבו סנאן", "symbol_code": 473, "name": u"אבו סנאן"},
{"search_priority": 0, "search_heb": u"דחי", "symbol_code": 475, "name": u"דחי"},
{"search_priority": 0, "search_heb": u"אכסאל", "symbol_code": 478, "name": u"אכסאל"},
{"search_priority": 1, "search_heb": u"בית ג'ן", "symbol_code": 480, "name": u"בית ג'ן"},
{"search_priority": 0, "search_heb": u"מגאר", "symbol_code": 481, "name": u"מגאר"},
{"search_priority": 0, "search_heb": u"בועיינה-נוג'ידאת", "symbol_code": 482,
"name": u"בועיינה-נוג'ידאת"},
{"search_priority": 0, "search_heb": u"בענה", "symbol_code": 483, "name": u"בענה"},
{"search_priority": 0, "search_heb": u"ג'ולס", "symbol_code": 485, "name": u"ג'ולס"},
{"search_priority": 2, "search_heb": u"ג'ש )גוש חלב(", "symbol_code": 487, "name": u"ג'ש )גוש חלב("},
{"search_priority": 0, "search_heb": u"דבוריה", "symbol_code": 489, "name": u"דבוריה"},
{"search_priority": 1, "search_heb": u"דייר אל-אסד", "symbol_code": 490, "name": u"דייר אל-אסד"},
{"search_priority": 1, "search_heb": u"דייר חנא", "symbol_code": 492, "name": u"דייר חנא"},
{"search_priority": 1, "search_heb": u"דייר ראפאת", "symbol_code": 493, "name": u"דייר ראפאת"},
{"search_priority": 1, "search_heb": u"דאלית אל-כרמל", "symbol_code": 494, "name": u"דאלית אל-כרמל"},
{"search_priority": 0, "search_heb": u"חורפיש", "symbol_code": 496, "name": u"חורפיש"},
{"search_priority": 1, "search_heb": u"טייבה )בעמק(", "symbol_code": 497, "name": u"טייבה )בעמק("},
{"search_priority": 0, "search_heb": u"טורעאן", "symbol_code": 498, "name": u"טורעאן"},
{"search_priority": 0, "search_heb": u"יפיע", "symbol_code": 499, "name": u"יפיע"},
{"search_priority": 0, "search_heb": u"ירכא", "symbol_code": 502, "name": u"ירכא"},
{"search_priority": 0, "search_heb": u"כאבול", "symbol_code": 504, "name": u"כאבול"},
{"search_priority": 2, "search_heb": u"כאוכב אבו אל-היג'א", "symbol_code": 505,
"name": u"כאוכב אבו אל-היג'א"},
{"search_priority": 1, "search_heb": u"כפר יאסיף", "symbol_code": 507, "name": u"כפר יאסיף"},
{"search_priority": 1, "search_heb": u"כפר כמא", "symbol_code": 508, "name": u"כפר כמא"},
{"search_priority": 1, "search_heb": u"כפר כנא", "symbol_code": 509, "name": u"כפר כנא"},
{"search_priority": 1, "search_heb": u"כפר מנדא", "symbol_code": 510, "name": u"כפר מנדא"},
{"search_priority": 0, "search_heb": u"עילוט", "symbol_code": 511, "name": u"עילוט"},
{"search_priority": 1, "search_heb": u"כפר מצר", "symbol_code": 512, "name": u"כפר מצר"},
{"search_priority": 1, "search_heb": u"עין ראפה", "symbol_code": 514, "name": u"עין ראפה"},
{"search_priority": 1, "search_heb": u"מג'ד אל-כרום", "symbol_code": 516, "name": u"מג'ד אל-כרום"},
{"search_priority": 0, "search_heb": u"מזרעה", "symbol_code": 517, "name": u"מזרעה"},
{"search_priority": 0, "search_heb": u"מעיליא", "symbol_code": 518, "name": u"מעיליא"},
{"search_priority": 0, "search_heb": u"משהד", "symbol_code": 520, "name": u"משהד"},
{"search_priority": 1, "search_heb": u"עין נקובא", "symbol_code": 521, "name": u"עין נקובא"},
{"search_priority": 0, "search_heb": u"נחף", "symbol_code": 522, "name": u"נחף"},
{"search_priority": 0, "search_heb": u"ניין", "symbol_code": 523, "name": u"ניין"},
{"search_priority": 0, "search_heb": u"נאעורה", "symbol_code": 524, "name": u"נאעורה"},
{"search_priority": 0, "search_heb": u"סאג'ור", "symbol_code": 525, "name": u"סאג'ור"},
{"search_priority": 0, "search_heb": u"סולם", "symbol_code": 526, "name": u"סולם"},
{"search_priority": 0, "search_heb": u"שזור", "symbol_code": 527, "name": u"שזור"},
{"search_priority": 0, "search_heb": u"עוזייר", "symbol_code": 528, "name": u"עוזייר"},
{"search_priority": 0, "search_heb": u"אעבלין", "symbol_code": 529, "name": u"אעבלין"},
{"search_priority": 0, "search_heb": u"עיילבון", "symbol_code": 530, "name": u"עיילבון"},
{"search_priority": 0, "search_heb": u"עראבה", "symbol_code": 531, "name": u"עראבה"},
{"search_priority": 1, "search_heb": u"עין מאהל", "symbol_code": 532, "name": u"עין מאהל"},
{"search_priority": 0, "search_heb": u"עספיא", "symbol_code": 534, "name": u"עספיא"},
{"search_priority": 0, "search_heb": u"פסוטה", "symbol_code": 535, "name": u"פסוטה"},
{"search_priority": 1, "search_heb": u"פקיעין )בוקייעה(", "symbol_code": 536,
"name": u"פקיעין )בוקייעה("},
{"search_priority": 0, "search_heb": u"פוריידיס", "symbol_code": 537, "name": u"פוריידיס"},
{"search_priority": 0, "search_heb": u"שעב", "symbol_code": 538, "name": u"שעב"},
{"search_priority": 0, "search_heb": u"רומאנה", "symbol_code": 539, "name": u"רומאנה"},
{"search_priority": 0, "search_heb": u"ריחאניה", "symbol_code": 540, "name": u"ריחאניה"},
{"search_priority": 1, "search_heb": u"ג'סר א-זרקא", "symbol_code": 541, "name": u"ג'סר א-זרקא"},
{"search_priority": 0, "search_heb": u"ריינה", "symbol_code": 542, "name": u"ריינה"},
{"search_priority": 0, "search_heb": u"ראמה", "symbol_code": 543, "name": u"ראמה"},
{"search_priority": 1, "search_heb": u"עין אל-אסד", "symbol_code": 546, "name": u"עין אל-אסד"},
{"search_priority": 1, "search_heb": u"טמרה )יזרעאל(", "symbol_code": 547, "name": u"טמרה )יזרעאל("},
{"search_priority": 1, "search_heb": u"גנות הדר", "symbol_code": 549, "name": u"גנות הדר"},
{"search_priority": 1, "search_heb": u"ניר בנים", "symbol_code": 553, "name": u"ניר בנים"},
{"search_priority": 0, "search_heb": u"שדמה", "symbol_code": 555, "name": u"שדמה"},
{"search_priority": 1, "search_heb": u"בוסתן הגליל", "symbol_code": 559, "name": u"בוסתן הגליל"},
{"search_priority": 1, "search_heb": u"בית אלעזרי", "symbol_code": 562, "name": u"בית אלעזרי"},
{"search_priority": 1, "search_heb": u"משמר דוד", "symbol_code": 563, "name": u"משמר דוד"},
{"search_priority": 0, "search_heb": u"רבדים", "symbol_code": 564, "name": u"רבדים"},
{"search_priority": 0, "search_heb": u"אזור", "symbol_code": 565, "name": u"אזור"},
{"search_priority": 1, "search_heb": u"גבעת שמש", "symbol_code": 566, "name": u"גבעת שמש"},
{"search_priority": 0, "search_heb": u"צרעה", "symbol_code": 567, "name": u"צרעה"},
{"search_priority": 0, "search_heb": u"מעונה", "symbol_code": 570, "name": u"מעונה"},
{"search_priority": 1, "search_heb": u"בית גמליאל", "symbol_code": 571, "name": u"בית גמליאל"},
{"search_priority": 1, "search_heb": u"בית העמק", "symbol_code": 572, "name": u"בית העמק"},
{"search_priority": 0, "search_heb": u"מבקיעים", "symbol_code": 573, "name": u"מבקיעים"},
{"search_priority": 1, "search_heb": u"גשר הזיו", "symbol_code": 574, "name": u"גשר הזיו"},
{"search_priority": 0, "search_heb": u"יסעור", "symbol_code": 575, "name": u"יסעור"},
{"search_priority": 0, "search_heb": u"כברי", "symbol_code": 576, "name": u"כברי"},
{"search_priority": 1, "search_heb": u"יד בנימין", "symbol_code": 577, "name": u"יד בנימין"},
{"search_priority": 0, "search_heb": u"סאסא", "symbol_code": 578, "name": u"סאסא"},
{"search_priority": 2, "search_heb": u"כפר ראש הנקרה", "symbol_code": 579, "name": u"כפר ראש הנקרה"},
{"search_priority": 1, "search_heb": u"כרם מהר\"ל", "symbol_code": 580, "name": u"כרם מהר\"ל"},
{"search_priority": 1, "search_heb": u"כפר הנגיד", "symbol_code": 582, "name": u"כפר הנגיד"},
{"search_priority": 0, "search_heb": u"זיקים", "symbol_code": 584, "name": u"זיקים"},
{"search_priority": 0, "search_heb": u"לביא", "symbol_code": 585, "name": u"לביא"},
{"search_priority": 0, "search_heb": u"מגידו", "symbol_code": 586, "name": u"מגידו"},
{"search_priority": 0, "search_heb": u"סביון", "symbol_code": 587, "name": u"סביון"},
{"search_priority": 1, "search_heb": u"בני ראם", "symbol_code": 588, "name": u"בני ראם"},
{"search_priority": 0, "search_heb": u"בצת", "symbol_code": 589, "name": u"בצת"},
{"search_priority": 1, "search_heb": u"נווה אור", "symbol_code": 590, "name": u"נווה אור"},
{"search_priority": 0, "search_heb": u"עשרת", "symbol_code": 591, "name": u"עשרת"},
{"search_priority": 1, "search_heb": u"בני דרום", "symbol_code": 592, "name": u"בני דרום"},
{"search_priority": 0, "search_heb": u"ערוגות", "symbol_code": 593, "name": u"ערוגות"},
{"search_priority": 0, "search_heb": u"צפריה", "symbol_code": 594, "name": u"צפריה"},
{"search_priority": 1, "search_heb": u"לוחמי הגיטאות", "symbol_code": 595, "name": u"לוחמי הגיטאות"},
{"search_priority": 0, "search_heb": u"מלכיה", "symbol_code": 596, "name": u"מלכיה"},
{"search_priority": 0, "search_heb": u"פלמחים", "symbol_code": 597, "name": u"פלמחים"},
{"search_priority": 1, "search_heb": u"בית קמה", "symbol_code": 598, "name": u"בית קמה"},
{"search_priority": 0, "search_heb": u"פרוד", "symbol_code": 599, "name": u"פרוד"},
{"search_priority": 0, "search_heb": u"נירים", "symbol_code": 602, "name": u"נירים"},
{"search_priority": 0, "search_heb": u"אלקוש", "symbol_code": 603, "name": u"אלקוש"},
{"search_priority": 1, "search_heb": u"בית עריף", "symbol_code": 604, "name": u"בית עריף"},
{"search_priority": 1, "search_heb": u"כפר שמאי", "symbol_code": 605, "name": u"כפר שמאי"},
{"search_priority": 0, "search_heb": u"מזור", "symbol_code": 606, "name": u"מזור"},
{"search_priority": 0, "search_heb": u"מירון", "symbol_code": 607, "name": u"מירון"},
{"search_priority": 1, "search_heb": u"כפר חושן", "symbol_code": 609, "name": u"כפר חושן"},
{"search_priority": 0, "search_heb": u"סתריה", "symbol_code": 610, "name": u"סתריה"},
{"search_priority": 0, "search_heb": u"צרופה", "symbol_code": 612, "name": u"צרופה"},
{"search_priority": 0, "search_heb": u"ציפורי", "symbol_code": 613, "name": u"ציפורי"},
{"search_priority": 0, "search_heb": u"שומרה", "symbol_code": 614, "name": u"שומרה"},
{"search_priority": 0, "search_heb": u"קדרון", "symbol_code": 615, "name": u"קדרון"},
{"search_priority": 0, "search_heb": u"רינתיה", "symbol_code": 616, "name": u"רינתיה"},
{"search_priority": 0, "search_heb": u"ברקאי", "symbol_code": 617, "name": u"ברקאי"},
{"search_priority": 0, "search_heb": u"חדיד", "symbol_code": 618, "name": u"חדיד"},
{"search_priority": 1, "search_heb": u"בית גוברין", "symbol_code": 619, "name": u"בית גוברין"},
{"search_priority": 1, "search_heb": u"משואות יצחק", "symbol_code": 620, "name": u"משואות יצחק"},
{"search_priority": 1, "search_heb": u"עין צורים", "symbol_code": 622, "name": u"עין צורים"},
{"search_priority": 0, "search_heb": u"יראון", "symbol_code": 623, "name": u"יראון"},
{"search_priority": 0, "search_heb": u"ג'לג'וליה", "symbol_code": 627, "name": u"ג'לג'וליה"},
{"search_priority": 0, "search_heb": u"ג'ת", "symbol_code": 628, "name": u"ג'ת"},
{"search_priority": 1, "search_heb": u"כפר ברא", "symbol_code": 633, "name": u"כפר ברא"},
{"search_priority": 1, "search_heb": u"כפר קאסם", "symbol_code": 634, "name": u"כפר קאסם"},
{"search_priority": 0, "search_heb": u"מוקייבלה", "symbol_code": 635, "name": u"מוקייבלה"},
{"search_priority": 0, "search_heb": u"צנדלה", "symbol_code": 636, "name": u"צנדלה"},
{"search_priority": 0, "search_heb": u"ערערה", "symbol_code": 637, "name": u"ערערה"},
{"search_priority": 0, "search_heb": u"קלנסווה", "symbol_code": 638, "name": u"קלנסווה"},
{"search_priority": 0, "search_heb": u"מצר", "symbol_code": 648, "name": u"מצר"},
{"search_priority": 0, "search_heb": u"מייסר", "symbol_code": 649, "name": u"מייסר"},
{"search_priority": 0, "search_heb": u"אבטין", "symbol_code": 652, "name": u"אבטין"},
{"search_priority": 1, "search_heb": u"כפר קרע", "symbol_code": 654, "name": u"כפר קרע"},
{"search_priority": 1, "search_heb": u"שייח' דנון", "symbol_code": 658, "name": u"שייח' דנון"},
{"search_priority": 1, "search_heb": u"שער אפרים", "symbol_code": 661, "name": u"שער אפרים"},
{"search_priority": 0, "search_heb": u"חוסן", "symbol_code": 662, "name": u"חוסן"},
{"search_priority": 1, "search_heb": u"טירת יהודה", "symbol_code": 663, "name": u"טירת יהודה"},
{"search_priority": 2, "search_heb": u"כרם בן זמרה", "symbol_code": 664, "name": u"כרם בן זמרה"},
{"search_priority": 0, "search_heb": u"תקומה", "symbol_code": 665, "name": u"תקומה"},
{"search_priority": 0, "search_heb": u"עומר", "symbol_code": 666, "name": u"עומר"},
{"search_priority": 0, "search_heb": u"ברעם", "symbol_code": 667, "name": u"ברעם"},
{"search_priority": 0, "search_heb": u"מפלסים", "symbol_code": 668, "name": u"מפלסים"},
{"search_priority": 1, "search_heb": u"משמר איילון", "symbol_code": 670, "name": u"משמר איילון"},
{"search_priority": 1, "search_heb": u"בית נקופה", "symbol_code": 672, "name": u"בית נקופה"},
{"search_priority": 1, "search_heb": u"כפר טרומן", "symbol_code": 673, "name": u"כפר טרומן"},
{"search_priority": 0, "search_heb": u"לימן", "symbol_code": 674, "name": u"לימן"},
{"search_priority": 0, "search_heb": u"הבונים", "symbol_code": 675, "name": u"הבונים"},
{"search_priority": 1, "search_heb": u"עין השלושה", "symbol_code": 676, "name": u"עין השלושה"},
{"search_priority": 0, "search_heb": u"הסוללים", "symbol_code": 677, "name": u"הסוללים"},
{"search_priority": 0, "search_heb": u"מעגן", "symbol_code": 678, "name": u"מעגן"},
{"search_priority": 0, "search_heb": u"אביאל", "symbol_code": 679, "name": u"אביאל"},
{"search_priority": 0, "search_heb": u"אומץ", "symbol_code": 680, "name": u"אומץ"},
{"search_priority": 1, "search_heb": u"גבעת שמואל", "symbol_code": 681, "name": u"גבעת שמואל"},
{"search_priority": 0, "search_heb": u"אליקים", "symbol_code": 682, "name": u"אליקים"},
{"search_priority": 1, "search_heb": u"גבע כרמל", "symbol_code": 683, "name": u"גבע כרמל"},
{"search_priority": 0, "search_heb": u"היוגב", "symbol_code": 684, "name": u"היוגב"},
{"search_priority": 0, "search_heb": u"בניה", "symbol_code": 685, "name": u"בניה"},
{"search_priority": 1, "search_heb": u"נווה ימין", "symbol_code": 686, "name": u"נווה ימין"},
{"search_priority": 1, "search_heb": u"עין איילה", "symbol_code": 687, "name": u"עין איילה"},
{"search_priority": 0, "search_heb": u"עלמה", "symbol_code": 688, "name": u"עלמה"},
{"search_priority": 0, "search_heb": u"מגדים", "symbol_code": 689, "name": u"מגדים"},
{"search_priority": 1, "search_heb": u"כפר אחים", "symbol_code": 690, "name": u"כפר אחים"},
{"search_priority": 0, "search_heb": u"שפיר", "symbol_code": 692, "name": u"שפיר"},
{"search_priority": 1, "search_heb": u"נתיב הל\"ה", "symbol_code": 693, "name": u"נתיב הל\"ה"},
{"search_priority": 1, "search_heb": u"מעגן מיכאל", "symbol_code": 694, "name": u"מעגן מיכאל"},
{"search_priority": 0, "search_heb": u"מגן", "symbol_code": 695, "name": u"מגן"},
{"search_priority": 1, "search_heb": u"כפר חב\"ד", "symbol_code": 696, "name": u"כפר חב\"ד"},
{"search_priority": 0, "search_heb": u"בארותיים", "symbol_code": 697, "name": u"בארותיים"},
{"search_priority": 0, "search_heb": u"בורגתה", "symbol_code": 698, "name": u"בורגתה"},
{"search_priority": 1, "search_heb": u"ניר ישראל", "symbol_code": 699, "name": u"ניר ישראל"},
{"search_priority": 0, "search_heb": u"חצב", "symbol_code": 700, "name": u"חצב"},
{"search_priority": 0, "search_heb": u"ארבל", "symbol_code": 701, "name": u"ארבל"},
{"search_priority": 0, "search_heb": u"האון", "symbol_code": 702, "name": u"האון"},
{"search_priority": 1, "search_heb": u"גבעת עוז", "symbol_code": 703, "name": u"גבעת עוז"},
{"search_priority": 0, "search_heb": u"נחשונים", "symbol_code": 705, "name": u"נחשונים"},
{"search_priority": 0, "search_heb": u"גיאה", "symbol_code": 706, "name": u"גיאה"},
{"search_priority": 1, "search_heb": u"כפר דניאל", "symbol_code": 707, "name": u"כפר דניאל"},
{"search_priority": 0, "search_heb": u"עמקה", "symbol_code": 708, "name": u"עמקה"},
{"search_priority": 0, "search_heb": u"תפרח", "symbol_code": 709, "name": u"תפרח"},
{"search_priority": 1, "search_heb": u"בית זית", "symbol_code": 710, "name": u"בית זית"},
{"search_priority": 0, "search_heb": u"עזריה", "symbol_code": 711, "name": u"עזריה"},
{"search_priority": 1, "search_heb": u"בן עמי", "symbol_code": 712, "name": u"בן עמי"},
{"search_priority": 0, "search_heb": u"רעים", "symbol_code": 713, "name": u"רעים"},
{"search_priority": 0, "search_heb": u"ארז", "symbol_code": 714, "name": u"ארז"},
{"search_priority": 1, "search_heb": u"להבות חביבה", "symbol_code": 715, "name": u"להבות חביבה"},
{"search_priority": 0, "search_heb": u"אייל", "symbol_code": 716, "name": u"אייל"},
{"search_priority": 0, "search_heb": u"חגור", "symbol_code": 717, "name": u"חגור"},
{"search_priority": 0, "search_heb": u"ירחיב", "symbol_code": 718, "name": u"ירחיב"},
{"search_priority": 1, "search_heb": u"תל קציר", "symbol_code": 719, "name": u"תל קציר"},
{"search_priority": 1, "search_heb": u"ניר גלים", "symbol_code": 720, "name": u"ניר גלים"},
{"search_priority": 1, "search_heb": u"שדה אילן", "symbol_code": 721, "name": u"שדה אילן"},
{"search_priority": 0, "search_heb": u"מגשימים", "symbol_code": 722, "name": u"מגשימים"},
{"search_priority": 1, "search_heb": u"בית הגדי", "symbol_code": 723, "name": u"בית הגדי"},
{"search_priority": 0, "search_heb": u"הודיה", "symbol_code": 726, "name": u"הודיה"},
{"search_priority": 1, "search_heb": u"תלמי יחיאל", "symbol_code": 727, "name": u"תלמי יחיאל"},
{"search_priority": 1, "search_heb": u"משמר השבעה", "symbol_code": 729, "name": u"משמר השבעה"},
{"search_priority": 0, "search_heb": u"אליפלט", "symbol_code": 730, "name": u"אליפלט"},
{"search_priority": 0, "search_heb": u"מישר", "symbol_code": 731, "name": u"מישר"},
{"search_priority": 1, "search_heb": u"משמר הירדן", "symbol_code": 732, "name": u"משמר הירדן"},
{"search_priority": 1, "search_heb": u"גן יאשיה", "symbol_code": 734, "name": u"גן יאשיה"},
{"search_priority": 1, "search_heb": u"רמות מאיר", "symbol_code": 735, "name": u"רמות מאיר"},
{"search_priority": 0, "search_heb": u"גילת", "symbol_code": 736, "name": u"גילת"},
{"search_priority": 0, "search_heb": u"עולש", "symbol_code": 737, "name": u"עולש"},
{"search_priority": 0, "search_heb": u"דור", "symbol_code": 738, "name": u"דור"},
{"search_priority": 1, "search_heb": u"שדה עוזיהו", "symbol_code": 739, "name": u"שדה עוזיהו"},
{"search_priority": 0, "search_heb": u"אשתאול", "symbol_code": 740, "name": u"אשתאול"},
{"search_priority": 0, "search_heb": u"שואבה", "symbol_code": 741, "name": u"שואבה"},
{"search_priority": 1, "search_heb": u"מסילת ציון", "symbol_code": 742, "name": u"מסילת ציון"},
{"search_priority": 1, "search_heb": u"כפר שמואל", "symbol_code": 743, "name": u"כפר שמואל"},
{"search_priority": 1, "search_heb": u"תלמי יפה", "symbol_code": 744, "name": u"תלמי יפה"},
{"search_priority": 0, "search_heb": u"גמזו", "symbol_code": 745, "name": u"גמזו"},
{"search_priority": 0, "search_heb": u"ברכיה", "symbol_code": 746, "name": u"ברכיה"},
{"search_priority": 1, "search_heb": u"בית שקמה", "symbol_code": 747, "name": u"בית שקמה"},
{"search_priority": 0, "search_heb": u"מסלול", "symbol_code": 748, "name": u"מסלול"},
{"search_priority": 0, "search_heb": u"פטיש", "symbol_code": 749, "name": u"פטיש"},
{"search_priority": 0, "search_heb": u"פדויים", "symbol_code": 750, "name": u"פדויים"},
{"search_priority": 1, "search_heb": u"בית מאיר", "symbol_code": 751, "name": u"בית מאיר"},
{"search_priority": 0, "search_heb": u"תעוז", "symbol_code": 752, "name": u"תעוז"},
{"search_priority": 0, "search_heb": u"ינוב", "symbol_code": 753, "name": u"ינוב"},
{"search_priority": 0, "search_heb": u"גורן", "symbol_code": 755, "name": u"גורן"},
{"search_priority": 1, "search_heb": u"בית עזרא", "symbol_code": 756, "name": u"בית עזרא"},
{"search_priority": 0, "search_heb": u"מצליח", "symbol_code": 757, "name": u"מצליח"},
{"search_priority": 1, "search_heb": u"יד חנה", "symbol_code": 758, "name": u"יד חנה"},
{"search_priority": 0, "search_heb": u"יציץ", "symbol_code": 759, "name": u"יציץ"},
{"search_priority": 1, "search_heb": u"בן זכאי", "symbol_code": 760, "name": u"בן זכאי"},
{"search_priority": 0, "search_heb": u"שובה", "symbol_code": 761, "name": u"שובה"},
{"search_priority": 0, "search_heb": u"בטחה", "symbol_code": 762, "name": u"בטחה"},
{"search_priority": 0, "search_heb": u"שתולים", "symbol_code": 763, "name": u"שתולים"},
{"search_priority": 1, "search_heb": u"כפר מרדכי", "symbol_code": 764, "name": u"כפר מרדכי"},
{"search_priority": 1, "search_heb": u"משגב דב", "symbol_code": 765, "name": u"משגב דב"},
{"search_priority": 0, "search_heb": u"קוממיות", "symbol_code": 766, "name": u"קוממיות"},
{"search_priority": 0, "search_heb": u"פורת", "symbol_code": 767, "name": u"פורת"},
{"search_priority": 0, "search_heb": u"כרמיה", "symbol_code": 768, "name": u"כרמיה"},
{"search_priority": 1, "search_heb": u"ניר עציון", "symbol_code": 769, "name": u"ניר עציון"},
{"search_priority": 1, "search_heb": u"מבוא ביתר", "symbol_code": 771, "name": u"מבוא ביתר"},
{"search_priority": 0, "search_heb": u"אמונים", "symbol_code": 772, "name": u"אמונים"},
{"search_priority": 0, "search_heb": u"עמיקם", "symbol_code": 773, "name": u"עמיקם"},
{"search_priority": 0, "search_heb": u"צוריאל", "symbol_code": 774, "name": u"צוריאל"},
{"search_priority": 1, "search_heb": u"יד נתן", "symbol_code": 775, "name": u"יד נתן"},
{"search_priority": 0, "search_heb": u"מחסיה", "symbol_code": 776, "name": u"מחסיה"},
{"search_priority": 0, "search_heb": u"נחשון", "symbol_code": 777, "name": u"נחשון"},
{"search_priority": 0, "search_heb": u"תרום", "symbol_code": 778, "name": u"תרום"},
{"search_priority": 0, "search_heb": u"עמינדב", "symbol_code": 779, "name": u"עמינדב"},
{"search_priority": 0, "search_heb": u"אורה", "symbol_code": 780, "name": u"אורה"},
{"search_priority": 1, "search_heb": u"אבן ספיר", "symbol_code": 783, "name": u"אבן ספיר"},
{"search_priority": 1, "search_heb": u"בית נחמיה", "symbol_code": 784, "name": u"בית נחמיה"},
{"search_priority": 0, "search_heb": u"אחיהוד", "symbol_code": 785, "name": u"אחיהוד"},
{"search_priority": 1, "search_heb": u"כפר זיתים", "symbol_code": 786, "name": u"כפר זיתים"},
{"search_priority": 1, "search_heb": u"גבעת יערים", "symbol_code": 787, "name": u"גבעת יערים"},
{"search_priority": 0, "search_heb": u"זיתן", "symbol_code": 788, "name": u"זיתן"},
{"search_priority": 0, "search_heb": u"רנן", "symbol_code": 789, "name": u"רנן"},
{"search_priority": 0, "search_heb": u"משען", "symbol_code": 791, "name": u"משען"},
{"search_priority": 1, "search_heb": u"נתיב השיירה", "symbol_code": 792, "name": u"נתיב השיירה"},
{"search_priority": 0, "search_heb": u"גבעתי", "symbol_code": 793, "name": u"גבעתי"},
{"search_priority": 0, "search_heb": u"עגור", "symbol_code": 794, "name": u"עגור"},
{"search_priority": 0, "search_heb": u"יערה", "symbol_code": 795, "name": u"יערה"},
{"search_priority": 0, "search_heb": u"צלפון", "symbol_code": 796, "name": u"צלפון"},
{"search_priority": 0, "search_heb": u"אחיעזר", "symbol_code": 797, "name": u"אחיעזר"},
{"search_priority": 0, "search_heb": u"יגל", "symbol_code": 798, "name": u"יגל"},
{"search_priority": 0, "search_heb": u"זכריה", "symbol_code": 799, "name": u"זכריה"},
{"search_priority": 1, "search_heb": u"בית חנניה", "symbol_code": 800, "name": u"בית חנניה"},
{"search_priority": 0, "search_heb": u"חמד", "symbol_code": 801, "name": u"חמד"},
{"search_priority": 1, "search_heb": u"גבעת כ\"ח", "symbol_code": 802, "name": u"גבעת כ\"ח"},
{"search_priority": 0, "search_heb": u"יושיביה", "symbol_code": 803, "name": u"יושיביה"},
{"search_priority": 0, "search_heb": u"אחיסמך", "symbol_code": 804, "name": u"אחיסמך"},
{"search_priority": 0, "search_heb": u"ישעי", "symbol_code": 805, "name": u"ישעי"},
{"search_priority": 1, "search_heb": u"עין יהב", "symbol_code": 806, "name": u"עין יהב"},
{"search_priority": 0, "search_heb": u"חניאל", "symbol_code": 807, "name": u"חניאל"},
{"search_priority": 1, "search_heb": u"ניר אליהו", "symbol_code": 808, "name": u"ניר אליהו"},
{"search_priority": 0, "search_heb": u"נחם", "symbol_code": 809, "name": u"נחם"},
{"search_priority": 0, "search_heb": u"עופר", "symbol_code": 810, "name": u"עופר"},
{"search_priority": 0, "search_heb": u"יכיני", "symbol_code": 811, "name": u"יכיני"},
{"search_priority": 0, "search_heb": u"שלומי", "symbol_code": 812, "name": u"שלומי"},
{"search_priority": 1, "search_heb": u"עין יעקב", "symbol_code": 813, "name": u"עין יעקב"},
{"search_priority": 0, "search_heb": u"תלמים", "symbol_code": 814, "name": u"תלמים"},
{"search_priority": 0, "search_heb": u"זבדיאל", "symbol_code": 815, "name": u"זבדיאל"},
{"search_priority": 0, "search_heb": u"זנוח", "symbol_code": 816, "name": u"זנוח"},
{"search_priority": 0, "search_heb": u"עזריקם", "symbol_code": 817, "name": u"עזריקם"},
{"search_priority": 0, "search_heb": u"זרחיה", "symbol_code": 818, "name": u"זרחיה"},
{"search_priority": 0, "search_heb": u"אביגדור", "symbol_code": 819, "name": u"אביגדור"},
{"search_priority": 0, "search_heb": u"חלץ", "symbol_code": 820, "name": u"חלץ"},
{"search_priority": 0, "search_heb": u"אחוזם", "symbol_code": 821, "name": u"אחוזם"},
{"search_priority": 0, "search_heb": u"מטע", "symbol_code": 822, "name": u"מטע"},
{"search_priority": 1, "search_heb": u"בר גיורא", "symbol_code": 823, "name": u"בר גיורא"},
{"search_priority": 1, "search_heb": u"כוכב מיכאל", "symbol_code": 824, "name": u"כוכב מיכאל"},
{"search_priority": 1, "search_heb": u"נס הרים", "symbol_code": 825, "name": u"נס הרים"},
{"search_priority": 0, "search_heb": u"עוזה", "symbol_code": 826, "name": u"עוזה"},
{"search_priority": 1, "search_heb": u"נווה מבטח", "symbol_code": 827, "name": u"נווה מבטח"},
{"search_priority": 0, "search_heb": u"ישרש", "symbol_code": 828, "name": u"ישרש"},
{"search_priority": 0, "search_heb": u"מבטחים", "symbol_code": 829, "name": u"מבטחים"},
{"search_priority": 0, "search_heb": u"ירוחם", "symbol_code": 831, "name": u"ירוחם"},
{"search_priority": 0, "search_heb": u"נורית", "symbol_code": 833, "name": u"נורית"},
{"search_priority": 0, "search_heb": u"גנות", "symbol_code": 836, "name": u"גנות"},
{"search_priority": 0, "search_heb": u"עזריאל", "symbol_code": 837, "name": u"עזריאל"},
{"search_priority": 0, "search_heb": u"פדיה", "symbol_code": 838, "name": u"פדיה"},
{"search_priority": 0, "search_heb": u"פתחיה", "symbol_code": 839, "name": u"פתחיה"},
{"search_priority": 0, "search_heb": u"כיסופים", "symbol_code": 840, "name": u"כיסופים"},
{"search_priority": 0, "search_heb": u"אלישמע", "symbol_code": 841, "name": u"אלישמע"},
{"search_priority": 0, "search_heb": u"געש", "symbol_code": 842, "name": u"געש"},
{"search_priority": 0, "search_heb": u"מרגליות", "symbol_code": 843, "name": u"מרגליות"},
{"search_priority": 1, "search_heb": u"נחל עוז", "symbol_code": 844, "name": u"נחל עוז"},
{"search_priority": 1, "search_heb": u"כפר עזה", "symbol_code": 845, "name": u"כפר עזה"},
{"search_priority": 0, "search_heb": u"שפר", "symbol_code": 846, "name": u"שפר"},
{"search_priority": 1, "search_heb": u"בית רבן", "symbol_code": 848, "name": u"בית רבן"},
{"search_priority": 0, "search_heb": u"דבירה", "symbol_code": 849, "name": u"דבירה"},
{"search_priority": 0, "search_heb": u"אחיטוב", "symbol_code": 850, "name": u"אחיטוב"},
{"search_priority": 1, "search_heb": u"ניצני עוז", "symbol_code": 851, "name": u"ניצני עוז"},
{"search_priority": 0, "search_heb": u"גונן", "symbol_code": 852, "name": u"גונן"},
{"search_priority": 0, "search_heb": u"גאליה", "symbol_code": 853, "name": u"גאליה"},
{"search_priority": 0, "search_heb": u"רחוב", "symbol_code": 854, "name": u"רחוב"},
{"search_priority": 0, "search_heb": u"שעלבים", "symbol_code": 856, "name": u"שעלבים"},
{"search_priority": 1, "search_heb": u"כפר אביב", "symbol_code": 857, "name": u"כפר אביב"},
{"search_priority": 1, "search_heb": u"נווה ירק", "symbol_code": 858, "name": u"נווה ירק"},
{"search_priority": 0, "search_heb": u"כסלון", "symbol_code": 859, "name": u"כסלון"},
{"search_priority": 1, "search_heb": u"שדה אליעזר", "symbol_code": 861, "name": u"שדה אליעזר"},
{"search_priority": 1, "search_heb": u"גני יוחנן", "symbol_code": 862, "name": u"גני יוחנן"},
{"search_priority": 0, "search_heb": u"גינתון", "symbol_code": 863, "name": u"גינתון"},
{"search_priority": 0, "search_heb": u"בקוע", "symbol_code": 864, "name": u"בקוע"},
{"search_priority": 0, "search_heb": u"שיבולים", "symbol_code": 865, "name": u"שיבולים"},
{"search_priority": 0, "search_heb": u"יטבתה", "symbol_code": 866, "name": u"יטבתה"},
{"search_priority": 1, "search_heb": u"אלוני יצחק", "symbol_code": 868, "name": u"אלוני יצחק"},
{"search_priority": 1, "search_heb": u"גבעת השלושה", "symbol_code": 870, "name": u"גבעת השלושה"},
{"search_priority": 0, "search_heb": u"עינת", "symbol_code": 871, "name": u"עינת"},
{"search_priority": 1, "search_heb": u"גאולי תימן", "symbol_code": 872, "name": u"גאולי תימן"},
{"search_priority": 0, "search_heb": u"שלווה", "symbol_code": 873, "name": u"שלווה"},
{"search_priority": 1, "search_heb": u"מגדל העמק", "symbol_code": 874, "name": u"מגדל העמק"},
{"search_priority": 1, "search_heb": u"בית חירות", "symbol_code": 877, "name": u"בית חירות"},
{"search_priority": 1, "search_heb": u"עין שריד", "symbol_code": 880, "name": u"עין שריד"},
{"search_priority": 0, "search_heb": u"אורנים", "symbol_code": 882, "name": u"אורנים"},
{"search_priority": 1, "search_heb": u"שדה בוקר", "symbol_code": 885, "name": u"שדה בוקר"},
{"search_priority": 0, "search_heb": u"איתנים", "symbol_code": 886, "name": u"איתנים"},
{"search_priority": 1, "search_heb": u"כפר הרי\"ף", "symbol_code": 888, "name": u"כפר הרי\"ף"},
{"search_priority": 2, "search_heb": u"כפר חסידים ב'", "symbol_code": 889, "name": u"כפר חסידים ב'"},
{"search_priority": 2, "search_heb": u"כפר הנוער הדתי", "symbol_code": 890,
"name": u"כפר הנוער הדתי"},
{"search_priority": 0, "search_heb": u"עבדון", "symbol_code": 892, "name": u"עבדון"},
{"search_priority": 1, "search_heb": u"מדרשת רופין", "symbol_code": 897, "name": u"מדרשת רופין"},
{"search_priority": 3, "search_heb": u"שבלי - אום אל-גנם", "symbol_code": 913,
"name": u"שבלי - אום אל-גנם"},
{"search_priority": 0, "search_heb": u"ישע", "symbol_code": 916, "name": u"ישע"},
{"search_priority": 1, "search_heb": u"עצמון שגב", "symbol_code": 917, "name": u"עצמון שגב"},
{"search_priority": 1, "search_heb": u"גבעת ישעיהו", "symbol_code": 919, "name": u"גבעת ישעיהו"},
{"search_priority": 1, "search_heb": u"שער מנשה", "symbol_code": 921, "name": u"שער מנשה"},
{"search_priority": 0, "search_heb": u"רכסים", "symbol_code": 922, "name": u"רכסים"},
{"search_priority": 1, "search_heb": u"נווה אבות", "symbol_code": 926, "name": u"נווה אבות"},
{"search_priority": 2, "search_heb": u"אבו עמרה )שבט(", "symbol_code": 932,
"name": u"אבו עמרה )שבט("},
{"search_priority": 2, "search_heb": u"אבו סריחאן )שבט(", "symbol_code": 935,
"name": u"אבו סריחאן )שבט("},
{"search_priority": 1, "search_heb": u"מסעודין אל-עזאזמה", "symbol_code": 939,
"name": u"מסעודין אל-עזאזמה"},
{"search_priority": 1, "search_heb": u"סואעד )חמרייה(", "symbol_code": 942,
"name": u"סואעד )חמרייה("},
{"search_priority": 1, "search_heb": u"בסמת טבעון", "symbol_code": 944, "name": u"בסמת טבעון"},
{"search_priority": 1, "search_heb": u"חוג'ייראת )ד'הרה(", "symbol_code": 948,
"name": u"חוג'ייראת )ד'הרה("},
{"search_priority": 1, "search_heb": u"הוזייל )שבט(", "symbol_code": 956, "name": u"הוזייל )שבט("},
{"search_priority": 2, "search_heb": u"עוקבי )בנו עוקבה(", "symbol_code": 957,
"name": u"עוקבי )בנו עוקבה("},
{"search_priority": 2, "search_heb": u"אבו עבדון )שבט(", "symbol_code": 958,
"name": u"אבו עבדון )שבט("},
{"search_priority": 1, "search_heb": u"אפיניש )שבט(", "symbol_code": 959, "name": u"אפיניש )שבט("},
{"search_priority": 1, "search_heb": u"אסד )שבט(", "symbol_code": 960, "name": u"אסד )שבט("},
{"search_priority": 2, "search_heb": u"אבו רוקייק )שבט(", "symbol_code": 961,
"name": u"אבו רוקייק )שבט("},
{"search_priority": 0, "search_heb": u"טובא-זנגריה", "symbol_code": 962, "name": u"טובא-זנגריה"},
{"search_priority": 1, "search_heb": u"אעצם )שבט(", "symbol_code": 963, "name": u"אעצם )שבט("},
{"search_priority": 1, "search_heb": u"קודייראת א-צאנע)שבט(", "symbol_code": 964,
"name": u"קודייראת א-צאנע)שבט("},
{"search_priority": 1, "search_heb": u"אטרש )שבט(", "symbol_code": 965, "name": u"אטרש )שבט("},
{"search_priority": 2, "search_heb": u"אבו רובייעה )שבט(", "symbol_code": 966,
"name": u"אבו רובייעה )שבט("},
{"search_priority": 2, "search_heb": u"אבו ג'ווייעד )שבט(", "symbol_code": 967,
"name": u"אבו ג'ווייעד )שבט("},
{"search_priority": 2, "search_heb": u"אבו קורינאת )שבט(", "symbol_code": 968,
"name": u"אבו קורינאת )שבט("},
{"search_priority": 1, "search_heb": u"עטאוונה )שבט(", "symbol_code": 969, "name": u"עטאוונה )שבט("},
{"search_priority": 2, "search_heb": u"תראבין א-צאנע )שבט(", "symbol_code": 970,
"name": u"תראבין א-צאנע )שבט("},
{"search_priority": 1, "search_heb": u"קוואעין )שבט(", "symbol_code": 972, "name": u"קוואעין )שבט("},
{"search_priority": 0, "search_heb": u"זרזיר", "symbol_code": 975, "name": u"זרזיר"},
{"search_priority": 1, "search_heb": u"ג'נאביב )שבט(", "symbol_code": 976, "name": u"ג'נאביב )שבט("},
{"search_priority": 0, "search_heb": u"כעביה-טבאש-חג'אג'רה", "symbol_code": 978,
"name": u"כעביה-טבאש-חג'אג'רה"},
{"search_priority": 1, "search_heb": u"ח'ואלד )שבט(", "symbol_code": 986, "name": u"ח'ואלד )שבט("},
{"search_priority": 2, "search_heb": u"סואעד )כמאנה( )שבט(", "symbol_code": 989,
"name": u"סואעד )כמאנה( )שבט("},
{"search_priority": 1, "search_heb": u"ראס עלי", "symbol_code": 990, "name": u"ראס עלי"},
{"search_priority": 0, "search_heb": u"חמאם", "symbol_code": 993, "name": u"חמאם"},
{"search_priority": 1, "search_heb": u"מנשית זבדה", "symbol_code": 994, "name": u"מנשית זבדה"},
{"search_priority": 1, "search_heb": u"רומת הייב", "symbol_code": 997, "name": u"רומת הייב"},
{"search_priority": 1, "search_heb": u"ביר אל-מכסור", "symbol_code": 998, "name": u"ביר אל-מכסור"},
{"search_priority": 1, "search_heb": u"מבשרת ציון", "symbol_code": 1015, "name": u"מבשרת ציון"},
{"search_priority": 1, "search_heb": u"אור עקיבא", "symbol_code": 1020, "name": u"אור עקיבא"},
{"search_priority": 0, "search_heb": u"חרוצים", "symbol_code": 1024, "name": u"חרוצים"},
{"search_priority": 0, "search_heb": u"שדרות", "symbol_code": 1031, "name": u"שדרות"},
{"search_priority": 1, "search_heb": u"קרית מלאכי", "symbol_code": 1034, "name": u"קרית מלאכי"},
{"search_priority": 1, "search_heb": u"נצאצרה )שבט(", "symbol_code": 1041, "name": u"נצאצרה )שבט("},
{"search_priority": 2, "search_heb": u"אבו עמאר )שבט(", "symbol_code": 1042,
"name": u"אבו עמאר )שבט("},
{"search_priority": 0, "search_heb": u"גיזו", "symbol_code": 1043, "name": u"גיזו"},
{"search_priority": 0, "search_heb": u"יעף", "symbol_code": 1044, "name": u"יעף"},
{"search_priority": 0, "search_heb": u"שתולה", "symbol_code": 1045, "name": u"שתולה"},
{"search_priority": 0, "search_heb": u"אוהד", "symbol_code": 1046, "name": u"אוהד"},
{"search_priority": 0, "search_heb": u"חזון", "symbol_code": 1047, "name": u"חזון"},
{"search_priority": 1, "search_heb": u"בית חשמונאי", "symbol_code": 1050, "name": u"בית חשמונאי"},
{"search_priority": 1, "search_heb": u"תלמי אליהו", "symbol_code": 1051, "name": u"תלמי אליהו"},
{"search_priority": 0, "search_heb": u"קטורה", "symbol_code": 1052, "name": u"קטורה"},
{"search_priority": 1, "search_heb": u"עין חצבה", "symbol_code": 1053, "name": u"עין חצבה"},
{"search_priority": 1, "search_heb": u"תל שבע", "symbol_code": 1054, "name": u"תל שבע"},
{"search_priority": 2, "search_heb": u"עין כרם-בי\"ס חקלאי", "symbol_code": 1056,
"name": u"עין כרם-בי\"ס חקלאי"},
{"search_priority": 1, "search_heb": u"נווה זוהר", "symbol_code": 1057, "name": u"נווה זוהר"},
{"search_priority": 1, "search_heb": u"שדה ניצן", "symbol_code": 1058, "name": u"שדה ניצן"},
{"search_priority": 0, "search_heb": u"כסיפה", "symbol_code": 1059, "name": u"כסיפה"},
{"search_priority": 0, "search_heb": u"לקיה", "symbol_code": 1060, "name": u"לקיה"},
{"search_priority": 1, "search_heb": u"נצרת עילית", "symbol_code": 1061, "name": u"נצרת עילית"},
{"search_priority": 0, "search_heb": u"מעלות-תרשיחא", "symbol_code": 1063, "name": u"מעלות-תרשיחא"},
{"search_priority": 0, "search_heb": u"אמירים", "symbol_code": 1064, "name": u"אמירים"},
{"search_priority": 0, "search_heb": u"זמרת", "symbol_code": 1065, "name": u"זמרת"},
{"search_priority": 1, "search_heb": u"בני עי\"ש", "symbol_code": 1066, "name": u"בני עי\"ש"},
{"search_priority": 0, "search_heb": u"דוב\"ב", "symbol_code": 1067, "name": u"דוב\"ב"},
{"search_priority": 0, "search_heb": u"אדמית", "symbol_code": 1068, "name": u"אדמית"},
{"search_priority": 0, "search_heb": u"רם-און", "symbol_code": 1069, "name": u"רם-און"},
{"search_priority": 0, "search_heb": u"אביעזר", "symbol_code": 1070, "name": u"אביעזר"},
{"search_priority": 1, "search_heb": u"נווה מיכאל", "symbol_code": 1071, "name": u"נווה מיכאל"},
{"search_priority": 1, "search_heb": u"גן הדרום", "symbol_code": 1072, "name": u"גן הדרום"},
{"search_priority": 1, "search_heb": u"בית ברל", "symbol_code": 1076, "name": u"בית ברל"},
{"search_priority": 1, "search_heb": u"גבעת שפירא", "symbol_code": 1077, "name": u"גבעת שפירא"},
{"search_priority": 0, "search_heb": u"צפרירים", "symbol_code": 1079, "name": u"צפרירים"},
{"search_priority": 0, "search_heb": u"מבועים", "symbol_code": 1080, "name": u"מבועים"},
{"search_priority": 1, "search_heb": u"אבן מנחם", "symbol_code": 1081, "name": u"אבן מנחם"},
{"search_priority": 0, "search_heb": u"מעגלים", "symbol_code": 1082, "name": u"מעגלים"},
{"search_priority": 0, "search_heb": u"תושיה", "symbol_code": 1083, "name": u"תושיה"},
{"search_priority": 3, "search_heb": u"בן שמן )כפר נוער(", "symbol_code": 1084,
"name": u"בן שמן )כפר נוער("},
{"search_priority": 1, "search_heb": u"כרם שלום", "symbol_code": 1085, "name": u"כרם שלום"},
{"search_priority": 2, "search_heb": u"כרם יבנה )ישיבה(", "symbol_code": 1094,
"name": u"כרם יבנה )ישיבה("},
{"search_priority": 1, "search_heb": u"כפר מימון", "symbol_code": 1095, "name": u"כפר מימון"},
{"search_priority": 1, "search_heb": u"מרכז שפירא", "symbol_code": 1098, "name": u"מרכז שפירא"},
{"search_priority": 1, "search_heb": u"צוקי ים", "symbol_code": 1102, "name": u"צוקי ים"},
{"search_priority": 1, "search_heb": u"גני הדר", "symbol_code": 1103, "name": u"גני הדר"},
{"search_priority": 3, "search_heb": u"פוריה - כפר עבודה", "symbol_code": 1104,
"name": u"פוריה - כפר עבודה"},
{"search_priority": 3, "search_heb": u"פוריה - נווה עובד", "symbol_code": 1105,
"name": u"פוריה - נווה עובד"},
{"search_priority": 0, "search_heb": u"אומן", "symbol_code": 1108, "name": u"אומן"},
{"search_priority": 0, "search_heb": u"חבר", "symbol_code": 1110, "name": u"חבר"},
{"search_priority": 0, "search_heb": u"יודפת", "symbol_code": 1112, "name": u"יודפת"},
{"search_priority": 1, "search_heb": u"צור הדסה", "symbol_code": 1113, "name": u"צור הדסה"},
{"search_priority": 1, "search_heb": u"שריגים )לי-און(", "symbol_code": 1114,
"name": u"שריגים )לי-און("},
{"search_priority": 0, "search_heb": u"אביבים", "symbol_code": 1115, "name": u"אביבים"},
{"search_priority": 0, "search_heb": u"יעל", "symbol_code": 1117, "name": u"יעל"},
{"search_priority": 0, "search_heb": u"אדרת", "symbol_code": 1123, "name": u"אדרת"},
{"search_priority": 1, "search_heb": u"נאות הכיכר", "symbol_code": 1124, "name": u"נאות הכיכר"},
{"search_priority": 0, "search_heb": u"אלמגור", "symbol_code": 1125, "name": u"אלמגור"},
{"search_priority": 0, "search_heb": u"אילות", "symbol_code": 1126, "name": u"אילות"},
{"search_priority": 1, "search_heb": u"מעלה גלבוע", "symbol_code": 1127, "name": u"מעלה גלבוע"},
{"search_priority": 1, "search_heb": u"מי עמי", "symbol_code": 1128, "name": u"מי עמי"},
{"search_priority": 0, "search_heb": u"גרופית", "symbol_code": 1129, "name": u"גרופית"},
{"search_priority": 2, "search_heb": u"כפר רוזנואלד )זרעית(", "symbol_code": 1130,
"name": u"כפר רוזנואלד )זרעית("},
{"search_priority": 0, "search_heb": u"שניר", "symbol_code": 1132, "name": u"שניר"},
{"search_priority": 0, "search_heb": u"ורדון", "symbol_code": 1133, "name": u"ורדון"},
{"search_priority": 1, "search_heb": u"יד השמונה", "symbol_code": 1134, "name": u"יד השמונה"},
{"search_priority": 0, "search_heb": u"צוחר", "symbol_code": 1136, "name": u"צוחר"},
{"search_priority": 1, "search_heb": u"קרית יערים", "symbol_code": 1137, "name": u"קרית יערים"},
{"search_priority": 0, "search_heb": u"יעד", "symbol_code": 1138, "name": u"יעד"},
{"search_priority": 0, "search_heb": u"כרמיאל", "symbol_code": 1139, "name": u"כרמיאל"},
{"search_priority": 2, "search_heb": u"מדרשת בן גוריון", "symbol_code": 1140,
"name": u"מדרשת בן גוריון"},
{"search_priority": 1, "search_heb": u"מבוא מודיעים", "symbol_code": 1141, "name": u"מבוא מודיעים"},
{"search_priority": 1, "search_heb": u"נס עמים", "symbol_code": 1143, "name": u"נס עמים"},
{"search_priority": 0, "search_heb": u"אלומה", "symbol_code": 1145, "name": u"אלומה"},
{"search_priority": 0, "search_heb": u"עלומים", "symbol_code": 1146, "name": u"עלומים"},
{"search_priority": 0, "search_heb": u"נטועה", "symbol_code": 1147, "name": u"נטועה"},
{"search_priority": 1, "search_heb": u"צור נתן", "symbol_code": 1148, "name": u"צור נתן"},
{"search_priority": 0, "search_heb": u"עזר", "symbol_code": 1149, "name": u"עזר"},
{"search_priority": 0, "search_heb": u"צופר", "symbol_code": 1150, "name": u"צופר"},
{"search_priority": 0, "search_heb": u"פארן", "symbol_code": 1151, "name": u"פארן"},
{"search_priority": 0, "search_heb": u"אשלים", "symbol_code": 1152, "name": u"אשלים"},
{"search_priority": 0, "search_heb": u"כישור", "symbol_code": 1153, "name": u"כישור"},
{"search_priority": 0, "search_heb": u"מלכישוע", "symbol_code": 1154, "name": u"מלכישוע"},
{"search_priority": 1, "search_heb": u"מגן שאול", "symbol_code": 1155, "name": u"מגן שאול"},
{"search_priority": 0, "search_heb": u"סמר", "symbol_code": 1156, "name": u"סמר"},
{"search_priority": 0, "search_heb": u"אחווה", "symbol_code": 1157, "name": u"אחווה"},
{"search_priority": 0, "search_heb": u"יהל", "symbol_code": 1158, "name": u"יהל"},
{"search_priority": 0, "search_heb": u"שכניה", "symbol_code": 1160, "name": u"שכניה"},
{"search_priority": 0, "search_heb": u"רהט", "symbol_code": 1161, "name": u"רהט"},
{"search_priority": 1, "search_heb": u"בית רימון", "symbol_code": 1162, "name": u"בית רימון"},
{"search_priority": 0, "search_heb": u"מורן", "symbol_code": 1163, "name": u"מורן"},
{"search_priority": 0, "search_heb": u"שילת", "symbol_code": 1165, "name": u"שילת"},
{"search_priority": 1, "search_heb": u"כפר רות", "symbol_code": 1166, "name": u"כפר רות"},
{"search_priority": 0, "search_heb": u"קיסריה", "symbol_code": 1167, "name": u"קיסריה"},
{"search_priority": 1, "search_heb": u"הוואשלה )שבט(", "symbol_code": 1169, "name": u"הוואשלה )שבט("},
{"search_priority": 1, "search_heb": u"סייד )שבט(", "symbol_code": 1170, "name": u"סייד )שבט("},
{"search_priority": 0, "search_heb": u"לוטם", "symbol_code": 1171, "name": u"לוטם"},
{"search_priority": 0, "search_heb": u"תובל", "symbol_code": 1172, "name": u"תובל"},
{"search_priority": 0, "search_heb": u"לפידות", "symbol_code": 1173, "name": u"לפידות"},
{"search_priority": 0, "search_heb": u"מנוף", "symbol_code": 1174, "name": u"מנוף"},
{"search_priority": 0, "search_heb": u"עידן", "symbol_code": 1175, "name": u"עידן"},
{"search_priority": 0, "search_heb": u"ספיר", "symbol_code": 1176, "name": u"ספיר"},
{"search_priority": 0, "search_heb": u"טללים", "symbol_code": 1177, "name": u"טללים"},
{"search_priority": 0, "search_heb": u"מורשת", "symbol_code": 1178, "name": u"מורשת"},
{"search_priority": 0, "search_heb": u"קורנית", "symbol_code": 1179, "name": u"קורנית"},
{"search_priority": 0, "search_heb": u"צביה", "symbol_code": 1180, "name": u"צביה"},
{"search_priority": 0, "search_heb": u"טל-אל", "symbol_code": 1181, "name": u"טל-אל"},
{"search_priority": 1, "search_heb": u"אלון הגליל", "symbol_code": 1182, "name": u"אלון הגליל"},
{"search_priority": 0, "search_heb": u"כליל", "symbol_code": 1183, "name": u"כליל"},
{"search_priority": 0, "search_heb": u"מתת", "symbol_code": 1184, "name": u"מתת"},
{"search_priority": 0, "search_heb": u"פלך", "symbol_code": 1185, "name": u"פלך"},
{"search_priority": 0, "search_heb": u"הושעיה", "symbol_code": 1186, "name": u"הושעיה"},
{"search_priority": 1, "search_heb": u"עיר אובות", "symbol_code": 1187, "name": u"עיר אובות"},
{"search_priority": 0, "search_heb": u"אשחר", "symbol_code": 1188, "name": u"אשחר"},
{"search_priority": 1, "search_heb": u"מצפה נטופה", "symbol_code": 1190, "name": u"מצפה נטופה"},
{"search_priority": 1, "search_heb": u"בר יוחאי", "symbol_code": 1191, "name": u"בר יוחאי"},
{"search_priority": 0, "search_heb": u"ערערה-בנגב", "symbol_code": 1192, "name": u"ערערה-בנגב"},
{"search_priority": 2, "search_heb": u"ניצנה )קהילת חינוך(", "symbol_code": 1195,
"name": u"ניצנה )קהילת חינוך("},
{"search_priority": 1, "search_heb": u"מחנה יתיר", "symbol_code": 1196, "name": u"מחנה יתיר"},
{"search_priority": 1, "search_heb": u"נאות סמדר", "symbol_code": 1197, "name": u"נאות סמדר"},
{"search_priority": 0, "search_heb": u"כרמים", "symbol_code": 1198, "name": u"כרמים"},
{"search_priority": 0, "search_heb": u"עדי", "symbol_code": 1199, "name": u"עדי"},
{"search_priority": 0, "search_heb": u"מודיעין-מכבים-רעות", "symbol_code": 1200,
"name": u"מודיעין-מכבים-רעות"},
{"search_priority": 0, "search_heb": u"כמון", "symbol_code": 1201, "name": u"כמון"},
{"search_priority": 0, "search_heb": u"מכמנים", "symbol_code": 1202, "name": u"מכמנים"},
{"search_priority": 0, "search_heb": u"הררית", "symbol_code": 1203, "name": u"הררית"},
{"search_priority": 0, "search_heb": u"גילון", "symbol_code": 1204, "name": u"גילון"},
{"search_priority": 0, "search_heb": u"מנות", "symbol_code": 1205, "name": u"מנות"},
{"search_priority": 0, "search_heb": u"גיתה", "symbol_code": 1206, "name": u"גיתה"},
{"search_priority": 0, "search_heb": u"לבון", "symbol_code": 1207, "name": u"לבון"},
{"search_priority": 0, "search_heb": u"הילה", "symbol_code": 1208, "name": u"הילה"},
{"search_priority": 0, "search_heb": u"חרשים", "symbol_code": 1209, "name": u"חרשים"},
{"search_priority": 0, "search_heb": u"כחל", "symbol_code": 1210, "name": u"כחל"},
{"search_priority": 0, "search_heb": u"קדרים", "symbol_code": 1211, "name": u"קדרים"},
{"search_priority": 0, "search_heb": u"עמוקה", "symbol_code": 1212, "name": u"עמוקה"},
{"search_priority": 0, "search_heb": u"צבעון", "symbol_code": 1213, "name": u"צבעון"},
{"search_priority": 0, "search_heb": u"טפחות", "symbol_code": 1214, "name": u"טפחות"},
{"search_priority": 1, "search_heb": u"גורנות הגליל", "symbol_code": 1219, "name": u"גורנות הגליל"},
{"search_priority": 0, "search_heb": u"אבירים", "symbol_code": 1220, "name": u"אבירים"},
{"search_priority": 0, "search_heb": u"צורית", "symbol_code": 1221, "name": u"צורית"},
{"search_priority": 1, "search_heb": u"מצפה אבי\"ב", "symbol_code": 1222, "name": u"מצפה אבי\"ב"},
{"search_priority": 1, "search_heb": u"שדי אברהם", "symbol_code": 1223, "name": u"שדי אברהם"},
{"search_priority": 1, "search_heb": u"כוכב יאיר", "symbol_code": 1224, "name": u"כוכב יאיר"},
{"search_priority": 0, "search_heb": u"רביד", "symbol_code": 1225, "name": u"רביד"},
{"search_priority": 0, "search_heb": u"יובלים", "symbol_code": 1226, "name": u"יובלים"},
{"search_priority": 0, "search_heb": u"יתד", "symbol_code": 1227, "name": u"יתד"},
{"search_priority": 0, "search_heb": u"רקפת", "symbol_code": 1228, "name": u"רקפת"},
{"search_priority": 0, "search_heb": u"כלנית", "symbol_code": 1229, "name": u"כלנית"},
{"search_priority": 0, "search_heb": u"לבנים", "symbol_code": 1230, "name": u"לבנים"},
{"search_priority": 1, "search_heb": u"פרי גן", "symbol_code": 1231, "name": u"פרי גן"},
{"search_priority": 0, "search_heb": u"יבול", "symbol_code": 1232, "name": u"יבול"},
{"search_priority": 0, "search_heb": u"שקף", "symbol_code": 1233, "name": u"שקף"},
{"search_priority": 1, "search_heb": u"קבועה )שבט(", "symbol_code": 1234, "name": u"קבועה )שבט("},
{"search_priority": 0, "search_heb": u"שורשים", "symbol_code": 1235, "name": u"שורשים"},
{"search_priority": 0, "search_heb": u"נירית", "symbol_code": 1236, "name": u"נירית"},
{"search_priority": 1, "search_heb": u"תלמי יוסף", "symbol_code": 1237, "name": u"תלמי יוסף"},
{"search_priority": 0, "search_heb": u"סופה", "symbol_code": 1238, "name": u"סופה"},
{"search_priority": 0, "search_heb": u"חולית", "symbol_code": 1239, "name": u"חולית"},
{"search_priority": 1, "search_heb": u"עין הבשור", "symbol_code": 1240, "name": u"עין הבשור"},
{"search_priority": 0, "search_heb": u"דקל", "symbol_code": 1241, "name": u"דקל"},
{"search_priority": 1, "search_heb": u"נתיב העשרה", "symbol_code": 1242, "name": u"נתיב העשרה"},
{"search_priority": 0, "search_heb": u"קציר", "symbol_code": 1243, "name": u"קציר"},
{"search_priority": 0, "search_heb": u"תמרת", "symbol_code": 1244, "name": u"תמרת"},
{"search_priority": 0, "search_heb": u"סלמה", "symbol_code": 1245, "name": u"סלמה"},
{"search_priority": 0, "search_heb": u"עראמשה", "symbol_code": 1246, "name": u"עראמשה"},
{"search_priority": 0, "search_heb": u"חריש", "symbol_code": 1247, "name": u"חריש"},
{"search_priority": 0, "search_heb": u"אליפז", "symbol_code": 1248, "name": u"אליפז"},
{"search_priority": 0, "search_heb": u"הרדוף", "symbol_code": 1249, "name": u"הרדוף"},
{"search_priority": 1, "search_heb": u"עין תמר", "symbol_code": 1251, "name": u"עין תמר"},
{"search_priority": 0, "search_heb": u"כורזים", "symbol_code": 1252, "name": u"כורזים"},
{"search_priority": 0, "search_heb": u"אמנון", "symbol_code": 1253, "name": u"אמנון"},
{"search_priority": 0, "search_heb": u"נטף", "symbol_code": 1254, "name": u"נטף"},
{"search_priority": 0, "search_heb": u"לוטן", "symbol_code": 1255, "name": u"לוטן"},
{"search_priority": 0, "search_heb": u"אשרת", "symbol_code": 1256, "name": u"אשרת"},
{"search_priority": 0, "search_heb": u"חנתון", "symbol_code": 1257, "name": u"חנתון"},
{"search_priority": 0, "search_heb": u"מסד", "symbol_code": 1258, "name": u"מסד"},
{"search_priority": 1, "search_heb": u"נווה שלום", "symbol_code": 1259, "name": u"נווה שלום"},
{"search_priority": 0, "search_heb": u"רתמים", "symbol_code": 1260, "name": u"רתמים"},
{"search_priority": 1, "search_heb": u"הר עמשא", "symbol_code": 1261, "name": u"הר עמשא"},
{"search_priority": 0, "search_heb": u"צוקים", "symbol_code": 1262, "name": u"צוקים"},
{"search_priority": 1, "search_heb": u"כפר ורדים", "symbol_code": 1263, "name": u"כפר ורדים"},
{"search_priority": 1, "search_heb": u"כרמי יוסף", "symbol_code": 1264, "name": u"כרמי יוסף"},
{"search_priority": 0, "search_heb": u"שומריה", "symbol_code": 1265, "name": u"שומריה"},
{"search_priority": 0, "search_heb": u"שחרות", "symbol_code": 1266, "name": u"שחרות"},
{"search_priority": 0, "search_heb": u"שיטים", "symbol_code": 1267, "name": u"שיטים"},
{"search_priority": 0, "search_heb": u"מיתר", "symbol_code": 1268, "name": u"מיתר"},
{"search_priority": 0, "search_heb": u"להבים", "symbol_code": 1271, "name": u"להבים"},
{"search_priority": 0, "search_heb": u"חלוץ", "symbol_code": 1272, "name": u"חלוץ"},
{"search_priority": 1, "search_heb": u"גן נר", "symbol_code": 1274, "name": u"גן נר"},
{"search_priority": 0, "search_heb": u"אבטליון", "symbol_code": 1275, "name": u"אבטליון"},
{"search_priority": 0, "search_heb": u"אשבל", "symbol_code": 1276, "name": u"אשבל"},
{"search_priority": 1, "search_heb": u"באר מילכה", "symbol_code": 1278, "name": u"באר מילכה"},
{"search_priority": 1, "search_heb": u"נווה חריף", "symbol_code": 1279, "name": u"נווה חריף"},
{"search_priority": 1, "search_heb": u"ניצני סיני", "symbol_code": 1280, "name": u"ניצני סיני"},
{"search_priority": 0, "search_heb": u"מירב", "symbol_code": 1282, "name": u"מירב"},
{"search_priority": 1, "search_heb": u"תל תאומים", "symbol_code": 1283, "name": u"תל תאומים"},
{"search_priority": 0, "search_heb": u"נופית", "symbol_code": 1284, "name": u"נופית"},
{"search_priority": 0, "search_heb": u"כרכום", "symbol_code": 1285, "name": u"כרכום"},
{"search_priority": 0, "search_heb": u"שגב-שלום", "symbol_code": 1286, "name": u"שגב-שלום"},
{"search_priority": 0, "search_heb": u"שני", "symbol_code": 1287, "name": u"שני"},
{"search_priority": 1, "search_heb": u"גבעת אלה", "symbol_code": 1288, "name": u"גבעת אלה"},
{"search_priority": 0, "search_heb": u"זמר", "symbol_code": 1290, "name": u"זמר"},
{"search_priority": 0, "search_heb": u"כמהין", "symbol_code": 1291, "name": u"כמהין"},
{"search_priority": 0, "search_heb": u"ג'דיידה-מכר", "symbol_code": 1292, "name": u"ג'דיידה-מכר"},
{"search_priority": 1, "search_heb": u"גבעת אבני", "symbol_code": 1293, "name": u"גבעת אבני"},
{"search_priority": 1, "search_heb": u"אור הגנוז", "symbol_code": 1294, "name": u"אור הגנוז"},
{"search_priority": 0, "search_heb": u"יאנוח-ג'ת", "symbol_code": 1295, "name": u"יאנוח-ג'ת"},
{"search_priority": 0, "search_heb": u"כסרא-סמיע", "symbol_code": 1296, "name": u"כסרא-סמיע"},
{"search_priority": 1, "search_heb": u"כפר חנניה", "symbol_code": 1297, "name": u"כפר חנניה"},
{"search_priority": 0, "search_heb": u"אתגר", "symbol_code": 1298, "name": u"אתגר"},
{"search_priority": 0, "search_heb": u"שעורים", "symbol_code": 1299, "name": u"שעורים"},
{"search_priority": 0, "search_heb": u"חורה", "symbol_code": 1303, "name": u"חורה"},
{"search_priority": 0, "search_heb": u"שוהם", "symbol_code": 1304, "name": u"שוהם"},
{"search_priority": 0, "search_heb": u"אלעד", "symbol_code": 1309, "name": u"אלעד"},
{"search_priority": 0, "search_heb": u"לפיד", "symbol_code": 1310, "name": u"לפיד"},
{"search_priority": 0, "search_heb": u"אבשלום", "symbol_code": 1311, "name": u"אבשלום"},
{"search_priority": 1, "search_heb": u"פוריה עילית", "symbol_code": 1313, "name": u"פוריה עילית"},
{"search_priority": 1, "search_heb": u"נווה זיו", "symbol_code": 1314, "name": u"נווה זיו"},
{"search_priority": 0, "search_heb": u"מתן", "symbol_code": 1315, "name": u"מתן"},
{"search_priority": 0, "search_heb": u"אל-עריאן", "symbol_code": 1316, "name": u"אל-עריאן"},
{"search_priority": 0, "search_heb": u"דמיידה", "symbol_code": 1317, "name": u"דמיידה"},
{"search_priority": 1, "search_heb": u"מבואות ים", "symbol_code": 1318, "name": u"מבואות ים"},
{"search_priority": 1, "search_heb": u"בת חפר", "symbol_code": 1319, "name": u"בת חפר"},
{"search_priority": 1, "search_heb": u"עין חוד", "symbol_code": 1320, "name": u"עין חוד"},
{"search_priority": 0, "search_heb": u"ח'ואלד", "symbol_code": 1321, "name": u"ח'ואלד"},
{"search_priority": 0, "search_heb": u"הודיות", "symbol_code": 1322, "name": u"הודיות"},
{"search_priority": 1, "search_heb": u"בת הדר", "symbol_code": 1323, "name": u"בת הדר"},
{"search_priority": 0, "search_heb": u"ארסוף", "symbol_code": 1324, "name": u"ארסוף"},
{"search_priority": 1, "search_heb": u"כפר זוהרים", "symbol_code": 1325, "name": u"כפר זוהרים"},
{"search_priority": 0, "search_heb": u"בסמ\"ה", "symbol_code": 1326, "name": u"בסמ\"ה"},
{"search_priority": 1, "search_heb": u"מעלה עירון", "symbol_code": 1327, "name": u"מעלה עירון"},
{"search_priority": 0, "search_heb": u"יתיר", "symbol_code": 1329, "name": u"יתיר"},
{"search_priority": 1, "search_heb": u"אחוזת ברק", "symbol_code": 1330, "name": u"אחוזת ברק"},
{"search_priority": 0, "search_heb": u"כמאנה", "symbol_code": 1331, "name": u"כמאנה"},
{"search_priority": 0, "search_heb": u"חוסנייה", "symbol_code": 1332, "name": u"חוסנייה"},
{"search_priority": 1, "search_heb": u"נוף איילון", "symbol_code": 1333, "name": u"נוף איילון"},
{"search_priority": 1, "search_heb": u"ראס אל-עין", "symbol_code": 1334, "name": u"ראס אל-עין"},
{"search_priority": 2, "search_heb": u"ערב אל נעים", "symbol_code": 1335, "name": u"ערב אל נעים"},
{"search_priority": 0, "search_heb": u"אירוס", "symbol_code": 1336, "name": u"אירוס"},
{"search_priority": 0, "search_heb": u"שמשית", "symbol_code": 1337, "name": u"שמשית"},
{"search_priority": 0, "search_heb": u"כדיתה", "symbol_code": 1338, "name": u"כדיתה"},
{"search_priority": 0, "search_heb": u"אל-עזי", "symbol_code": 1339, "name": u"אל-עזי"},
{"search_priority": 1, "search_heb": u"מרחב עם", "symbol_code": 1340, "name": u"מרחב עם"},
{"search_priority": 1, "search_heb": u"רוח מדבר", "symbol_code": 1341, "name": u"רוח מדבר"},
{"search_priority": 2, "search_heb": u"אבו קרינאת )יישוב(", "symbol_code": 1342,
"name": u"אבו קרינאת )יישוב("},
{"search_priority": 0, "search_heb": u"מכחול", "symbol_code": 1343, "name": u"מכחול"},
{"search_priority": 1, "search_heb": u"גבעות בר", "symbol_code": 1344, "name": u"גבעות בר"},
{"search_priority": 1, "search_heb": u"צור יצחק", "symbol_code": 1345, "name": u"צור יצחק"},
{"search_priority": 1, "search_heb": u"תראבין א-צאנע)ישוב(", "symbol_code": 1346,
"name": u"תראבין א-צאנע)ישוב("},
{"search_priority": 1, "search_heb": u"קצר א-סר", "symbol_code": 1347, "name": u"קצר א-סר"},
{"search_priority": 1, "search_heb": u"ביר הדאג'", "symbol_code": 1348, "name": u"ביר הדאג'"},
{"search_priority": 0, "search_heb": u"דריג'את", "symbol_code": 1349, "name": u"דריג'את"},
{"search_priority": 1, "search_heb": u"אום בטין", "symbol_code": 1358, "name": u"אום בטין"},
{"search_priority": 1, "search_heb": u"אל סייד", "symbol_code": 1359, "name": u"אל סייד"},
{"search_priority": 0, "search_heb": u"מולדה", "symbol_code": 1360, "name": u"מולדה"},
{"search_priority": 1, "search_heb": u"בת חן", "symbol_code": 1361, "name": u"בת חן"},
{"search_priority": 1, "search_heb": u"בני נצרים", "symbol_code": 1363, "name": u"בני נצרים"},
{"search_priority": 0, "search_heb": u"שלומית", "symbol_code": 1364, "name": u"שלומית"},
{"search_priority": 0, "search_heb": u"אליאב", "symbol_code": 1365, "name": u"אליאב"},
{"search_priority": 0, "search_heb": u"נווה", "symbol_code": 1366, "name": u"נווה"},
{"search_priority": 0, "search_heb": u"כחלה", "symbol_code": 1367, "name": u"כחלה"},
{"search_priority": 1, "search_heb": u"בני דקלים", "symbol_code": 1368, "name": u"בני דקלים"},
{"search_priority": 0, "search_heb": u"נטע", "symbol_code": 1369, "name": u"נטע"},
{"search_priority": 1, "search_heb": u"מצפה אילן", "symbol_code": 1370, "name": u"מצפה אילן"},
{"search_priority": 1, "search_heb": u"גני טל", "symbol_code": 1371, "name": u"גני טל"},
{"search_priority": 1, "search_heb": u"נצר חזני", "symbol_code": 1372, "name": u"נצר חזני"},
{"search_priority": 1, "search_heb": u"שלווה במדבר", "symbol_code": 1373, "name": u"שלווה במדבר"},
{"search_priority": 1, "search_heb": u"כרמי קטיף", "symbol_code": 1374, "name": u"כרמי קטיף"},
{"search_priority": 1, "search_heb": u"אבו תלול", "symbol_code": 1375, "name": u"אבו תלול"},
{"search_priority": 1, "search_heb": u"באר גנים", "symbol_code": 1376, "name": u"באר גנים"},
{"search_priority": 1, "search_heb": u"בת חצור", "symbol_code": 1401, "name": u"בת חצור"},
{"search_priority": 1, "search_heb": u"חצרות חולדה", "symbol_code": 1402, "name": u"חצרות חולדה"},
{"search_priority": 1, "search_heb": u"חצרות יסף", "symbol_code": 1404, "name": u"חצרות יסף"},
{"search_priority": 1, "search_heb": u"חצרות כ\"ח", "symbol_code": 1405, "name": u"חצרות כ\"ח"},
{"search_priority": 1, "search_heb": u"חצר בארותיים", "symbol_code": 1409, "name": u"חצר בארותיים"},
{"search_priority": 1, "search_heb": u"מחנה הילה", "symbol_code": 1411, "name": u"מחנה הילה"},
{"search_priority": 2, "search_heb": u"מחנה תל נוף", "symbol_code": 1412, "name": u"מחנה תל נוף"},
{"search_priority": 1, "search_heb": u"מחנה יהודית", "symbol_code": 1413, "name": u"מחנה יהודית"},
{"search_priority": 1, "search_heb": u"מחנה מרים", "symbol_code": 1414, "name": u"מחנה מרים"},
{"search_priority": 1, "search_heb": u"מחנה יפה", "symbol_code": 1415, "name": u"מחנה יפה"},
{"search_priority": 1, "search_heb": u"מחנה יוכבד", "symbol_code": 1416, "name": u"מחנה יוכבד"},
{"search_priority": 1, "search_heb": u"מחנה עדי", "symbol_code": 1417, "name": u"מחנה עדי"},
{"search_priority": 1, "search_heb": u"מחנה טלי", "symbol_code": 1418, "name": u"מחנה טלי"},
{"search_priority": 1, "search_heb": u"ניצן ב'", "symbol_code": 1419, "name": u"ניצן ב'"},
{"search_priority": 0, "search_heb": u"תנובות", "symbol_code": 2002, "name": u"תנובות"},
{"search_priority": 1, "search_heb": u"תלמי אלעזר", "symbol_code": 2003, "name": u"תלמי אלעזר"},
{"search_priority": 0, "search_heb": u"כנות", "symbol_code": 2006, "name": u"כנות"},
{"search_priority": 1, "search_heb": u"שדה יצחק", "symbol_code": 2008, "name": u"שדה יצחק"},
{"search_priority": 0, "search_heb": u"יובל", "symbol_code": 2009, "name": u"יובל"},
{"search_priority": 2, "search_heb": u"כפר בן נון", "symbol_code": 2010, "name": u"כפר בן נון"},
{"search_priority": 0, "search_heb": u"ינון", "symbol_code": 2011, "name": u"ינון"},
{"search_priority": 0, "search_heb": u"אורות", "symbol_code": 2012, "name": u"אורות"},
{"search_priority": 2, "search_heb": u"בן שמן )מושב(", "symbol_code": 2013, "name": u"בן שמן )מושב("},
{"search_priority": 0, "search_heb": u"גבעולים", "symbol_code": 2014, "name": u"גבעולים"},
{"search_priority": 1, "search_heb": u"שדי חמד", "symbol_code": 2015, "name": u"שדי חמד"},
{"search_priority": 0, "search_heb": u"רוויה", "symbol_code": 2016, "name": u"רוויה"},
{"search_priority": 2, "search_heb": u"גבעת חיים )איחוד(", "symbol_code": 2018,
"name": u"גבעת חיים )איחוד("},
{"search_priority": 1, "search_heb": u"אשל הנשיא", "symbol_code": 2021, "name": u"אשל הנשיא"},
{"search_priority": 0, "search_heb": u"להב", "symbol_code": 2023, "name": u"להב"},
{"search_priority": 1, "search_heb": u"אום אל-קוטוף", "symbol_code": 2024, "name": u"אום אל-קוטוף"},
{"search_priority": 0, "search_heb": u"ירדנה", "symbol_code": 2026, "name": u"ירדנה"},
{"search_priority": 1, "search_heb": u"מדרך עוז", "symbol_code": 2029, "name": u"מדרך עוז"},
{"search_priority": 0, "search_heb": u"מנוחה", "symbol_code": 2030, "name": u"מנוחה"},
{"search_priority": 1, "search_heb": u"בית חלקיה", "symbol_code": 2033, "name": u"בית חלקיה"},
{"search_priority": 1, "search_heb": u"חצור הגלילית", "symbol_code": 2034, "name": u"חצור הגלילית"},
{"search_priority": 0, "search_heb": u"עדנים", "symbol_code": 2035, "name": u"עדנים"},
{"search_priority": 0, "search_heb": u"ברקת", "symbol_code": 2038, "name": u"ברקת"},
{"search_priority": 1, "search_heb": u"קרית יערים)מוסד(", "symbol_code": 2039,
"name": u"קרית יערים)מוסד("},
{"search_priority": 1, "search_heb": u"עין גדי", "symbol_code": 2042, "name": u"עין גדי"},
{"search_priority": 0, "search_heb": u"בחן", "symbol_code": 2043, "name": u"בחן"},
{"search_priority": 0, "search_heb": u"מלילות", "symbol_code": 2044, "name": u"מלילות"},
{"search_priority": 0, "search_heb": u"נחלה", "symbol_code": 2045, "name": u"נחלה"},
{"search_priority": 0, "search_heb": u"סגולה", "symbol_code": 2046, "name": u"סגולה"},
{"search_priority": 1, "search_heb": u"ניר משה", "symbol_code": 2047, "name": u"ניר משה"},
{"search_priority": 1, "search_heb": u"ניר עקיבא", "symbol_code": 2048, "name": u"ניר עקיבא"},
{"search_priority": 1, "search_heb": u"שדה צבי", "symbol_code": 2049, "name": u"שדה צבי"},
{"search_priority": 1, "search_heb": u"תלמי ביל\"ו", "symbol_code": 2050, "name": u"תלמי ביל\"ו"},
{"search_priority": 0, "search_heb": u"רווחה", "symbol_code": 2051, "name": u"רווחה"},
{"search_priority": 0, "search_heb": u"אביטל", "symbol_code": 2052, "name": u"אביטל"},
{"search_priority": 0, "search_heb": u"פרזון", "symbol_code": 2053, "name": u"פרזון"},
{"search_priority": 0, "search_heb": u"מיטב", "symbol_code": 2054, "name": u"מיטב"},
{"search_priority": 0, "search_heb": u"מאור", "symbol_code": 2055, "name": u"מאור"},
{"search_priority": 1, "search_heb": u"שדי תרומות", "symbol_code": 2057, "name": u"שדי תרומות"},
{"search_priority": 1, "search_heb": u"פעמי תש\"ז", "symbol_code": 2059, "name": u"פעמי תש\"ז"},
{"search_priority": 0, "search_heb": u"ברוש", "symbol_code": 2060, "name": u"ברוש"},
{"search_priority": 0, "search_heb": u"תדהר", "symbol_code": 2061, "name": u"תדהר"},
{"search_priority": 0, "search_heb": u"תאשור", "symbol_code": 2062, "name": u"תאשור"},
{"search_priority": 0, "search_heb": u"דישון", "symbol_code": 2063, "name": u"דישון"},
{"search_priority": 0, "search_heb": u"זרועה", "symbol_code": 2064, "name": u"זרועה"},
{"search_priority": 1, "search_heb": u"טירת כרמל", "symbol_code": 2100, "name": u"טירת כרמל"},
{"search_priority": 0, "search_heb": u"דימונה", "symbol_code": 2200, "name": u"דימונה"},
{"search_priority": 1, "search_heb": u"קרית טבעון", "symbol_code": 2300, "name": u"קרית טבעון"},
{"search_priority": 1, "search_heb": u"אור יהודה", "symbol_code": 2400, "name": u"אור יהודה"},
{"search_priority": 0, "search_heb": u"נשר", "symbol_code": 2500, "name": u"נשר"},
{"search_priority": 1, "search_heb": u"באר יעקב", "symbol_code": 2530, "name": u"באר יעקב"},
{"search_priority": 0, "search_heb": u"גדרה", "symbol_code": 2550, "name": u"גדרה"},
{"search_priority": 0, "search_heb": u"ערד", "symbol_code": 2560, "name": u"ערד"},
{"search_priority": 0, "search_heb": u"אילת", "symbol_code": 2600, "name": u"אילת"},
{"search_priority": 1, "search_heb": u"בית שמש", "symbol_code": 2610, "name": u"בית שמש"},
{"search_priority": 1, "search_heb": u"קרית אונו", "symbol_code": 2620, "name": u"קרית אונו"},
{"search_priority": 1, "search_heb": u"קרית גת", "symbol_code": 2630, "name": u"קרית גת"},
{"search_priority": 1, "search_heb": u"ראש העין", "symbol_code": 2640, "name": u"ראש העין"},
{"search_priority": 1, "search_heb": u"רמת השרון", "symbol_code": 2650, "name": u"רמת השרון"},
{"search_priority": 0, "search_heb": u"יבנה", "symbol_code": 2660, "name": u"יבנה"},
{"search_priority": 1, "search_heb": u"אום אל-פחם", "symbol_code": 2710, "name": u"אום אל-פחם"},
{"search_priority": 0, "search_heb": u"טירה", "symbol_code": 2720, "name": u"טירה"},
{"search_priority": 0, "search_heb": u"טייבה", "symbol_code": 2730, "name": u"טייבה"},
{"search_priority": 1, "search_heb": u"זבארגה )שבט(", "symbol_code": 2742, "name": u"זבארגה )שבט("},
{"search_priority": 1, "search_heb": u"קרית שמונה", "symbol_code": 2800, "name": u"קרית שמונה"},
{"search_priority": 0, "search_heb": u"ירושלים", "symbol_code": 3000, "name": u"ירושלים"},
{"search_priority": 0, "search_heb": u"חברון", "symbol_code": 3400, "name": u"חברון"},
{"search_priority": 1, "search_heb": u"כפר עציון", "symbol_code": 3488, "name": u"כפר עציון"},
{"search_priority": 1, "search_heb": u"נתיב הגדוד", "symbol_code": 3555, "name": u"נתיב הגדוד"},
{"search_priority": 0, "search_heb": u"אלמוג", "symbol_code": 3556, "name": u"אלמוג"},
{"search_priority": 0, "search_heb": u"קדומים", "symbol_code": 3557, "name": u"קדומים"},
{"search_priority": 0, "search_heb": u"תומר", "symbol_code": 3558, "name": u"תומר"},
{"search_priority": 0, "search_heb": u"אלקנה", "symbol_code": 3560, "name": u"אלקנה"},
{"search_priority": 1, "search_heb": u"מגדל עוז", "symbol_code": 3561, "name": u"מגדל עוז"},
{"search_priority": 0, "search_heb": u"תקוע", "symbol_code": 3563, "name": u"תקוע"},
{"search_priority": 1, "search_heb": u"כוכב השחר", "symbol_code": 3564, "name": u"כוכב השחר"},
{"search_priority": 0, "search_heb": u"רימונים", "symbol_code": 3565, "name": u"רימונים"},
{"search_priority": 0, "search_heb": u"יפית", "symbol_code": 3566, "name": u"יפית"},
{"search_priority": 0, "search_heb": u"סלעית", "symbol_code": 3567, "name": u"סלעית"},
{"search_priority": 0, "search_heb": u"ריחן", "symbol_code": 3568, "name": u"ריחן"},
{"search_priority": 1, "search_heb": u"מבוא דותן", "symbol_code": 3569, "name": u"מבוא דותן"},
{"search_priority": 0, "search_heb": u"אריאל", "symbol_code": 3570, "name": u"אריאל"},
{"search_priority": 1, "search_heb": u"שבי שומרון", "symbol_code": 3571, "name": u"שבי שומרון"},
{"search_priority": 1, "search_heb": u"כפר תפוח", "symbol_code": 3572, "name": u"כפר תפוח"},
{"search_priority": 0, "search_heb": u"חלמיש", "symbol_code": 3573, "name": u"חלמיש"},
{"search_priority": 1, "search_heb": u"בית אל", "symbol_code": 3574, "name": u"בית אל"},
{"search_priority": 1, "search_heb": u"בית חורון", "symbol_code": 3575, "name": u"בית חורון"},
{"search_priority": 1, "search_heb": u"מצפה יריחו", "symbol_code": 3576, "name": u"מצפה יריחו"},
{"search_priority": 1, "search_heb": u"שדמות מחולה", "symbol_code": 3578, "name": u"שדמות מחולה"},
{"search_priority": 1, "search_heb": u"אלון מורה", "symbol_code": 3579, "name": u"אלון מורה"},
{"search_priority": 0, "search_heb": u"ארגמן", "symbol_code": 3598, "name": u"ארגמן"},
{"search_priority": 0, "search_heb": u"מחולה", "symbol_code": 3599, "name": u"מחולה"},
{"search_priority": 0, "search_heb": u"קליה", "symbol_code": 3601, "name": u"קליה"},
{"search_priority": 1, "search_heb": u"ראש צורים", "symbol_code": 3602, "name": u"ראש צורים"},
{"search_priority": 1, "search_heb": u"הר גילה", "symbol_code": 3603, "name": u"הר גילה"},
{"search_priority": 1, "search_heb": u"אלון שבות", "symbol_code": 3604, "name": u"אלון שבות"},
{"search_priority": 0, "search_heb": u"משואה", "symbol_code": 3605, "name": u"משואה"},
{"search_priority": 0, "search_heb": u"גלגל", "symbol_code": 3606, "name": u"גלגל"},
{"search_priority": 0, "search_heb": u"ייט\"ב", "symbol_code": 3607, "name": u"ייט\"ב"},
{"search_priority": 1, "search_heb": u"מעלה אפרים", "symbol_code": 3608, "name": u"מעלה אפרים"},
{"search_priority": 0, "search_heb": u"חמרה", "symbol_code": 3609, "name": u"חמרה"},
{"search_priority": 1, "search_heb": u"מצפה שלם", "symbol_code": 3610, "name": u"מצפה שלם"},
{"search_priority": 1, "search_heb": u"קרית ארבע", "symbol_code": 3611, "name": u"קרית ארבע"},
{"search_priority": 0, "search_heb": u"בקעות", "symbol_code": 3612, "name": u"בקעות"},
{"search_priority": 0, "search_heb": u"גיתית", "symbol_code": 3613, "name": u"גיתית"},
{"search_priority": 0, "search_heb": u"מכורה", "symbol_code": 3614, "name": u"מכורה"},
{"search_priority": 0, "search_heb": u"פצאל", "symbol_code": 3615, "name": u"פצאל"},
{"search_priority": 1, "search_heb": u"מעלה אדומים", "symbol_code": 3616, "name": u"מעלה אדומים"},
{"search_priority": 0, "search_heb": u"עופרה", "symbol_code": 3617, "name": u"עופרה"},
{"search_priority": 0, "search_heb": u"אלעזר", "symbol_code": 3618, "name": u"אלעזר"},
{"search_priority": 0, "search_heb": u"רועי", "symbol_code": 3619, "name": u"רועי"},
{"search_priority": 0, "search_heb": u"נירן", "symbol_code": 3620, "name": u"נירן"},
{"search_priority": 1, "search_heb": u"מעלה שומרון", "symbol_code": 3637, "name": u"מעלה שומרון"},
{"search_priority": 1, "search_heb": u"כפר אדומים", "symbol_code": 3638, "name": u"כפר אדומים"},
{"search_priority": 1, "search_heb": u"ורד יריחו", "symbol_code": 3639, "name": u"ורד יריחו"},
{"search_priority": 1, "search_heb": u"קרני שומרון", "symbol_code": 3640, "name": u"קרני שומרון"},
{"search_priority": 0, "search_heb": u"שילה", "symbol_code": 3641, "name": u"שילה"},
{"search_priority": 0, "search_heb": u"חיננית", "symbol_code": 3643, "name": u"חיננית"},
{"search_priority": 1, "search_heb": u"גבעון החדשה", "symbol_code": 3644, "name": u"גבעון החדשה"},
{"search_priority": 1, "search_heb": u"בית הערבה", "symbol_code": 3645, "name": u"בית הערבה"},
{"search_priority": 0, "search_heb": u"חמדת", "symbol_code": 3646, "name": u"חמדת"},
{"search_priority": 0, "search_heb": u"יקיר", "symbol_code": 3647, "name": u"יקיר"},
{"search_priority": 0, "search_heb": u"מתתיהו", "symbol_code": 3648, "name": u"מתתיהו"},
{"search_priority": 0, "search_heb": u"שקד", "symbol_code": 3649, "name": u"שקד"},
{"search_priority": 0, "search_heb": u"אפרת", "symbol_code": 3650, "name": u"אפרת"},
{"search_priority": 1, "search_heb": u"מעלה מכמש", "symbol_code": 3651, "name": u"מעלה מכמש"},
{"search_priority": 1, "search_heb": u"בית אריה", "symbol_code": 3652, "name": u"בית אריה"},
{"search_priority": 1, "search_heb": u"מעלה עמוס", "symbol_code": 3653, "name": u"מעלה עמוס"},
{"search_priority": 0, "search_heb": u"ברקן", "symbol_code": 3654, "name": u"ברקן"},
{"search_priority": 0, "search_heb": u"ניל\"י", "symbol_code": 3655, "name": u"ניל\"י"},
{"search_priority": 0, "search_heb": u"כרמל", "symbol_code": 3656, "name": u"כרמל"},
{"search_priority": 0, "search_heb": u"מעון", "symbol_code": 3657, "name": u"מעון"},
{"search_priority": 0, "search_heb": u"עטרת", "symbol_code": 3658, "name": u"עטרת"},
{"search_priority": 0, "search_heb": u"פסגות", "symbol_code": 3659, "name": u"פסגות"},
{"search_priority": 0, "search_heb": u"עמנואל", "symbol_code": 3660, "name": u"עמנואל"},
{"search_priority": 1, "search_heb": u"מבוא חורון", "symbol_code": 3709, "name": u"מבוא חורון"},
{"search_priority": 0, "search_heb": u"ברכה", "symbol_code": 3710, "name": u"ברכה"},
{"search_priority": 0, "search_heb": u"ענב", "symbol_code": 3712, "name": u"ענב"},
{"search_priority": 0, "search_heb": u"נעמ\"ה", "symbol_code": 3713, "name": u"נעמ\"ה"},
{"search_priority": 0, "search_heb": u"עלמון", "symbol_code": 3715, "name": u"עלמון"},
{"search_priority": 0, "search_heb": u"חרמש", "symbol_code": 3717, "name": u"חרמש"},
{"search_priority": 0, "search_heb": u"תלם", "symbol_code": 3719, "name": u"תלם"},
{"search_priority": 1, "search_heb": u"שערי תקווה", "symbol_code": 3720, "name": u"שערי תקווה"},
{"search_priority": 0, "search_heb": u"אשכולות", "symbol_code": 3722, "name": u"אשכולות"},
{"search_priority": 1, "search_heb": u"פני חבר", "symbol_code": 3723, "name": u"פני חבר"},
{"search_priority": 0, "search_heb": u"נגוהות", "symbol_code": 3724, "name": u"נגוהות"},
{"search_priority": 1, "search_heb": u"נווה דניאל", "symbol_code": 3725, "name": u"נווה דניאל"},
{"search_priority": 0, "search_heb": u"נוקדים", "symbol_code": 3726, "name": u"נוקדים"},
{"search_priority": 1, "search_heb": u"עלי זהב", "symbol_code": 3727, "name": u"עלי זהב"},
{"search_priority": 1, "search_heb": u"גבעת זאב", "symbol_code": 3730, "name": u"גבעת זאב"},
{"search_priority": 0, "search_heb": u"טנא", "symbol_code": 3743, "name": u"טנא"},
{"search_priority": 0, "search_heb": u"ברוכין", "symbol_code": 3744, "name": u"ברוכין"},
{"search_priority": 1, "search_heb": u"מצדות יהודה", "symbol_code": 3745, "name": u"מצדות יהודה"},
{"search_priority": 1, "search_heb": u"קרית נטפים", "symbol_code": 3746, "name": u"קרית נטפים"},
{"search_priority": 0, "search_heb": u"דולב", "symbol_code": 3747, "name": u"דולב"},
{"search_priority": 0, "search_heb": u"עתניאל", "symbol_code": 3748, "name": u"עתניאל"},
{"search_priority": 0, "search_heb": u"יצהר", "symbol_code": 3749, "name": u"יצהר"},
{"search_priority": 1, "search_heb": u"אלפי מנשה", "symbol_code": 3750, "name": u"אלפי מנשה"},
{"search_priority": 0, "search_heb": u"מגדלים", "symbol_code": 3751, "name": u"מגדלים"},
{"search_priority": 1, "search_heb": u"מעלה לבונה", "symbol_code": 3752, "name": u"מעלה לבונה"},
{"search_priority": 0, "search_heb": u"אספר", "symbol_code": 3754, "name": u"אספר"},
{"search_priority": 0, "search_heb": u"סוסיה", "symbol_code": 3756, "name": u"סוסיה"},
{"search_priority": 0, "search_heb": u"אדורה", "symbol_code": 3759, "name": u"אדורה"},
{"search_priority": 0, "search_heb": u"אורנית", "symbol_code": 3760, "name": u"אורנית"},
{"search_priority": 0, "search_heb": u"איתמר", "symbol_code": 3762, "name": u"איתמר"},
{"search_priority": 1, "search_heb": u"גבע בנימין", "symbol_code": 3763, "name": u"גבע בנימין"},
{"search_priority": 0, "search_heb": u"חגי", "symbol_code": 3764, "name": u"חגי"},
{"search_priority": 0, "search_heb": u"עלי", "symbol_code": 3765, "name": u"עלי"},
{"search_priority": 1, "search_heb": u"כרמי צור", "symbol_code": 3766, "name": u"כרמי צור"},
{"search_priority": 0, "search_heb": u"נחליאל", "symbol_code": 3767, "name": u"נחליאל"},
{"search_priority": 0, "search_heb": u"פדואל", "symbol_code": 3768, "name": u"פדואל"},
{"search_priority": 1, "search_heb": u"הר אדר", "symbol_code": 3769, "name": u"הר אדר"},
{"search_priority": 0, "search_heb": u"חשמונאים", "symbol_code": 3770, "name": u"חשמונאים"},
{"search_priority": 0, "search_heb": u"סנסנה", "symbol_code": 3777, "name": u"סנסנה"},
{"search_priority": 1, "search_heb": u"עץ אפרים", "symbol_code": 3778, "name": u"עץ אפרים"},
{"search_priority": 1, "search_heb": u"כוכב יעקב", "symbol_code": 3779, "name": u"כוכב יעקב"},
{"search_priority": 1, "search_heb": u"ביתר עילית", "symbol_code": 3780, "name": u"ביתר עילית"},
{"search_priority": 0, "search_heb": u"קדר", "symbol_code": 3781, "name": u"קדר"},
{"search_priority": 0, "search_heb": u"רותם", "symbol_code": 3782, "name": u"רותם"},
{"search_priority": 0, "search_heb": u"שמעה", "symbol_code": 3784, "name": u"שמעה"},
{"search_priority": 0, "search_heb": u"משכיות", "symbol_code": 3785, "name": u"משכיות"},
{"search_priority": 0, "search_heb": u"אבנת", "symbol_code": 3786, "name": u"אבנת"},
{"search_priority": 0, "search_heb": u"נעלה", "symbol_code": 3787, "name": u"נעלה"},
{"search_priority": 0, "search_heb": u"טלמון", "symbol_code": 3788, "name": u"טלמון"},
{"search_priority": 0, "search_heb": u"נופים", "symbol_code": 3790, "name": u"נופים"},
{"search_priority": 0, "search_heb": u"צופים", "symbol_code": 3791, "name": u"צופים"},
{"search_priority": 1, "search_heb": u"אבני חפץ", "symbol_code": 3793, "name": u"אבני חפץ"},
{"search_priority": 1, "search_heb": u"בת עין", "symbol_code": 3794, "name": u"בת עין"},
{"search_priority": 0, "search_heb": u"רבבה", "symbol_code": 3795, "name": u"רבבה"},
{"search_priority": 1, "search_heb": u"כפר האורנים", "symbol_code": 3796, "name": u"כפר האורנים"},
{"search_priority": 1, "search_heb": u"מודיעין עילית", "symbol_code": 3797, "name": u"מודיעין עילית"},
{"search_priority": 0, "search_heb": u"רחלים", "symbol_code": 3822, "name": u"רחלים"},
{"search_priority": 1, "search_heb": u"גני מודיעין", "symbol_code": 3823, "name": u"גני מודיעין"},
{"search_priority": 0, "search_heb": u"חיפה", "symbol_code": 4000, "name": u"חיפה"},
{"search_priority": 0, "search_heb": u"בוקעאתא", "symbol_code": 4001, "name": u"בוקעאתא"},
{"search_priority": 0, "search_heb": u"אלי-עד", "symbol_code": 4002, "name": u"אלי-עד"},
{"search_priority": 0, "search_heb": u"אל-רום", "symbol_code": 4003, "name": u"אל-רום"},
{"search_priority": 1, "search_heb": u"כפר חרוב", "symbol_code": 4004, "name": u"כפר חרוב"},
{"search_priority": 0, "search_heb": u"חספין", "symbol_code": 4005, "name": u"חספין"},
{"search_priority": 0, "search_heb": u"קשת", "symbol_code": 4006, "name": u"קשת"},
{"search_priority": 0, "search_heb": u"יונתן", "symbol_code": 4007, "name": u"יונתן"},
{"search_priority": 1, "search_heb": u"מעלה גמלא", "symbol_code": 4008, "name": u"מעלה גמלא"},
{"search_priority": 0, "search_heb": u"שעל", "symbol_code": 4009, "name": u"שעל"},
{"search_priority": 0, "search_heb": u"אודם", "symbol_code": 4010, "name": u"אודם"},
{"search_priority": 1, "search_heb": u"אבני איתן", "symbol_code": 4011, "name": u"אבני איתן"},
{"search_priority": 0, "search_heb": u"אניעם", "symbol_code": 4012, "name": u"אניעם"},
{"search_priority": 0, "search_heb": u"אורטל", "symbol_code": 4013, "name": u"אורטל"},
{"search_priority": 0, "search_heb": u"נטור", "symbol_code": 4014, "name": u"נטור"},
{"search_priority": 1, "search_heb": u"בני יהודה", "symbol_code": 4015, "name": u"בני יהודה"},
{"search_priority": 1, "search_heb": u"אלוני הבשן", "symbol_code": 4017, "name": u"אלוני הבשן"},
{"search_priority": 0, "search_heb": u"מיצר", "symbol_code": 4019, "name": u"מיצר"},
{"search_priority": 1, "search_heb": u"גבעת יואב", "symbol_code": 4021, "name": u"גבעת יואב"},
{"search_priority": 0, "search_heb": u"גשור", "symbol_code": 4022, "name": u"גשור"},
{"search_priority": 0, "search_heb": u"קלע", "symbol_code": 4024, "name": u"קלע"},
{"search_priority": 1, "search_heb": u"קדמת צבי", "symbol_code": 4025, "name": u"קדמת צבי"},
{"search_priority": 0, "search_heb": u"חד-נס", "symbol_code": 4026, "name": u"חד-נס"},
{"search_priority": 0, "search_heb": u"כנף", "symbol_code": 4028, "name": u"כנף"},
{"search_priority": 0, "search_heb": u"נמרוד", "symbol_code": 4035, "name": u"נמרוד"},
{"search_priority": 0, "search_heb": u"קצרין", "symbol_code": 4100, "name": u"קצרין"},
{"search_priority": 1, "search_heb": u"מרום גולן", "symbol_code": 4101, "name": u"מרום גולן"},
{"search_priority": 1, "search_heb": u"מג'דל שמס", "symbol_code": 4201, "name": u"מג'דל שמס"},
{"search_priority": 0, "search_heb": u"מסעדה", "symbol_code": 4203, "name": u"מסעדה"},
{"search_priority": 1, "search_heb": u"מבוא חמה", "symbol_code": 4204, "name": u"מבוא חמה"},
{"search_priority": 0, "search_heb": u"אפיק", "symbol_code": 4301, "name": u"אפיק"},
{"search_priority": 1, "search_heb": u"נווה אטי\"ב", "symbol_code": 4303, "name": u"נווה אטי\"ב"},
{"search_priority": 0, "search_heb": u"נוב", "symbol_code": 4304, "name": u"נוב"},
{"search_priority": 0, "search_heb": u"ע'ג'ר", "symbol_code": 4501, "name": u"ע'ג'ר"},
{"search_priority": 1, "search_heb": u"עין קנייא", "symbol_code": 4502, "name": u"עין קנייא"},
{"search_priority": 1, "search_heb": u"עין זיוון", "symbol_code": 4503, "name": u"עין זיוון"},
{"search_priority": 1, "search_heb": u"נאות גולן", "symbol_code": 4551, "name": u"נאות גולן"},
{"search_priority": 1, "search_heb": u"רמת מגשימים", "symbol_code": 4701, "name": u"רמת מגשימים"},
{"search_priority": 0, "search_heb": u"רמות", "symbol_code": 4702, "name": u"רמות"},
{"search_priority": 2, "search_heb": u"תל אביב -יפו", "symbol_code": 5000, "name": u"תל אביב - יפו"},
{"search_priority": 1, "search_heb": u"באקה אל-גרביה", "symbol_code": 6000, "name": u"באקה אל-גרביה"},
{"search_priority": 1, "search_heb": u"בני ברק", "symbol_code": 6100, "name": u"בני ברק"},
{"search_priority": 1, "search_heb": u"בת ים", "symbol_code": 6200, "name": u"בת ים"},
{"search_priority": 0, "search_heb": u"גבעתיים", "symbol_code": 6300, "name": u"גבעתיים"},
{"search_priority": 0, "search_heb": u"הרצליה", "symbol_code": 6400, "name": u"הרצליה"},
{"search_priority": 0, "search_heb": u"חדרה", "symbol_code": 6500, "name": u"חדרה"},
{"search_priority": 0, "search_heb": u"חולון", "symbol_code": 6600, "name": u"חולון"},
{"search_priority": 0, "search_heb": u"טבריה", "symbol_code": 6700, "name": u"טבריה"},
{"search_priority": 1, "search_heb": u"קרית אתא", "symbol_code": 6800, "name": u"קרית אתא"},
{"search_priority": 1, "search_heb": u"כפר סבא", "symbol_code": 6900, "name": u"כפר סבא"},
{"search_priority": 0, "search_heb": u"לוד", "symbol_code": 7000, "name": u"לוד"},
{"search_priority": 0, "search_heb": u"אשקלון", "symbol_code": 7100, "name": u"אשקלון"},
{"search_priority": 1, "search_heb": u"נס ציונה", "symbol_code": 7200, "name": u"נס ציונה"},
{"search_priority": 0, "search_heb": u"נצרת", "symbol_code": 7300, "name": u"נצרת"},
{"search_priority": 0, "search_heb": u"נתניה", "symbol_code": 7400, "name": u"נתניה"},
{"search_priority": 0, "search_heb": u"סח'נין", "symbol_code": 7500, "name": u"סח'נין"},
{"search_priority": 0, "search_heb": u"עכו", "symbol_code": 7600, "name": u"עכו"},
{"search_priority": 0, "search_heb": u"עפולה", "symbol_code": 7700, "name": u"עפולה"},
{"search_priority": 1, "search_heb": u"פרדס חנה-כרכור", "symbol_code": 7800,
"name": u"פרדס חנה-כרכור"},
{"search_priority": 1, "search_heb": u"פתח תקווה", "symbol_code": 7900, "name": u"פתח תקווה"},
{"search_priority": 0, "search_heb": u"צפת", "symbol_code": 8000, "name": u"צפת"},
{"search_priority": 1, "search_heb": u"קרית מוצקין", "symbol_code": 8200, "name": u"קרית מוצקין"},
{"search_priority": 1, "search_heb": u"ראשון לציון", "symbol_code": 8300, "name": u"ראשון לציון"},
{"search_priority": 0, "search_heb": u"רחובות", "symbol_code": 8400, "name": u"רחובות"},
{"search_priority": 0, "search_heb": u"רמלה", "symbol_code": 8500, "name": u"רמלה"},
{"search_priority": 1, "search_heb": u"רמת גן", "symbol_code": 8600, "name": u"רמת גן"},
{"search_priority": 0, "search_heb": u"רעננה", "symbol_code": 8700, "name": u"רעננה"},
{"search_priority": 0, "search_heb": u"שפרעם", "symbol_code": 8800, "name": u"שפרעם"},
{"search_priority": 0, "search_heb": u"טמרה", "symbol_code": 8900, "name": u"טמרה"},
{"search_priority": 1, "search_heb": u"באר שבע", "symbol_code": 9000, "name": u"באר שבע"},
{"search_priority": 0, "search_heb": u"נהריה", "symbol_code": 9100, "name": u"נהריה"},
{"search_priority": 1, "search_heb": u"בית שאן", "symbol_code": 9200, "name": u"בית שאן"},
{"search_priority": 1, "search_heb": u"זכרון יעקב", "symbol_code": 9300, "name": u"זכרון יעקב"},
{"search_priority": 0, "search_heb": u"יהוד-מונוסון", "symbol_code": 9400, "name": u"יהוד-מונוסון"},
{"search_priority": 1, "search_heb": u"קרית ביאליק", "symbol_code": 9500, "name": u"קרית ביאליק"},
{"search_priority": 1, "search_heb": u"קרית ים", "symbol_code": 9600, "name": u"קרית ים"},
{"search_priority": 1, "search_heb": u"הוד השרון", "symbol_code": 9700, "name": u"הוד השרון"},
{"search_priority": 1, "search_heb": u"בנימינה-גבעת עדה", "symbol_code": 9800,
"name": u"בנימינה-גבעת עדה"}]
, multiinsert=False
)
|
# test_bsddb3_database.py
# Copyright 2019 Roger Marsh
# Licence: See LICENCE (BSD licence)
"""bsddb3_database tests"""
import unittest
import os
try:
from .. import bsddb3_database
except ImportError: # Not ModuleNotFoundError for Pythons earlier than 3.6
bsddb3_database = None
class Bsddb3Database(unittest.TestCase):
def tearDown(self):
logdir = "___memlogs_memory_db"
if os.path.exists(logdir):
for f in os.listdir(logdir):
if f.startswith("log."):
os.remove(os.path.join(logdir, f))
os.rmdir(logdir)
def test__assumptions(self):
msg = "Failure of this test invalidates all other tests"
self.assertRaisesRegex(
TypeError,
"".join(
(
"__init__\(\) missing 1 required positional argument: ",
"'specification'",
)
),
bsddb3_database.Database,
)
self.assertIsInstance(
bsddb3_database.Database({}),
bsddb3_database.Database,
)
def test_open_database(self):
self.assertEqual(bsddb3_database.Database({}).open_database(), None)
if __name__ == "__main__":
runner = unittest.TextTestRunner
loader = unittest.defaultTestLoader.loadTestsFromTestCase
if bsddb3_database is not None:
runner().run(loader(Bsddb3Database))
|
"""
utils.py
========
Utility functions
Created by Maxim Ziatdinov (email: maxim.ziatdinov@ai4microscopy.com)
"""
from typing import Union, Dict
import jax
import jax.numpy as jnp
import numpy as onp
def enable_x64():
"""Use double (x64) precision for jax arrays"""
jax.config.update("jax_enable_x64", True)
def get_keys(seed: int = 0):
"""
Simple wrapper for jax.random.split to get
rng keys for model inference and prediction
"""
rng_key_1, rng_key_2 = jax.random.split(jax.random.PRNGKey(seed))
return rng_key_1, rng_key_2
def split_in_batches(X_new: Union[onp.ndarray, jnp.ndarray],
batch_size: int = 100, dim: int = 0):
"""
Splits array into batches along the first or second dimensions
"""
if dim not in [0, 1]:
raise NotImplementedError("'dim' must be equal to 0 or 1")
num_batches = jax.numpy.floor_divide(X_new.shape[dim], batch_size)
X_split = []
for i in range(num_batches):
if dim == 0:
X_i = X_new[i*batch_size:(i+1)*batch_size]
else:
X_i = X_new[:, i*batch_size:(i+1)*batch_size]
X_split.append(X_i)
X_i = X_new[(i+1)*batch_size:] if dim == 0 else X_new[:, (i+1)*batch_size:]
if X_i.shape[dim] > 0:
X_split.append(X_i)
return X_split
def get_haiku_dict(kernel_params: Dict[str, jnp.ndarray]) -> Dict[str, Dict[str, jnp.ndarray]]:
"""
Extracts weights and biases from viDKL dictionary into a separate
dictionary compatible with haiku's .apply() method
"""
all_weights = {}
all_biases = {}
for key, val in kernel_params.items():
if key.startswith('feature_extractor'):
name_split = key.split('/')
name_new = name_split[1] + '/' + name_split[2][:-2]
if name_split[2][-1] == 'b':
all_biases[name_new] = val
else:
all_weights[name_new] = val
nn_params = {}
for (k, v1), (_, v2) in zip(all_weights.items(), all_biases.items()):
nn_params[k] = {"w": v1, "b": v2}
return nn_params
|
#!/usr/bin/env python3
import serial
import sys
import os
import subprocess
import hashlib
import datetime
import time
import numpy as np
dev = serial.Serial("/dev/ttyUSB0", 115200,timeout=10)
def benchmarkBinary(binary):
print("Flashing {}..".format(binary))
subprocess.run(["st-flash", "write", binary, "0x8000000"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
print("Flashed, now running benchmarks..".format(binary))
state = 'waiting'
marker = b''
# This parses test vector output starting with a number of leading '=',
# and expects a hashtag '#' after the test vector output.
while True:
x = dev.read()
if x == b'' and state == 'waiting':
print("timed out while waiting for the markers")
return benchmarkBinary(binary)
if state == 'waiting':
if x == b'=':
marker += x
continue
# If we saw at least 5 equal signs, assume we've probably started
elif marker.count(b'=') > 5:
state = 'beginning'
vector = []
print(" .. found output marker..")
if state == 'beginning':
if x == b'=':
continue
else:
state = 'reading'
elif state == 'reading':
if x == b'#':
break
else:
vector.append(x)
lines =b''.join(vector).decode("ISO-8859-1");
lines = lines.split("\n");
# sometimes the output markers end up as the first line of the output file
if "=" in lines[0]:
return [int(lines[2]), int(lines[5]), int(lines[8])];
else:
return [int(lines[1]), int(lines[4]), int(lines[7])];
def printStats(l):
print("AVG: {:,}, MIN: {:,}, MAX: {:,}".format(int(np.mean(l)), min(l), max(l)))
def printResults(binary, data):
print(f"#{binary}")
print(data)
keygen = [item[0] for item in data]
enc = [item[1] for item in data]
dec = [item[2] for item in data]
print("keygen")
printStats(keygen)
print("enc")
printStats(enc)
print("dec")
printStats(dec)
def doBenchmarks():
binaries = ["benchmark-lightsaber.bin", "benchmark-saber.bin", "benchmark-firesaber.bin",
"stack-lightsaber.bin", "stack-saber.bin", "stack-firesaber.bin"
]
for binary in binaries:
results = []
subprocess.run(["make", binary],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
for i in range(1):
results.append(benchmarkBinary(binary))
printResults(binary, results)
doBenchmarks()
|
import pathlib
import os
import sys
import shutil
import json
from subprocess import check_output, run, CalledProcessError
import time
from tempfile import TemporaryDirectory
os.chdir('..')
repo_dir = pathlib.Path('.')
root_files = list(repo_dir.glob('root/**/*.json'))
translation_files = set(repo_dir.glob('translation/**/*.json'))
git_branch_name = "complete_translations"
threshold = 99
complete_translations = set()
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
def split_name(file):
if not '_' in file.stem:
return (None, None)
return file.stem.split('_')
def calculate_completion(root_file, translation_file):
root_data = json.loads(root_file.read_text())
tr_data = json.loads(translation_file.read_text())
return (len(root_data), len(tr_data))
uid_mapping = {}
long_id_mapping = {}
for file in root_files:
uid, muids = split_name(file)
if not uid:
continue
if uid not in uid_mapping:
uid_mapping[uid] = []
uid_mapping[uid].append(file)
long_id_mapping[file.stem] = file
for file in sorted(translation_files):
uid, muids = split_name(file)
if uid not in uid_mapping:
print(f'Could not find root file for {file.relative_to(repo_dir)}', file=sys.stderr)
continue
if len(uid_mapping[uid]) > 1:
print(f'Multiple potential root files for {file.relative_to(repo_dir)}', file=sys.stderr)
root_file = uid_mapping[uid][0]
root_count, tr_count = calculate_completion(root_file, file)
completion = int(0.5+100 * tr_count / root_count)
assert root_count > 0
if completion >= threshold or root_count - tr_count <= 1:
complete_translations.add(file.name)
files = {'meta': [], 'root': []}
files['meta'] = [str(p) for p in repo_dir.glob('*.json')]
for folder in repo_dir.glob('[!.]*'):
files[folder.name] = []
for file in folder.glob('**/*.json'):
if folder.name == 'translation' and file.name not in complete_translations:
continue
if file.stat().st_size <= 4:
continue
files[folder.name].append(str(file.relative_to(repo_dir)))
try:
check_output(['git', 'checkout', 'master'])
check_output(['git', 'stash', 'push', '--include-untracked', '-m', 'preserving master'])
try:
check_output(['git', 'checkout', git_branch_name])
except CalledProcessError:
print('Attempting to create branch')
try:
check_output(f'true | git mktree | xargs git commit-tree | xargs git branch {git_branch_name}', shell=True)
check_output(['git', 'checkout', git_branch_name])
except CalledProcessError:
raise ValueError(f'Could not checkout branch {git_branch_name}')
check_output(['git', 'commit', '--allow-empty', '--only', '-m', f'Created {git_branch_name}'])
for folder, files_to_add in files.items():
for chunk in chunks(files_to_add, 1000):
check_output(' '.join(['git', 'checkout', 'master', '--', *chunk]), shell=True)
time.sleep(0.1)
try:
check_output(['git', 'commit', '-a', '-m', f'Added updated files in {folder}'])
except CalledProcessError:
pass
finally:
check_output(['git', 'checkout', 'master'])
check_output(['git', 'stash', 'pop'])
|
# -*- encoding: UTF-8 -*-
# Standard imports
from os import path, makedirs, remove
from dataclasses import dataclass
# Third party imports
import json
from github import Github, AuthenticatedUser
from github.GithubException import BadCredentialsException, GithubException
from rich.prompt import Prompt
# Application imports
from app.entities.email import Email
from app.logger import console
# Singleton
AUTH_FILE = "tmp/auth.json"
LOGIN_AUTH_METHOD = "login"
TOKEN_AUTH_METHOD = "token"
@dataclass
class GithubService:
github: Github = None
user : AuthenticatedUser = None
def configurated(self):
return path.isfile(AUTH_FILE)
def authenticated(self):
try:
return bool(self.user) and bool(self.user.login)
except BadCredentialsException as exception:
return False
async def configure(self) -> bool:
directory = path.dirname(AUTH_FILE)
if directory and not path.isdir(directory) and makedirs(directory) and not path.isdir(directory):
makedirs(directory)
# XXX Login authentication method could be used on self-hosted instances
# but this method is no longer an option on github.com
# https://github.com/PyGithub/PyGithub/issues/1851
method = Prompt.ask("Choice your authentication method",
# choices=[LOGIN_AUTH_METHOD, TOKEN_AUTH_METHOD],
choices=[TOKEN_AUTH_METHOD],
default=TOKEN_AUTH_METHOD)
if LOGIN_AUTH_METHOD == method:
login = Prompt.ask("Enter your login")
password = Prompt.ask("Enter your password", password=True)
auth = { "login": login, "password": password }
else:
token = Prompt.ask("Enter your token", password=True)
auth = { "token": token }
auth["method"] = method
with open(AUTH_FILE, "w") as file:
file.write(json.dumps(auth))
return True
async def authenticate(self) -> bool:
if not self.configurated():
return False
with open(AUTH_FILE) as file:
self.login_or_token = json.loads(file.read())
try:
if LOGIN_AUTH_METHOD == self.login_or_token.get("method"):
self.github = Github(self.login_or_token.get("login"), self.login_or_token.get("password"))
else:
self.github = Github(self.login_or_token.get(self.login_or_token.get("method")))
self.user = self.github.get_user()
self.user.login
except BadCredentialsException as exception:
console.print("{:<40s} [red]{:<10s}[/red]".format("Signing in to Github:", "fail"))
console.print("{:<40s} {:<10s}".format("Cleaning the cached authentication file:", "done"))
remove(AUTH_FILE)
return False
except GithubException as exception:
console.print("[red]{:<40s} {:<10s}[/red]".format("Error:", exception.data.get("message")))
remove(AUTH_FILE)
return False
return True
async def search_email(self, email: Email):
if not self.authenticated() and not await self.authenticate():
exit(1)
users = self.github.search_users("{} in:email".format(email.address))
for user in users:
email.user = user
return True
return False
async def create_repository(self, name, *, private=False):
if not self.authenticated() and not await self.authenticate():
exit(1)
return self.user.create_repo(name, private=private)
async def get_repo(self, name: str):
if not self.authenticated() and not await self.authenticate():
exit(1)
return self.user.get_repo(name)
# return self.github.get_repo(name)
|
# PyAlgoTrade
#
# Copyright 2011-2014 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import unittest
import os
from pyalgotrade.barfeed import yahoofeed
from pyalgotrade.barfeed import sqlitefeed
from pyalgotrade import bar
from pyalgotrade import dataseries
from pyalgotrade import marketsession
import common
import feed_test
class TemporarySQLiteFeed:
def __init__(self, dbFilePath, frequency, maxLen=dataseries.DEFAULT_MAX_LEN):
if os.path.exists(dbFilePath):
raise Exception("File exists")
self.__dbFilePath = dbFilePath
self.__frequency = frequency
self.__feed = None
self.__maxLen = maxLen
def __enter__(self):
self.__feed = sqlitefeed.Feed(self.__dbFilePath, self.__frequency, maxLen=self.__maxLen)
def __exit__(self, exc_type, exc_val, exc_tb):
self.__feed.getDatabase().disconnect() # This is for the feed to release the file and get it deleted.
self.__feed = None
os.remove(self.__dbFilePath)
def getFeed(self):
return self.__feed
class SQLiteFeedTestCase(unittest.TestCase):
dbName = "SQLiteFeedTestCase.sqlite"
def testBaseFeedInterface(self):
tmpFeed = TemporarySQLiteFeed(SQLiteFeedTestCase.dbName, bar.Frequency.DAY)
with tmpFeed:
# Load bars using a Yahoo! feed.
yahooFeed = yahoofeed.Feed()
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2000-yahoofinance.csv"), marketsession.USEquities.timezone)
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2001-yahoofinance.csv"), marketsession.USEquities.timezone)
# Fill the database using the bars from the Yahoo! feed.
sqliteFeed = tmpFeed.getFeed()
sqliteFeed.getDatabase().addBarsFromFeed(yahooFeed)
# Load the SQLite feed and process all bars.
sqliteFeed.loadBars("orcl")
feed_test.tstBaseFeedInterface(self, sqliteFeed)
def testLoadDailyBars(self):
tmpFeed = TemporarySQLiteFeed(SQLiteFeedTestCase.dbName, bar.Frequency.DAY)
with tmpFeed:
# Load bars using a Yahoo! feed.
yahooFeed = yahoofeed.Feed()
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2000-yahoofinance.csv"), marketsession.USEquities.timezone)
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2001-yahoofinance.csv"), marketsession.USEquities.timezone)
# Fill the database using the bars from the Yahoo! feed.
sqliteFeed = tmpFeed.getFeed()
sqliteFeed.getDatabase().addBarsFromFeed(yahooFeed)
# Load the SQLite feed and process all bars.
sqliteFeed.loadBars("orcl")
for bars in sqliteFeed:
pass
# Check that both dataseries have the same bars.
yahooDS = yahooFeed["orcl"]
sqliteDS = sqliteFeed["orcl"]
self.assertEqual(len(yahooDS), len(sqliteDS))
for i in xrange(len(yahooDS)):
self.assertEqual(yahooDS[i].getDateTime(), sqliteDS[i].getDateTime())
self.assertEqual(yahooDS[i].getOpen(), sqliteDS[i].getOpen())
self.assertEqual(yahooDS[i].getHigh(), sqliteDS[i].getHigh())
self.assertEqual(yahooDS[i].getLow(), sqliteDS[i].getLow())
self.assertEqual(yahooDS[i].getClose(), sqliteDS[i].getClose())
self.assertEqual(yahooDS[i].getAdjClose(), sqliteDS[i].getAdjClose())
def testBounded(self):
tmpFeed = TemporarySQLiteFeed(SQLiteFeedTestCase.dbName, bar.Frequency.DAY, maxLen=2)
with tmpFeed:
# Load bars using a Yahoo! feed.
yahooFeed = yahoofeed.Feed(maxLen=1)
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2000-yahoofinance.csv"), marketsession.USEquities.timezone)
yahooFeed.addBarsFromCSV("orcl", common.get_data_file_path("orcl-2001-yahoofinance.csv"), marketsession.USEquities.timezone)
# Fill the database using the bars from the Yahoo! feed.
sqliteFeed = tmpFeed.getFeed()
sqliteFeed.getDatabase().addBarsFromFeed(yahooFeed)
# Load the SQLite feed and process all bars.
sqliteFeed.loadBars("orcl")
for bars in sqliteFeed:
pass
barDS = sqliteFeed["orcl"]
self.assertEqual(len(barDS), 2)
self.assertEqual(len(barDS.getDateTimes()), 2)
self.assertEqual(len(barDS.getCloseDataSeries()), 2)
self.assertEqual(len(barDS.getCloseDataSeries().getDateTimes()), 2)
self.assertEqual(len(barDS.getOpenDataSeries()), 2)
self.assertEqual(len(barDS.getHighDataSeries()), 2)
self.assertEqual(len(barDS.getLowDataSeries()), 2)
self.assertEqual(len(barDS.getAdjCloseDataSeries()), 2)
|
"""
Pay to delegated puzzle or hidden puzzle
In this puzzle program, the solution must choose either a hidden puzzle or a
delegated puzzle on a given public key.
The given public key is morphed by adding an offset from the hash of the hidden puzzle
and itself, giving a new so-called "synthetic" public key which has the hidden puzzle
hidden inside of it.
If the hidden puzzle path is taken, the hidden puzzle and original public key will be revealed
which proves that it was hidden there in the first place.
This roughly corresponds to bitcoin's taproot.
Note:
p2_delegated_puzzle_or_hidden_puzzle is essentially the "standard coin" in chia.
DEFAULT_HIDDEN_PUZZLE_HASH from this puzzle is used with
calculate_synthetic_secret_key in the wallet's standard pk_to_sk finder.
This is important because it allows sign_coin_spends to function properly via the
following mechanism:
- A 'standard coin' coin exists in the blockchain with some puzzle hash.
- The user's wallet contains a primary sk/pk pair which are used to derive to one
level a set of auxiliary sk/pk pairs which are used for specific coins. These
can be used for signing in AGG_SIG_ME, but the standard coin uses a key further
derived from one of these via calculate_synthetic_secret_key as described in
https://chialisp.com/docs/standard_transaction. Therefore, when a wallet needs
to find a secret key for signing based on a public key, it needs to try repeating
this derivation as well and see if the BLSPublicKey (pk) associated with any of the
derived secret keys matches the pk requested by the coin.
- Python code previously appeared which was written like:
delegated_puzzle_solution = Program.to((1, condition_args))
solutions = Program.to([[], delegated_puzzle_solution, []])
In context, delegated_puzzle_solution here is any *chialisp program*, here one
simply quoting a list of conditions, and the following argument is the arguments
to this program, which here are unused. Secondly, the actual arguments to the
p2_delegated_puzzle_or_hidden_puzzle are given. The first argument determines
whether a hidden or revealed puzzle is used. If the puzzle is hidden, then what
is required is a signature given a specific synthetic key since the key cannot be
derived inline without the puzzle. In that case, the first argument is this key.
In most cases, the puzzle will be revealed, and this argument will be the nil object,
() (represented here by an empty python list).
The second and third arguments are a chialisp program and its corresponding
arguments, which will be run inside the standard coin puzzle. This interacts with
sign_coin_spend in that the AGG_SIG_ME condition added by the inner puzzle asks the
surrounding system to provide a signature over the provided program with a synthetic
key whose derivation is within. Any wallets which intend to use standard coins in
this way must try to resolve a public key to a secret key via this derivation.
"""
import hashlib
from hsms.bls12_381 import BLSPublicKey, BLSSecretExponent
from hsms.streamables import bytes32, Program
from .load_clvm import load_clvm
from .p2_conditions import puzzle_for_conditions
DEFAULT_HIDDEN_PUZZLE = Program.from_bytes(
bytes.fromhex("ff0980")
) # this puzzle `(=)` always fails
DEFAULT_HIDDEN_PUZZLE_HASH = DEFAULT_HIDDEN_PUZZLE.tree_hash()
MOD = load_clvm("p2_delegated_puzzle_or_hidden_puzzle.cl")
SYNTHETIC_MOD = load_clvm("calculate_synthetic_public_key.cl")
def calculate_synthetic_offset(
public_key: BLSPublicKey, hidden_puzzle_hash: bytes32
) -> BLSSecretExponent:
blob = hashlib.sha256(bytes(public_key) + hidden_puzzle_hash).digest()
offset = Program.to(blob).as_int()
assert offset == int(Program.to(blob))
return BLSSecretExponent.from_int(offset)
def calculate_synthetic_public_key(
public_key: BLSPublicKey, hidden_puzzle_hash: bytes32
) -> BLSPublicKey:
r = SYNTHETIC_MOD.run([bytes(public_key), hidden_puzzle_hash])
return BLSPublicKey.from_bytes(r.as_atom())
def calculate_synthetic_secret_key(
secret_key: BLSSecretExponent, hidden_puzzle_hash: bytes32
) -> BLSSecretExponent:
public_key = secret_key.public_key()
synthetic_offset = calculate_synthetic_offset(public_key, hidden_puzzle_hash)
synthetic_secret_key = secret_key + synthetic_offset
return synthetic_secret_key
def puzzle_for_synthetic_public_key(synthetic_public_key: BLSPublicKey) -> Program:
return MOD.curry(bytes(synthetic_public_key))
def puzzle_for_public_key_and_hidden_puzzle_hash(
public_key: BLSPublicKey, hidden_puzzle_hash: bytes32
) -> Program:
synthetic_public_key = calculate_synthetic_public_key(
public_key, hidden_puzzle_hash
)
return puzzle_for_synthetic_public_key(synthetic_public_key)
def puzzle_for_public_key_and_hidden_puzzle(
public_key: BLSPublicKey, hidden_puzzle: Program
) -> Program:
return puzzle_for_public_key_and_hidden_puzzle_hash(
public_key, hidden_puzzle.tree_hash()
)
def puzzle_for_pk(public_key: BLSPublicKey) -> Program:
return puzzle_for_public_key_and_hidden_puzzle_hash(
public_key, DEFAULT_HIDDEN_PUZZLE_HASH
)
def solution_for_delegated_puzzle(
delegated_puzzle: Program, solution: Program
) -> Program:
return Program.to([[], delegated_puzzle, solution])
def solution_for_hidden_puzzle(
hidden_public_key: BLSPublicKey,
hidden_puzzle: Program,
solution_to_hidden_puzzle: Program,
) -> Program:
return Program.to([hidden_public_key, hidden_puzzle, solution_to_hidden_puzzle])
def solution_for_conditions(conditions) -> Program:
delegated_puzzle = puzzle_for_conditions(conditions)
return solution_for_delegated_puzzle(delegated_puzzle, Program.to(0))
|
import os
import shutil
from .helpers import build_in_container
def deploy_layer(runtime, env):
print('Beginning deployment...')
os.chdir('.layer')
print('Building layer...')
error = build_in_container(runtime)
if error:
os.chdir('..')
shutil.rmtree('.layer')
exit()
print('Deploying layer...')
error = os.system(f'sls deploy --stage {env}')
if error:
print("\033[91mDeployment failed!\033[0m")
else:
print(f"\033[92mDeployment to {env} complete!\033[0m")
|
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"sparsify": "neighbors.ipynb",
"hstack": "neighbors.ipynb",
"vstack": "neighbors.ipynb",
"stack": "neighbors.ipynb",
"NMSLibSklearnWrapper": "neighbors.ipynb",
"FastCosineNN": "neighbors.ipynb",
"FastJaccardNN": "neighbors.ipynb",
"FastL2NN": "neighbors.ipynb",
"FastKLDivNN": "neighbors.ipynb"}
modules = ["neighbors.py"]
doc_url = "https://AlanGanem.github.io/NMSLearn/nmslearn/"
git_url = "https://github.com/AlanGanem/nmslearn/tree/master/"
def custom_doc_links(name): return None
|
"""empty message
Revision ID: 623662ea0e7e
Revises: d1edb3cadec8
Create Date: 2020-11-24 16:34:02.327556
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '623662ea0e7e'
down_revision = 'd1edb3cadec8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_log', sa.Column('mailbox_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'email_log', 'mailbox', ['mailbox_id'], ['id'], ondelete='cascade')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'email_log', type_='foreignkey')
op.drop_column('email_log', 'mailbox_id')
# ### end Alembic commands ###
|
'''Backup a cluster'''
from logging import getLogger
import click
from hazelsync.cli import with_cluster
log = getLogger('hazelsync')
@click.command()
@click.argument('name')
def stream(name):
'''Pull some data to ease the backup speed'''
with with_cluster(name) as cluster:
log.info("Running hazel stream for %s", name)
cluster.stream()
|
from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional
from PyQt5 import QtCore, QtGui, QtWidgets
from electroncash import address
from electroncash.address import Address, AddressError
from electroncash.avalanche.delegation import (
Delegation,
DelegationBuilder,
WrongDelegatorKeyError,
)
from electroncash.avalanche.primitives import Key, PublicKey
from electroncash.avalanche.proof import LimitedProofId, Proof, ProofBuilder
from electroncash.avalanche.serialize import DeserializationError
from electroncash.bitcoin import is_private_key
from electroncash.constants import PROOF_DUST_THRESHOLD
from electroncash.i18n import _
from electroncash.uint256 import UInt256
from .password_dialog import PasswordDialog
if TYPE_CHECKING:
from electroncash.wallet import Deterministic_Wallet
# We generate a few deterministic private keys to pre-fill some widgets, so the user
# does not need to use an external tool or a dummy wallet to generate keys.
# TODO: don't always use the same keys, increment the index as needed (requires saving
# the index or the generated keys to the wallet file)
_PROOF_MASTER_KEY_INDEX = 0
_DELEGATED_KEY_INDEX = 1
def get_privkey_suggestion(
wallet: Deterministic_Wallet,
key_index: int = 0,
pwd: Optional[str] = None,
) -> str:
"""Get a deterministic private key derived from a BIP44 path that is not used
by the wallet to generate addresses.
Return it in WIF format, or return an empty string on failure (pwd dialog
cancelled).
"""
# Use BIP44 change_index 2, which is not used by any application.
privkey_index = (2, key_index)
if wallet.has_password() and pwd is None:
raise RuntimeError("Wallet password required")
return wallet.export_private_key_for_index(privkey_index, pwd)
class CachedWalletPasswordWidget(QtWidgets.QWidget):
"""A base class for widgets that may prompt the user for a wallet password and
remember that password for later reuse.
The password can also be specified in the constructor. In this case, there is no
need to prompt the user for it.
"""
def __init__(
self,
wallet: Deterministic_Wallet,
pwd: Optional[str] = None,
parent: QtWidgets.QWidget = None,
):
super().__init__(parent)
self._pwd = pwd
self.wallet = wallet
@property
def pwd(self) -> Optional[str]:
"""Return wallet password.
Open a dialog to ask for the wallet password if necessary, and cache it.
Keep asking until the user provides the correct pwd or clicks cancel.
If the password dialog is cancelled, return None.
"""
if self._pwd is not None:
return self._pwd
while self.wallet.has_password():
password = PasswordDialog(parent=self).run()
if password is None:
# dialog cancelled
return
try:
self.wallet.check_password(password)
self._pwd = password
# success
return self._pwd
except Exception as e:
QtWidgets.QMessageBox.critical(self, "Invalid password", str(e))
class Link(QtWidgets.QPushButton):
def __init__(self, text="", parent=None):
super().__init__(text, parent)
stylesheet = """
QPushButton {
color: blue;
border: none;
font-weight: bold;
font-size: 14px;
text-align: center;
}
QPushButton:disabled {
color: gray;
}
"""
self.setStyleSheet(stylesheet)
size_policy = QtWidgets.QSizePolicy()
size_policy.setHorizontalPolicy(QtWidgets.QSizePolicy.Fixed)
self.setSizePolicy(size_policy)
class AvaProofWidget(CachedWalletPasswordWidget):
def __init__(
self,
utxos: List[dict],
wallet: Deterministic_Wallet,
receive_address: Optional[Address] = None,
parent: QtWidgets.QWidget = None,
):
"""
:param utxos: List of UTXOs to be used as stakes
:param parent:
"""
CachedWalletPasswordWidget.__init__(self, wallet, parent=parent)
# This is enough width to show a whole compressed pubkey.
self.setMinimumWidth(750)
# Enough height to show the entire proof without scrolling.
self.setMinimumHeight(680)
self.utxos = utxos
self.excluded_utxos: List[dict] = []
self.wallet = wallet
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
layout.addWidget(QtWidgets.QLabel("Proof sequence"))
self.sequence_sb = QtWidgets.QSpinBox()
self.sequence_sb.setMinimum(0)
layout.addWidget(self.sequence_sb)
layout.addSpacing(10)
expiration_layout = QtWidgets.QHBoxLayout()
layout.addLayout(expiration_layout)
expiration_left_sublayout = QtWidgets.QVBoxLayout()
expiration_layout.addLayout(expiration_left_sublayout)
expiration_left_sublayout.addWidget(QtWidgets.QLabel("Expiration date"))
self.calendar = QtWidgets.QDateTimeEdit()
self.calendar.setToolTip("Date and time at which the proof will expire")
expiration_left_sublayout.addWidget(self.calendar)
expiration_right_sublayout = QtWidgets.QVBoxLayout()
expiration_layout.addLayout(expiration_right_sublayout)
expiration_right_sublayout.addWidget(
QtWidgets.QLabel("Expiration POSIX timestamp")
)
# Use a QDoubleSpinbox with precision set to 0 decimals, because
# QSpinBox is limited to the int32 range (January 19, 2038)
self.timestamp_widget = QtWidgets.QDoubleSpinBox()
self.timestamp_widget.setDecimals(0)
# date range: genesis block to Wed Jun 09 3554 16:53:20 GMT
self.timestamp_widget.setRange(1231006505, 50**10)
self.timestamp_widget.setSingleStep(86400)
self.timestamp_widget.setToolTip(
"POSIX time, seconds since 1970-01-01T00:00:00"
)
expiration_right_sublayout.addWidget(self.timestamp_widget)
layout.addSpacing(10)
layout.addWidget(QtWidgets.QLabel("Master private key (WIF)"))
self.master_key_edit = QtWidgets.QLineEdit()
self.master_key_edit.setToolTip(
"Private key that controls the proof. This is the key that signs the "
"delegation or signs the avalanche votes."
)
# Suggest a private key to the user. He can change it if he wants.
self.master_key_edit.setText(self._get_privkey_suggestion())
layout.addWidget(self.master_key_edit)
layout.addSpacing(10)
layout.addWidget(
QtWidgets.QLabel("Master public key (computed from master private key)")
)
self.master_pubkey_view = QtWidgets.QLineEdit()
self.master_pubkey_view.setReadOnly(True)
layout.addWidget(self.master_pubkey_view)
layout.addSpacing(10)
layout.addWidget(QtWidgets.QLabel("Payout address"))
self.payout_addr_edit = QtWidgets.QLineEdit()
self.payout_addr_edit.setToolTip(
"Address to which staking rewards could be sent, in the future"
)
if receive_address is not None:
self.payout_addr_edit.setText(receive_address.to_ui_string())
layout.addWidget(self.payout_addr_edit)
layout.addSpacing(10)
self.utxos_wigdet = QtWidgets.QTableWidget(len(utxos), 4)
self.utxos_wigdet.setHorizontalHeaderLabels(
["txid", "vout", "amount (sats)", "block height"]
)
self.utxos_wigdet.verticalHeader().setVisible(False)
self.utxos_wigdet.setSelectionMode(QtWidgets.QTableWidget.NoSelection)
self.utxos_wigdet.horizontalHeader().setSectionResizeMode(
0, QtWidgets.QHeaderView.Stretch
)
layout.addWidget(self.utxos_wigdet)
for i, utxo in enumerate(utxos):
txid_item = QtWidgets.QTableWidgetItem(utxo["prevout_hash"])
self.utxos_wigdet.setItem(i, 0, txid_item)
vout_item = QtWidgets.QTableWidgetItem(str(utxo["prevout_n"]))
self.utxos_wigdet.setItem(i, 1, vout_item)
amount_item = QtWidgets.QTableWidgetItem(str(utxo["value"]))
if utxo["value"] < PROOF_DUST_THRESHOLD:
amount_item.setForeground(QtGui.QColor("red"))
amount_item.setToolTip(
_(
"The minimum threshold for a coin in an avalanche proof is "
"1,000,000 XEC."
)
)
self.utxos_wigdet.setItem(i, 2, amount_item)
height_item = QtWidgets.QTableWidgetItem(str(utxo["height"]))
if utxo["height"] <= 0:
# TODO: make the height cell editable, for users to fill the block
# height manually.
height_item.setForeground(QtGui.QColor("red"))
height_item.setToolTip(
_(
"Unconfirmed coins will not be included because the height of the"
"block for each coin is required to generate the proof."
)
)
self.utxos_wigdet.setItem(i, 3, height_item)
self.generate_button = QtWidgets.QPushButton("Generate proof")
layout.addWidget(self.generate_button)
self.generate_button.clicked.connect(self._on_generate_clicked)
self.proof_display = QtWidgets.QTextEdit()
self.proof_display.setReadOnly(True)
layout.addWidget(self.proof_display)
self.generate_dg_button = Link("Generate a delegation for this proof")
self.generate_dg_button.setEnabled(False)
layout.addWidget(self.generate_dg_button)
# Connect signals
self.calendar.dateTimeChanged.connect(self.on_datetime_changed)
self.timestamp_widget.valueChanged.connect(self.on_timestamp_changed)
self.master_key_edit.textChanged.connect(self.update_master_pubkey)
self.generate_dg_button.clicked.connect(self.open_dg_dialog)
# Init widgets
now = QtCore.QDateTime.currentDateTime()
self.calendar.setDateTime(now.addYears(1))
self.dg_dialog = None
self.update_master_pubkey(self.master_key_edit.text())
def _get_privkey_suggestion(self) -> str:
"""Get a private key to pre-fill the master key field.
Return it in WIF format, or return an empty string on failure (pwd dialog
cancelled).
"""
if not self.wallet.is_deterministic() or not self.wallet.can_export():
return ""
wif_pk = ""
if not self.wallet.has_password() or self.pwd is not None:
wif_pk = get_privkey_suggestion(
self.wallet, key_index=_PROOF_MASTER_KEY_INDEX, pwd=self.pwd
)
return wif_pk
def on_datetime_changed(self, dt: QtCore.QDateTime):
"""Set the timestamp from a QDateTime"""
was_blocked = self.blockSignals(True)
self.timestamp_widget.setValue(dt.toSecsSinceEpoch())
self.blockSignals(was_blocked)
def on_timestamp_changed(self, timestamp: float):
"""Set the calendar date from POSIX timestamp"""
timestamp = int(timestamp)
was_blocked = self.blockSignals(True)
self.calendar.setDateTime(QtCore.QDateTime.fromSecsSinceEpoch(timestamp))
self.blockSignals(was_blocked)
def update_master_pubkey(self, master_wif: str):
if is_private_key(master_wif):
master_pub = Key.from_wif(master_wif).get_pubkey()
pubkey_str = master_pub.to_hex()
self.master_pubkey_view.setText(pubkey_str)
def _on_generate_clicked(self):
proof = self._build()
if proof is not None:
self.proof_display.setText(f'<p style="color:black;"><b>{proof}</b></p>')
reply = QtWidgets.QMessageBox.question(
self,
"Freeze coins",
"Spending coins that are used as stakes in a proof will invalidate "
"the proof. Do you want to freeze the corresponding coins to avoid "
"accidentally spending them?",
defaultButton=QtWidgets.QMessageBox.Yes,
)
utxos_to_freeze = [u for u in self.utxos if u not in self.excluded_utxos]
if reply == QtWidgets.QMessageBox.Yes:
self.wallet.set_frozen_coin_state(utxos_to_freeze, freeze=True)
self.generate_dg_button.setEnabled(proof is not None)
def _build(self) -> Optional[str]:
master_wif = self.master_key_edit.text()
if not is_private_key(master_wif):
QtWidgets.QMessageBox.critical(
self, "Invalid private key", "Could not parse private key."
)
return
master = Key.from_wif(master_wif)
try:
payout_address = Address.from_string(self.payout_addr_edit.text())
except AddressError as e:
QtWidgets.QMessageBox.critical(self, "Invalid payout address", str(e))
return
payout_script = payout_address.to_script()
if self.wallet.has_password() and self.pwd is None:
self.proof_display.setText(
'<p style="color:red;">Password dialog cancelled!</p>'
)
return
proofbuilder = ProofBuilder(
sequence=self.sequence_sb.value(),
expiration_time=self.calendar.dateTime().toSecsSinceEpoch(),
master=master,
payout_script_pubkey=payout_script,
)
self.excluded_utxos = []
for utxo in self.utxos:
if utxo["height"] <= 0:
# ignore unconfirmed coins
self.excluded_utxos.append(utxo)
continue
address = utxo["address"]
if not isinstance(utxo["address"], Address):
# utxo loaded from JSON file (serialized)
address = Address.from_string(address)
priv_key = self.wallet.export_private_key(address, self.pwd)
proofbuilder.add_utxo(
txid=UInt256.from_hex(utxo["prevout_hash"]),
vout=utxo["prevout_n"],
amount=utxo["value"],
height=utxo["height"],
wif_privkey=priv_key,
is_coinbase=utxo["coinbase"],
)
num_utxos_in_proof = len(self.utxos) - len(self.excluded_utxos)
if num_utxos_in_proof <= 0:
QtWidgets.QMessageBox.critical(
self,
_("No valid stake"),
_("No valid stake left after excluding unconfirmed coins."),
)
return
if len(self.excluded_utxos) > 0:
QtWidgets.QMessageBox.warning(
self,
_("Excluded stakes"),
f"{len(self.excluded_utxos)}"
+ " "
+ _(
"coins have been excluded from the proof because they are "
"unconfirmed or do not have a block height specified."
),
)
return proofbuilder.build().to_hex()
def open_dg_dialog(self):
if self.dg_dialog is None:
self.dg_dialog = AvaDelegationDialog(self.wallet, self.pwd, self)
self.dg_dialog.set_proof(self.proof_display.toPlainText())
self.dg_dialog.set_master(self.master_key_edit.text())
self.dg_dialog.show()
class AvaProofDialog(QtWidgets.QDialog):
def __init__(
self,
utxos: List[dict],
wallet: Deterministic_Wallet,
receive_address: Optional[Address] = None,
parent: Optional[QtWidgets.QWidget] = None,
):
super().__init__(parent)
self.setWindowTitle("Build avalanche proof")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.proof_widget = AvaProofWidget(utxos, wallet, receive_address, self)
layout.addWidget(self.proof_widget)
buttons_layout = QtWidgets.QHBoxLayout()
layout.addLayout(buttons_layout)
self.ok_button = QtWidgets.QPushButton("OK")
buttons_layout.addWidget(self.ok_button)
self.dismiss_button = QtWidgets.QPushButton("Dismiss")
buttons_layout.addWidget(self.dismiss_button)
self.ok_button.clicked.connect(self.accept)
self.dismiss_button.clicked.connect(self.reject)
class AvaDelegationWidget(CachedWalletPasswordWidget):
def __init__(
self,
wallet: Deterministic_Wallet,
pwd: Optional[str] = None,
parent: Optional[QtWidgets.QWidget] = None,
):
super().__init__(wallet, pwd, parent)
self.setMinimumWidth(750)
self.setMinimumHeight(580)
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.tab_widget = QtWidgets.QTabWidget()
layout.addWidget(self.tab_widget)
layout.addSpacing(10)
self.proof_edit = QtWidgets.QTextEdit()
self.proof_edit.setAcceptRichText(False)
self.proof_edit.setToolTip(
"Enter a proof in hexadecimal format. A delegation will be generated for "
"this proof. Specify the proof master key as the delegator key below."
)
self.tab_widget.addTab(self.proof_edit, "From a proof")
self.ltd_id_edit = QtWidgets.QLineEdit()
self.ltd_id_edit.setToolTip(
"Enter the proof ID of the proof to be delegated. A delegation will be "
"generated for the proof corresponding to this ID. "
"You need to provide this proof's master key as the delegator key (below)."
)
self.tab_widget.addTab(self.ltd_id_edit, "From a Limited Proof ID")
self.dg_edit = QtWidgets.QTextEdit()
self.dg_edit.setAcceptRichText(False)
self.dg_edit.setToolTip(
"Enter an existing delegation to which you want to add another level. "
"Enter the private key corresponding to this existing delegation's "
"delegated key as the new delegator key, and specify a new delegated key."
)
self.tab_widget.addTab(self.dg_edit, "From an existing delegation")
layout.addWidget(QtWidgets.QLabel("Delegator key (WIF)"))
self.delegator_key_edit = QtWidgets.QLineEdit()
self.delegator_key_edit.setToolTip(
"Master key of the proof, or private key for the last level of an "
"existing delegation."
)
layout.addWidget(self.delegator_key_edit)
layout.addSpacing(10)
layout.addWidget(QtWidgets.QLabel("Delegated public key"))
delegated_key_layout = QtWidgets.QHBoxLayout()
self.pubkey_edit = QtWidgets.QLineEdit()
self.pubkey_edit.setToolTip("The public key to delegate the proof to.")
delegated_key_layout.addWidget(self.pubkey_edit)
generate_key_button = QtWidgets.QPushButton("Generate key")
delegated_key_layout.addWidget(generate_key_button)
layout.addLayout(delegated_key_layout)
layout.addSpacing(10)
self.generate_button = QtWidgets.QPushButton("Generate delegation")
layout.addWidget(self.generate_button)
self.dg_display = QtWidgets.QTextEdit()
self.dg_display.setReadOnly(True)
layout.addWidget(self.dg_display)
# Signals
self.dg_edit.textChanged.connect(self.on_delegation_pasted)
generate_key_button.clicked.connect(self.on_generate_key_clicked)
self.generate_button.clicked.connect(self.on_generate_clicked)
def set_proof(self, proof_hex: str):
self.proof_edit.setText(proof_hex)
def set_master(self, master_wif: str):
self.delegator_key_edit.setText(master_wif)
def on_delegation_pasted(self):
"""Deserialize the delegation to be used as a base delegation to which a level
is to be added. Find the delegated pubkey and check whether this is an auxiliary
key from this wallet. If it is, prefill the Delegator key field with the private
key.
"""
try:
dg = Delegation.from_hex(self.dg_edit.toPlainText())
except DeserializationError:
return
dg_pubkey = dg.get_delegated_public_key()
# Mind the type difference between PublicKey returned by
# Delegation.get_delegated_public_key and PublicKey used by Wallet.
idx = self.wallet.get_auxiliary_pubkey_index(
address.PublicKey.from_pubkey(dg_pubkey.keydata),
self.pwd,
)
if idx is not None:
self.delegator_key_edit.setText(
self.wallet.export_private_key_for_index((2, idx), self.pwd)
)
def on_generate_key_clicked(self):
"""Open a dialog to show a private/public key pair to be used as delegated key.
Fill the delegated public key widget with the resulting public key.
"""
if not self.wallet.is_deterministic() or not self.wallet.can_export():
return
wif_pk = ""
if not self.wallet.has_password() or self.pwd is not None:
wif_pk = get_privkey_suggestion(
self.wallet,
key_index=_DELEGATED_KEY_INDEX,
pwd=self.pwd,
)
if not wif_pk:
# This should only happen if the pwd dialog was cancelled
self.pubkey_edit.setText("")
return
QtWidgets.QMessageBox.information(
self,
"Delegated key",
f"Please save the following private key:<br><b>{wif_pk}</b><br><br>"
f"You will need it to use your delegation with a Bitcoin ABC node.",
)
self.pubkey_edit.setText(Key.from_wif(wif_pk).get_pubkey().to_hex())
def on_generate_clicked(self):
dg_hex = self._build()
if dg_hex is not None:
self.dg_display.setText(f'<p style="color:black;"><b>{dg_hex}</b></p>')
def _build(self) -> Optional[str]:
delegator_wif = self.delegator_key_edit.text()
if not is_private_key(delegator_wif):
QtWidgets.QMessageBox.critical(
self, "Invalid private key", "Could not parse private key."
)
return
delegator = Key.from_wif(delegator_wif)
try:
delegated_pubkey = PublicKey.from_hex(self.pubkey_edit.text())
except DeserializationError:
QtWidgets.QMessageBox.critical(
self,
"Invalid delegated pubkey",
"Could not parse delegated public key.",
)
return
active_tab_widget = self.tab_widget.currentWidget()
if active_tab_widget is self.ltd_id_edit:
try:
ltd_id = LimitedProofId.from_hex(self.ltd_id_edit.text())
except DeserializationError:
QtWidgets.QMessageBox.critical(
self,
"Invalid limited ID",
"Could not parse limited ID (not a 32 bytes hex string).",
)
return
dgb = DelegationBuilder(ltd_id, delegator.get_pubkey())
elif active_tab_widget is self.proof_edit:
try:
proof = Proof.from_hex(self.proof_edit.toPlainText())
except DeserializationError:
QtWidgets.QMessageBox.critical(
self,
"Invalid proof",
"Could not parse proof. Check the format.",
)
return
dgb = DelegationBuilder.from_proof(proof)
elif active_tab_widget is self.dg_edit:
try:
dg = Delegation.from_hex(self.dg_edit.toPlainText())
except DeserializationError:
QtWidgets.QMessageBox.critical(
self,
"Invalid delegation",
"Could not parse delegation. Check the format.",
)
return
dgb = DelegationBuilder.from_delegation(dg)
else:
# This should never happen, so we want to hear about it. Catch fire.
raise RuntimeError("Indeterminate active tab.")
try:
dgb.add_level(delegator, delegated_pubkey)
except WrongDelegatorKeyError:
QtWidgets.QMessageBox.critical(
self,
"Wrong delegator key",
"The provided delegator key does not match the proof master key or "
"the previous delegated public key (if adding a level to an existing "
"delegation).",
)
return
return dgb.build().to_hex()
def get_delegation(self) -> str:
"""Return delegation, as a hexadecimal string.
An empty string means the delegation building failed.
"""
return self.dg_display.toPlainText()
class AvaDelegationDialog(QtWidgets.QDialog):
def __init__(
self,
wallet: Deterministic_Wallet,
pwd: Optional[str] = None,
parent: Optional[QtWidgets.QWidget] = None,
):
super().__init__(parent)
self.setWindowTitle("Build avalanche delegation")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.dg_widget = AvaDelegationWidget(wallet, pwd, parent)
layout.addWidget(self.dg_widget)
buttons_layout = QtWidgets.QHBoxLayout()
layout.addLayout(buttons_layout)
self.ok_button = QtWidgets.QPushButton("OK")
buttons_layout.addWidget(self.ok_button)
self.dismiss_button = QtWidgets.QPushButton("Dismiss")
buttons_layout.addWidget(self.dismiss_button)
self.ok_button.clicked.connect(self.accept)
self.dismiss_button.clicked.connect(self.reject)
def set_proof(self, proof_hex: str):
self.dg_widget.set_proof(proof_hex)
def set_master(self, master_wif: str):
self.dg_widget.set_master(master_wif)
|
# coding: UTF-8
"""
Simple load balancing with pypar
(based on demo3.py from pypar demo package)
Felix Richter <felix.richter2@uni-rostock.de>
"""
import sys
import time
import numpy
import pypar
PYPAR_WORKTAG = 1
PYPAR_DIETAG = 2
def mprint(txt):
"""
Print message txt
with indentation following the node's rank
"""
import pypar
pre = " " * 8 * pypar.rank()
if type(txt) != type('dummy'):
txt = txt.__str__()
pat = "-%d-"
print pre + (pat % pypar.rank()) + txt
class PyparWork(object):
"""Abstract base class for ant work to be balanced"""
def __init__(self):
pass
def uplink(self, balancer, myid, numprocs, node):
self.balancer = balancer
self.pypar_id = myid
self.pypar_numprocs = numprocs
self.pypar_node = node
def getNumWorkItems(self):
pass
def handleWorkResult(self, result, status):
pass
def calcWorkResult(self, worknum):
pass
def masterBeforeWork(self):
"""Master node calls this before sending out the work"""
pass
def slaveBeforeWork(self):
"""Slave nodes call this before receiving work"""
pass
def masterAfterWork(self):
"""Master node calls this after receiving the last work result"""
pass
def slaveAfterWork(self):
"""Slave nodes call this after sending the last work result"""
pass
def msgprint(self, txt):
pre = " " * 8 * self.pypar_id
if type(txt) != type('dummy'):
txt = txt.__str__()
pat = "-%d-"
print pre + (pat % self.pypar_id) + txt
class PyparBalancer(object):
"""The Load Balancer Class
Initialize it with a PyparWork-derived class instance
which describes the actual work to do.
debug == True - more status messages
"""
def __init__(self, work, debug = False):
self.numprocs = pypar.size() # Number of processes as specified by mpirun
self.myid = pypar.rank() # Id of of this process (myid in [0, numproc-1])
self.node = pypar.get_processor_name() # Host name on which current process is running
self.debug= debug
self.work = work
# Added by Ole Nielsen, 15 May 2008
if self.numprocs < 2:
msg = 'PyparBalancer must run on at least 2 processes'
msg += ' for the Master Slave paradigm to make sense.'
raise Exception, msg
self.work.uplink(self, self.myid, self.numprocs, self.node)
self.numworks = self.work.getNumWorkItems()
print "PyparBalancer initialised on proc %d of %d on node %s" %(self.myid, self.numprocs, self.node)
def master(self):
numcompleted = 0
#--- start slaves distributing the first work slot
for i in range(0, min(self.numprocs-1, self.numworks)):
work = i
slave= i+1
pypar.send(work, destination=slave, tag=PYPAR_WORKTAG)
print '[MASTER ]: sent first work "%s" to node %d' %(work, slave)
# dispatch the remaining work slots on dynamic load-balancing policy
# the quicker to do the job, the more jobs it takes
for work in range(self.numprocs-1, self.numworks):
result, status = pypar.receive(source=pypar.any_source, tag=PYPAR_WORKTAG, return_status=True)
print '[MASTER ]: received result from node %d' %(status.source, )
#print result
numcompleted += 1
pypar.send(work, destination=status.source, tag=PYPAR_WORKTAG)
if self.debug: print '[MASTER ]: sent work "%s" to node %d' %(work, status.source)
self.work.handleWorkResult(result, status)
# all works have been dispatched out
print '[MASTER ]: ToDo : %d' %self.numworks
print '[MASTER ]: Done : %d' %numcompleted
# I've still to take into the remaining completions
while (numcompleted < self.numworks):
result, status = pypar.receive(source=pypar.any_source, tag=PYPAR_WORKTAG, return_status=True)
print '[MASTER ]: received (final) result from node %d' % (status.source, )
print result
numcompleted += 1
print '[MASTER ]: %d completed' %numcompleted
self.work.handleWorkResult(result, status)
print '[MASTER ]: about to terminate slaves'
# Tell slaves to stop working
for i in range(1, self.numprocs):
pypar.send('#', destination=i, tag=PYPAR_DIETAG)
if self.debug: print '[MASTER ]: sent DIETAG to node %d' %(i,)
def slave(self):
if self.debug: print '[SLAVE %d]: I am processor %d of %d on node %s' % (self.myid, self.myid, self.numprocs, self.node)
if self.debug: print '[SLAVE %d]: Entering work loop' % (self.myid,)
while True:
result, status = pypar.receive(source=0, tag=pypar.any_tag, return_status=True)
print '[SLAVE %d]: received work with tag %d from node %d'\
%(self.myid, status.tag, status.source)
if (status.tag == PYPAR_DIETAG):
print '[SLAVE %d]: received termination from node %d' % (self.myid, 0)
return
else:
worknum = result
if self.debug: print '[SLAVE %d]: work number is %s' % (self.myid, worknum)
myresult = self.work.calcWorkResult(worknum)
pypar.send(myresult, destination=0)
if self.debug: print '[SLAVE %d]: sent result to node %d' % (self.myid, 0)
def run(self):
if self.myid == 0:
self.work.masterBeforeWork()
self.master()
self.work.masterAfterWork()
else:
self.work.slaveBeforeWork()
self.slave()
self.work.slaveAfterWork()
pypar.finalize()
if self.myid != 0:
sys.exit()
# und schluss.
class PyparDemoWork(PyparWork):
"""Example PyparWork implementation"""
def __init__(self):
import numpy
self.worklist = numpy.arange(0.0,20.0)
self.resultlist = numpy.zeros_like(self.worklist)
def getNumWorkItems(self):
return len(self.worklist)
def calcWorkResult(self, worknum):
return [worknum, self.worklist[worknum] + 1]
def handleWorkResult(self, result, status):
self.resultlist[result[0]] = result[1]
def masterBeforeWork(self):
print self.worklist
def slaveBeforeWork(self):
pass
def masterAfterWork(self):
print self.resultlist
def slaveAfterWork(self):
pass
if __name__ == "__main__":
print "-----------------------"
print "::: PyParBalancer TEST "
print "-----------------------"
# create instance of work class
pyparwork = PyparDemoWork()
# create instance of balancer class,
# initialize with work class
balancer = PyparBalancer(pyparwork, True)
# run it
balancer.run()
|
import os, os.path
import errno
import json
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def load_json(path):
data = {}
try:
with open(path) as f:
data = json.load(f)
print(f'{path} successfully loaded')
except:
print('Could not load json')
return data
def save_json(path, data):
mkdir_p(os.path.dirname(path))
with open(path, 'w') as f:
json.dump(data, f)
def load_cog(bot, cog):
try:
bot.load_extension(f'cogs.{cog}')
print(f'Loaded extension {cog}.')
return True
except Exception as e:
print(f'Failed to load extension {cog}.', file=sys.stderr)
traceback.print_exc()
return False
def unload_cog(bot, cog):
try:
bot.unload_extension(f'cogs.{cog}')
print(f'Unloaded extension {cog}.')
return True
except Exception as e:
print(f'Failed to unload extension {cog}.', file=sys.stderr)
traceback.print_exc()
return False
|
import numpy as np
import numba as nb
import linoplib
def jacobi(A, v0, f, nu_1=1):
"""If for some reason you want to use the original Jacobi solver (not the weighted version), it's implemented here.
As I understand it, there's no reason you'd want this for solving Av=f.
"""
return weighted_jacobi(A, v0, f, 1, nu_1)
@nb.jit(nopython=True)
def weighted_jacobi(A, v0, f, w, nu_1):
"""Weighted Jacobi solver.
Parameters
----------
A : np.ndarray
Linear differential operator (Av = f).
v0 : np.ndarray
Initial guess for the solution to (Av = f)
f : np.ndarray
Forcing function (Av = f)
w : float
Jacobi weighting factor. Usually set to 2/3.
nu_1 : int, optional
Number of iterations to carry out.
Returns
-------
np.ndarray
1D array containing the solution.
"""
v = v0.copy()
diag = np.diag(A)
D = np.diag(diag)
Q = D-A
for _ in range(nu_1):
v = (1-w)*v + w*(f + Q@v)/diag
return v
@nb.jit(nopython=True)
def VMG(A, v0, f, w, nu_1=10, nu_2=10, depth=-1):
"""V-cycle multigrid solver. Solves Av = f.
Parameters
----------
A : np.ndarray
Linear differential operator (Av = f).
v0 : np.ndarray
Initial guess for the solution to (Av = f)
f : np.ndarray
Forcing function (Av = f)
w : float
Jacobi weighting factor. Usually set to 2/3
nu_1 : int, optional
Number of jacobi iterations before each coarsening grid spacing step.
nu_2 : int, optional
Number of jacobi iterations at each de-coarsening grid spacing step.
depth : int, optional
Number of grid coarsening steps to take. Leave as -1 except for debugging.
Returns
-------
np.ndarray
1D array containing the solution.
"""
N = v0.shape[0]
v = weighted_jacobi(A, v0, f, w, nu_1=nu_1)
if depth != 0 and (N-1) % 2 == 0 and (N-1) > 8:
I_h2h = linoplib.prolongation(N)
I_2hh = linoplib.full_weighting(N)
A_2h = I_2hh@A@I_h2h
v_2h = np.zeros(int((N-1)/2))
f_2h = I_2hh@(f-A@v)
v_2h = VMG(A_2h, v_2h, f_2h, w, nu_1, nu_2, depth-1)
v += I_h2h@v_2h
v = weighted_jacobi(A, v, f, w, nu_1=nu_2)
return v
@nb.jit(nopython=True)
def FMG(A, v0, f, w, nu_0=1, nu_1=10, nu_2=10):
"""Full multigrid solver. Solves Av = f.
Parameters
----------
A : np.ndarray
Linear differential operator (Av = f).
v0 : np.ndarray
Initial guess for the solution to (Av = f)
f : np.ndarray
Forcing function (Av = f)
w : float
Jacobi weighting factor. Usually set to 2/3
nu_0 : int, optional
Number of times to call the VMG method at each grid spacing step.
nu_1 : int, optional
Number of jacobi iterations before each coarsening grid spacing step.
nu_2 : int, optional
Number of jacobi iterations at each de-coarsening grid spacing step.
Returns
-------
np.ndarray
1D array containing the solution.
"""
N = v0.shape[0]
if (N-1) % 2 != 0 or (N-1) <= 8:
v = np.zeros(N)
else:
I_h2h = linoplib.prolongation(N)
I_2hh = linoplib.full_weighting(N)
A_2h = I_2hh@A@I_h2h
v_2h = np.zeros(int((N-1)/2))
f_2h = I_2hh@(f-A@v0)
v_2h = FMG(A_2h, v_2h, f_2h, w, nu_0, nu_1, nu_2)
v = v0 + I_h2h@v_2h
for _ in range(nu_0):
v = VMG(A, v, f, w, nu_1, nu_2, depth=-1)
return v
def direct(A, v0, f):
"""Directly solve Av=f by calling numpy.
"""
return np.linalg.inv(A)@f
def error(A, v, f):
"""Compute the error, given A, v, and f.
"""
return f-A@v
|
# Copyright 2018-present Kensho Technologies, LLC.
import codecs
import datetime
from os import path
import random
import re
import sys
from .animals import get_animal_generation_commands
from .events import get_event_generation_commands
from .species import get_species_generation_commands
# https://packaging.python.org/guides/single-sourcing-package-version/
# #single-sourcing-the-version
def read_file(filename):
"""Read and return text from the file specified by `filename`, in the project root directory."""
# intentionally *not* adding an encoding option to open
# see here:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
top_level_directory = path.dirname(path.dirname(path.dirname(path.abspath(__file__))))
with codecs.open(path.join(top_level_directory, 'graphql_compiler', filename), 'r') as f:
return f.read()
def find_version():
"""Return current version of package."""
version_file = read_file('__init__.py')
version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']', version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
def main():
"""Print a list of SQL commands to generate the testing database."""
random.seed(0)
module_path = path.relpath(__file__)
current_datetime = datetime.datetime.now().isoformat()
log_message = ('# Auto-generated output from `{path}`.\n'
'# Do not edit directly!\n'
'# Generated on {datetime} from compiler version {version}.\n\n')
sys.stdout.write(
log_message.format(path=module_path, datetime=current_datetime, version=find_version()))
sql_command_generators = [
get_event_generation_commands,
get_species_generation_commands,
get_animal_generation_commands,
]
for sql_command_generator in sql_command_generators:
sql_command_list = sql_command_generator()
sys.stdout.write('\n'.join(sql_command_list))
sys.stdout.write('\n')
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
import os
from torch.utils.data import Dataset
import numpy as np
import json
from dataloaders.rawvideo_util import RawVideoExtractor
class DiDeMo_DataLoader(Dataset):
def __init__(
self,
subset,
data_path,
features_path,
tokenizer,
max_words=30,
feature_framerate=1.0,
max_frames=100,
image_resolution=224,
frame_order=0,
slice_framepos=0,
):
self.data_path = data_path
self.features_path = features_path
self.feature_framerate = feature_framerate
self.max_words = max_words
self.max_frames = max_frames
self.tokenizer = tokenizer
# 0: ordinary order; 1: reverse order; 2: random order.
self.frame_order = frame_order
assert self.frame_order in [0, 1, 2]
# 0: cut from head frames; 1: cut from tail frames; 2: extract frames uniformly.
self.slice_framepos = slice_framepos
assert self.slice_framepos in [0, 1, 2]
self.subset = subset
assert self.subset in ["train", "val", "test"]
video_id_path_dict = {}
video_id_path_dict["train"] = os.path.join(self.data_path, "train_list.txt")
video_id_path_dict["val"] = os.path.join(self.data_path, "val_list.txt")
video_id_path_dict["test"] = os.path.join(self.data_path, "test_list.txt")
video_json_path_dict = {}
video_json_path_dict["train"] = os.path.join(self.data_path, "train_data.json")
video_json_path_dict["val"] = os.path.join(self.data_path, "val_data.json")
video_json_path_dict["test"] = os.path.join(self.data_path, "test_data.json")
with open(video_id_path_dict[self.subset], 'r') as fp:
video_ids = [itm.strip() for itm in fp.readlines()]
caption_dict = {}
with open(video_json_path_dict[self.subset], 'r') as f:
json_data = json.load(f)
for itm in json_data:
description = itm["description"]
times = itm["times"]
video = itm["video"]
if video not in video_ids:
continue
# each video is split into 5-second temporal chunks
# average the points from each annotator
start_ = np.mean([t_[0] for t_ in times]) * 5
end_ = (np.mean([t_[1] for t_ in times]) + 1) * 5
if video in caption_dict:
caption_dict[video]["start"].append(start_)
caption_dict[video]["end"].append(end_)
caption_dict[video]["text"].append(description)
else:
caption_dict[video] = {}
caption_dict[video]["start"] = [start_]
caption_dict[video]["end"] = [end_]
caption_dict[video]["text"] = [description]
for k_ in caption_dict.keys():
caption_dict[k_]["start"] = [0]
# trick to save time on obtaining each video length
# [https://github.com/LisaAnne/LocalizingMoments/blob/master/README.md]:
# Some videos are longer than 30 seconds. These videos were truncated to 30 seconds during annotation.
caption_dict[k_]["end"] = [31]
caption_dict[k_]["text"] = [" ".join(caption_dict[k_]["text"])]
video_dict = {}
for root, dub_dir, video_files in os.walk(self.features_path):
for video_file in video_files:
video_id_ = video_file
if video_id_ not in video_ids:
continue
file_path_ = os.path.join(root, video_file)
video_dict[video_id_] = file_path_
self.caption_dict = caption_dict
self.video_dict = video_dict
video_ids = list(set(video_ids) & set(self.caption_dict.keys()) & set(self.video_dict.keys()))
# Get all captions
self.iter2video_pairs_dict = {}
for video_id in self.caption_dict.keys():
if video_id not in video_ids:
continue
caption = self.caption_dict[video_id]
n_caption = len(caption['start'])
for sub_id in range(n_caption):
self.iter2video_pairs_dict[len(self.iter2video_pairs_dict)] = (video_id, sub_id)
self.rawVideoExtractor = RawVideoExtractor(framerate=feature_framerate, size=image_resolution)
self.SPECIAL_TOKEN = {"CLS_TOKEN": "<|startoftext|>", "SEP_TOKEN": "<|endoftext|>",
"MASK_TOKEN": "[MASK]", "UNK_TOKEN": "[UNK]", "PAD_TOKEN": "[PAD]"}
def __len__(self):
return len(self.iter2video_pairs_dict)
def _get_text(self, video_id, sub_id):
caption = self.caption_dict[video_id]
k = 1
r_ind = [sub_id]
starts = np.zeros(k, dtype=np.long)
ends = np.zeros(k, dtype=np.long)
pairs_text = np.zeros((k, self.max_words), dtype=np.long)
pairs_mask = np.zeros((k, self.max_words), dtype=np.long)
pairs_segment = np.zeros((k, self.max_words), dtype=np.long)
for i in range(k):
ind = r_ind[i]
start_, end_ = caption['start'][ind], caption['end'][ind]
words = self.tokenizer.tokenize(caption['text'][ind])
starts[i], ends[i] = start_, end_
words = [self.SPECIAL_TOKEN["CLS_TOKEN"]] + words
total_length_with_CLS = self.max_words - 1
if len(words) > total_length_with_CLS:
words = words[:total_length_with_CLS]
words = words + [self.SPECIAL_TOKEN["SEP_TOKEN"]]
input_ids = self.tokenizer.convert_tokens_to_ids(words)
input_mask = [1] * len(input_ids)
segment_ids = [0] * len(input_ids)
while len(input_ids) < self.max_words:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == self.max_words
assert len(input_mask) == self.max_words
assert len(segment_ids) == self.max_words
pairs_text[i] = np.array(input_ids)
pairs_mask[i] = np.array(input_mask)
pairs_segment[i] = np.array(segment_ids)
return pairs_text, pairs_mask, pairs_segment, starts, ends
def _get_rawvideo(self, idx, s, e):
video_mask = np.zeros((len(s), self.max_frames), dtype=np.long)
max_video_length = [0] * len(s)
# Pair x L x T x 3 x H x W
video = np.zeros((len(s), self.max_frames, 1, 3,
self.rawVideoExtractor.size, self.rawVideoExtractor.size), dtype=np.float)
video_path = self.video_dict[idx]
try:
for i in range(len(s)):
start_time = int(s[i])
end_time = int(e[i])
start_time = start_time if start_time >= 0. else 0.
end_time = end_time if end_time >= 0. else 0.
if start_time > end_time:
start_time, end_time = end_time, start_time
elif start_time == end_time:
end_time = end_time + 1
cache_id = "{}_{}_{}".format(video_path, start_time, end_time)
# Should be optimized by gathering all asking of this video
raw_video_data = self.rawVideoExtractor.get_video_data(video_path, start_time, end_time)
raw_video_data = raw_video_data['video']
if len(raw_video_data.shape) > 3:
raw_video_data_clip = raw_video_data
# L x T x 3 x H x W
raw_video_slice = self.rawVideoExtractor.process_raw_data(raw_video_data_clip)
if self.max_frames < raw_video_slice.shape[0]:
if self.slice_framepos == 0:
video_slice = raw_video_slice[:self.max_frames, ...]
elif self.slice_framepos == 1:
video_slice = raw_video_slice[-self.max_frames:, ...]
else:
sample_indx = np.linspace(0, raw_video_slice.shape[0] - 1, num=self.max_frames, dtype=int)
video_slice = raw_video_slice[sample_indx, ...]
else:
video_slice = raw_video_slice
video_slice = self.rawVideoExtractor.process_frame_order(video_slice, frame_order=self.frame_order)
slice_len = video_slice.shape[0]
max_video_length[i] = max_video_length[i] if max_video_length[i] > slice_len else slice_len
if slice_len < 1:
pass
else:
video[i][:slice_len, ...] = video_slice
else:
print("video path: {} error. video id: {}, start: {}, end: {}".format(video_path, idx, start_time, end_time))
except Exception as excep:
print("video path: {} error. video id: {}, start: {}, end: {}, Error: {}".format(video_path, idx, s, e, excep))
pass
# raise e
for i, v_length in enumerate(max_video_length):
video_mask[i][:v_length] = [1] * v_length
return video, video_mask
def __getitem__(self, feature_idx):
video_id, sub_id = self.iter2video_pairs_dict[feature_idx]
pairs_text, pairs_mask, pairs_segment, starts, ends = self._get_text(video_id, sub_id)
video, video_mask = self._get_rawvideo(video_id, starts, ends)
return pairs_text, pairs_mask, pairs_segment, video, video_mask
|
'''Faça um programa para imprimir:
1
1 2
1 2 3
.....
1 2 3 ... n
para um n informado pelo usuário. Use uma função que receba um valor n inteiro imprima até a n-ésima linha.'''
print('-----DESAFIO 100-----')
def imprimir(valor):
if isinstance(valor, int):
x = 1
while x <= valor:
y = 1
texto=''
while y <= x:
texto += str(y) + ' '
y += 1
print (texto)
x += 1
x=int(input("numero: "))
imprimir(x)
|
# Generated by Django 3.0.5 on 2020-06-14 16:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('submission', '0045_auto_20200607_1710'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='state',
field=models.CharField(default='submitted', max_length=26),
),
]
|
def bubble(lst,size,a=0,b=0):
if a == size:
return lst
else:
if b == size:
return bubble(lst,size,a+1,a+1)
if lst[a]>lst[b]:
lst[a],lst[b] = lst[b],lst[a]
return bubble(lst,size,a,b+1)
def findmed(lst):
tmp = lst.copy()
tmp = bubble(tmp,len(tmp))
if len(tmp)%2==0:
return (tmp[len(tmp)//2-1]+tmp[len(tmp)//2])/2
else:
return tmp[len(tmp)//2]
l = [e for e in input("Enter Input : ").split()]
if l[0] == 'EX':
Ans = "minHeap and maxHeap"
print("Extra Question : What is a suitable sort algorithm?")
print(" Your Answer : "+Ans)
else:
l=list(map(int, l))
lst = []
for i in l:
lst.append(i)
print("list = ",lst," : median = ","{:.1f}".format(findmed(lst)),sep="")
|
file = open("input", "r")
good_passwords = 0
for line in file.readlines():
counts, char, word = line.strip().split()
count_low, count_high = counts.split("-")
char = char[0]
num = word.count(char)
if num>=int(count_low) and num <=int(count_high):
good_passwords+=1
print(good_passwords)
|
import os
from typing import (
Any,
Sequence, List
)
def ensure_list(value: Any) -> List:
# if isinstance(value, Sequence) and not isinstance(value, str):
if hasattr(value, '__iter__') and not isinstance(value, str):
return list(value)
else:
return [value]
def files_exist(files: Sequence[str]) -> bool:
return len(files) != 0 and all([os.path.exists(f) for f in files])
def hash_dict(obj: dict):
from hashlib import md5
obj = {k: obj[k] for k in sorted(obj)}
return md5(str(obj).encode()).hexdigest()
def set_property(obj, name, prop_function):
"""Add property :obj:`prop_function` to :obj:`obj`.
:obj:`prop_function` must be a function taking only one argument, i.e.,
:obj:`obj`.
Args:
obj (object): object on which the property has to be added.
name (str): the name of the property.
prop_function (function): function taking only :obj:`obj` as argument.
"""
class_name = obj.__class__.__name__
new_class = type(class_name, (obj.__class__,),
{name: property(prop_function)})
obj.__class__ = new_class
|
from typing import Tuple
import math
import torch
import torchaudio
from torch import Tensor
__all__ = [
'get_mel_banks',
'inverse_mel_scale',
'inverse_mel_scale_scalar',
'mel_scale',
'mel_scale_scalar',
'spectrogram',
'fbank',
'mfcc',
'vtln_warp_freq',
'vtln_warp_mel_freq',
'resample_waveform',
]
# numeric_limits<float>::epsilon() 1.1920928955078125e-07
EPSILON = torch.tensor(torch.finfo(torch.float).eps)
# 1 milliseconds = 0.001 seconds
MILLISECONDS_TO_SECONDS = 0.001
# window types
HAMMING = 'hamming'
HANNING = 'hanning'
POVEY = 'povey'
RECTANGULAR = 'rectangular'
BLACKMAN = 'blackman'
WINDOWS = [HAMMING, HANNING, POVEY, RECTANGULAR, BLACKMAN]
def _get_epsilon(device, dtype):
return EPSILON.to(device=device, dtype=dtype)
def _next_power_of_2(x: int) -> int:
r"""Returns the smallest power of 2 that is greater than x
"""
return 1 if x == 0 else 2 ** (x - 1).bit_length()
def _get_strided(waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool) -> Tensor:
r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``)
representing how the window is shifted along the waveform. Each row is a frame.
Args:
waveform (Tensor): Tensor of size ``num_samples``
window_size (int): Frame length
window_shift (int): Frame shift
snip_edges (bool): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends.
Returns:
Tensor: 2D tensor of size (m, ``window_size``) where each row is a frame
"""
assert waveform.dim() == 1
num_samples = waveform.size(0)
strides = (window_shift * waveform.stride(0), waveform.stride(0))
if snip_edges:
if num_samples < window_size:
return torch.empty((0, 0), dtype=waveform.dtype, device=waveform.device)
else:
m = 1 + (num_samples - window_size) // window_shift
else:
reversed_waveform = torch.flip(waveform, [0])
m = (num_samples + (window_shift // 2)) // window_shift
pad = window_size // 2 - window_shift // 2
pad_right = reversed_waveform
if pad > 0:
# torch.nn.functional.pad returns [2,1,0,1,2] for 'reflect'
# but we want [2, 1, 0, 0, 1, 2]
pad_left = reversed_waveform[-pad:]
waveform = torch.cat((pad_left, waveform, pad_right), dim=0)
else:
# pad is negative so we want to trim the waveform at the front
waveform = torch.cat((waveform[-pad:], pad_right), dim=0)
sizes = (m, window_size)
return waveform.as_strided(sizes, strides)
def _feature_window_function(window_type: str,
window_size: int,
blackman_coeff: float,
device: torch.device,
dtype: int,
) -> Tensor:
r"""Returns a window function with the given type and size
"""
if window_type == HANNING:
return torch.hann_window(window_size, periodic=False, device=device, dtype=dtype)
elif window_type == HAMMING:
return torch.hamming_window(window_size, periodic=False, alpha=0.54, beta=0.46, device=device, dtype=dtype)
elif window_type == POVEY:
# like hanning but goes to zero at edges
return torch.hann_window(window_size, periodic=False, device=device, dtype=dtype).pow(0.85)
elif window_type == RECTANGULAR:
return torch.ones(window_size, device=device, dtype=dtype)
elif window_type == BLACKMAN:
a = 2 * math.pi / (window_size - 1)
window_function = torch.arange(window_size, device=device, dtype=dtype)
# can't use torch.blackman_window as they use different coefficients
return (blackman_coeff - 0.5 * torch.cos(a * window_function) +
(0.5 - blackman_coeff) * torch.cos(2 * a * window_function)).to(device=device, dtype=dtype)
else:
raise Exception('Invalid window type ' + window_type)
def _get_log_energy(strided_input: Tensor,
epsilon: Tensor,
energy_floor: float) -> Tensor:
r"""Returns the log energy of size (m) for a strided_input (m,*)
"""
device, dtype = strided_input.device, strided_input.dtype
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
if energy_floor == 0.0:
return log_energy
return torch.max(
log_energy, torch.tensor(math.log(energy_floor), device=device, dtype=dtype))
def _get_waveform_and_window_properties(waveform: Tensor,
channel: int,
sample_frequency: float,
frame_shift: float,
frame_length: float,
round_to_power_of_two: bool,
preemphasis_coefficient: float) -> Tuple[Tensor, int, int, int]:
r"""Gets the waveform and window properties
"""
channel = max(channel, 0)
assert channel < waveform.size(0), ('Invalid channel %d for size %d' % (channel, waveform.size(0)))
waveform = waveform[channel, :] # size (n)
window_shift = int(sample_frequency * frame_shift * MILLISECONDS_TO_SECONDS)
window_size = int(sample_frequency * frame_length * MILLISECONDS_TO_SECONDS)
padded_window_size = _next_power_of_2(window_size) if round_to_power_of_two else window_size
assert 2 <= window_size <= len(waveform), ('choose a window size %d that is [2, %d]' % (window_size, len(waveform)))
assert 0 < window_shift, '`window_shift` must be greater than 0'
assert padded_window_size % 2 == 0, 'the padded `window_size` must be divisible by two.' \
' use `round_to_power_of_two` or change `frame_length`'
assert 0. <= preemphasis_coefficient <= 1.0, '`preemphasis_coefficient` must be between [0,1]'
assert sample_frequency > 0, '`sample_frequency` must be greater than zero'
return waveform, window_shift, window_size, padded_window_size
def _get_window(waveform: Tensor,
padded_window_size: int,
window_size: int,
window_shift: int,
window_type: str,
blackman_coeff: float,
snip_edges: bool,
raw_energy: bool,
energy_floor: float,
dither: float,
remove_dc_offset: bool,
preemphasis_coefficient: float) -> Tuple[Tensor, Tensor]:
r"""Gets a window and its log energy
Returns:
(Tensor, Tensor): strided_input of size (m, ``padded_window_size``) and signal_log_energy of size (m)
"""
device, dtype = waveform.device, waveform.dtype
epsilon = _get_epsilon(device, dtype)
# size (m, window_size)
strided_input = _get_strided(waveform, window_size, window_shift, snip_edges)
if dither != 0.0:
# Returns a random number strictly between 0 and 1
x = torch.max(epsilon, torch.rand(strided_input.shape, device=device, dtype=dtype))
rand_gauss = torch.sqrt(-2 * x.log()) * torch.cos(2 * math.pi * x)
strided_input = strided_input + rand_gauss * dither
if remove_dc_offset:
# Subtract each row/frame by its mean
row_means = torch.mean(strided_input, dim=1).unsqueeze(1) # size (m, 1)
strided_input = strided_input - row_means
if raw_energy:
# Compute the log energy of each row/frame before applying preemphasis and
# window function
signal_log_energy = _get_log_energy(strided_input, epsilon, energy_floor) # size (m)
if preemphasis_coefficient != 0.0:
# strided_input[i,j] -= preemphasis_coefficient * strided_input[i, max(0, j-1)] for all i,j
offset_strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (1, 0), mode='replicate').squeeze(0) # size (m, window_size + 1)
strided_input = strided_input - preemphasis_coefficient * offset_strided_input[:, :-1]
# Apply window_function to each row/frame
window_function = _feature_window_function(
window_type, window_size, blackman_coeff, device, dtype).unsqueeze(0) # size (1, window_size)
strided_input = strided_input * window_function # size (m, window_size)
# Pad columns with zero until we reach size (m, padded_window_size)
if padded_window_size != window_size:
padding_right = padded_window_size - window_size
strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (0, padding_right), mode='constant', value=0).squeeze(0)
# Compute energy after window function (not the raw one)
if not raw_energy:
signal_log_energy = _get_log_energy(strided_input, epsilon, energy_floor) # size (m)
return strided_input, signal_log_energy
def _subtract_column_mean(tensor: Tensor, subtract_mean: bool) -> Tensor:
# subtracts the column mean of the tensor size (m, n) if subtract_mean=True
# it returns size (m, n)
if subtract_mean:
col_means = torch.mean(tensor, dim=0).unsqueeze(0)
tensor = tensor - col_means
return tensor
def spectrogram(waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
min_duration: float = 0.0,
preemphasis_coefficient: float = 0.97,
raw_energy: bool = True,
remove_dc_offset: bool = True,
round_to_power_of_two: bool = True,
sample_frequency: float = 16000.0,
snip_edges: bool = True,
subtract_mean: bool = False,
window_type: str = POVEY) -> Tensor:
r"""Create a spectrogram from a raw audio signal. This matches the input/output of Kaldi's
compute-spectrogram-feats.
Args:
waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2)
blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``)
channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``)
dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set
the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``)
energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution:
this floor is applied to the zeroth component, representing the total signal energy. The floor on the
individual spectrogram elements is fixed at std::numeric_limits<float>::epsilon(). (Default: ``1.0``)
frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``)
frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``)
min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``)
preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``)
raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``)
remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``)
round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input
to FFT. (Default: ``True``)
sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if
specified there) (Default: ``16000.0``)
snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``)
subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do
it this way. (Default: ``False``)
window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman')
(Default: ``'povey'``)
Returns:
Tensor: A spectrogram identical to what Kaldi would output. The shape is
(m, ``padded_window_size // 2 + 1``) where m is calculated in _get_strided
"""
waveform, window_shift, window_size, padded_window_size = _get_waveform_and_window_properties(
waveform, channel, sample_frequency, frame_shift, frame_length, round_to_power_of_two, preemphasis_coefficient)
if len(waveform) < min_duration * sample_frequency:
# signal is too short
return torch.empty(0)
strided_input, signal_log_energy = _get_window(
waveform, padded_window_size, window_size, window_shift, window_type, blackman_coeff,
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
# size (m, padded_window_size // 2 + 1, 2)
fft = torch.rfft(strided_input, 1, normalized=False, onesided=True)
# Convert the FFT into a power spectrum
power_spectrum = torch.max(fft.pow(2).sum(2), EPSILON).log() # size (m, padded_window_size // 2 + 1)
power_spectrum[:, 0] = signal_log_energy
power_spectrum = _subtract_column_mean(power_spectrum, subtract_mean)
return power_spectrum
def inverse_mel_scale_scalar(mel_freq: float) -> float:
return 700.0 * (math.exp(mel_freq / 1127.0) - 1.0)
def inverse_mel_scale(mel_freq: Tensor) -> Tensor:
return 700.0 * ((mel_freq / 1127.0).exp() - 1.0)
def mel_scale_scalar(freq: float) -> float:
return 1127.0 * math.log(1.0 + freq / 700.0)
def mel_scale(freq: Tensor) -> Tensor:
return 1127.0 * (1.0 + freq / 700.0).log()
def vtln_warp_freq(vtln_low_cutoff: float,
vtln_high_cutoff: float,
low_freq: float,
high_freq: float,
vtln_warp_factor: float,
freq: Tensor) -> Tensor:
r"""This computes a VTLN warping function that is not the same as HTK's one,
but has similar inputs (this function has the advantage of never producing
empty bins).
This function computes a warp function F(freq), defined between low_freq
and high_freq inclusive, with the following properties:
F(low_freq) == low_freq
F(high_freq) == high_freq
The function is continuous and piecewise linear with two inflection
points.
The lower inflection point (measured in terms of the unwarped
frequency) is at frequency l, determined as described below.
The higher inflection point is at a frequency h, determined as
described below.
If l <= f <= h, then F(f) = f/vtln_warp_factor.
If the higher inflection point (measured in terms of the unwarped
frequency) is at h, then max(h, F(h)) == vtln_high_cutoff.
Since (by the last point) F(h) == h/vtln_warp_factor, then
max(h, h/vtln_warp_factor) == vtln_high_cutoff, so
h = vtln_high_cutoff / max(1, 1/vtln_warp_factor).
= vtln_high_cutoff * min(1, vtln_warp_factor).
If the lower inflection point (measured in terms of the unwarped
frequency) is at l, then min(l, F(l)) == vtln_low_cutoff
This implies that l = vtln_low_cutoff / min(1, 1/vtln_warp_factor)
= vtln_low_cutoff * max(1, vtln_warp_factor)
Args:
vtln_low_cutoff (float): Lower frequency cutoffs for VTLN
vtln_high_cutoff (float): Upper frequency cutoffs for VTLN
low_freq (float): Lower frequency cutoffs in mel computation
high_freq (float): Upper frequency cutoffs in mel computation
vtln_warp_factor (float): Vtln warp factor
freq (Tensor): given frequency in Hz
Returns:
Tensor: Freq after vtln warp
"""
assert vtln_low_cutoff > low_freq, 'be sure to set the vtln_low option higher than low_freq'
assert vtln_high_cutoff < high_freq, 'be sure to set the vtln_high option lower than high_freq [or negative]'
l = vtln_low_cutoff * max(1.0, vtln_warp_factor)
h = vtln_high_cutoff * min(1.0, vtln_warp_factor)
scale = 1.0 / vtln_warp_factor
Fl = scale * l # F(l)
Fh = scale * h # F(h)
assert l > low_freq and h < high_freq
# slope of left part of the 3-piece linear function
scale_left = (Fl - low_freq) / (l - low_freq)
# [slope of center part is just "scale"]
# slope of right part of the 3-piece linear function
scale_right = (high_freq - Fh) / (high_freq - h)
res = torch.empty_like(freq)
outside_low_high_freq = torch.lt(freq, low_freq) | torch.gt(freq, high_freq) # freq < low_freq || freq > high_freq
before_l = torch.lt(freq, l) # freq < l
before_h = torch.lt(freq, h) # freq < h
after_h = torch.ge(freq, h) # freq >= h
# order of operations matter here (since there is overlapping frequency regions)
res[after_h] = high_freq + scale_right * (freq[after_h] - high_freq)
res[before_h] = scale * freq[before_h]
res[before_l] = low_freq + scale_left * (freq[before_l] - low_freq)
res[outside_low_high_freq] = freq[outside_low_high_freq]
return res
def vtln_warp_mel_freq(vtln_low_cutoff: float,
vtln_high_cutoff: float,
low_freq, high_freq: float,
vtln_warp_factor: float,
mel_freq: Tensor) -> Tensor:
r"""
Args:
vtln_low_cutoff (float): Lower frequency cutoffs for VTLN
vtln_high_cutoff (float): Upper frequency cutoffs for VTLN
low_freq (float): Lower frequency cutoffs in mel computation
high_freq (float): Upper frequency cutoffs in mel computation
vtln_warp_factor (float): Vtln warp factor
mel_freq (Tensor): Given frequency in Mel
Returns:
Tensor: ``mel_freq`` after vtln warp
"""
return mel_scale(vtln_warp_freq(vtln_low_cutoff, vtln_high_cutoff, low_freq, high_freq,
vtln_warp_factor, inverse_mel_scale(mel_freq)))
def get_mel_banks(num_bins: int,
window_length_padded: int,
sample_freq: float,
low_freq: float,
high_freq: float,
vtln_low: float,
vtln_high: float,
vtln_warp_factor: float) -> Tuple[Tensor, Tensor]:
"""
Returns:
(Tensor, Tensor): The tuple consists of ``bins`` (which is
melbank of size (``num_bins``, ``num_fft_bins``)) and ``center_freqs`` (which is
center frequencies of bins of size (``num_bins``)).
"""
assert num_bins > 3, 'Must have at least 3 mel bins'
assert window_length_padded % 2 == 0
num_fft_bins = window_length_padded / 2
nyquist = 0.5 * sample_freq
if high_freq <= 0.0:
high_freq += nyquist
assert (0.0 <= low_freq < nyquist) and (0.0 < high_freq <= nyquist) and (low_freq < high_freq), \
('Bad values in options: low-freq %f and high-freq %f vs. nyquist %f' % (low_freq, high_freq, nyquist))
# fft-bin width [think of it as Nyquist-freq / half-window-length]
fft_bin_width = sample_freq / window_length_padded
mel_low_freq = mel_scale_scalar(low_freq)
mel_high_freq = mel_scale_scalar(high_freq)
# divide by num_bins+1 in next line because of end-effects where the bins
# spread out to the sides.
mel_freq_delta = (mel_high_freq - mel_low_freq) / (num_bins + 1)
if vtln_high < 0.0:
vtln_high += nyquist
assert vtln_warp_factor == 1.0 or ((low_freq < vtln_low < high_freq) and
(0.0 < vtln_high < high_freq) and (vtln_low < vtln_high)), \
('Bad values in options: vtln-low %f and vtln-high %f, versus low-freq %f and high-freq %f' %
(vtln_low, vtln_high, low_freq, high_freq))
bin = torch.arange(num_bins).unsqueeze(1)
left_mel = mel_low_freq + bin * mel_freq_delta # size(num_bins, 1)
center_mel = mel_low_freq + (bin + 1.0) * mel_freq_delta # size(num_bins, 1)
right_mel = mel_low_freq + (bin + 2.0) * mel_freq_delta # size(num_bins, 1)
if vtln_warp_factor != 1.0:
left_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, left_mel)
center_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, center_mel)
right_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, right_mel)
center_freqs = inverse_mel_scale(center_mel) # size (num_bins)
# size(1, num_fft_bins)
mel = mel_scale(fft_bin_width * torch.arange(num_fft_bins)).unsqueeze(0)
# size (num_bins, num_fft_bins)
up_slope = (mel - left_mel) / (center_mel - left_mel)
down_slope = (right_mel - mel) / (right_mel - center_mel)
if vtln_warp_factor == 1.0:
# left_mel < center_mel < right_mel so we can min the two slopes and clamp negative values
bins = torch.max(torch.zeros(1), torch.min(up_slope, down_slope))
else:
# warping can move the order of left_mel, center_mel, right_mel anywhere
bins = torch.zeros_like(up_slope)
up_idx = torch.gt(mel, left_mel) & torch.le(mel, center_mel) # left_mel < mel <= center_mel
down_idx = torch.gt(mel, center_mel) & torch.lt(mel, right_mel) # center_mel < mel < right_mel
bins[up_idx] = up_slope[up_idx]
bins[down_idx] = down_slope[down_idx]
return bins, center_freqs
def fbank(waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
min_duration: float = 0.0,
num_mel_bins: int = 23,
preemphasis_coefficient: float = 0.97,
raw_energy: bool = True,
remove_dc_offset: bool = True,
round_to_power_of_two: bool = True,
sample_frequency: float = 16000.0,
snip_edges: bool = True,
subtract_mean: bool = False,
use_energy: bool = False,
use_log_fbank: bool = True,
use_power: bool = True,
vtln_high: float = -500.0,
vtln_low: float = 100.0,
vtln_warp: float = 1.0,
window_type: str = POVEY) -> Tensor:
r"""Create a fbank from a raw audio signal. This matches the input/output of Kaldi's
compute-fbank-feats.
Args:
waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2)
blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``)
channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``)
dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set
the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``)
energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution:
this floor is applied to the zeroth component, representing the total signal energy. The floor on the
individual spectrogram elements is fixed at std::numeric_limits<float>::epsilon(). (Default: ``1.0``)
frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``)
frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``)
high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist)
(Default: ``0.0``)
htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible features
(need to change other parameters). (Default: ``False``)
low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``)
min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``)
num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``)
preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``)
raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``)
remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``)
round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input
to FFT. (Default: ``True``)
sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if
specified there) (Default: ``16000.0``)
snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``)
subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do
it this way. (Default: ``False``)
use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``)
use_log_fbank (bool, optional):If true, produce log-filterbank, else produce linear. (Default: ``True``)
use_power (bool, optional): If true, use power, else use magnitude. (Default: ``True``)
vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if
negative, offset from high-mel-freq (Default: ``-500.0``)
vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``)
vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``)
window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman')
(Default: ``'povey'``)
Returns:
Tensor: A fbank identical to what Kaldi would output. The shape is (m, ``num_mel_bins + use_energy``)
where m is calculated in _get_strided
"""
device, dtype = waveform.device, waveform.dtype
waveform, window_shift, window_size, padded_window_size = _get_waveform_and_window_properties(
waveform, channel, sample_frequency, frame_shift, frame_length, round_to_power_of_two, preemphasis_coefficient)
if len(waveform) < min_duration * sample_frequency:
# signal is too short
return torch.empty(0, device=device, dtype=dtype)
# strided_input, size (m, padded_window_size) and signal_log_energy, size (m)
strided_input, signal_log_energy = _get_window(
waveform, padded_window_size, window_size, window_shift, window_type, blackman_coeff,
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
# size (m, padded_window_size // 2 + 1, 2)
fft = torch.rfft(strided_input, 1, normalized=False, onesided=True)
power_spectrum = fft.pow(2).sum(2).unsqueeze(1) # size (m, 1, padded_window_size // 2 + 1)
if not use_power:
power_spectrum = power_spectrum.pow(0.5)
# size (num_mel_bins, padded_window_size // 2)
mel_energies, _ = get_mel_banks(num_mel_bins, padded_window_size, sample_frequency,
low_freq, high_freq, vtln_low, vtln_high, vtln_warp)
mel_energies = mel_energies.to(device=device, dtype=dtype)
# pad right column with zeros and add dimension, size (1, num_mel_bins, padded_window_size // 2 + 1)
mel_energies = torch.nn.functional.pad(mel_energies, (0, 1), mode='constant', value=0).unsqueeze(0)
# sum with mel fiterbanks over the power spectrum, size (m, num_mel_bins)
mel_energies = (power_spectrum * mel_energies).sum(dim=2)
if use_log_fbank:
# avoid log of zero (which should be prevented anyway by dithering)
mel_energies = torch.max(mel_energies, _get_epsilon(device, dtype)).log()
# if use_energy then add it as the last column for htk_compat == true else first column
if use_energy:
signal_log_energy = signal_log_energy.unsqueeze(1) # size (m, 1)
# returns size (m, num_mel_bins + 1)
if htk_compat:
mel_energies = torch.cat((mel_energies, signal_log_energy), dim=1)
else:
mel_energies = torch.cat((signal_log_energy, mel_energies), dim=1)
mel_energies = _subtract_column_mean(mel_energies, subtract_mean)
return mel_energies
def _get_dct_matrix(num_ceps: int, num_mel_bins: int) -> Tensor:
# returns a dct matrix of size (num_mel_bins, num_ceps)
# size (num_mel_bins, num_mel_bins)
dct_matrix = torchaudio.functional.create_dct(num_mel_bins, num_mel_bins, 'ortho')
# kaldi expects the first cepstral to be weighted sum of factor sqrt(1/num_mel_bins)
# this would be the first column in the dct_matrix for torchaudio as it expects a
# right multiply (which would be the first column of the kaldi's dct_matrix as kaldi
# expects a left multiply e.g. dct_matrix * vector).
dct_matrix[:, 0] = math.sqrt(1 / float(num_mel_bins))
dct_matrix = dct_matrix[:, :num_ceps]
return dct_matrix
def _get_lifter_coeffs(num_ceps: int, cepstral_lifter: float) -> Tensor:
# returns size (num_ceps)
# Compute liftering coefficients (scaling on cepstral coeffs)
# coeffs are numbered slightly differently from HTK: the zeroth index is C0, which is not affected.
i = torch.arange(num_ceps)
return 1.0 + 0.5 * cepstral_lifter * torch.sin(math.pi * i / cepstral_lifter)
def mfcc(
waveform: Tensor,
blackman_coeff: float = 0.42,
cepstral_lifter: float = 22.0,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
num_ceps: int = 13,
min_duration: float = 0.0,
num_mel_bins: int = 23,
preemphasis_coefficient: float = 0.97,
raw_energy: bool = True,
remove_dc_offset: bool = True,
round_to_power_of_two: bool = True,
sample_frequency: float = 16000.0,
snip_edges: bool = True,
subtract_mean: bool = False,
use_energy: bool = False,
vtln_high: float = -500.0,
vtln_low: float = 100.0,
vtln_warp: float = 1.0,
window_type: str = POVEY) -> Tensor:
r"""Create a mfcc from a raw audio signal. This matches the input/output of Kaldi's
compute-mfcc-feats.
Args:
waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2)
blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``)
cepstral_lifter (float, optional): Constant that controls scaling of MFCCs (Default: ``22.0``)
channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``)
dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set
the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``)
energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution:
this floor is applied to the zeroth component, representing the total signal energy. The floor on the
individual spectrogram elements is fixed at std::numeric_limits<float>::epsilon(). (Default: ``1.0``)
frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``)
frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``)
high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist)
(Default: ``0.0``)
htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible
features (need to change other parameters). (Default: ``False``)
low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``)
num_ceps (int, optional): Number of cepstra in MFCC computation (including C0) (Default: ``13``)
min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``)
num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``)
preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``)
raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``)
remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``)
round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input
to FFT. (Default: ``True``)
sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if
specified there) (Default: ``16000.0``)
snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``)
subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do
it this way. (Default: ``False``)
use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``)
vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if
negative, offset from high-mel-freq (Default: ``-500.0``)
vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``)
vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``)
window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman')
(Default: ``"povey"``)
Returns:
Tensor: A mfcc identical to what Kaldi would output. The shape is (m, ``num_ceps``)
where m is calculated in _get_strided
"""
assert num_ceps <= num_mel_bins, 'num_ceps cannot be larger than num_mel_bins: %d vs %d' % (num_ceps, num_mel_bins)
# The mel_energies should not be squared (use_power=True), not have mean subtracted
# (subtract_mean=False), and use log (use_log_fbank=True).
# size (m, num_mel_bins + use_energy)
feature = fbank(waveform=waveform, blackman_coeff=blackman_coeff, channel=channel,
dither=dither, energy_floor=energy_floor, frame_length=frame_length,
frame_shift=frame_shift, high_freq=high_freq, htk_compat=htk_compat,
low_freq=low_freq, min_duration=min_duration, num_mel_bins=num_mel_bins,
preemphasis_coefficient=preemphasis_coefficient, raw_energy=raw_energy,
remove_dc_offset=remove_dc_offset, round_to_power_of_two=round_to_power_of_two,
sample_frequency=sample_frequency, snip_edges=snip_edges, subtract_mean=False,
use_energy=use_energy, use_log_fbank=True, use_power=True,
vtln_high=vtln_high, vtln_low=vtln_low, vtln_warp=vtln_warp, window_type=window_type)
if use_energy:
# size (m)
signal_log_energy = feature[:, num_mel_bins if htk_compat else 0]
# offset is 0 if htk_compat==True else 1
mel_offset = int(not htk_compat)
feature = feature[:, mel_offset:(num_mel_bins + mel_offset)]
# size (num_mel_bins, num_ceps)
dct_matrix = _get_dct_matrix(num_ceps, num_mel_bins)
# size (m, num_ceps)
feature = feature.matmul(dct_matrix)
if cepstral_lifter != 0.0:
# size (1, num_ceps)
lifter_coeffs = _get_lifter_coeffs(num_ceps, cepstral_lifter).unsqueeze(0)
feature *= lifter_coeffs
# if use_energy then replace the last column for htk_compat == true else first column
if use_energy:
feature[:, 0] = signal_log_energy
if htk_compat:
energy = feature[:, 0].unsqueeze(1) # size (m, 1)
feature = feature[:, 1:] # size (m, num_ceps - 1)
if not use_energy:
# scale on C0 (actually removing a scale we previously added that's
# part of one common definition of the cosine transform.)
energy *= math.sqrt(2)
feature = torch.cat((feature, energy), dim=1)
feature = _subtract_column_mean(feature, subtract_mean)
return feature
def _get_LR_indices_and_weights(orig_freq: float,
new_freq: float,
output_samples_in_unit: int,
window_width: float,
lowpass_cutoff: float,
lowpass_filter_width: int,
device: torch.device,
dtype: int) -> Tuple[Tensor, Tensor]:
r"""Based on LinearResample::SetIndexesAndWeights where it retrieves the weights for
resampling as well as the indices in which they are valid. LinearResample (LR) means
that the output signal is at linearly spaced intervals (i.e the output signal has a
frequency of ``new_freq``). It uses sinc/bandlimited interpolation to upsample/downsample
the signal.
The reason why the same filter is not used for multiple convolutions is because the
sinc function could sampled at different points in time. For example, suppose
a signal is sampled at the timestamps (seconds)
0 16 32
and we want it to be sampled at the timestamps (seconds)
0 5 10 15 20 25 30 35
at the timestamp of 16, the delta timestamps are
16 11 6 1 4 9 14 19
at the timestamp of 32, the delta timestamps are
32 27 22 17 12 8 2 3
As we can see from deltas, the sinc function is sampled at different points of time
assuming the center of the sinc function is at 0, 16, and 32 (the deltas [..., 6, 1, 4, ....]
for 16 vs [...., 2, 3, ....] for 32)
Example, one case is when the ``orig_freq`` and ``new_freq`` are multiples of each other then
there needs to be one filter.
A windowed filter function (i.e. Hanning * sinc) because the ideal case of sinc function
has infinite support (non-zero for all values) so instead it is truncated and multiplied by
a window function which gives it less-than-perfect rolloff [1].
[1] Chapter 16: Windowed-Sinc Filters, https://www.dspguide.com/ch16/1.htm
Args:
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
output_samples_in_unit (int): The number of output samples in the smallest repeating unit:
num_samp_out = new_freq / Gcd(orig_freq, new_freq)
window_width (float): The width of the window which is nonzero
lowpass_cutoff (float): The filter cutoff in Hz. The filter cutoff needs to be less
than samp_rate_in_hz/2 and less than samp_rate_out_hz/2.
lowpass_filter_width (int): Controls the sharpness of the filter, more == sharper but less
efficient. We suggest around 4 to 10 for normal use
Returns:
(Tensor, Tensor): A tuple of ``min_input_index`` (which is the minimum indices
where the window is valid, size (``output_samples_in_unit``)) and ``weights`` (which is the weights
which correspond with min_input_index, size (``output_samples_in_unit``, ``max_weight_width``)).
"""
assert lowpass_cutoff < min(orig_freq, new_freq) / 2
output_t = torch.arange(0., output_samples_in_unit, device=device, dtype=dtype) / new_freq
min_t = output_t - window_width
max_t = output_t + window_width
min_input_index = torch.ceil(min_t * orig_freq) # size (output_samples_in_unit)
max_input_index = torch.floor(max_t * orig_freq) # size (output_samples_in_unit)
num_indices = max_input_index - min_input_index + 1 # size (output_samples_in_unit)
max_weight_width = num_indices.max()
# create a group of weights of size (output_samples_in_unit, max_weight_width)
j = torch.arange(max_weight_width, device=device, dtype=dtype).unsqueeze(0)
input_index = min_input_index.unsqueeze(1) + j
delta_t = (input_index / orig_freq) - output_t.unsqueeze(1)
weights = torch.zeros_like(delta_t)
inside_window_indices = delta_t.abs().lt(window_width)
# raised-cosine (Hanning) window with width `window_width`
weights[inside_window_indices] = 0.5 * (1 + torch.cos(2 * math.pi * lowpass_cutoff /
lowpass_filter_width * delta_t[inside_window_indices]))
t_eq_zero_indices = delta_t.eq(0.0)
t_not_eq_zero_indices = ~t_eq_zero_indices
# sinc filter function
weights[t_not_eq_zero_indices] *= torch.sin(
2 * math.pi * lowpass_cutoff * delta_t[t_not_eq_zero_indices]) / (math.pi * delta_t[t_not_eq_zero_indices])
# limit of the function at t = 0
weights[t_eq_zero_indices] *= 2 * lowpass_cutoff
weights /= orig_freq # size (output_samples_in_unit, max_weight_width)
return min_input_index, weights
def _lcm(a: int, b: int) -> int:
return abs(a * b) // math.gcd(a, b)
def _get_num_LR_output_samples(input_num_samp: int,
samp_rate_in: float,
samp_rate_out: float) -> int:
r"""Based on LinearResample::GetNumOutputSamples. LinearResample (LR) means that
the output signal is at linearly spaced intervals (i.e the output signal has a
frequency of ``new_freq``). It uses sinc/bandlimited interpolation to upsample/downsample
the signal.
Args:
input_num_samp (int): The number of samples in the input
samp_rate_in (float): The original frequency of the signal
samp_rate_out (float): The desired frequency
Returns:
int: The number of output samples
"""
# For exact computation, we measure time in "ticks" of 1.0 / tick_freq,
# where tick_freq is the least common multiple of samp_rate_in and
# samp_rate_out.
samp_rate_in = int(samp_rate_in)
samp_rate_out = int(samp_rate_out)
tick_freq = _lcm(samp_rate_in, samp_rate_out)
ticks_per_input_period = tick_freq // samp_rate_in
# work out the number of ticks in the time interval
# [ 0, input_num_samp/samp_rate_in ).
interval_length_in_ticks = input_num_samp * ticks_per_input_period
if interval_length_in_ticks <= 0:
return 0
ticks_per_output_period = tick_freq // samp_rate_out
# Get the last output-sample in the closed interval, i.e. replacing [ ) with
# [ ]. Note: integer division rounds down. See
# http://en.wikipedia.org/wiki/Interval_(mathematics) for an explanation of
# the notation.
last_output_samp = interval_length_in_ticks // ticks_per_output_period
# We need the last output-sample in the open interval, so if it takes us to
# the end of the interval exactly, subtract one.
if last_output_samp * ticks_per_output_period == interval_length_in_ticks:
last_output_samp -= 1
# First output-sample index is zero, so the number of output samples
# is the last output-sample plus one.
num_output_samp = last_output_samp + 1
return num_output_samp
def resample_waveform(waveform: Tensor,
orig_freq: float,
new_freq: float,
lowpass_filter_width: int = 6) -> Tensor:
r"""Resamples the waveform at the new frequency. This matches Kaldi's OfflineFeatureTpl ResampleWaveform
which uses a LinearResample (resample a signal at linearly spaced intervals to upsample/downsample
a signal). LinearResample (LR) means that the output signal is at linearly spaced intervals (i.e
the output signal has a frequency of ``new_freq``). It uses sinc/bandlimited interpolation to
upsample/downsample the signal.
https://ccrma.stanford.edu/~jos/resample/Theory_Ideal_Bandlimited_Interpolation.html
https://github.com/kaldi-asr/kaldi/blob/master/src/feat/resample.h#L56
Args:
waveform (Tensor): The input signal of size (c, n)
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
lowpass_filter_width (int, optional): Controls the sharpness of the filter, more == sharper
but less efficient. We suggest around 4 to 10 for normal use. (Default: ``6``)
Returns:
Tensor: The waveform at the new frequency
"""
device, dtype = waveform.device, waveform.dtype
assert waveform.dim() == 2
assert orig_freq > 0.0 and new_freq > 0.0
min_freq = min(orig_freq, new_freq)
lowpass_cutoff = 0.99 * 0.5 * min_freq
assert lowpass_cutoff * 2 <= min_freq
base_freq = math.gcd(int(orig_freq), int(new_freq))
input_samples_in_unit = int(orig_freq) // base_freq
output_samples_in_unit = int(new_freq) // base_freq
window_width = lowpass_filter_width / (2.0 * lowpass_cutoff)
first_indices, weights = _get_LR_indices_and_weights(
orig_freq, new_freq, output_samples_in_unit,
window_width, lowpass_cutoff, lowpass_filter_width, device, dtype)
assert first_indices.dim() == 1
# TODO figure a better way to do this. conv1d reaches every element i*stride + padding
# all the weights have the same stride but have different padding.
# Current implementation takes the input and applies the various padding before
# doing a conv1d for that specific weight.
conv_stride = input_samples_in_unit
conv_transpose_stride = output_samples_in_unit
num_channels, wave_len = waveform.size()
window_size = weights.size(1)
tot_output_samp = _get_num_LR_output_samples(wave_len, orig_freq, new_freq)
output = torch.zeros((num_channels, tot_output_samp),
device=device, dtype=dtype)
# eye size: (num_channels, num_channels, 1)
eye = torch.eye(num_channels, device=device, dtype=dtype).unsqueeze(2)
for i in range(first_indices.size(0)):
wave_to_conv = waveform
first_index = int(first_indices[i].item())
if first_index >= 0:
# trim the signal as the filter will not be applied before the first_index
wave_to_conv = wave_to_conv[..., first_index:]
# pad the right of the signal to allow partial convolutions meaning compute
# values for partial windows (e.g. end of the window is outside the signal length)
max_unit_index = (tot_output_samp - 1) // output_samples_in_unit
end_index_of_last_window = max_unit_index * conv_stride + window_size
current_wave_len = wave_len - first_index
right_padding = max(0, end_index_of_last_window + 1 - current_wave_len)
left_padding = max(0, -first_index)
if left_padding != 0 or right_padding != 0:
wave_to_conv = torch.nn.functional.pad(wave_to_conv, (left_padding, right_padding))
conv_wave = torch.nn.functional.conv1d(
wave_to_conv.unsqueeze(0), weights[i].repeat(num_channels, 1, 1),
stride=conv_stride, groups=num_channels)
# we want conv_wave[:, i] to be at output[:, i + n*conv_transpose_stride]
dilated_conv_wave = torch.nn.functional.conv_transpose1d(
conv_wave, eye, stride=conv_transpose_stride).squeeze(0)
# pad dilated_conv_wave so it reaches the output length if needed.
dialated_conv_wave_len = dilated_conv_wave.size(-1)
left_padding = i
right_padding = max(0, tot_output_samp - (left_padding + dialated_conv_wave_len))
dilated_conv_wave = torch.nn.functional.pad(
dilated_conv_wave, (left_padding, right_padding))[..., :tot_output_samp]
output += dilated_conv_wave
return output
|
"""Define weapons and their stats"""
class Weapon:
def __init__(self):
self.name = 'Undefined Weapon'
self.description = 'Undefined'
self.damage = 0
self.value = 0
raise NotImplementedError("Not a real Weapon!")
def __str__(self):
return self.name
class Rock(Weapon):
def __init__(self):
self.name = "Rock"
self.description = "A fist-sized rock, suitable for bludgeoning."
self.damage = 5
self.value = 1
class Dagger(Weapon):
def __init__(self):
self.name = "Dagger"
self.description = "A small dagger with some rust. " \
"Somewhat more dangerous than a rock."
self.damage = 10
self.value = 20
class RustySword(Weapon):
def __init__(self):
self.name = "Rusty sword"
self.description = "This sword is showing its age, " \
"but still has some fight in it."
self.damage = 20
self.value = 50
class HeavyAxe(Weapon):
def __init__(self):
self.name = "Heavy Axe"
self.description = "It's an axe, and it's heavy." \
"What more do you want?"
self.damage = 40
self.value = 75
|
from .client import (
AnsaClient,
get_grpc_connection_options
)
from .AnsaGRPC_pb2_grpc import (
LassoAnsaDriverServicer,
LassoAnsaDriverStub
)
__all__ = [
'AnsaClient',
'get_grpc_connection_options',
"LassoAnsaDriverStub",
"LassoAnsaDriverServicer"]
|
import os
from matplotlib import pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
filename = 'part3.npy'
x=np.linspace(-1.2,0.6,50)
y=np.linspace(-0.07,0.07,50)
X,Y = np.meshgrid(x,y)
if os.path.exists(filename):
data = np.load(filename)
ax.plot_wireframe(X,Y,data)
plt.ylim([-0.07,0.07])
plt.xlim([-1.2,0.6])
plt.xlabel('position')
plt.ylabel('velocity')
plt.legend()
plt.show()
|
from pydataset import data
import time
import sys
sys.path.insert(1, '../')
import fastg3.ncrisp as g3ncrisp
df = data("diamonds")
xparams = {
'carat':{
'type': 'numerical',
'predicate': 'absolute_distance',
'params': [0.05]
},
'x':{
'type': 'numerical',
'predicate': 'absolute_distance',
'params': [0.05]
},
'y':{
'type': 'numerical',
'predicate': 'absolute_distance',
'params': [0.05]
},
'cut':{
'type': 'categorical',
'predicate': 'equality'
},
'color':{
'type': 'categorical',
'predicate': 'equality'
},
'clarity':{
'type': 'categorical',
'predicate': 'equality'
}
}
yparams = {
'price':{
'type': 'numerical',
'predicate': 'absolute_distance',
'params': [10]
}
}
if __name__ == '__main__':
# Creates an interface with C++ object; errors will be return if any parameter is wrong
VPE = g3ncrisp.create_vpe_instance(df,
xparams,
yparams,
blocking=True,
opti_ordering=True,
join_type="auto",
verbose=True)
# Finds all violating pairs
start = time.time()
vps = VPE.enum_vps()
ellapsed = time.time()-start
print(f'Diamond dataset contains {len(df.index)} rows.')
print(f'{len(vps)} violating pairs found in {"{:.3f}".format(ellapsed)}s.')
|
import sklearn
from sklearn import datasets
from sklearn import svm
from sklearn import metrics
from sklearn.neighbors import KNeighborsClassifier
cancer=datasets.load_breast_cancer()
# Features
print(cancer.feature_names)
# Labels
print(cancer.target_names)
# Splitting Data
x = cancer.data # All of the features
y = cancer.target # All of the labels
x_train, x_test, y_train, y_test = sklearn.model_selection.train_test_split(x, y, test_size=0.2)
# First five instances
print(x_train[:5], y_train[:5])
classes=['malignant', 'benign']
#Clasifier
clf= svm.SVC(kernel="linear", C=2)
#Worst, many variance depending of n_neighbors
#clf=KNeighborsClassifier(n_neighbors=13)
clf.fit(x_train, y_train)
# Predict
y_pred=clf.predict(x_test)
# Accuracy
acc=metrics.accuracy_score(y_test, y_pred)
print(acc)
|
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils.translation import gettext_lazy as _
from .location import Location
class LocationTracker(models.Model):
class Status(models.TextChoices):
ACTIVE = "AC", _("Active")
INACTIVE = "IN", _("Inactive")
MISSING = "MI", _("Missing")
DECOMMISSIONED = "DE", _("Decommissioned")
bike = models.ForeignKey("Bike", on_delete=models.PROTECT, null=True, blank=True)
device_id = models.CharField(default=None, null=False, blank=True, max_length=255)
last_reported = models.DateTimeField(default=None, null=True, blank=True)
battery_voltage = models.FloatField(default=None, null=True, blank=True)
tracker_type = models.ForeignKey(
"LocationTrackerType", on_delete=models.PROTECT, null=True, blank=True
)
tracker_status = models.CharField(
max_length=2, choices=Status.choices, default=Status.INACTIVE
)
internal = models.BooleanField(
default=False,
help_text="""Internal trackers don't publish their locations to the enduser.
They are useful for backup trackers with lower accuracy e.g. wifi trackers.""",
)
def current_geolocation(self):
if not self.id:
return None
try:
return Location.objects.filter(
tracker=self, reported_at__isnull=False
).latest("reported_at")
except ObjectDoesNotExist:
return None
def __str__(self):
return str(self.device_id)
|
import pathlib
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import pandas as pd
from fa2 import ForceAtlas2
from matplotlib.collections import LineCollection
from ccgowl.data.make_synthetic_data import standardize
from ccgowl.evaluation.cluster_metrics import spectral_clustering
from ccgowl.models.ccgowl import CCGOWLModel
from ccgowl.simulations.simulation import Simulation
from ccgowl.simulations.utils import convert_to_df_with_labels, pairs_in_clusters, compute_true_group, normalize_dfs
from ccgowl.visualization.curved_edges import curved_edges
def read_stock_data():
proj_root_path = pathlib.Path.cwd().parent.parent
data_path = 'data/raw/s_&_p/data.csv'
sector_path = 'data/raw/s_&_p/info.csv'
data_full_path = proj_root_path / data_path
sectors_full_path = proj_root_path / sector_path
data = pd.read_csv(data_full_path, index_col=0)
info = pd.read_csv(sectors_full_path)
data = standardize(data)
stock_names = info['V1']
sectors = info['V2']
data.columns = [f'{comp}/{gic}' for comp, gic in zip(stock_names, sectors)]
return data, data.columns
class StockData(Simulation):
def __init__(self, model, model_params):
self.data, self.info = read_stock_data()
_, self.p = self.data.shape
self.sample_cov = np.cov(self.data.T)
self.model = model(self.data.values, *model_params)
self.true_groups = pd.DataFrame()
self.predicted_groups = pd.DataFrame()
def run(self):
np.random.seed(680)
self.model.fit()
theta_hat = self.model.theta_hat
y_true_clusters_df = compute_true_group(theta_hat, self.info)
K = len(np.unique(y_true_clusters_df.values[np.tril_indices(self.p, -1)].tolist()))
theta_clusters = spectral_clustering(theta=theta_hat, K=K)
theta_clusters = [int(cluster) for cluster in theta_clusters]
theta_mat_clusters = np.zeros((self.p, self.p))
theta_mat_clusters[np.tril_indices(self.p, -1)] = theta_clusters
clusters_df = convert_to_df_with_labels(self.info, theta_mat_clusters.copy())
y_true_clusters_df = normalize_dfs(y_true_clusters_df, self.info, self.p)
clusters_df = normalize_dfs(clusters_df, self.info, self.p)
self.true_groups = pairs_in_clusters(y_true_clusters_df, K)
self.predicted_groups = pairs_in_clusters(clusters_df, K)
def plot_results(self):
df_true = self.true_groups
df_pred = self.predicted_groups
carac_dict = dict(zip(['Consumer Discretionary', 'Consumer Staples', 'Energy',
'Financials', 'Health Care', 'Industrials',
'Information Technology', 'Materials',
'Telecommunications Services', 'Utilities', 'No Group'],
['red', 'blue', 'gray', 'yellow', 'black',
'skyblue', 'orange', 'yellowgreen', 'pink', 'cyan', 'purple']))
df_filtered = df_pred.loc[(df_pred['I'] != df_pred['J']) & (df_pred['Group'] != 10)]
df_filtered.drop_duplicates(['I', 'J'], inplace=True)
df_true = df_pred.loc[(df_true['I'] != df_true['J']) & (df_true['Group'] != 10)]
df_true.drop_duplicates(['I', 'J'], inplace=True)
G = nx.from_pandas_edgelist(df_filtered, 'I', 'J', create_using=nx.Graph())
unique_sectors_in_cluster = list(np.unique(list(df_true['I']) + list(df_true['J'])))
carac = pd.DataFrame({
'ID': unique_sectors_in_cluster,
'myvalue': [carac_dict[entry.split('/')[1]] for entry in unique_sectors_in_cluster],
})
carac = carac.set_index('ID')
carac = carac.reindex(G.nodes())
j = 0
for i, row in carac.iterrows():
if pd.isna(row['myvalue']):
carac.iloc[j, 0] = carac_dict[i.split('/')[1]]
j += 1
forceatlas2 = ForceAtlas2( # Behavior alternatives
outboundAttractionDistribution=True, # Dissuade hubs
linLogMode=False, # NOT IMPLEMENTED
adjustSizes=False, # Prevent overlap (NOT IMPLEMENTED)
edgeWeightInfluence=1.0,
# Performance
jitterTolerance=1.0, # Tolerance
barnesHutOptimize=True,
barnesHutTheta=1.2,
multiThreaded=False, # NOT IMPLEMENTED
# Tuning
scalingRatio=2.0,
strongGravityMode=False,
gravity=1.0,
# Log
verbose=True)
pos = forceatlas2.forceatlas2_networkx_layout(G, pos=None, iterations=50)
curves = curved_edges(G, pos)
lc = LineCollection(curves, color=(0.0, 0.0, 0.0), alpha=0.1, linewidths=5.0, linestyles='solid')
plt.figure(figsize=(20, 20))
plt.gca().add_collection(lc)
nx.draw_networkx_nodes(G, pos, node_size=800, alpha=0.9, node_color=list(carac['myvalue']))
plt.tick_params(axis='both', which='both', bottom=False, left=False, labelbottom=False, labelleft=False)
plt.show()
if __name__ == '__main__':
stock_sim = StockData(CCGOWLModel, [0.4, 0.0001])
stock_sim.run()
stock_sim.plot_results()
|
# -*- coding: utf-8 -*-
from pyramid.exceptions import ConfigurationError
from oereb_client import __version__
class Index(object):
def __init__(self, request):
"""Entry point for index rendering.
Args:
request (pyramid.request.Request): The request instance.
"""
self.request_ = request
self.config_ = request.registry.settings.get('oereb_client', {})
self.validate_()
def validate_(self):
if not isinstance(self.config_, dict):
raise ConfigurationError('Configuration needs to be a dictionary, got {0} instead'.format(
type(self.config_)
))
if not isinstance(self.config_.get('application'), dict):
raise ConfigurationError('Missing "application" configuration')
if 'title' not in self.config_.get('application'):
raise ConfigurationError('Missing application title')
if 'logo_canton' not in self.config_.get('application'):
raise ConfigurationError('Missing cantonal logo')
if 'languages' not in self.config_.get('application'):
raise ConfigurationError('Missing available languages')
if 'default_language' not in self.config_.get('application'):
raise ConfigurationError('Missing default language')
if not isinstance(self.config_.get('view'), dict):
raise ConfigurationError('Missing "view" configuration')
if 'map_x' not in self.config_.get('view'):
raise ConfigurationError('Missing map_x in view configuration')
if 'map_y' not in self.config_.get('view'):
raise ConfigurationError('Missing map_y in view configuration')
if 'map_zoom' not in self.config_.get('view'):
raise ConfigurationError('Missing map_zoom in view configuration')
if 'resolutions' not in self.config_.get('view'):
raise ConfigurationError('Missing resolutions in view configuration')
if not isinstance(self.config_.get('base_layer'), dict):
raise ConfigurationError('Missing "base_layer" configuration')
if not isinstance(self.config_.get('availability'), dict):
raise ConfigurationError('Missing "availability" configuration')
if not (
isinstance(self.config_.get('search'), list) or
isinstance(self.config_.get('search'), str)
):
raise ConfigurationError('Missing "search" configuration')
if not isinstance(self.config_.get('support'), dict):
raise ConfigurationError('Missing "support" configuration')
def is_debug_(self):
"""
Returns true if requested in debug mode.
Returns:
bool: True if requested in debug mode.
"""
local = self.request_.application_url.startswith('http://localhost')
debug = self.request_.params.get('debug') == 'true'
return local and debug
def get_google_analytics_(self):
"""
Returns the configuration for Google Analytics.
Returns:
str or None: The Google Analytics configuration.
"""
return self.config_.get('google_analytics', None)
def get_custom_css_url_(self):
"""
Returns the URL of the custom CSS file.
Returns:
str or None: The URL of the custom CSS file.
"""
return self.config_.get('custom_css_url', None)
def get_application_config_(self):
"""
Reads the application configuration and applies default values.
Returns:
dict: The application configuration.
"""
cfg = self.config_.get('application', {})
if 'icon' not in cfg:
cfg.update({
'icon': self.request_.static_url('oereb_client:static/images/favicon.png')
})
if 'logo_oereb' not in cfg:
cfg.update({
'logo_oereb': [
{
'Language': 'en',
'URL': self.request_.static_url('oereb_client:static/images/logo_oereb_de.jpg')
},
{
'Language': 'de',
'URL': self.request_.static_url('oereb_client:static/images/logo_oereb_de.jpg')
},
{
'Language': 'fr',
'URL': self.request_.static_url('oereb_client:static/images/logo_oereb_fr.jpg')
},
{
'Language': 'it',
'URL': self.request_.static_url('oereb_client:static/images/logo_oereb_it.jpg')
},
{
'Language': 'rm',
'URL': self.request_.static_url('oereb_client:static/images/logo_oereb_rm.jpg')
}
]
})
return cfg
def get_service_url_(self):
service_url = self.config_.get('service_url', None)
if service_url is not None:
if service_url[-1] != '/':
service_url += '/'
return service_url
return self.request_.route_url(
'{0}/index'.format(self.request_.route_prefix)
)
def get_search_url_(self):
search = self.config_.get('search', None)
if isinstance(search, str):
return search
return self.request_.route_url(
'{0}/search'.format(self.request_.route_prefix)
)
def get_config(self):
"""
Returns the JSON-encoded configuration.
Returns:
str: The JSON-encoded configuration.
"""
return {
'test_instance_notice': self.config_.get('test_instance_notice', None),
'application_url': self.request_.route_url(
'{0}/index'.format(self.request_.route_prefix)
),
'service_url': self.get_service_url_(),
'search_url': self.get_search_url_(),
'application': self.get_application_config_(),
'version': __version__,
'view': self.config_.get('view', {}),
'base_layer': self.config_.get('base_layer', {}),
'availability': self.config_.get('availability', {}),
'search': self.config_.get('search', {}),
'support': self.config_.get('support', {}),
'external_viewer': self.config_.get('external_viewer', {})
}
def get_title(self):
title = None
if 'lang' in self.request_.params:
for item in self.config_['application']['title']:
if item['Language'] == self.request_.params.get('lang'):
title = item['Text']
break
elif item['Language'] == self.config_['application']['default_language']:
title = item['Text']
if title is None:
title = self.config_['application']['title'][0]['Text']
return title
def render(self):
"""
Returns the dictionary with rendering parameters.
Returns:
dict: Dictionary with rendering parameters.
"""
return {
'debug': self.is_debug_(),
'google_analytics': self.get_google_analytics_(),
'custom_css_url': self.get_custom_css_url_(),
'config': self.get_config(),
'title': self.get_title()
}
|
import re
import settings
def main():
try:
from github import Github
from github.GithubException import BadCredentialsException
except ImportError:
print('No github module found, try running pip install pygithub')
return
g = Github(settings.ACCESS_TOKEN)
try:
org = g.get_organization(settings.ORGANIZATION_NAME)
except BadCredentialsException:
print('Bad credentials. Go to https://github.com/settings/tokens ,')
print('generate new token there and put in local_settings.py ACCESS_TOKEN')
return
for repo in org.get_repos():
in_scope = repo.name in settings.PROJECT_REPOS
if settings.PROJECT_REPOS and not in_scope:
continue
for pull in repo.get_pulls():
print('[%s]' % repo.name, pull.title, pull.html_url)
if in_progress(pull):
print('\t', green('in progress'))
continue
mistakes = check(pull)
if not mistakes:
print('\t', green('OK'))
for m in mistakes:
print('\t-', red(m))
#import bpython; bpython.embed(locals())
#return
def in_progress(pull):
return 'in progress' in pull.body.lower()
def check(pull):
mistakes = []
if settings.JIRA_PROJECT not in pull.title: # TODO check for numbers
mistakes.append('There are no issue mentioned in title')
if len(pull.body) < 20:
mistakes.append('Description of changes made in PR should be present')
if not pull.mergeable:
mistakes.append('Has merge conflicts')
if reviews_approved(pull) < 1:
mistakes.append('PR should be reviewed and approved by at least one team member')
if not last_commit_built_successfully(pull):
mistakes.append('There are unsuccessfull travis checks in last PR commit')
if not has_test_confirmation(pull):
mistakes.append('Should have comment "Tested [when] on build #[number]"')
return mistakes
def last_commit_built_successfully(pull):
last_commit = list(pull.get_commits())[-1]
status = last_commit.get_combined_status()
return status.state == 'success'
def reviews_approved(pull):
approvals = 0
for review in pull.get_reviews():
if review.state == 'APPROVED':
approvals += 1
return approvals
def has_test_confirmation(pull):
return True # TODO: implement this somehow
for c in pull.get_comments():
text = c.body
# if 'Tested' in text and 'on build #' in text:
if 'tested' in text or 'Tested' in text:
return True
return False
NONE_COLOR = "\033[0m"
RED_COLOR = "\033[31m"
GREEN_COLOR = "\033[92m"
def red(s):
return RED_COLOR + s + NONE_COLOR
def green(s):
return GREEN_COLOR + s + NONE_COLOR
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.