content
stringlengths
5
1.05M
from manga_download import * download(TOONILY, 'happening', (0,86), overridePages= False, downloadImages= True, overrideImages= False)
from holder import Holder import PySimpleGUIQt as sg import yaml import multiprocessing sg.theme('Dark2') layout = [[sg.Text('HolderMC - your crossbow user')], [sg.Text('Delay (in seconds):'), sg.InputText()], [sg.Button('Run'), sg.Button('Stop')]] window = sg.Window('HolderMC', layout, no_titlebar=False, alpha_channel=.7, grab_anywhere=True, keep_on_top=True) if __name__ == "__main__": try: with open('options.yml') as f: options = yaml.load(f, Loader=yaml.FullLoader) except FileNotFoundError: with open('options.yml', 'w') as f: yaml.dump({ 'delay': 10, 'draw_time': 0.9 }, f) with open('options.yml') as f: options = yaml.load(f, Loader=yaml.FullLoader) while True: event, values = window.Read(close=False) if event == sg.WINDOW_CLOSED: try: crossbow.terminate() except NameError: pass break if event == 'Run': if values[0] != '': try: options['delay'] = float(values[0]) except ValueError: sg.Popup('Input error', f'Please input a float, your input is {values[0]}', keep_on_top=True) with open('options.yml', 'w') as f: yaml.dump(options, f) crossbow = multiprocessing.Process(target=Holder, args=(options,)) crossbow.start() if event == 'Stop': try: crossbow.terminate() except NameError: pass window.close()
import torch import torch.nn as nn import torch.nn.functional as F __all__ = ['DDSConv'] class route_func(nn.Module): def __init__(self, in_channels, out_channels, num_experts=3, reduction=16, mode='out'): super().__init__() # Global Average Pool self.gap1 = nn.AdaptiveAvgPool2d(1) self.gap3 = nn.AdaptiveAvgPool2d(3) squeeze_channels = max(in_channels // reduction, reduction) self.dwise_separable = nn.Sequential( nn.Conv2d(2 * in_channels, squeeze_channels, kernel_size=1, stride=1, groups=1, bias=False), nn.ReLU(inplace=True), nn.Conv2d(squeeze_channels, squeeze_channels, kernel_size=3, stride=1, groups=squeeze_channels, bias=False), nn.ReLU(inplace=True), nn.Conv2d(squeeze_channels, num_experts * out_channels if mode=='out' else num_experts * in_channels, kernel_size=1, stride=1, groups=1, bias=False) ) self.sigmoid = nn.Sigmoid() def forward(self, x): b, _, _, _ = x.size() a1 = self.gap1(x) a3 = self.gap3(x) a1 = a1.expand_as(a3) attention = torch.cat([a1, a3], dim=1) attention = self.sigmoid(self.dwise_separable(attention)) return attention class DDSConv(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, num_experts=3, stride=1, padding=0, groups=1, reduction=16, deploy=False, mode='in'): super().__init__() self.deploy = deploy self.mode = mode self.num_experts = num_experts self.in_channels = in_channels self.out_channels = out_channels self.stride = stride self.padding = padding self.groups = groups # routing function self.routing_func = route_func(in_channels, out_channels, num_experts, reduction, mode) # convs if deploy: self.convs = [nn.Parameter(torch.Tensor(out_channels, in_channels, kernel_size, kernel_size)) for i in range(num_experts)] else: self.convs = nn.ModuleList([nn.Conv2d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, groups=groups) for i in range(num_experts)]) self.bns = nn.ModuleList([nn.BatchNorm2d(out_channels) for i in range(num_experts)]) def forward(self, x): routing_weight = self.routing_func(x) # N x k*C if self.deploy: convs = [] for i in range(self.num_experts): route = routing_weight[:, i * self.out_channels : (i+1) * self.out_channels].squeeze(0).unsqueeze(-1) weight = self.convs[i] weight = weight * route convs.append(weight) conv = sum(convs) output = F.conv2d(x, weight=conv, stride=self.stride, padding=self.padding, groups=self.groups) else: outputs = [] if self.mode == 'out': for i in range(self.num_experts): route = routing_weight[:, i * self.out_channels : (i+1) * self.out_channels] # X * W out = self.convs[i](x) out = self.bns[i](out) out = out * route.expand_as(out) outputs.append(out) output = sum(outputs) else: for i in range(self.num_experts): route = routing_weight[:, i * self.in_channels : (i+1) * self.in_channels] attention = x * route.expand_as(x) # X * W out = self.convs[i](attention) out = self.bns[i](out) outputs.append(out) output = sum(outputs) return output def test(): x = torch.randn(64, 16, 32, 32) conv = DDSConv(16, 64, 3, padding=1, mode='out') y = conv(x) print(y.shape) conv = DDSConv(16, 64, 3, padding=1, mode='in') y = conv(x) print(y.shape) # test()
from ad_api.base import Client, sp_endpoint, fill_query_params, ApiResponse class BidRecommendations(Client): @sp_endpoint('/v2/sp/adGroups/{}/bidRecommendations', method='GET') def get_ad_group_bid_recommendations(self, adGroupId, **kwargs) -> ApiResponse: r""" get_ad_group_bid_recommendations(self, adGroupId, \*\*kwargs) -> ApiResponse Gets a bid recommendation for an ad group. path **adGroupId**:*number* | Required. The identifier of an existing ad group. Returns: ApiResponse """ return self._request(fill_query_params(kwargs.pop('path'), adGroupId), params=kwargs) @sp_endpoint('/v2/sp/keywords/{}/bidRecommendations', method='GET') def get_keyword_bid_recommendations(self, keywordId, **kwargs) -> ApiResponse: r""" get_ad_group_bid_recommendations(self, adGroupId, \*\*kwargs) -> ApiResponse Gets a bid recommendation for a keyword. path **keywordId**:*number* | Required. The identifier of an existing keyword. Returns: ApiResponse """ return self._request(fill_query_params(kwargs.pop('path'), keywordId), params=kwargs) @sp_endpoint('/v2/sp/keywords/bidRecommendations', method='POST') def get_keywords_bid_recommendations(self, **kwargs) -> ApiResponse: r""" get_keywords_bid_recommendations(self, \*\*kwargs) -> ApiResponse: Gets bid recommendations for keywords. body: | REQUIRED {'description': 'An array of ad groups.}' | '**adGroupId**': *number*, {'description': 'The identifier of the ad group.'} | keywords { | '**keywords**': *string*, {'description': 'The keyword text.'} | '**matchType**': *string*, {'description': 'The type of match', 'Enum': '[ exact, phrase, broad ]'} | } Returns: ApiResponse """ return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs) @sp_endpoint('/v2/sp/targets/bidRecommendations', method='POST') def get_targets_bid_recommendations(self, **kwargs) -> ApiResponse: r""" get_targets_bid_recommendations(self, \*\*kwargs) -> ApiResponse: Gets a list of bid recommendations for keyword, product, or auto targeting expressions. body: | REQUIRED {'description': 'An array of ad groups.}' | '**adGroupId**': *number*, {'description': 'The ad group identifier.'} | expressions { | '**value**': *string*, {'description': 'The expression value.'} | '**type**': *string*, {'description': 'The type of targeting expression', 'Enum': '[ queryBroadMatches, queryPhraseMatches, queryExactMatches, asinCategorySameAs, asinBrandSameAs, asinPriceLessThan, asinPriceBetween, asinPriceGreaterThan, asinReviewRatingLessThan, asinReviewRatingBetween, asinReviewRatingGreaterThan, asinSameAs, queryBroadRelMatches, queryHighRelMatches, asinSubstituteRelated, asinAccessoryRelated, asinAgeRangeSameAs, asinGenreSameAs, asinIsPrimeShippingEligible ]'} | } Returns: ApiResponse """ return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)
import pytest from joffrey import CLI, Group @pytest.fixture def cli(): return CLI() def test_empty_flag_prefix(): with pytest.raises(ValueError): CLI(flag_prefix='') def test_underscore_kwarg(cli): @cli.flag() def oh_hi(): pass assert 'oh_hi' in cli.parse('--oh-hi') def test_change_underscore(cli): cli.flag('oh_hi')(lambda: None) cli.flag('oh_hello', _='.')(lambda: None) assert 'oh_hello' in cli.parse('--oh.hello') assert 'oh_hello' in cli.parse('-e') # next untaken alphanumeric alias assert 'oh_hi' in cli.parse('-o') def test_bad_group_names(cli): cli.name_conflict = Group() with pytest.raises(ValueError): cli.name_conflict = Group() def test_nonexistents(cli): for x in ('remove', 'getarg', 'getflag', 'getcmd'): with pytest.raises(KeyError): getattr(cli, x)('this does not exist') assert not cli.hasany('this also does not exist') def test__quote_unquote__subnamespace_for_codecov(cli): cli.flag('a')(lambda: None) cli.parse('-a')._.pretty()
from setuptools import find_packages, setup with open("README.md", mode="r", encoding="utf-8") as f: readme = f.read() with open("LICENSE", mode="r", encoding="utf-8") as f: license_text = f.read() setup( name="diptych", version="0.0.1", description=( "Detect multiple pages in image scanned, split them and apply OCR." ), long_description=readme, author="Vincent LE GARREC", author_email="github@le-garrec.fr", url="https://github.com/bansan85/diptych", license=license_text, packages=find_packages(exclude=("tests", "docs")), )
# -*- coding: utf-8 -*- u"""Utils for SecureTea Auto Server Patcher Project: ╔═╗┌─┐┌─┐┬ ┬┬─┐┌─┐╔╦╗┌─┐┌─┐ ╚═╗├┤ │ │ │├┬┘├┤ ║ ├┤ ├─┤ ╚═╝└─┘└─┘└─┘┴└─└─┘ ╩ └─┘┴ ┴ Author: Abhishek Sharma <abhishek_official@hotmail.com> , Jun 20 2019 Version: 1.4 Module: SecureTea """ import platform import os def check_root(): """ Check whether the program is running as root or not. Args: None Raises: None Returns: bool: True if running as root, else False """ user = os.getuid() return user == 0 def categorize_os(): """ Categorize operating system by its parent distribution. Args: None Raises: None Returns: None """ os_name = get_system_name() if os_name in ["ubuntu", "kali", "backtrack", "debian"]: return "debian" # elif some other OS, add their name else: # if OS not in list return None def get_system_name(): """ Return the name of the operating system. Args: None Raises: None Returns: os_name (str): Name of the operating system """ os_name = platform.dist()[0] return os_name.lower()
import random pc = random.randint(0,10) print('Sou seu computador...Acabei de pensar em um numero de 0 a 10.') print('Consegue adivinhar qual foi?') count = 0 acertou = False while not acertou: guess = int(input('Qual e seu palpite? ')) count = count + 1 if guess == pc: acertou = True else: if guess < pc: print('Mais...tente mais uma vez!') elif guess > pc: print('Menos...Tente mais uma vez.') print('Acertou! Voce teve {} palpites e o numero era {}'.format(count,pc))
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Common") from datetime import datetime, timedelta from System import * from QuantConnect import * from QuantConnect.Algorithm import * from QuantConnect.Data import * from QuantConnect.Data.Custom.Robintrack import * ### <summary> ### Looks at users holding the stock AAPL at a given point in time ### and keeps track of changes in retail investor sentiment. ### ### We go long if the sentiment increases by 0.5%, and short if it decreases by -0.5% ### </summary> class RobintrackHoldingsAlgorithm(QCAlgorithm): def Initialize(self): self.lastValue = 0 self.SetStartDate(2018, 5, 1) self.SetEndDate(2020, 5, 5) self.SetCash(100000) self.aapl = self.AddEquity("AAPL", Resolution.Daily).Symbol self.aaplHoldings = self.AddData(RobintrackHoldings, self.aapl).Symbol self.isLong = False def OnData(self, data): for kvp in data.Get(RobintrackHoldings): holdings = kvp.Value if self.lastValue != 0: percentChange = (holdings.UsersHolding - self.lastValue) / self.lastValue holdingInfo = f"There are {holdings.UsersHolding} unique users holding {kvp.Key.Underlying} - users holding % of U.S. equities universe: {holdings.UniverseHoldingPercent * 100.0}%" if percentChange >= 0.005 and not self.isLong: self.Log(f"{self.UtcTime} - Buying AAPL - {holdingInfo}") self.SetHoldings(self.aapl, 0.5) self.isLong = True elif percentChange <= -0.005 and self.isLong: self.Log(f"{self.UtcTime} - Shorting AAPL - {holdingInfo}") self.SetHoldings(self.aapl, -0.5) self.isLong = False self.lastValue = holdings.UsersHolding;
import numpy as np import basico_forward def h(x, pre, c): return (x**pre).prod(1) * c def gillespie(x, c, pre, post, max_t): """ Gillespie simulation Parameters ---------- x: 1D array of size n_species The initial numbers. c: 1D array of size n_reactions The reaction rates. pre: array of size n_reactions x n_species What is to be consumed. post: array of size n_reactions x n_species What is to be produced max_t: int Timulate up to time max_t Returns ------- t, X: 1d array, 2d array t: The time points. X: The history of the species. ``X.shape == (t.size, x.size)`` """ t = 0 t_store = [t] x_store = [x.copy()] S = post - pre while t < max_t: h_vec = h(x, pre, c) h0 = h_vec.sum() if h0 == 0: break delta_t = np.random.exponential(1 / h0) # no reaction can occur any more if not np.isfinite(delta_t): t_store.append(max_t) x_store.append(x) break reaction = np.random.choice(c.size, p=h_vec/h0) t = t + delta_t x = x + S[reaction] t_store.append(t) x_store.append(x) return np.array(t_store), np.array(x_store) MAX_T = 0.1 class Model1: __name__ = "Model 1" x0 = np.array([40, 3]) # Initial molecule numbers pre = np.array([[1, 1]], dtype=int) post = np.array([[0, 2]]) def __call__(self, par): t, X = gillespie(self.x0, np.array([float(par["rate"])]), self.pre, self.post, MAX_T) return {"t": t, "X" : X} class Model2(Model1): __name__ = "Model 2" pre = np.array([[1, 0]], dtype=int) post = np.array([[0, 1]]) if __name__ == "__main__": import matplotlib.pyplot as plt true_rate = 2.3 observations = [Model1()({"rate": true_rate}), Model2()({"rate": 30}), basico_forward.BasicoModel('./data/abc_example.xml', MAX_T)({"(R1).k1": true_rate, "(R2).k1": true_rate}), basico_forward.BasicoModel('./data/abc_example.xml', MAX_T)({"(R1).k1": 30, "(R2).k1": 30}), ] fig, axes = plt.subplots(ncols=4) fig.set_size_inches((12, 4)) for ax, title, obs in zip(axes, ["Observation", "Competition", "Basico (true)","Basico (30)"], observations): ax.step(obs["t"], obs["X"]); ax.legend(["Species X", "Species Y"]); ax.set_xlabel("Time"); ax.set_ylabel("Concentration"); ax.set_title(title); plt.savefig('./out/out.png') print(observations[0]) print(observations[2])
"""Hide-and-Seek Privacy Challenge Codebase. Reference: James Jordon, Daniel Jarrett, Jinsung Yoon, Ari Ercole, Cheng Zhang, Danielle Belgrave, Mihaela van der Schaar, "Hide-and-Seek Privacy Challenge: Synthetic Data Generation vs. Patient Re-identification with Clinical Time-series Data," Neural Information Processing Systems (NeurIPS) Competition, 2020. Link: https://www.vanderschaar-lab.com/announcing-the-neurips-2020-hide-and-seek-privacy-challenge/ Last updated Date: June 21th 2020 Code author: Jinsung Yoon Contact: jsyoon0823@gmail.com ----------------------------- binary_predictor.py Note: Make binary predictor that predict synthetic data from original enlarged data. Then, use the predicted scores as the distance between synthetic and real data """ # Necessary packages import numpy as np import sys from .general_rnn import GeneralRNN def binary_predictor (generated_data, enlarge_data, tensorboard_dir): """Find top gen_no enlarge data whose predicted scores is largest using the trained predictor. Args: - generated_data: generated data points - enlarge_data: train data + remaining data Returns: - reidentified_data: 1 if it is used as train data, 0 otherwise """ # Parameters enl_no, seq_len, dim = enlarge_data.shape gen_no, _, _ = generated_data.shape # Set model parameters model_parameters = {'task': 'classification', 'model_type': 'gru', 'h_dim': dim, 'n_layer': 3, 'batch_size': 128, 'epoch': 20, 'learning_rate': 0.001} # Set training features and labels train_x = np.concatenate((generated_data.copy(), enlarge_data.copy()), axis = 0) train_y = np.concatenate((np.zeros([gen_no, 1]), np.ones([enl_no, 1])), axis = 0) idx = np.random.permutation(enl_no+gen_no) train_x = train_x[idx, :, :] train_y = train_y[idx, :] # Train the binary predictor general_rnn = GeneralRNN(model_parameters, tensorboard_dir) general_rnn.fit(train_x, train_y) # Measure the distance from synthetic data using the trained model distance = general_rnn.predict(enlarge_data) # Check the threshold distance for top gen_no for 1-NN distance thresh = sorted(distance)[gen_no] # Return the decision for reidentified data reidentified_data = 1*(distance <= thresh) return reidentified_data
# # PySNMP MIB module INTEL-ES480-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/INTEL-ES480-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:54:07 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter32, MibIdentifier, ObjectIdentity, IpAddress, ModuleIdentity, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Counter64, enterprises, Unsigned32, NotificationType, iso, Gauge32, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "ObjectIdentity", "IpAddress", "ModuleIdentity", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Counter64", "enterprises", "Unsigned32", "NotificationType", "iso", "Gauge32", "Integer32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") intel = MibIdentifier((1, 3, 6, 1, 4, 1, 343)) sysProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 5)) switches = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 5, 1)) mib2ext = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 6)) es480tAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 6, 60)) es480t = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 5, 1, 15)) es480tSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 6, 60, 1)) mibBuilder.exportSymbols("INTEL-ES480-MIB", es480tAgent=es480tAgent, es480tSystem=es480tSystem, switches=switches, es480t=es480t, sysProducts=sysProducts, intel=intel, mib2ext=mib2ext)
b = [22, 25, 28, 28, 33, 36, 37, 41, 41, 41, 47, 48, 52, 55, 55, 60, 60] def new_list(a): new_list = [] for i in a: if i not in new_list: new_list.append(i) print(new_list) new_list(b)
import numpy as np import cv2 import vispy import vispy.scene from vispy.scene import visuals import tensorflow as tf import src.config import sys from absl import flags from src.util import image as img_util from src.RunModel import RunModel import datetime def inside(r, q): rx, ry, rw, rh = r qx, qy, qw, qh = q return rx > qx and ry > qy and rx + rw < qx + qw and ry + rh < qy + qh def cutout_detections(img, rects): x, y, w, h = rects # the HOG detector returns slightly larger rectangles than the real objects. # so we slightly shrink the rectangles to get a nicer output. # pad_w, pad_h = int(0.15*w), int(0.05*h) # cv2.rectangle(img, (x+pad_w, y+pad_h), (x+w-pad_w, y+h-pad_h), (0, 255, 0), thickness) return img[y:y+h, x:x+w] def preprocess_image(img): if np.max(img.shape[:2]) != config.img_size: # print('Resizing so the max image size is %d..' % img_size) scale = (float(config.img_size) / np.max(img.shape[:2])) else: scale = 1. center = np.round(np.array(img.shape[:2]) / 2).astype(int) # image center in (x,y) center = center[::-1] crop, proc_param = img_util.scale_and_crop(img, scale, center, config.img_size) # Normalize image to [-1, 1] crop = 2 * ((crop / 255.) - 0.5) return crop def main(): # Video capture cap = cv2.VideoCapture(0) cap.set(3,1280) cap.set(4,1024) # People Detection hog = cv2.HOGDescriptor() hog.setSVMDetector( cv2.HOGDescriptor_getDefaultPeopleDetector() ) # Make a canvas and add simple view canvas = vispy.scene.SceneCanvas(keys='interactive', show=True) view = canvas.central_widget.add_view() # create scatter object scatter = visuals.Markers() # generate data or figure out how to prevent crash without data ^^ pos = np.random.normal(size=(100000, 3), scale=0.2) scatter.set_data(pos, edge_color=None, face_color=(1, 1, 1, .5), size=5) view.add(scatter) #configure view view.camera = 'turntable' # or try 'arcball' axis = visuals.XYZAxis(parent=view.scene) #load model sess = tf.Session() model = RunModel(config, sess=sess) while(True): # Capture frame-by-frame ret, frame = cap.read() #cutout person found, w = hog.detectMultiScale(frame, winStride=(8,8), padding=(32,32), scale=1.05) found_filtered = [] for ri, r in enumerate(found): for qi, q in enumerate(found): if ri != qi and inside(r, q): break else: found_filtered.append(r) print('%d (%d) found' % (len(found_filtered), len(found))) if len(found_filtered)>0: person = cutout_detections(frame, found_filtered[0]) #correct dimensions for detection processed = preprocess_image(person) # Add batch dimension: 1 x D x D x 3 input_img = np.expand_dims(processed, 0) # Theta is the 85D vector holding [camera, pose, shape] # where camera is 3D [s, tx, ty] # pose is 72D vector holding the rotation of 24 joints of SMPL in axis angle format # shape is 10D shape coefficients of SMPL start = datetime.datetime.now() joints, verts, cams, joints3d, theta = model.predict( input_img, get_theta=True) end = datetime.datetime.now() delta = end -start print("took:" , delta) # Display Camera frame cv2.imshow('frame',frame) cv2.imshow('processed',processed) if cv2.waitKey(1) & 0xFF == ord('q'): break # Display Plot # pos = np.random.normal(size=(100000, 3), scale=0.2) scatter.set_data(verts[0], edge_color=None, face_color=(1, 1, 1, .5), size=5) # When everything done, release the capture cap.release() cv2.destroyAllWindows() if __name__ == '__main__': config = flags.FLAGS config(sys.argv) # Using pre-trained model, change this to use your own. config.load_path = src.config.PRETRAINED_MODEL config.batch_size = 1 # renderer = vis_util.SMPLRenderer(face_path=config.smpl_face_path) main()
# panxapi.py after command must be double quotation marks import subprocess from lxml import etree # cmd = "panxapi.py -xs \"/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/address-group\"" cmd = "panxapi.py -xs \"/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/address\"" p = subprocess.Popen(cmd, shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) out = p.stdout.read() err = p.stderr.read() print(out.decode("gbk"), type(out.decode("gbk"))) # dict = json.loads(out.decode("gbk")) # print(dict,dict["entry"], type(dict)) # print(out.decode("utf-8") == "") # print(err.decode("gbk")) # print(err.decode("utf-8")) xml=etree.fromstring(out.decode('utf-8'), etree.XMLParser(remove_blank_text=True)) #初始化生成一个XPath解析对象 # result=etree.tostring(xml,encoding='utf-8') #解析对象输出代码 # print(type(xml)) # print(type(result)) # print(result.decode('utf-8')) # b = xml.xpath('//entry/tag[member="paupdate"]/../@name') b = xml.xpath('//entry/tag[member="paupdate"]/../@name') print(type(b),b,b[0]) # b = xml.xpath('//entry[@name="tagTestGroup"]//text()') # print(type(b),b,b[0])
import numpy as np from math import pi def binned_profile(y, x, bins=20): '''Create a profile of y(x) in the specified bins. Parameters ---------- y : array_like The y-coordinates of the data points. This should be 1-dimensional. x : array_like The x-coordinates of the data points. This should be 1-dimensional and the same size as `y`. bins : array_like or int The bin edges of the profile. If this is an integer, `bins` is the number of bins that will be equally distributed along the whole range of `x`. Returns ------- bin_centers : array_like The center of each of the bins. profile : array_like The y-values of the resulting profile. std_profile : array_like The standard deviation within each bin. num_per_bin : array_like The number of samples per bin. Raises ------ RuntimeError If the number of bins are negative or zero. ''' if np.isscalar(bins): if bins <= 0: raise RuntimeError('The number of bins should be positive.') # Equally space the bins. bins = np.linspace(np.min(x), np.max(x), bins+1) bin_centers = (bins[1:] + bins[:-1]) / 2 num_bins = len(bin_centers) num_per_bin = np.histogram(x, bins)[0] which_bin = np.digitize(x, bins) profile = np.array([np.nanmean(y[which_bin == b]) for b in range(1, num_bins + 1)]) std_profile = np.array([np.nanstd(y[which_bin == b]) for b in range(1, num_bins + 1)]) return bin_centers, profile, std_profile, num_per_bin def azimutal_profile(image, num_bins): '''Create an azimuthal profile of the image around its center. Parameters ---------- image : Field The image that we want an azimuthal profile from. This image must be two-dimensional. num_bins : int The number of bins in theta. Bins will be equally distributed in theta. Returns ------- bin_centers : array_like The center of each of the bins. profile : array_like The y-values of the resulting azimuthal profile. std_profile : array_like The standard deviation within each bin. num_per_bin : array_like The number of samples per bin. ''' theta = image.grid.as_('polar').theta bins = np.linspace(-pi, pi, num_bins + 1) return binned_profile(image.flat, theta.flat, bins) def radial_profile(image, bin_size): '''Create a radial profile of the image around its center. Parameters ---------- image : Field The image that we want an azimuthal profile from. This image must be two-dimensional. bin_size : scalar The extent of each bin. Each bin will be a ring from r to r+`bin_size`. Returns ------- bin_centers : array_like The center of each of the bins. profile : array_like The y-values of the resulting radial profile. std_profile : array_like The standard deviation within each bin. num_per_bin : array_like The number of samples per bin. ''' r = image.grid.as_('polar').r n_bins = int(np.ceil(r.max() / bin_size)) max_bin = n_bins * bin_size bins = np.linspace(0, max_bin, n_bins + 1) return binned_profile(image, r, bins)
from datetime import datetime import pytest from pytz import utc from api.files.serializers import FileSerializer from api_tests import utils from osf_tests.factories import ( UserFactory, NodeFactory, ) from tests.utils import make_drf_request_with_version @pytest.fixture() def user(): return UserFactory() @pytest.mark.django_db class TestFileSerializer: @pytest.fixture() def node(self, user): return NodeFactory(creator=user) @pytest.fixture() def file_one(self, node, user): return utils.create_test_file(node, user) def test_file_serializer(self, file_one): date_created = file_one.versions.first().date_created date_modified = file_one.versions.last().date_created date_created_tz_aware = date_created.replace(tzinfo=utc) date_modified_tz_aware = date_modified.replace(tzinfo=utc) new_format = '%Y-%m-%dT%H:%M:%S.%fZ' # test_date_modified_formats_to_old_format req = make_drf_request_with_version(version='2.0') data = FileSerializer(file_one, context={'request': req}).data['data'] assert date_modified_tz_aware == data['attributes']['date_modified'] # test_date_modified_formats_to_new_format req = make_drf_request_with_version(version='2.2') data = FileSerializer(file_one, context={'request': req}).data['data'] assert datetime.strftime(date_modified, new_format) == data['attributes']['date_modified'] # test_date_created_formats_to_old_format req = make_drf_request_with_version(version='2.0') data = FileSerializer(file_one, context={'request': req}).data['data'] assert date_created_tz_aware == data['attributes']['date_created'] # test_date_created_formats_to_new_format req = make_drf_request_with_version(version='2.2') data = FileSerializer(file_one, context={'request': req}).data['data'] assert datetime.strftime(date_created, new_format) == data['attributes']['date_created']
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads_v5/proto/resources/bidding_strategy.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.ads.google_ads.v5.proto.common import bidding_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2 from google.ads.google_ads.v5.proto.enums import bidding_strategy_status_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__status__pb2 from google.ads.google_ads.v5.proto.enums import bidding_strategy_type_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__type__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/ads/googleads_v5/proto/resources/bidding_strategy.proto', package='google.ads.googleads.v5.resources', syntax='proto3', serialized_options=b'\n%com.google.ads.googleads.v5.resourcesB\024BiddingStrategyProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v5/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V5.Resources\312\002!Google\\Ads\\GoogleAds\\V5\\Resources\352\002%Google::Ads::GoogleAds::V5::Resources', create_key=_descriptor._internal_create_key, serialized_pb=b'\n>google/ads/googleads_v5/proto/resources/bidding_strategy.proto\x12!google.ads.googleads.v5.resources\x1a\x32google/ads/googleads_v5/proto/common/bidding.proto\x1a\x41google/ads/googleads_v5/proto/enums/bidding_strategy_status.proto\x1a?google/ads/googleads_v5/proto/enums/bidding_strategy_type.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto\"\xfc\x07\n\x0f\x42iddingStrategy\x12G\n\rresource_name\x18\x01 \x01(\tB0\xe0\x41\x05\xfa\x41*\n(googleads.googleapis.com/BiddingStrategy\x12\x14\n\x02id\x18\x10 \x01(\x03\x42\x03\xe0\x41\x03H\x01\x88\x01\x01\x12\x11\n\x04name\x18\x11 \x01(\tH\x02\x88\x01\x01\x12\x63\n\x06status\x18\x0f \x01(\x0e\x32N.google.ads.googleads.v5.enums.BiddingStrategyStatusEnum.BiddingStrategyStatusB\x03\xe0\x41\x03\x12]\n\x04type\x18\x05 \x01(\x0e\x32J.google.ads.googleads.v5.enums.BiddingStrategyTypeEnum.BiddingStrategyTypeB\x03\xe0\x41\x03\x12)\n\x17\x65\x66\x66\x65\x63tive_currency_code\x18\x14 \x01(\tB\x03\xe0\x41\x03H\x03\x88\x01\x01\x12 \n\x0e\x63\x61mpaign_count\x18\x12 \x01(\x03\x42\x03\xe0\x41\x03H\x04\x88\x01\x01\x12,\n\x1anon_removed_campaign_count\x18\x13 \x01(\x03\x42\x03\xe0\x41\x03H\x05\x88\x01\x01\x12\x43\n\x0c\x65nhanced_cpc\x18\x07 \x01(\x0b\x32+.google.ads.googleads.v5.common.EnhancedCpcH\x00\x12?\n\ntarget_cpa\x18\t \x01(\x0b\x32).google.ads.googleads.v5.common.TargetCpaH\x00\x12X\n\x17target_impression_share\x18\x30 \x01(\x0b\x32\x35.google.ads.googleads.v5.common.TargetImpressionShareH\x00\x12\x41\n\x0btarget_roas\x18\x0b \x01(\x0b\x32*.google.ads.googleads.v5.common.TargetRoasH\x00\x12\x43\n\x0ctarget_spend\x18\x0c \x01(\x0b\x32+.google.ads.googleads.v5.common.TargetSpendH\x00:h\xea\x41\x65\n(googleads.googleapis.com/BiddingStrategy\x12\x39\x63ustomers/{customer}/biddingStrategies/{bidding_strategy}B\x08\n\x06schemeB\x05\n\x03_idB\x07\n\x05_nameB\x1a\n\x18_effective_currency_codeB\x11\n\x0f_campaign_countB\x1d\n\x1b_non_removed_campaign_countB\x81\x02\n%com.google.ads.googleads.v5.resourcesB\x14\x42iddingStrategyProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v5/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V5.Resources\xca\x02!Google\\Ads\\GoogleAds\\V5\\Resources\xea\x02%Google::Ads::GoogleAds::V5::Resourcesb\x06proto3' , dependencies=[google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__type__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) _BIDDINGSTRATEGY = _descriptor.Descriptor( name='BiddingStrategy', full_name='google.ads.googleads.v5.resources.BiddingStrategy', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='resource_name', full_name='google.ads.googleads.v5.resources.BiddingStrategy.resource_name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\005\372A*\n(googleads.googleapis.com/BiddingStrategy', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='id', full_name='google.ads.googleads.v5.resources.BiddingStrategy.id', index=1, number=16, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='name', full_name='google.ads.googleads.v5.resources.BiddingStrategy.name', index=2, number=17, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='status', full_name='google.ads.googleads.v5.resources.BiddingStrategy.status', index=3, number=15, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='google.ads.googleads.v5.resources.BiddingStrategy.type', index=4, number=5, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='effective_currency_code', full_name='google.ads.googleads.v5.resources.BiddingStrategy.effective_currency_code', index=5, number=20, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='campaign_count', full_name='google.ads.googleads.v5.resources.BiddingStrategy.campaign_count', index=6, number=18, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='non_removed_campaign_count', full_name='google.ads.googleads.v5.resources.BiddingStrategy.non_removed_campaign_count', index=7, number=19, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='enhanced_cpc', full_name='google.ads.googleads.v5.resources.BiddingStrategy.enhanced_cpc', index=8, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_cpa', full_name='google.ads.googleads.v5.resources.BiddingStrategy.target_cpa', index=9, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_impression_share', full_name='google.ads.googleads.v5.resources.BiddingStrategy.target_impression_share', index=10, number=48, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_roas', full_name='google.ads.googleads.v5.resources.BiddingStrategy.target_roas', index=11, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='target_spend', full_name='google.ads.googleads.v5.resources.BiddingStrategy.target_spend', index=12, number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\352Ae\n(googleads.googleapis.com/BiddingStrategy\0229customers/{customer}/biddingStrategies/{bidding_strategy}', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='scheme', full_name='google.ads.googleads.v5.resources.BiddingStrategy.scheme', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_id', full_name='google.ads.googleads.v5.resources.BiddingStrategy._id', index=1, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_name', full_name='google.ads.googleads.v5.resources.BiddingStrategy._name', index=2, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_effective_currency_code', full_name='google.ads.googleads.v5.resources.BiddingStrategy._effective_currency_code', index=3, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_campaign_count', full_name='google.ads.googleads.v5.resources.BiddingStrategy._campaign_count', index=4, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), _descriptor.OneofDescriptor( name='_non_removed_campaign_count', full_name='google.ads.googleads.v5.resources.BiddingStrategy._non_removed_campaign_count', index=5, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=376, serialized_end=1396, ) _BIDDINGSTRATEGY.fields_by_name['status'].enum_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__status__pb2._BIDDINGSTRATEGYSTATUSENUM_BIDDINGSTRATEGYSTATUS _BIDDINGSTRATEGY.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_bidding__strategy__type__pb2._BIDDINGSTRATEGYTYPEENUM_BIDDINGSTRATEGYTYPE _BIDDINGSTRATEGY.fields_by_name['enhanced_cpc'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2._ENHANCEDCPC _BIDDINGSTRATEGY.fields_by_name['target_cpa'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2._TARGETCPA _BIDDINGSTRATEGY.fields_by_name['target_impression_share'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2._TARGETIMPRESSIONSHARE _BIDDINGSTRATEGY.fields_by_name['target_roas'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2._TARGETROAS _BIDDINGSTRATEGY.fields_by_name['target_spend'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_bidding__pb2._TARGETSPEND _BIDDINGSTRATEGY.oneofs_by_name['scheme'].fields.append( _BIDDINGSTRATEGY.fields_by_name['enhanced_cpc']) _BIDDINGSTRATEGY.fields_by_name['enhanced_cpc'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['scheme'] _BIDDINGSTRATEGY.oneofs_by_name['scheme'].fields.append( _BIDDINGSTRATEGY.fields_by_name['target_cpa']) _BIDDINGSTRATEGY.fields_by_name['target_cpa'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['scheme'] _BIDDINGSTRATEGY.oneofs_by_name['scheme'].fields.append( _BIDDINGSTRATEGY.fields_by_name['target_impression_share']) _BIDDINGSTRATEGY.fields_by_name['target_impression_share'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['scheme'] _BIDDINGSTRATEGY.oneofs_by_name['scheme'].fields.append( _BIDDINGSTRATEGY.fields_by_name['target_roas']) _BIDDINGSTRATEGY.fields_by_name['target_roas'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['scheme'] _BIDDINGSTRATEGY.oneofs_by_name['scheme'].fields.append( _BIDDINGSTRATEGY.fields_by_name['target_spend']) _BIDDINGSTRATEGY.fields_by_name['target_spend'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['scheme'] _BIDDINGSTRATEGY.oneofs_by_name['_id'].fields.append( _BIDDINGSTRATEGY.fields_by_name['id']) _BIDDINGSTRATEGY.fields_by_name['id'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['_id'] _BIDDINGSTRATEGY.oneofs_by_name['_name'].fields.append( _BIDDINGSTRATEGY.fields_by_name['name']) _BIDDINGSTRATEGY.fields_by_name['name'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['_name'] _BIDDINGSTRATEGY.oneofs_by_name['_effective_currency_code'].fields.append( _BIDDINGSTRATEGY.fields_by_name['effective_currency_code']) _BIDDINGSTRATEGY.fields_by_name['effective_currency_code'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['_effective_currency_code'] _BIDDINGSTRATEGY.oneofs_by_name['_campaign_count'].fields.append( _BIDDINGSTRATEGY.fields_by_name['campaign_count']) _BIDDINGSTRATEGY.fields_by_name['campaign_count'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['_campaign_count'] _BIDDINGSTRATEGY.oneofs_by_name['_non_removed_campaign_count'].fields.append( _BIDDINGSTRATEGY.fields_by_name['non_removed_campaign_count']) _BIDDINGSTRATEGY.fields_by_name['non_removed_campaign_count'].containing_oneof = _BIDDINGSTRATEGY.oneofs_by_name['_non_removed_campaign_count'] DESCRIPTOR.message_types_by_name['BiddingStrategy'] = _BIDDINGSTRATEGY _sym_db.RegisterFileDescriptor(DESCRIPTOR) BiddingStrategy = _reflection.GeneratedProtocolMessageType('BiddingStrategy', (_message.Message,), { 'DESCRIPTOR' : _BIDDINGSTRATEGY, '__module__' : 'google.ads.googleads_v5.proto.resources.bidding_strategy_pb2' , '__doc__': """A bidding strategy. Attributes: resource_name: Immutable. The resource name of the bidding strategy. Bidding strategy resource names have the form: ``customers/{customer_ id}/biddingStrategies/{bidding_strategy_id}`` id: Output only. The ID of the bidding strategy. name: The name of the bidding strategy. All bidding strategies within an account must be named distinctly. The length of this string should be between 1 and 255, inclusive, in UTF-8 bytes, (trimmed). status: Output only. The status of the bidding strategy. This field is read-only. type: Output only. The type of the bidding strategy. Create a bidding strategy by setting the bidding scheme. This field is read-only. effective_currency_code: Output only. The currency used by the bidding strategy (ISO 4217 three-letter code). For bidding strategies in manager customers, this is the currency set by the advertiser when creating the strategy. For serving customers, this is the customer's currency\_code. Bidding strategy metrics are reported in this currency. This field is read-only. campaign_count: Output only. The number of campaigns attached to this bidding strategy. This field is read-only. non_removed_campaign_count: Output only. The number of non-removed campaigns attached to this bidding strategy. This field is read-only. scheme: The bidding scheme. Only one can be set. enhanced_cpc: A bidding strategy that raises bids for clicks that seem more likely to lead to a conversion and lowers them for clicks where they seem less likely. target_cpa: A bidding strategy that sets bids to help get as many conversions as possible at the target cost-per-acquisition (CPA) you set. target_impression_share: A bidding strategy that automatically optimizes towards a desired percentage of impressions. target_roas: A bidding strategy that helps you maximize revenue while averaging a specific target Return On Ad Spend (ROAS). target_spend: A bid strategy that sets your bids to help get as many clicks as possible within your budget. """, # @@protoc_insertion_point(class_scope:google.ads.googleads.v5.resources.BiddingStrategy) }) _sym_db.RegisterMessage(BiddingStrategy) DESCRIPTOR._options = None _BIDDINGSTRATEGY.fields_by_name['resource_name']._options = None _BIDDINGSTRATEGY.fields_by_name['id']._options = None _BIDDINGSTRATEGY.fields_by_name['status']._options = None _BIDDINGSTRATEGY.fields_by_name['type']._options = None _BIDDINGSTRATEGY.fields_by_name['effective_currency_code']._options = None _BIDDINGSTRATEGY.fields_by_name['campaign_count']._options = None _BIDDINGSTRATEGY.fields_by_name['non_removed_campaign_count']._options = None _BIDDINGSTRATEGY._options = None # @@protoc_insertion_point(module_scope)
from __future__ import absolute_import import env import envplus.pathfile import envplus.helpers VERSION_TUPLE = (0, 0, 1) VERSION = ".".join(map(str, VERSION_TUPLE))
# -*- coding: utf-8 -*- ''' ############################# Acme::MetaSyntactic::pause_id ############################# **** NAME **** Acme::MetaSyntactic::pause_id - The PAUSE id theme *********** DESCRIPTION *********** This is the list of all PAUSE (Perl Authors Upload SErver) user id (slightly transmogrified to be valid Perl identifiers). This list used to require frequent updates, until the data was read from CPAN local configuration. \ ``;-)``\ *********** CONTRIBUTOR *********** Philippe Bruhat (BooK). ******* CHANGES ******* - \* 2013-03-25 - v1.013 Fixed issues that CPAN was still causing (mostly under Win32). Published in Acme-MetaSyntactic-Themes version 1.031. - \* 2013-02-18 - v1.012 Made even more resistant to CPAN errors. In case of such errors, the module will be quiet, and slow to start. Published in Acme-MetaSyntactic-Themes version 1.030. - \* 2013-01-14 - v1.011 Made more resistant to CPAN errors. Published in Acme-MetaSyntactic-Themes version 1.029. - \* 2012-11-12 - v1.010 Updated to use CPAN.pm to obtain the list of PAUSE ID. If CPAN.pm is not configured it will fallback to the hardcoded list from version 1.009. Published in Acme-MetaSyntactic-Themes version 1.027. - \* 2012-11-05 - v1.009 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.026. - \* 2012-10-29 - v1.008 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.025. - \* 2012-10-22 - v1.007 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.024. - \* 2012-10-01 - v1.006 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.021. - \* 2012-09-10 - v1.005 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.018. - \* 2012-08-27 - v1.004 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.016. - \* 2012-07-23 - v1.003 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.011. - \* 2012-06-25 - v1.002 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.007. - \* 2012-05-28 - v1.001 Updated from the source web site in Acme-MetaSyntactic-Themes version 1.003. - \* 2012-05-07 - v1.000 Updated with additions since November 2006, and received its own version number in Acme-MetaSyntactic-Themes version 1.000. - \* 2006-11-06 Updated from the source web site in Acme-MetaSyntactic version 0.99. - \* 2006-10-30 Updated from the source web site in Acme-MetaSyntactic version 0.98. - \* 2006-10-23 Updated from the source web site in Acme-MetaSyntactic version 0.97. - \* 2006-10-16 Updated from the source web site in Acme-MetaSyntactic version 0.96. - \* 2006-10-09 Updated from the source web site in Acme-MetaSyntactic version 0.95. - \* 2006-10-02 Updated from the source web site in Acme-MetaSyntactic version 0.94. - \* 2006-09-25 Updated from the source web site in Acme-MetaSyntactic version 0.93. - \* 2006-09-18 Updated from the source web site in Acme-MetaSyntactic version 0.92. - \* 2006-09-11 Updated from the source web site in Acme-MetaSyntactic version 0.91. - \* 2006-09-04 Updated from the source web site in Acme-MetaSyntactic version 0.90. - \* 2006-08-28 Introduced in Acme-MetaSyntactic version 0.89. ******** SEE ALSO ******** `Acme::MetaSyntactic <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic&mode=module>`_, `Acme::MetaSyntactic::List <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic%3a%3aList&mode=module>`_. ''' name = 'pause_id' DATA = '''\ # names AADLER AAKD AAKHTER AALLAN AAMEND AANANDJHA AANKHEN AANOAA AANZLOVAR AAR AARDEN AARDO AARE AARON AARONJJ AARONRP AARONSCA AASPNAS AASSAD AAU AAYARS ABALAMA ABARCLAY ABCABC ABCDEFGH ABCXYZ ABE ABEL ABELEW ABELTJE ABEND ABERGMAN ABERNDT ABEROHAM ABH ABHAS ABHIDHAR ABHIISNOT ABHINAY ABHISHEK ABHISINGH ABIGAIL ABLAKELY ABLUM ABOL ABOUTOV ABRAXXA ABREY ABREZINS ABROSNAN ABS ABUALIGA ABUI ABURKE ABURLISON ABURS ABW ABYPAUL ACAJOU ACALPINI ACAMARI ACANFORA ACARVALHO ACB ACCA ACCARDO ACDALTON ACE ACESAVER ACESTER ACFEREN ACG ACH ACHEN ACHILLES ACHIMRU ACHOUNG ACID ACIDDEATH ACIDLEMON ACK ACKI ACMCMEN ACOBURN ACOTIE ACPGUEDES ACRABB ACRAIG ACRAWFORD ACRUSSELL ACTUAL ADALTON ADAM ADAMBA ADAMBACK ADAMC ADAMCIK ADAMGENT ADAMK ADAMOWSKI ADAMSJ ADAMSON ADAMZ ADAPAY ADARSHTP ADAVIES ADCHEN ADDI ADDINBOX ADDW ADEO ADEOLA ADESC ADESINGH ADIE ADIPALMA ADIRAJ ADITTES ADITYA ADITYADEV ADIZERE ADODGE ADONG ADOPTME ADOROSH ADRABI ADRIAN ADRIANA ADRIANWIT ADRIS ADROFFNER ADTIM ADUITSIS ADULAU ADY ADYE ADZZ AECOOPER AEF AEISNER AELAND AELDER AELFAKIH AELSE AEPAGE AERDAN AERO AERTS AESOP AESPEN AFAN AFARIS AFARIZWAN AFERBER AFERRER AFF AFFC AFIACRE AFIELDS AFINDLAY AFL AFLOTT AFN AFOLEY AFOXSON AFRIKA AFRYER AGALLI AGARAN AGATORANO AGATT AGATTI AGENT AGENTML AGF AGHULOUM AGIERTH AGJ AGNISLAV AGOE AGOLOMSH AGORDON AGORMAN AGREW AGRICOCB AGRIMME AGROLMS AGRUNDMA AGUIMARA AGUL AHALL AHAMM AHARONI AHARRISON AHCHORN AHERNIT AHICOX AHIGUCHI AHIGUTI AHIROSE AHMAD AHORNBY AHOSEY AHOYING AIDAN AIMASS AIMBERT AINAME AIRWAVE AIVATURI AIZVORSKI AJACKSON AJAEKEL AJAYRES AJCT AJDELORE AJDIXON AJFRY AJGB AJGOUGH AJKALD AJOHNSON AJOLMA AJPAGE AJPEACOCK AJUNG AJWANS AJWOOD AKALINUX AKAPLAN AKARGER AKHILA AKHOBOV AKHUETTEL AKIHITO AKIMOV AKIRA AKISSANE AKIYM AKMISHRA AKOBA AKOLB AKR AKRON AKS AKSHAY AKSTE ALAMAZ ALANC ALANCITT ALANSTEP ALANSZ ALASKA ALASLAVIC ALBERS ALBOVA ALD ALDICKEY ALDOBRANT ALEC ALECH ALECS ALEENA ALET ALEVENSO ALEVIN ALEX ALEXB ALEXBIO ALEXBYK ALEXD ALEXE ALEXEVIL ALEXEYT ALEXF ALEXIOB ALEXK ALEXLOMAS ALEXM ALEXMASS ALEXMC ALEXMV ALEXP ALEXPRECH ALEXS ALEXSEA ALEXT ALEXX ALFALPHA ALFIE ALFILLE ALFO ALFRED ALFREDO ALFW ALGDR ALGER ALGERNON ALHA ALI ALIAN ALIC ALIN ALINGNAU ALINKE ALIRAM ALIRZO ALISTAIRC ALIZTA ALJOSCHA ALKNAFF ALKOR ALLEN ALLENCHEN ALLENDAY ALLENS ALLOLEX ALNEWKIRK ALPHAZULU ALPO ALPOW ALSCH ALT ALTITUDE ALTREUS ALUCAS ALVAR ALVAROL ALX ALXPLDEV ALYX AMACKEY AMAHABAL AMALTSEV AMANOKHIN AMAR AMARISAN AMARQUIS AMASHANOV AMBRUS AMBS AMCN AMD AMEDINA AMERZKY AMGARLAND AMIAS AMICHAUER AMIMOTO AMIRF AMIRI AMIRITE AMITSIDES AML AMLING AMMMA AMNESIAC AMOLLOY AMONARCH AMONSEN AMONTERO AMOORE AMORETTE AMOSS AMOXOUS AMR AMRUTA AMS AMUGNOLO AMURREN AMV AMW ANAGHAKK ANAK ANALL ANALOG ANAND ANARION ANATRA ANAZAWA ANBON ANBROWN ANBU ANDALE ANDARA ANDERS ANDI ANDK ANDMEN ANDOR ANDOT ANDOZER ANDRE ANDREFS ANDREGNEX ANDREI ANDREIN ANDREJ ANDREM ANDREMAR ANDREPO ANDREW ANDREWC ANDREWD ANDREWF ANDREWHO ANDREWIK ANDREWJSI ANDREWN ANDREWO ANDREWS ANDREY ANDREYR ANDRI ANDY ANDYA ANDYB ANDYD ANDYDUNC ANDYGLEW ANDYGROM ANDYJ ANDYP ANDYPUR ANDYW ANELSON ANFI ANGERSTEI ANGUS ANGUSLEES ANGUYEN ANH ANIAS ANIMATIFY ANIMATOR ANIO ANIRVAN ANK ANKITJ ANKITS ANNADURAI ANNELI ANNO ANNYRAUL ANONWB ANOUAR ANS ANSGAR ANTHONY ANTHONYU ANTONESCU ANTONFIN ANTONIKO ANTONY ANTRO ANUNES ANURADHA ANWILL ANYDOT ANZUHAN AOCH AOCINAR AORR AOTERRA APALADE APARNA APATSCHE APATWA APEIRON APERROTT APERSAUD APH APHILIPP APILOS APKELLY APLA APLEINER APLONIS APML APNIC APOCAL APOLLO APPEL APRIME APRISCAK APTHORPE APTITUZ AQL AQUACADE AQUILINA AQUMSIEH AQUTIV ARAAB ARACKHAEN ARAK ARANDAL ARAVIND ARAVINDDH ARAVINDR ARC ARCANEZ ARCHAELUS ARCHFOOL ARCNON ARCOLF ARDAN AREGGIORI AREIBENS ARENSB ARFREITAS ARGGGH ARGRATH ARICH ARIE ARIELS ARIF ARILOU ARISAWA ARISI ARISTOTLE ARJAY ARJEN ARJONES ARJUNS ARKAGHOSH ARLIN ARMAND ARMSTD ARMSTRONG ARNDT ARNE ARNESOND ARNHEMCR ARNIE ARNSHOLT ARODLAND ARODRIGZ AROTH ARPADF ARPI ARSHAD ARSML ARTHAS ARTHUR ARTICLE ARTO ARTURAZ ARTY ARUL ARUN ARUNBEAR ARUNDEV ARUTEIDO ARVIEGAS ARVINDPR ARYEH ASA ASACKETT ASAKURA ASANDSTRM ASARIAN ASARIH ASAVIGE ASB ASCENT ASCHAMP ASCHERBAU ASCHIG ASCOPE ASDEEVA ASDF ASED ASEIPP ASERBIN ASG ASGEIRN ASH ASHER ASHERROD ASHGENE ASHISHKU ASHISHM ASHLEY ASHLEYP ASHLEYW ASHOOOO ASHTED ASHUTOSH ASHWIN ASIMJALIS ASIMON ASK ASKADNA ASKSH ASLETT ASMOLECK ASNMTAP ASOKO ASOKOLOV ASP ASPA ASPEER ASPIDER ASPIERS ASPINDLER ASPINELLI ASPRILLIA ASSENTCH ASTAINES ASTANLEY ASTARA ASTEAM ASTEI ASTEWART ASTILLER ASTLETRON ASTRB ASTUBBS ASVBR ASW ATANATION ATCROFT ATG ATH ATHOMAS ATHOMASON ATIFK ATISTLER ATODOROV ATOM ATOOMIC ATOURBIN ATRAK ATRICKETT ATRIPPS ATRODO ATTILA ATZ AUBERTG AUBREYJA AUBRYF AUDREYT AUFFLICK AUGER AUGGY AUGIE AUGUSTO AUKJAN AULUSOY AURUM AUSCHUTZ AUSSQUARE AUTRIJUS AVAJADI AVALLABH AVAR AVARUS AVATAR AVB AVCPAN AVENJ AVIAN AVIF AVIKAK AVINASH AVITARNET AVIVSIM AVKHOZOV AVOGE AVOROBEY AVP AVUSEROW AWA AWBAUM AWC AWENDT AWESTHOLM AWGIBBS AWHITE AWIN AWKAY AWKWARD AWNCORP AWOHLD AWOOD AWRIGLEY AWS AWWAIID AXANTHOS AXEMAN AXIAK AXS AYANOKOUZ AYATES AYOUNG AYRNIEU AZAWAWI AZED AZEMGI AZINGER AZJADFTRE AZOLIA AZS AZUL AZZID BABAR BABF BABKIN BACEK BACHMANN BADAJOZ BADEN BADGERSRC BADKINS BADO BAERGAJ BAF BAGARRE BAGET BAGGEND BAGUINGUA BAHOOTYPR BAHUGHES BAK BALAJIRAM BALDOWN BALDR BALDUR BALDWINR BALESHI BALINHA BALTIC BAM BAMBR BANB BANDERSON BANG BANNAN BANNER BANTOWN BARABAS BARBACHAN BARBEE BARBIE BAREFOOT BARGLE BARNEY BARNINGER BARRACODE BARRYP BARRYPJ BARTAZ BARTENDER BARTL BARTLEY BARTLOWA BARTOLIN BARTS BARUCH BAS BASANTK BASELINE BASENJI BASHI BASHINSKY BASHLOV BASKAR BASLOCK BASTIAN BAT BATKINS BATKINSON BATMAN BATZ BAUERB BAUERM BAYASHI BAYNAA BAYSIDE BBACKER BBAXTER BBB BBC BBCIFL BBCPKENT BBCSIMONF BBEAUSEJ BBEELEY BBELDERB BBENNETT BBIKER BBIMBER BBIRTH BBKR BBOKSA BBPATEL BBQSAULS BBRYANT BBUM BBURCH BBUSS BBYRD BCARMER BCARROLL BCBAILEY BCDE BCEVC BCH BCHOATE BCLAWSIE BCMB BCN BCO BCOFFMAN BCONRY BCORNETT BCOSELL BCROWELL BCT BDA BDAGLISH BDARRAH BDD BDFOY BDGREGG BDLILLEY BDONLAN BDOWLING BDR BDRACO BDUGGAN BDULFER BDWELLE BDWOR BEADSLAND BEANS BEANZ BEARSHEN BEAST BEASTVOL BEATNIK BEAU BECKHENG BECS BEDIVERE BEECEE BEGEMOTV BEHANW BEHROOZI BEJ BELCHAM BELDEN BELFRY BEMACE BEN BENAZZO BENBOOTH BENC BENH BENHMM BENIZI BENJAMIN BENL BENLAVERY BENLEKIN BENLEVER BENLI BENLUND BENMEINL BENN BENNIE BENNING BENPAVON BENRUSSEL BENSOMMER BENT BENTIS BENW BEPPU BERA BERGSMAT BERIKV BERLE BERNARD BEROV BERRY BERT BESSARABV BETH BETTELLI BETUL BEVANS BEVO BEWEGEN BEYONDME BFAGA BFAIST BFG BFITCH BFITZ BFOZ BFREE BGARBER BGAY BGEDIK BGEHRICH BGI BGIBBY BGILLS BGILMORE BGINGERY BGPMON BGRAY BGRIMM BGUARRACI BHALLISSY BHANN BHARB BHECKEL BHEISIG BHERWEYER BHILDRED BHILTON BHM BHODGES BHODGINS BHOLSTEN BHOLZMAN BHORAN BHUGHES BHUMPH BHUNTER BHUVAN BIAFRA BIBLIBRE BIESZCZAD BIGBUG BIGGLES BIGHIL BIGJ BIGJOE BIGNOSE BIGPRESH BIGREDS BIJUA BIKER BILBO BILIARDS BILL BILLAUER BILLC BILLH BILLHAILS BILLKAMP BILLN BILLW BILLWALZ BINGOS BINKLEY BINOJNAIR BINOJOHN BIOJETONE BIOPERLML BIOPREM BIRKETT BIRNEY BIRRIE BISHOYH BISSCUITT BITJAM BITMAN BITS BITTER BIWILLIA BJAKUBSKI BJDEAN BJEPS BJKUIT BJM BJOERN BJOERNW BJORNARDO BJR BJT BJTURNER BJWHEELER BKA BKANEY BKATZUNG BKB BKCHAPIN BKENDI BKING BKLAAS BKLANG BKNIGHT BKOLERA BKRAMER BKRON BKUHN BKW BLABES BLABOS BLACKAVAR BLACKHAWK BLACKJ BLACKSTAR BLACKWIND BLACRA BLADE BLAIR BLAIZER BLAKEW BLAM BLANTREWI BLASKOV BLAST BLBLACK BLCKSMTH BLGL BLHOTSKY BLILBURNE BLINCHIK BLM BLMARKET BLNOARD BLOGALEX BLOM BLOONIX BLOPEZ BLSRM BLUECURIO BLUEFEET BLUEGUARD BLUELINES BLUET BLUHM BLWOOD BLX BLYNCH BMAMES BMARCOTTE BMARTIN BMAVT BMAYNARD BMC BMCG BMEDWAR BMEEKINGS BMIDD BMILLARES BMILLER BMILLETT BMILLHAM BMORGAN BMORROW BMOYLES BMS BMULLER BMXKRIS BNEGRAO BOADLER BOAG BOATES BOB BOBERNST BOBF BOBG BOBGOLD BOBKARE BOBMATH BOBN BOBNET BOBO BOBP BOBPP BOBSIDE BOBTFISH BOBW BOCAGE BOCHAROV BOCONNOR BODHI BODIN BOESCH BOEWE BOFTX BOGA BOGDAN BOGDANOV BOGDRO BOKUTIN BOLAV BOLDRA BOLEK BOLTE BOMB BOMBARELY BONANZA BONAR BONDFOG BONDURRI BONES BONFOG BONTI BOOK BOOST BOR BORIS BORISD BORISZ BORLIK BORONK BORUP BORWICK BOSBORNE BOSTON BOSU BOTANICA BOUBAKER BOUMENOT BOURDUX BOWEN BOWMANBS BOWTIE BOXPHERE BOXZOU BOYSIE BOZO BOZZIO BPANNIER BPATEL BPAULSEN BPEDERSE BPETERING BPETH BPGN BPHILLIPS BPMEDLEY BPORTER BPOSTLE BPOWERS BPRUDENT BPSCHUCK BQUINN BRAAM BRACETA BRAD BRADAPP BRADC BRADCATER BRADD BRADFITZ BRADH BRADMC BRAINBUZ BRAINTREE BRAMBLE BRANDON BRANDONC BRANSCHA BRASIL BRATNER BRAVEL BREEGSTER BREMNERB BRENDAN BRENNEN BRENO BRENT BRENTDAX BREQUESEN BRESER BRETT BRG BRIAC BRIAN BRIAND BRIANG BRIANKNOX BRIANL BRIANNG BRIANPUGH BRIANS BRIANSKI BRIANSP BRICAS BRICE BRICKER BRINZER BROCK BROCKMD BROCSEIB BROERSE BROMAGE BRONDSEM BRONG BRONSON BRONTO BROOM BROQ BROWSERUK BROZZIS BRTEAM BRTHOMAS BRUC BRUCEK BRUCER BRUGNARA BRUJAH BRUMLEVE BRUMMETT BRUNKHI BRUNO BRUNODIAZ BRUNORC BRUNOV BRX BRYAN BRYCE BRYONICS BRYURECKA BSAGO BSB BSCHMAL BSCHMAU BSCHMITT BSCHMITZ BSDF BSDZ BSHADE BSHANKS BSHENRY BSHOW BSI BSIDES BSINGER BSM BSMITH BSNOOP BSORAHAN BSTILWELL BSTPIERRE BSTURNER BSUGARS BSUNG BTAMILIO BTHOMAS BTIETZ BTMCINNES BTP BTRIBIT BTROTT BTUCKER BUBAFLUB BUCCIA BUCKSTER BUCKY BUDDHA BUDNEY BUDZYNSK BUGGYD BUGONI BUGOV BULB BULKDD BUMBY BUNDACIA BUNTAR BURAK BUREADO BURGERS BURKEY BURL BURNASH BURNERSK BUTILW BUU BUZZ BVA BVIERRA BVINNERD BVINSON BVR BWAGNER BWARDEN BWARFIELD BWATSON BWEBB BWEILER BWIGFIELD BWILLIAM BWISTI BWIUBS BWIZARD BWKECK BWMCADAMS BWSMITH BXBD BYOURTH BYRNE BYRON BYTBOX BYTEROCK BYTETOOLS BZAJAC CAADAMS CABUTLER CACHASOFT CADAMS CADAVIS CADE CADEN CAE CAESCHLI CAFFIEND CAGAO CAIDA CAIDAPERL CAILLTE CAIO CAJ CAJOSS CALDERMAN CALDRIN CALEB CALID CALLAHAN CALVINM CAM CAMELCASE CAMERB CAMERON CAMSTUART CANADA CANECA CANGELL CANID CANONB CANPANIEL CAO CAPOEIRAB CAPTTOFU CARCUS CARL CARLADLER CARLOSFB CARLOSP CARLVINCE CARMAN CARMARK CARNIL CARPENTER CARPKITTY CARROLL CARSON CARSWELL CARTER CARWASH CASAO CASEY CASIANO CASSJ CASTLE CASTOR CASWK CAT CATALIN CATERHAM CATFU CATONE CAUGUSTIN CAUSTIN CAVAC CAVANAUGH CAVASQUEZ CAWHORNPD CAWKA CAZADOR CBAIL CBAKER CBALZ CBARKEY CBARRATT CBERNING CBERRY CBHAMLIN CBHICKS CBINX CBIONDO CBOURNE CBOUVI CBRADFORD CBRANDT CBRINK CBROOKS CBTILDEN CBUREL CCAVNOR CCCP CCELSO CCHAMBER CCHITTLE CCLARK CCLOUTIER CCMELO CCOBB CCOLBOURN CCOLLINS CCPRO CCWALLACE CCWF CCZ CDARROCH CDAWSON CDBROWN CDE CDENT CDFH CDH CDIDEROT CDOLAN CDOMOVOY CDONLEY CDOT CDRAKE CDUNN CDYBED CDZWM CEBJYRE CECALA CEDWARDS CEESHEK CELEBITH CELOGEEK CERATITES CERHA CERNEY CEVANS CEZHANG CFABER CFAERBER CFALLEN CFEDDE CFILIPPE CFORPC CFOUTS CFRANKS CFRETER CFUHRMAN CGANESAN CGARCIA CGAUTAM CGERNHAR CGILMORE CGIP CGLASS CGLEE CGRADY CGRAU CGRAY CGUINE CHADMJOHN CHAGN CHAITANYA CHALL CHAMAS CHANG_LIU CHANKEY CHANSEN CHAOS CHAOSLAW CHAOTIC CHARADEUR CHARDEN CHARDIN CHAROVARK CHARTGRP CHAS CHASONHTX CHATEAU CHCHCH CHE CHEAKO CHELLAPPA CHEN CHENGANG CHENRYN CHENXIN CHENXUAN CHENYR CHESHIRE CHESNOKOV CHESSKIT CHESTER CHETANG CHEUNG CHEWTOY CHGEUER CHGOETZE CHI CHIA CHIBA CHICKS CHIGGINS CHILTS CHINTANA CHIPMUNK CHIPS CHIPSOID CHIPT CHIRAG CHIRAGMAT CHISEL CHITOIUP CHLIGE CHM CHOCOLATE CHOGAN CHOLET CHOLLOWAY CHOLT CHORNY CHOROBA CHOUPT CHOWARTH CHRIS CHRISA CHRISARN CHRISC CHRISCHU CHRISFR CHRISJ CHRISJCRA CHRISM CHRISN CHRISRD CHRISS CHRISTIAN CHRISTJ CHRISV CHRISY CHRMASTO CHRMUE CHROMATIC CHRWIN CHRWOLF CHSANCH CHSTROSS CHTHORMAN CHTTRAX CHUHN CHUMP CHUNSHENG CHUNZI CHURCH CHYLLI CIA CIMIL CINDY CINE CINXGLER CIROQUE CITA CITIZEN CITYPIG CJCOLEMAN CJCOLLIER CJFIELDS CJK CJM CJOHNSTON CJONES CJS CJTAYLOR CJUAN CJUKUO CKAISER CKANDOTH CKARG CKEITH CKERNER CKFULTON CKIMBER CKJ CKONG CKRAS CKRUSE CKUELKER CLACO CLADI CLAESJAC CLAIRD CLAIRVY CLAMB CLANE CLARKD CLAUSD CLAUSK CLAW CLB CLBECKER CLCL CLEACH CLEISHMAN CLEMBURG CLEMENSG CLEONTY CLIFF CLIFFORD CLIFFORDJ CLINT CLINTDW CLINTP CLIVE CLKAO CLMS CLMSYJEDI CLOK CLONE CLOOS CLOTHO CLOWN CLP CLSCOTT CLSN CLSUNG CLUNIS CLWOLFE CLZ CMANLEY CMANTITO CMASON CMAUJEAN CMAUSSAN CMBARON CMC CMCKAY CMCOSSE CMERTZ CMEYER CMILLS CML CMLH CMO CMOEWES CMONSON CMOORE CMORRIS CMP CMS CMUNGALL CMYERS CNANDOR CNANGEL CNATION CNG CNHACKTNT CNIGHS CNLAVY CNRDEEIO CNX CODEAH CODEBLACK CODEBREAK CODECHILD CODEFISH CODEHELP CODEPOET CODEREPOS CODESCOUT CODYP COELA COG COGENT COHTAN COKE COLEMAN COLEMINOR COLINCCI COLINCYR COLINFINE COLINK COLINM COLINO COLINSC COLLIN COLLINS COLMODE COMAND COMPLX COMRADE COMSKIL COMSULTIA CONKLIN CONNECTED CONO CONOR CONTEB CONVERTER COOK COOLEDIT COOLMEN COOLS COOLUNIX COOPERCL COPE COPEML COPPERLY CORDATA COREY CORION CORKBALL CORLETTK CORLISS CORMAC CORMANDER CORNELIS CORNELIUS CORRIS CORTI CORUSCATE CORWIN CORYB CORYG CORYKING COSIMO COSMICNET COSMIN COSTELA COTTO COTTON COUDOT COUNTSKM COUNTZERO COWBOY COWDAWG COWENS COWFISH CPANEL CPANIC CPANPERL CPATIL CPB CPELLERIN CPHIL CPIGGOTT CPJL CPK CPKOIS CPLEE CPORTMAN CPWILLMOT CRABTREE CRACKCELL CRACKEL CRAFFI CRAFTWORK CRAIC CRAIG CRAIGK CRAIGM CRAIHA CRAKRJACK CRAMER CRAMIREZ CRAZYDJ CREAMYG CREIN CRENZ CRI CRIACOW CRISB CRITICRE CRITTER CRLYJEP CROMEDOME CROMIS CRONUS CROSSWIRE CROWJA CROX CRUNCHIE CRUSOE CRUX CRZEDPSYC CSA CSALAT CSANDEEP CSARVA CSCHAEFER CSCHORN CSCHWENZ CSCM CSEATON CSELLEG CSELT CSIDE CSJEWELL CSMITH CSOE CSPENCER CSR CSRIDGWAY CSTEPHENE CTAPUK CTBROWN CTCSHAW CTDEAN CTHEUN CTHOM CTHOR CTI CTILMES CTIMMER CTLARSEN CTREPTOW CTRLSOFT CTRONDLP CTWETEN CUACK CUB CUBABIT CUBRID CUNNINGT CURTEVANS CURTIS CURTISF CURTLG CUSTARD CUTHBERT CUTTER CVEGA CVENTERS CVICENTE CVONROES CWELCH CWEST CWEVERITT CWH CWHITE CWIMMER CWINTERS CWRL CWW CXL CXREG CYBAEA CYBER CYBERMATT CYBERSPAC CYCLES CYGA CYING CYK CYLFHX CYMPAK CYPLP CZBSD DAAN DACONTI DADAMK DADERUS DADIS DAEMON DAGOBERT DAGOLDEN DAHILLMA DAHLIA DAHUT DAIBA DAINIX DAIYU DAIZI DAKKAR DALCAZAR DALEAMON DALGL DALILA DALINAUM DALLEN DALTSKI DAM DAMBAR DAMEO DAMI DAMJANP DAMO DAMOG DAMS DAN DANAJ DANAL DANB DANBERR DANBOL DANBOO DANBORN DANDV DANFRU DANHGN DANIEL DANIELM DANIELP DANIELR DANIELTWC DANIL DANISH DANJOU DANK DANKOGAI DANMOORE DANMQ DANMURRAY DANN DANNY DANOCPANO DANPEDER DANSCH DANSMITH DANT DANW DAOT DAOTOAD DAP DAPATRICK DAPI DAPM DARBYR DARCH DAREOLA DARIN DARKBLUE DARKNOS DARKSMO DARKTIGER DARNOLD DAROLD DARRELV DARREN DARTH DASHI DASHJR DATA DATABUILT DATCHLEY DATOPDOG DAUNAY DAV DAVAZ DAVE DAVEBAIRD DAVECROSS DAVEDAY DAVEGMX DAVEHODG DAVEHOLL DAVEL DAVEM DAVEO DAVEOLA DAVEROTH DAVEWEBB DAVEWOOD DAVID DAVIDB DAVIDC DAVIDCYL DAVIDE DAVIDEBE DAVIDG DAVIDH DAVIDHJ DAVIDIAM DAVIDIUS DAVIDM DAVIDNICO DAVIDO DAVIDOV DAVIDP DAVIDRA DAVIDROSS DAVIDRW DAVIDSLV DAVIEIRA DAVINCHI DAVOD DAVOOD DAVVID DAWOOD DAXELROD DAXIM DAYANUNE DAZJORZ DBADRAK DBAIRD DBAKER DBALMER DBARTH DBARTLE DBAURAIN DBB DBCM DBDPG DBEAZLEY DBELL DBEN DBENNETT DBIALAC DBIML DBLACKA DBLASZYK DBMAKER DBMOLESTA DBONNER DBOTHAM DBOURGET DBP DBR DBRADFORD DBRESH DBRIAN DBROBINS DBROOKS DBROWNING DBRUNTON DBUCHMAN DBURDICK DBURKE DBWELLS DCANTRELL DCARDWELL DCARRAWAY DCARRIGAN DCARVELL DCASTRO DCEPML DCFLECK DCHESTER DCHRIS DCL DCLARKE DCLEE DCLINTON DCMERTENS DCOLBURN DCOLLINS DCONWAY DCOPPIT DCPETROV DCRAWFORD DCREAGER DCS DCT DDAY DDB DDEBRITO DDEIMEKE DDF DDICK DDOKTER DDOL DDOYLE DDRIBIN DDS DDUMONT DDYER DEADBEEF DEADBODY DEAN DEANF DEANH DEBAJYOTI DEBASHISH DEBENSON DEBONAIR DEC DECASM DECAY DECIUS DEDMEDVED DEDRICK DEEMON DEENEWCUM DEEPAK DEEPAKG DEEPAN DEEPCREEK DEEPFRYED DEFCON DEG DEGE DEGRAW DEIAN DEL DELANEY DELANO DELAREY DELBOY DELDUCRA DELON DELPHINUS DELTA DEMERY DEMIAN DEMIURG DEMMY DENA DENIS DENKINGER DENNEY DENNISL DENNISON DENNY DENWA DENYSV DEORTH DEP DEPESZ DEPP DEPPPP DERAHUL DEREK DEREKB DEREKP DEREKW DERF DERHAAG DERMOT DERMOTH DEROBINS DESIGNATE DESIMINER DESOTO DESPAIR DESROD DESSAILLY DETI DETOX DETZ DETZOLD DEUSX DEVDOS DEVELO DEVEN DEVIS DEVMORFO DEVOGON DEVON DEVONJ DEW DEWEG DEXTER DFAN DFARALDO DFAURE DFAUX DFERREIRA DFETTER DFH DFIELDING DFORKNER DFRANKOW DFREEDMAN DFRENCH DFRETT DFROZ DGARAMOND DGAY DGEHL DGETHINGS DGINEV DGL DGMDAN DGOEHRIG DGOLD DGORLEY DGRAEVEJ DGRAHAM DGRANAU DGRAVES DGRECO DGREGORO DGREGORY DGRIS DGRIZZANT DGROVER DGRYSKI DGSCOPE DGUY DHA DHABLE DHACK DHAGAN DHAGEMAN DHAIVATP DHANISCH DHARD DHARDISON DHARRIS DHASCRUZ DHAVAL DHEINBACH DHICKLING DHINKLE DHORNE DHOSS DHOUSTON DHUDES DHUNT DIABLO DIAMON DIBERRI DICHI DIDO DIEDERICH DIEGO DIEGOK DIESWAYTO DIFRO DIGIMER DILGER DILLO DIMARTINO DIMIKREM DIMMA DIMRUB DINGZEZHU DINO DINOMITE DIOCLES DIOGEN DIONALM DIONYS DIRKDN DIRT DISHMAEL DISSENT DIST DISTLER DITTO DIVEC DIVER DIVERDI DIVIDOR DIZ DIZZY DJACOBS DJASMINE DJBECKETT DJBERG DJBURKE DJCE DJCOLLINS DJCONNEL DJCP DJCURTIS DJERIUS DJGOKU DJH DJHD DJIBEL DJK DJKERNEN DJO DJOHNSTON DJOOS DJPADZ DJR DJTELNIC DJZORT DKAMENOV DKAMHOLZ DKASAK DKASIM DKF DKG DKING DKOCH DKOFFLER DKOGAN DKRAUSE DKS DKU DKUBB DKUEBLER DKWILSON DKYGER DLA DLADNER DLAMBLEY DLAND DLANE DLECONTE DLEE DLEIGH DLIM DLINCOLN DLINK DLKING DLO DLOWE DLUGOSZ DLUNDBERG DLUX DLY DMA DMABE DMAC DMACKS DMAHONEY DMAKI DMALONE DMANURA DMARTIN DMAXWELL DMC DMCBRIDE DMCC DMCCOY DMCS DMEGG DMERRILL DMESSINA DMINUS DMITRI DMITRICDO DMITRY DMITRYNOD DMITRYSOL DMLLOYD DMLOND DMN DMO DMOR DMOW DMPETIT DMR DMUEY DMUSGR DMYTRO DNAD DNARAYAN DNEIL DNICHTER DNORTH DNS DOBER DOCG DOCTORMO DODGER DODYSW DOGAN DOGGY DOHERTY DOKADON DOKSNER DOLIVEIRA DOLMEN DOM DOMINICN DOMINIX DOMIZIO DOMM DOMO DOMQ DON DONANGEL DONATOAZ DONCKERS DONDRAKE DONEILL DONGXU DONGYI DONKS DONM DONOR DONS DONSHANK DOOM DOPACKI DORIAN DORMANDO DORNER DORWARD DOS DOSNIBBLE DOTTK DOUGB DOUGDUDE DOUGL DOUGLEITH DOUGLISH DOUGLUCE DOUGM DOUGRAY DOUGTHUG DOUGW DOUGWEBB DOUMBZH DOVIER DOWEN DOWENS DOWNHOM DOY DOZZIE DPARIS DPATES DPAVLIN DPCHRIST DPETROV DPIROTTE DPISONI DPLINGO DPMEYER DPOINTON DPOKORNY DPR DPRANTL DPRICE DQNEO DRAEGTUN DRAGMZ DRAGONFLY DRAGOS DRAKHOS DRAUONER DRAWNBOY DRAXIL DRBEAN DREAD DREADY DREAMG DREBOLO DREDD DREE DREIDY DREINHOLD DRENCH DREWF DREYNOLDS DRFROG DRIEUX DRIN DRINCHEV DRIPLOCK DRLARO DROBERTS DROLSKY DROOGIE DRORR DRPROLIX DRRHO DRSAARON DRSH DRSTEVE DRTECH DRULENSOT DRUOSO DRUXA DRYAJOV DRYMAN DRZIGMAN DSADINOFF DSAMZA DSANDER DSB DSBIKE DSCHAEFER DSCHOEN DSCHWEI DSE DSEWELL DSF DSHANE DSHAO DSHAP DSHEPP DSHERER DSHEROH DSHULTZ DSILVIA DSKOLL DSLEWART DSLLOYD DSNOPEK DSOBON DSOKOL DSOLIMANO DSOUFLIS DSOUZA DSPADEA DSPARLING DSPEICHER DSPENCER DSPIZZ DSTAAL DSTAHLKE DSTALDER DSTEELE DSTERLING DSTH DSTORRS DSTUART DSUGAL DSULLIVAN DTADY DTAYLOR DTHAY DTHOMAS DTHOREAU DTIBERIO DTJANEZIC DTORRES DTOWN DTREDER DTRISCHUK DTURLEY DUANEG DUBEX DUCKBILL DUCKYYAZY DUEBERB DUELAFN DUFF DUFFEE DUG DUKKIE DUL DULING DUMB DUNCAN DUNCAND DUNCS DUNNIGANJ DUPUISARN DURIST DUSHY DUTCHIE DUTCHMAN DUZY DVDPOL DVENABLE DVINCI DVKLEIN DVRYABOY DVSM DVWRIGHT DWARREN DWATSON DWC DWCARDER DWELZEL DWHEELER DWILSON DWINTERS DWMYERS DWOOD DWOODHOW DWP DWRIGHT DWUEPPEL DWUZIU DXIAO DXMAN DYACOB DYEE DYFET DYKER DYLAN DYLUNIO DYNULAX DYOUNG DYPSILON DZEMA DZHARIY DZHUO EAK EALLENIII EAM EARINO EARL EARNESON EARONESTY EASR EAST EASY EASYLIN EAU EAX EAYNG EBARLOW EBASSI EBAUDREZ EBAY EBHANSSEN EBOHLMAN EBOSRUP EBOURGET EBRAGIN EBRUNI EBUSBOOM EBUSTO EBUTLER ECALDER ECARRILLO ECARROLL ECASTILLA ECHETZ ECKARDT ECLARK ECOCODE ECTO EDALY EDAM EDAVIS EDB EDD EDECA EDENC EDF EDIPRETO EDJ EDLIU EDMONSON EDOARDO EDOUGLASS EDP EDPRATOMO EDUARDOW EDUSEGRE EDWARD EDWARDG EDWARDIII EDWARDSON EEDDINGT EEKIM EESTABROO EEWAH EFANCHE EFIFER EFISCHER EFL EFOLLEY EFRAZ EFUNNEKO EGARLAND EGERMAN EGGA EGGL EGILES EGOR EGORSH EGRANLUND EGROSS EGUPTA EGYPTUS EHARTNETT EHASTINGS EHEFFERN EHERSHEY EHOOD EHSAN EHSANE EIDOLA EIDOLON EIJABB EIKEG EILARA EIM EIMAMAGI EINHVERFR EIRO EISEN EITANS EJB EJDRS EJH EJS EKAWAS EKG EKKIP EKLERKS EKMETT EKOS EKPONK ELACOUR ELAGACHE ELBEHO ELBOW ELEMENT ELENAFRIE ELEONORA ELEPHANT ELGAARD ELH ELIAS ELIJAH ELIKA ELISA ELISHEVA ELIXIR ELIZABETH ELLIOTF ELLIOTJS ELLIOTT ELLIPSE ELLIRYC ELMAR ELMEX ELNAPPY ELOHMROW ELPENGUIN ELSE ELTONLIKA ELUBOW ELWARREN ELWINTER ELYNCH EMAHURIN EMARQUES EMARTIN EMAX EMAZEP EMBIX EMCB EMEN EMIL EMILLER EMMANUEL EMODELLER EMORGAN EMORISSE EMOTICON EMPI EMRYS EMURPHY ENACODE ENB ENCRYPTIO ENDA ENDLER ENEGAARD ENELL ENGELS ENGIN ENGMARK ENHERING ENKI ENNO ENOOR ENRYS EPEREZ EPHESUS EPIPE EPSALON ERAGONJ ERANGEL ERANTAPAA EREZ ERGOWOLF ERIAM ERIC ERICA ERICBLUE ERICH ERICJ ERICJH ERICKD ERICM ERICW ERICYE ERIK ERIKE ERIKH ERIKRAY ERIKS ERINSPICE ERMEYERS ERNESTO ERNGUI EROB EROLLINS EROSS ERRR ERSHOV ERUBER ERUBY ERWAN ERWIN ERYQ ESAMMER ESCH ESENTHIL ESH ESM ESP ESPIE ESPIZO ESPOSITO ESR ESSELENS ESSENZ ESSKAR ESTITH ESTRABD ESTRAI ESTRELOW ESUMMERS ETC ETHELIN ETHER ETHUL ETILEM ETJ ETLTCHFIG ETTINGER ETWOECMA EUG EUROBOB EVAL EVALETO EVAN EVANK EVANPRO EVANZS EVDB EVERYBODY EVI EVO EVOZON EWALKER EWATERS EWIJAYA EWILHELM EWINDISCH EXC EXIFTOOL EXOBUZZ EXODIST EXODUS EXPLORER EXUSSUM EYCK EYYJEN EZARKO EZDB EZRA EZRAKILTY FABER FABPOT FABRICODE FABRVEC FAFI FAGLIONIG FAGZAL FAHAD FAICHNEY FAIRLITE FAISAL FAIZ FALCAO FALCONE FALSE FANGLY FANGYUAN FANOS FANSIPANS FANTONSEN FANY FAPG FARBER FARHAD FARHADM FARIS FARO FARROKHI FASTBEAN FASTLY FATCAT FAULSTIC FAXIOMAN FAXMODEM FAYLAND FAYS FBONDURRI FBORSHEV FBRIERE FCECCONI FCH FCO FDALY FDESAR FDULAU FEAMSTER FEDOROV FELICITY FELIPE FELIXL FEN FENNER FERGUSOR FERNANDES FERRANCE FERREIRA FERRENCY FERZ FESTY FEUERBACH FFAUTEUX FGA FGALASSI FGERLACH FGLOCK FGOSLICH FHOLTRY FHOXH FIBO FIDDUR FIDUS FIGANIM FIJI FILIN FIMM FINN FIRASZ FIREBBS FIREDRAKE FIREPEAR FIRMICUS FIRZEN FIS FISCH FISH FISHBOT FITZNER FIVE FIXLER FJC FJCABRERA FJH FKALTER FKIORI FKOENEN FKOLODNY FKREIJM FKUO FLAME FLAMEY FLAVIOAM FLAVIODES FLAZAN FLECERF FLEITNER FLETCH FLH FLIGHT FLIP FLIPKIN FLIPTOP FLORA FLORIAN FLORIANBW FLOWERYSO FLUFF FLUFFY FLUFFYNJ FLUOB FLUXOID FLW FLYGOAST FMACHABEE FMARIER FMC FMENCZER FMERGES FNEVER FOBES FOD FOLIVAES FONKIE FOOBARD FOOBLE FOOCHRE FOOF FOOLFISH FOOLISH FOOP FORCERY FORMAN FORMORER FORREST FORRESTC FORS FORTY FORWARD FOTANGO FOX FOXCOOL FOXEY FPAS FPIVARI FPMIP FPORCHER FPREICH FRACOLO FRACTAL FRAG FRAJULAC FRAMM FRAMSTAG FRANC FRANCISCO FRANCKC FRANCOC FRANKC FRANKCOX FRANKG FRANKIE FRANKMAN FRANKS FRANPERL FRASE FRAZER FRAZI FRED FREDERICD FREDERICK FREDY FREEDOM FREEHAHA FREESIDE FREMAN FREQUENCY FRETT FREUND FREW FRIDA FRIED FRIEDBERG FRIEDMAN FRIEDO FRIFFIN FRIGHETTI FRODWITH FROGGS FROLLER FROLOV FROSTY FROTZ FRUIT FRUITSOFT FSAMPEDRO FSEITZ FSG FSORIANO FSPEISER FTASSIN FTI FTL FTOBIN FTW FUJIMURA FUJISAWA FUJIWARA FUKAI FUKATA FUKUDA FUKUMURA FULLERMD FUNGUS FUNKJAMES FUSINV FUTAYA FUZHONG FUZZ FVANDUN FVOX FVULTO FWIE FWILES FWNEPOMUK FWOJCIK FXFX FXN FXZUZ FYALCIN FYNTE GAAL GAAS GABB GABIRU GABOR GABRIEL GABY GAFFER GAFFIE GAGGINO GAGNONM GAISSMAI GALAND GALGIER GALILEO GAM GAMACHE GAMBURGER GAMEHAWK GAND GANGLION GAOCHONG GAOD GAOU GARAFOLA GARDNER GARGAMEL GARIEV GARNACHO GARNADI GARROW GARTH GARTHD GARU GARY GARYAJ GARYK GASOLWU GATLIN GAUDEON GAURAV GAURAVK GAURAVKH GAV GAVIN GAVINC GAWBUL GAWLIK GAZ GAZERRO GBACON GBAILEY GBARCO GBARR GBAUER GBJK GBL GBOSS GBROCK GBROWN GBSHOUSE GBUDD GBURGER GCALIN GCAMPBELL GCARLS GCERRAI GCHESLER GCHIAIA GCHILD GCJ GCLASS GCOHEN GCOULOMB GDAMORE GDAV GDEWIS GDEY GDM GDR GDSL GEBELE GED GEDGE GEEKLADY GEEWIZ GEEZENSLA GEEZMO GEHIC GENE GENECUT GENEHACK GENEPERL GENIE GENJISCH GENSYM GEOFF GEOFFEG GEOFFR GEOFFT GEOJS GEOKEVHAT GEORG GEORGEOP GEOTIGER GERA GERBERRI GERDLER GERHARD GERRIT GERRYSTER GESTEP GETTY GFA GFIREBALL GFK GFLETCHER GFLOHR GFUJI GGAL GGALLONE GGARAND GGOEBEL GGOLDBACH GGONTER GGOUDSMIT GGUREVICH GHALSE GHEALTON GHENRY GHOARE GHUTCHIS GIATORTA GIDON GIEGERICH GIFF GIGAGEEK GILAD GILLMAUS GIMPIT GIMPSON GINGERHOT GIPPOLITI GIRAFFED GIRASQUID GISEBURT GIULIENK GIZMO GJB GJONES GJRUSSEL GKAPUR GKE GKNAUSS GKNOPS GLADIATOR GLAI GLANVILLE GLARKIN GLASSCOCK GLASSER GLAUCO GLEACH GLEDESMA GLENNWOOD GLENSMALL GLIDER GLMEOCCI GLORYBOX GLOVER GLTS GMAMBRO GMAX GMCC GMCCAR GMCH GMCHARLT GMCQUEEN GMDA GMG GMGRD GMILLERD GMLEWIS GMONEY GMONSON GMORTEN GMPASSOS GNALABS GNAT GNATYNA GNAZAREY GNB GNDPR GNG GNO GNOME GNURD GNUSTAVO GNUTOO GOCCY GODE GODSON GOEDICKE GOEVERT GOLD GOLDBB GOLDBIO GOLHARAM GOLM GOMOR GONERI GONZO GOODEVLPR GOODI GOOSEFISH GOOZBACH GOPI GORAXE GORD GORK GORTAN GOSHA GOSSAMER GOURAV GOYALI GOYAX GOZER GPALMER GPANKAJ GPAPILION GPAREDES GPHAT GPOLART GRAF GRAFF GRAHAMC GRAHJENK GRANDPA GRANTG GRANTM GRAOUTS GRATEMYL GRAVALO GRAVATTJ GRAY GRAYKOT GRAZZ GRB GREB GREEN GREENBEAN GREENPAU GREGARYH GREGFAST GREGG GREGHEO GREGOA GREGOR GREGORY GREGT GREIZHANG GRENDELT GRENDELUK GREP GREPBOY GREYCAT GRI GRIAN GRICH GRICHTER GRIFF GRIM GRIMOIRE GRIPE GRISHACE GRISKEY GRIXON GRJONES GRM GRODITI GROMMEL GROMMIER GROOVIS GROSOURS GROSSMANN GROUSSE GROVED GRUBER GRUBERT GRUDZIAR GRUNDEI GRYDUNCAN GRYPHON GSA GSAINIO GSAR GSAVIX GSB GSCHLOSS GSEAMAN GSGANG GSHANK GSIEMS GSIMMONS GSLIN GSLONDON GSM GSPAF GSPEAR GSPI GSPIVEY GSTRAUSS GSULLIVAN GTERMARS GTHEALL GTHYNI GUELICH GUEUX GUGOD GUGU GUIDO GUIDOPIAN GUILHEM GUILLAUME GUIMARD GULLY GUNNAR GUNSETT GUNTHER GUNYA GUOJIAN GURD GURETS GURUGEEK GURUGLENN GURUKUL GURUPERL GUS GUSG GUSTRA GUTZMANN GUYARIELI GUYDX GVENKAT GWADEJ GWARD GWARMAN GWELCH GWG GWHITE GWHULBERT GWILLIAMS GWL GWOLF GWORROLL GWS GWYN GYEPI GYPARK GYU HAARG HABE HABEGGER HACHI HACKER HACKMAN HAG HAGANK HAGBARD HAGGAI HAGY HAHAFAHA HAKANARDO HAKESTLER HAKOBE HALKEYE HALLECK HALLEY HALO HALODB HALPOM HAMANO HAMBSTER HAMEZ HAMID HAMLETT HAMMOND HAMPTON HANAXIS HANELYP HANENKAMP HANGIL HANJE HANK HANKIVY HANSHK HANSPOO HANSTO HANXI HAOO HAOSAN HAOYAYOI HAP HAQ HARASTY HARDAKER HARDCODE HARDIK HARDY HARIHARAN HARLEY HARLINH HARMONIE HARPREET HARRY HARRYPAN HARSHAG HARTMAN HARTZELL HASANT HASIOTIS HASSEILY HASTINGSE HATA HATAK HATENA HATHIPERL HATMAKER HATTORI HAVI HAVOC HAWAHAWAI HAWK HAWKALOOG HAWKINSM HAWKMOON HAYAJO HAYASHI HAYTER HBABCOCK HBARTEL HBENGEN HBIERSMA HBSLABS HCAMP HCARVEY HCCHIEN HCKKID HDAICHI HDANAK HDANIEL HDIAS HDM HDOSHI HDP HDS HEBELT HEDWIG HEGEMANP HEIKKI HEIKO HEIKOK HEIKOWU HEINZEBA HEJKI HELEI HELENA HELLERM HELLMAN HELO HEMA HEMAN HEMBREED HEMINGWAY HEMLOCK HENDRIKS HENKE HENRIK HENRIKS HENROID HENRYYKT HENTEKO HERBERTS HERNAN HERRERA HERVE HERVEUS HESCHONG HESCO HESSU HEUEL HEUMANN HEX HEXCODER HEYTRAV HFAHE HFB HFINKEL HFOLLMANN HGDEV HGNENG HGOTTFR HHOLZ HHORIUCHI HHUNTER HIDE HIDEAKIO HIDEDEN HIGHTOWE HIGU HIHIK HIIVE HIKARINE HILLER HIMACHOCO HIMAZU HINDMAN HINDMARSH HINNERK HINRIK HIO HIPHIPPOP HIRAFOO HIRATA HIRATARA HIRO HIROSE HIROYUKI HIROYUKIM HISAJU HISELDL HISSO HITAPEX HITHERTO HITSU HJANSEN HJB HJHELGE HJP HKAREKAR HKASHYAP HKCLARK HKOBA HKOCH HLARONS HLHAMILT HLIN HLP HLSANDER HLUBENOW HMA HMATT HMBRAND HMERSCH HMNIELSEN HMOVVA HMUELLER HOBBESTIG HOBBIT HOBBS HOCHSTEN HODEL HOEVE HOFFY HOGGARTH HOKKE HOLBROOK HOLGER HOLGILEIN HOLLI HOLLIE HOLLIFM HOLLY HOLLYKING HOLMBERG HOLMLUND HOLOWAY HOLROY HOLSTEN HOLT HOMSHERD HONEYMAN HONG HOOO HOOWA HOPCROFTS HORD HOREA HORIUCHI HORN HORNBURG HORNET HORROCKS HORSHACK HORST HOSI HOTSPUR HOTZE HOURBACK HOUSEL HOUZUO HOVENKO HOWARD HOWARS HOWEN HOWIE HOYMICH HPA HPALM HPETERS HPOUL HPSMITH HPWERNER HPYHACKER HRAFNKELL HRANICKY HRG HRIDAY HRK HROGERS HROMI HRYK HSLEE HSMYERS HSTEEB HSUREN HSURESH HSW HTCHAPMAN HTHOMAS HTOUG HTR HUANGWEI HUCKE HUDSON HUGHES HUGOCHIEN HUGOCOSTA HUGUEI HUJINPU HUMPH HUNTER HUSKA HUSOFT HUXTONR HVALVERDE HVC HVDS HVOERS HWAT HWEBER HZHOU HZLNT IAIN IAINT IAMB IAMCAL IAN IANB IANC IANCABELL IANCHARD IANDERSO IANK IANPX IANRODDIS IARNELL IAWELCH IBB IBMTORDB2 IBRAUN IBUDAI IBURRELL ICABRERA ICAM ICASIMPAN ICC ICD ICDEV ICENI ICERIDER ICESPIRIT ICHE ICHI ICHLADIL ICHUDOV ICIBIN ICK ICKHABOD ICLARK ICONTACT ICRF ICY ICYDEE IDAS IDEN IDERRICK IDIVISION IDN IDO IDOPEREL IDORU IEFREMOV IFIM IFLAN IFOMICHEV IFROL IFTEKHAR IFUSCHINI IGERLACH IGORM IGREC IGSTAR IGUTHRIE IGVA IHAQUE IHARA IHEFFNER IHOLSMAN IHRD IIZUKA IJLIAO IJONES IKALLEN IKANUSIM IKEBE IKEGAMI IKETRIS IKHRNET IKLUFT IKUTA ILER ILIAL ILION ILJATABAC ILLVILJA ILMARI ILTZU ILV ILYA ILYALIT ILYAM ILYAS ILYAVERL ILYAZ IMACAT IMALPASS IMARSMAN IMATTJN IME IMIALEKM IMIOSGA IMMUTE IMPIOUS INA INDIAMADE INDRASENA INEULANDE INFERNALE INFERNO INFIDEL INFINOID INFOFLEX INFOPAK INFRARED INGHAM INGOMACH INGRIF INGY INIGO INITDOTD INITNINE INOCI INSANEART INSANIAC INSTANTK INTEGURU INTERCAL INTERGURU INTERMINE INTMKTG INTRICA INTRIGERI INTWO INVENTOR INVINITY IOANJ IOANNIS IOANR IOK IONCACHE IPAPONOV IPECHORIN IPENBURG IPH IPLOC IPPO IPTOLOC IRC IROBERTS IRONCAMEL IRONS IRQ IRVINED ISAAC ISAACL ISAACSON ISABELLE ISAGE ISHIGAKI ISILLITOE ISJOUNG ISKATA ISLUE ISOYA ISPY ISSM ISTEEL ISTERIN ISTR ITDEV ITEAHAUS ITEGEBO ITGURU ITMFROLOV ITNOMAD ITRIVIZKI ITRUNAEV ITSME ITUB ITWARRIOR ITYNDALL IVAN IVANOFF IVANPAN IVANS IVANTIS IVANWILLS IVANWONG IVEY IVORW IVOZ IVSOKOLOV IWADE IWALZ IWAMATSU IWDW IWIARDA IWOODHEAD IX IXA IXO IZI IZUT JAAN JABBAS JABLKO JABRA JABURO JAC JACEK JACKAL JACKB JACKLU JACKMANEY JACKOHARE JACKS JACKSONM JACKTAO JACM JACO JACOB JACOBROSE JACQUELIN JAD JADAMS JADEDEANE JADEV JAE JAEPSTEIN JAGERMAN JAGS JAINHK JAITKEN JAIV JAK JAKB JAKE JAKEGOLD JAKEX JAKOBI JALDHAR JALLEN JALLWINE JALONSO JALOPEURA JALYCAI JAM JAMACO JAMADAM JAMBE JAMCC JAME JAMES JAMESB JAMESC JAMESGOL JAMESM JAMESODEN JAMESP JAMESPO JAMESR JAMESS JAMESTHOM JAMHED JAMSHAID JAMTUR JAN JANDD JANDRAS JANDREW JANDREWS JANERT JANEZHANG JANL JANNINO JANP JANPAZ JANPOM JANSTARKE JANTMAN JANU JANUS JANW JAOS JAPA JAR JARED JARIAALTO JARICH JARLE JARTYMIAK JARW JASALOMON JASHMENN JASKA JASLONG JASON JASONBIO JASONJAYR JASONK JASONM JASONMAY JASONPOPE JASONS JASONW JASPAX JASPREET JAST JATU JAU JAUER JAVIER JAVIERPB JAW JAWNSY JAX JAYALLEN JAYBEE JAYBONCI JAYBUFF JAYCE JAYJ JAYK JAYWHY JBABBING JBAKER JBARKER JBARRA JBARRATT JBARRETT JBAZIK JBE JBELTON JBENINGER JBERGER JBERT JBEVERLY JBG JBIGGS JBISBEE JBJOHNS JBLAKEY JBM JBNIVOIT JBODNAR JBOHM JBRIGGS JBROWN JBRYAN JBSOLES JBUHACOFF JBURATI JBURNETT JBURNHAM JBW JCAMACHO JCAP JCAREY JCARLING JCARON JCARREIRA JCARTER JCCLEAVER JCDUQUE JCERVAN JCF JCHASSLER JCHEN JCHIN JCHONIG JCHRIS JCIHLAR JCLEMENTS JCLINE JCLYMAN JCM JCMULLER JCMURPHY JCNORTON JCO JCOHEN JCOLE JCONERLY JCOP JCORWIN JCOSTOM JCOUBALL JCP JCREASEY JCRISTY JCROMIE JCTEBBAL JCUA JCUZELLA JCZEUS JDALBERG JDALLMAN JDAV JDAVIDB JDAVIS JDB JDBOYD JDCORRAL JDDIXON JDDPAUSE JDELUISE JDENNIS JDEPERI JDEVLIN JDHEDDEN JDIEPEN JDKOHLER JDLEE JDOUGLAS JDPORTER JDR JDRAGO JDS JDUARTER JDUNCAN JDUTTON JDV JEAANDER JEAGLE JEALLEN JEAN JEB JEDAI JEDWARDS JEDY JEEK JEEN JEESMON JEF JEFF JEFFA JEFFBLACK JEFFERY JEFFH JEFFL JEFFMOCK JEFFO JEFFOBER JEFFW JEFFY JEFPONOT JEGADE JEGAN JELLMAN JELU JENDA JENKINSON JENKSTER JENNY JENS JENSH JENSJ JENSL JEOB JEPRICE JERBOAA JERBROO JEREMIAH JEREMIE JEREMY JEREMYW JERESIG JERF JERI JERICSON JERLBAUM JEROENES JEROMEMCK JERRYK JERRYV JERZYK JESFIVE JESGIM JESSE JESSEG JESSESTAY JESSICAQ JESSP JESSSSST JESTER JESTOCK JESUS JET JETEVE JETTERO JETTRA JEUNICE JEV JEZHANCO JEZRA JFARRELL JFEARN JFENAL JFF JFINGER JFITZ JFLAHERTY JFLOURNOY JFLOWERS JFLUHMANN JFORGET JFRAIRE JFREEMAN JFRIED JFRIEDL JFROEBE JFROSTH JFS JFURNESS JGAMBLE JGARRISON JGARVIN JGATCOMB JGBISHOP JGDA JGEYER JGIBSON JGILB JGITLIN JGK JGL JGLICK JGMYERS JGOFF JGOLDBERG JGOLDSCHR JGOMMERS JGONZALEZ JGOODALL JGORMAN JGOULAH JGRAY JGRELET JGROENVEL JGRUBER JGS JGUENTHER JGULUARTE JHA JHAGG JHALLOCK JHAMLET JHANNAH JHAR JHARDING JHART JHATFIEL JHCLOOS JHELBERG JHELWIG JHERM JHERRERO JHI JHINKLE JHITT JHIVER JHKIM JHOBLITT JHOLCAP JHOOGENR JHORWITZ JHOWELL JHPB JHTHORSEN JHUCKABY JHUDGE JHUG JHUGHES JHUNI JHYLAND JIANGS JIGSO JIK JILA JIM JIMBOB JIMBOX JIMFL JIMI JIMLAMBRT JIMRIE JIMS JIMT JIMW JINGRAM JINKEE JIRA JIRAPL JIRO JIRWIN JJACK JJARVINEN JJCARMAN JJDG JJFUMERO JJGREEN JJHORNER JJMB JJNAPIORK JJOAO JJONES JJORDAN JJORE JJSCHUTZ JJSIMONI JJUDD JKAMPHAUS JKARALIUS JKAST JKEENAN JKEGL JKEISER JKEKS JKELFER JKIM JKISER JKISTER JKLEPEK JKNEPLEY JKNOTT JKOBIE JKODIS JKONDO JKORKIN JKR JKRAJ JKRAMER JKRASNOO JKUTEJ JKVA JLABOVITZ JLAPEYRE JLAPOUTRE JLATHAN JLAVALLEE JLAVERY JLAVOLD JLAWRENC JLBEC JLBXKYFJO JLEADER JLEFLER JLENOIR JLENTON JLETUAL JLEV JLEVAN JLGELPI JLHOLT JLISHEV JLK JLKAUS JLLEROY JLLOYD JLMARTIN JLMOREL JLOLOFIE JLOPHTY JLOUDER JLOUISBIZ JLOWREY JLRUSH JLSCHWAB JLSPEARS JMAC JMACC JMACFARLA JMADLER JMAHAN JMAHESH JMAHONEY JMANCZ JMARCH JMASLAK JMASON JMASTROS JMATES JMAY JMBO JMCADA JMCCARV JMCGUIRE JMCNAMARA JMEGERMAN JMEHNLE JMELANSON JMELTZER JMENDLER JMERELO JMFAYARD JMFOUST JMFREEMAN JMGDOC JMGLOV JMH JMHOGLUND JMINIERI JMKHB JMM JMMILLS JMONGAN JMOORE JMOOSMANN JMORA JMORRIS JMPENNEL JMS JMT JMTORRES JMUHLICH JMURPHY JMURRAY JMUSSE JMV JMVILOMET JNAGRA JNBEK JNBROOKES JNEYSTADT JNH JNICHOLS JNIXON JNK JNLIN JNOBLE JNOLAN JNORUSIS JNPR JNQUINTIN JNSIMONN JNW JNWHITLEY JOACHIMDS JOAKIMBS JOANMG JOAO JOAOCOSTA JOAOP JOBA JOBERO JOCASA JOCHEN JOE JOEATZ JOEHIL JOEJIANG JOELH JOELJAC JOENIO JOEOFCLEW JOEP JOEPHAYES JOESUF JOETYM JOEY JOEYATES JOGLA JOHAN JOHANL JOHANNP JOHANNZ JOHANVDB JOHAYEK JOHNA JOHNBAYLY JOHNBOKMA JOHNC JOHND JOHNG JOHNGH JOHNH JOHNHARRI JOHNKOEN JOHNL JOHNMA JOHNP JOHNSCA JOHNW JOHNWRDN JOHNYJH JOKERGOO JOKKE JOLANDE JOMON JON JONADAB JONALLEN JONAS JONASBN JONASBULL JONASO JONATAYAH JONATHAN JONB JONBJ JONBROWN JONE JONFM JONG JONI JONIONS JONJ JONJAY JONMORGAN JONO JONOZZZ JONTAYLOR JONTY JOOON JORDAN JORISVR JOROL JOROURKE JORVIS JOS JOSEBA JOSEF JOSEIBERT JOSEPHW JOSERIJO JOSERODR JOSEVNZ JOSEWEEKS JOSH JOSHHANNA JOSHKUO JOSHR JOSHSTEW JOSHUA JOSHUAG JOSHUAZ JOSHW JOST JOSTEN JOT JOUKE JOVAL JOY JPACE JPAF JPATON JPAVLICK JPB JPC JPDURRIOS JPEACOCK JPEREGR JPETERSON JPFRICKER JPIERCE JPIKUL JPINKHAM JPJEON JPL JPLATTER JPLUMEYER JPNANGLE JPO JPOLLACK JPOWERS JPR JPRANTE JPRAVETZ JPRIT JPVIDAL JQCOFFEY JQUELIN JQUILLAN JQUINLAN JQUIROGA JQYAO JRAFTERY JRANDALL JRED JRENNIE JREPROGLE JREY JRG JRH JRIDEOUT JRLITTELL JRM JROBINSON JROCKWAY JROD JROGERS JROWE JRUBEN JRW JRYAN JSA JSAILOR JSALZ JSANCHEZ JSBARKAN JSCH JSCHNEID JSCHREIB JSCHROER JSEGAL JSET JSHEARER JSHERMAN JSHIMADA JSHIRLEY JSHY JSIME JSIMPSON JSIRACUSA JSLAGEL JSM JSMITH JSMITTY JSMYSER JSN JSNBY JSOBRIER JSOFFIAN JSONNENB JSOVERSON JSPEAKS JSPICAK JSTEBENS JSTELZER JSTENZEL JSTEWART JSTOF JSTOWE JSTRAUSS JSTROM JSWANN JSWARTZ JSWEVAL JSWITZER JTAM JTANG JTARCHIE JTATUM JTAVARES JTAVERNI JTBRAUN JTCLARKE JTEFD JTGANS JTHARDY JTHIGH JTHOMPSON JTILLMAN JTITUS JTK JTNIX JTOBEY JTOPJIAN JTP JTPALMER JTRAJ JTRAMMELL JTROWE JTT JTURNER JTWEED JUCROUZET JUDD JUDIOO JUDITH JUDOFYR JUERD JUFFI JUGUANG JULES JULY JUM JUMASH JUNICHIRO JUNKER JUNOS JUNYER JUPITER JURACH JURL JURUEN JUSTER JUSTEW JUSTIN JUSTINL JUSTVIT JV JVANASCO JVANDERB JVANNUCCI JVASILE JVB JVBSOFT JVENIER JVZ JWACH JWALGENB JWALT JWANG JWAR JWAT JWAUTHIER JWB JWDOMAIN JWEBSTER JWEIGEL JWEVELAND JWGEO JWHEELER JWHITE JWIED JWIEGLEY JWIELAND JWILEY JWILLIAMS JWJS JWM JWOODYATT JWRIGHT JWU JXH JYOTHISH JZAWODNY JZHANG JZOBEL JZTAM JZUCKER KAARE KABANOID KABLAMO KACCV KADOS KADOUBEK KAELIN KAESEES KAFKA KAGENEKO KAHIRSCH KAHN KAHUNA KAIH KAILI KAIMI KAIW KAKADU KAKE KAKOOCH KAL KALEL KALELE KALEX KALEYCHEV KALGAN KALIGUS KALLEWOOF KALNINS KALOU KALRON KALT KAMAL KAMARA KAMELKEV KAMENSKY KAMILLO KAMIPO KAMO KAN KANE KANEY KANEZENG KANGELOV KANGU KANISHKA KANNA KANTE KAORU KAOSAGNT KAPPA KARASIK KARAVELOV KARDEL KARHU KARINILS KARIR KARJALA KARLON KARLWARD KARMAN KARTHIK KARTHIKK KARTHIKU KARUPA KASEI KASHA KASJENS KASPER KASS KASTNER KATE KATMONKEY KAUFMANN KAVEHMZ KAVITHA KAWABATA KAWAMURAY KAWASAKI KAYSB KAZEBURO KAZIMIROV KAZUHISA KAZUHO KAZUKIMA KAZUNORI KBA KBALBI KBARBER KBARTER KBAUCOM KBENSON KBLIN KBOCEK KBORER KBOSAK KBRANDT KBRINT KBROWN KBURDIS KBXXXIEN KCARNUT KCHAITAN KCHAN KCIVEY KCK KCLARK KCODY KCOMKAR KCOTT KCOWGILL KCWU KDAQ KDOULEPOV KDOWNEY KDRANJAN KEEDI KEEKUNG KEICHNER KEISUKE KEJOKI KELLAN KELSEY KEMMONS KEN KENCL KENESH KENFOX KENHOLM KENJIRO KENMACF KENNEDYH KENNETHK KENNYG KENO KENSHAN KENSHIN KENTARO KENTNL KENWU KERBERUS KERISMAN KERMAGO KERNELJ KEROYON KERR KERW KES KESINGER KESTEB KESTER KESZLER KETHGEN KEU KEVANG KEVIN KEVINA KEVINBOUG KEVINFINK KEVINJ KEVINO KEVINR KEVINRICE KEVINZ KEVJSHEA KEVLINDEV KEVORKYAN KEWLPI KEYSPERL KFAUGHNAN KFHICKEL KFLEGAL KFO KFOGEL KGALINSKY KGB KGJERDE KGODBOLD KGOESS KGOLDOV KGREENE KGRENNAN KHAMAR KHAMBHALA KHAMER KHAMPTON KHAREC KHASELY KHEDIN KHEMICALS KHM KHOELZER KHOERLING KHOLZ KHS KHULTMAN KHURT KHUSSEIN KHW KIANWIN KID KIELSTR KILINRAX KILNA KILORK KIM KIMAHL KIMATA KIMBREL KIMHAWTIN KIMMEL KIMMORMH KIMOTO KIMRYAN KIMURA KINDJAL KING KINMAN KINOW KINSAN KINZLER KIO KIOKO KIRILL KIRILLM KIRSLE KIRSTINB KISEKI KISMET KITAMAKI KITANO KITCHEN KITDEKAT KITE KITO KITOMER KITTY KITTYCAT KIWANAMI KIXX KIZ KJACKSON KJALB KJAM KJEKJO KJELLM KJETIL KJETILK KJH KJM KJOHNSON KJPRICE KJS KJWCODE KKRON KKULT KLBLNGMJ KLEINJ KLIMKIN KLLIN KLOHNER KLORTHO KLPTWO KLYNC KMACLEOD KMADHYAN KMAK KMCGRAIL KMCGRATH KMEHTA KMELTZ KMIYAZAKI KMOHARANA KMR KMX KNAGANO KNAIRN KNASSAR KNEW KNI KNIGHT KNJSKYWKR KNM KNOEFEL KNOK KNOPKE KNORR KNOTTY KNOWLES KNSHAUM KNTH KNTONAS KOADMAN KOBAYASHI KOBAYASI KOCEASY KODAK KOEN KOGA KOHA KOHLIS KOHTS KOJIMAGI KOJUN KOKI KOKOGIKO KOLA KOLCON KOLD KOLIBRIE KOLPAK KOLYA KOM KOMAROV KOMORIYA KONBUIZM KONDO KONOBI KONSTANT KOORCHIK KOOS KOPPI KOPUG KORPIQ KORSANI KORSHAK KORTY KOSCIELNY KOSMO KOST KOSTAS KOSTMO KOSTYA KOT KOTEROFF KOTEROV KOTOTAMA KOVACSBV KOVENSKY KOYACHI KOZO KPATTON KPETERS KPFEIFFER KPI KPOL KPOWER KPV KPWATSON KRAEHE KRAGEN KRAIH KRAILEY KRAJARAM KRAKEN KRAL KRAMAN KRBURTON KRENNMAIR KRILL KRIPT KRISHPL KRISSG KRISTI KRISTIAN KRISTINA KRN KROEPKE KROH KROKI KROPP KROS KROSS KROW KRS KRUGGLE KRUSCOE KRYDE KRZAK KRZKRZ KSB KSCHEIBEL KSCRIPT KSDOCTOR KSHANTH KSI KSNORTUM KSPENCER KSTAR KSTEP KSTEPHENS KSTEPME KSURI KSUZUKI KSZOKE KTAKATA KTANG KTAT KTDREYER KTHAKORE KTHOMAS KTMK KTORP KUBIK KUBINA KUBOTA KUDARASP KUDINOV KUDRA KUERBIS KULCHENKO KULDEEP KULP KULTAWAT KUMA KUMAKATSU KUMY KUNGFUFTR KUNIYOSHI KUNWON KURAMAKO KURGAN KURIANJA KURIHARA KURIYAMA KUROWSKI KUUSE KUZUHA KVAIL KVALTONE KVENTIN KVORG KWAKWA KWAPING KWARTIK KWATCH KWHITSEL KWILLIAMS KWILMS KWINDLA KWITKNR KWITTMER KWMAK KWOO KWOOLERY KXJ KYANNY KYLE KYLED KYLEJAMES KYLESCH KYNAN KYO KYOKI KYOKO KYOMAIODP KYORO KYOSHU KYPREOS KYU KYZ KZFM KZTOMITA KZYS LABASTAR LABEKA LABITBOL LABZERONE LACKITA LAFFERTY LAFRAIA LAINMLH LAIRDM LAJANDY LAJJR LALA LALIT LALLIP LAM LAMBRETTA LAMECH LAMMEL LAMP LAMPRECHT LANCET LANCEW LANDMAN LANGENJO LANGMEAD LANGMIC LANTI LANY LAOMOI LAOTSEU LAPIS LAROY LARRYK LARRYL LARRYSH LARSEN LARSGT LARSLUND LARSNYG LARSSKJ LARSTHON LASTRONIN LATTICE LAWALSH LAWGON LAWRENCEH LAXEN LAYE LAZARIDIS LAZYBONE LBAXTER LBAYER LBE LBECCHI LBENDAVID LBORGMAN LBOTTEL LBR LBROCARD LCARMICH LCD LCGUID LCHAVEZ LCONS LCOWLE LDACHARY LDAVIS LDB LDIDRY LDOMKE LDS LEADTIGER LEAKIN LEANDR LEANDRO LECHEE LECSTOR LEEDO LEEJA LEEJO LEEPEN LEEYM LEFEDOR LEFORESJF LEGART LEGATVS LEGLESS LEGO LEIDNER LEIF LEIFHED LEIFJ LEIGUANG LEIJON LEIRA LEITE LEITGEBJ LEKUIN LELIR LEMBARK LEMON LEMP LENDL LENGEL LENIK LENJAFFE LENNERTO LENNY LENZO LEO LEOBALTER LEOCHARRE LEOHUA LEON LEONMEDIA LEONOVPA LEONT LEPHT LEPREVOST LEPT LEPTON LERGON LESPAUL LESPEA LESV LETO LEUCOS LEUNGA LEV LEVENGLI LEVIA LEVIATHAN LEVINSE LEVONB LEXICON LEXUS LFAGUNDES LFEISTEL LFINI LGAUTROT LGEHLEN LGJUT LGODDARD LHOSS LHOWARD LHS LIBERTY LIBVENUS LICH LICHTKIND LIEUTAR LIFEGUARD LIFO LIGHTSEY LIKK LIKSU LILSTEVEY LIMAONE LINAS LINC LINCHUS LINCOLNB LINDNER LINK LINKDD LINKE LINMAR LINNIN LINPC LINSALROB LINTDOG LINUS LIOL LION LIOSHA LIRAZ LISANTRA LISCOVIUS LISPYONE LITCHIE LITERALKA LIUER LIUL LIWEI LIYANAGE LJOHNSON LJUBEX LKCJR LKUNDRAK LLAP LLG LLOYDG LLOYDR LLSJK LLT LMASARA LMB LMC LMCMLAL LMETCALF LMEYER LMJM LMOLNAR LMUELLER LOCAL LOCATION LOCHMATT LOCSMIF LODIN LOGCHECK LOGICUS LOGIE LOGVINON LOIC LOICDREUX LOKKJU LOLO LOMBARDO LONERR LONEWOLF LONGJON LOOFORT LOOSIFER LORDLOD LORDO LORDSPACE LORENSEN LORN LORTAS LORY LOSYME LOTTC LOTTZ LOURIDAS LOVEKY LOVELACE LOVEX LOZ LOZARCHER LOZIER LPALMER LPETERS LPETRE LREEVES LROCHER LROMAN LROMERO LRR LRUOSO LSAUNDERS LSBUCHALA LSF LSFISV LSIM LSKATZ LSLOAN LSTAF LSTEVENS LSTGEORGE LTBOOTS LTHARRIS LTHEGLER LTOETSCH LTP LTRIANT LUCARIZZI LUCAS LUCHA LUCS LUFI LUISMUNOZ LUKAST LUKE LUKEC LUKEROSS LUKHNOS LUKKA LUM LUNARTEAR LUNATIC LUNDEEN LUNDO LUNGCHING LUNKER LUOXI LUPE LUPUS LURKER LUSHE LUSOL LUTETIUS LUTHERH LUVANTI LUX LUXOSTEIN LUYANFEI LUYSEYAL LVANELSEN LVREGENT LWA LWALL LWSITU LWWWP LXP LYL LYNX LYOKATO LYTREAN LYUAN LYUMING LZAP LZE MAARD MAB MABAUER MABI MAC MACDEE MACDONSP MACFLY MACGAW MACGYVER MACHINE MACIEJ MACKENNA MACKENZIE MACKERS MACNOD MACPAUL MACROFAGE MADBOSUN MADCAT MADCODER MADDAVE MADFROG MADGHOUL MADKINS MADLINUX MADWOLF MADZ MAESTRO MAFR MAG MAGGIEXYZ MAGH MAGICDRIV MAGICIAN MAGIKMAN MAGNACHEF MAGNEW MAGNUS MAGO MAGOO MAGORACH MAGU MAHATMA MAHEX MAHIRO MAHITO MAHLBERG MAHLON MAHNKONG MAHT MAIDO MAILOMAN MAIN MAINBRAIN MAIO MAIRE MAITKIN MAJA MAJCHER MAJENSEN MAJESTIC MAJLIS MAK MAKAMAKA MAKAROW MAKE MAKIS MAKITA MAKLER MAKOTO MALA MALANDER MALAY MALDUARTE MALETIN MALLEN MALLRED MALLUM MALO MALOHIN MALPOETA MALT MALTEU MALUKU MALVARO MAMATUX MAMAWE MAMOD MAND MANDY MANFREDI MANGARU MANI MANIK MANISH MANIWHEEL MANJUNATH MANNI MANNO MANOJKG MANOWAR MANPREET MANTOVANI MANU MANUEL MANWAR MAOE MARAIST MARAL MARAVAN MARC MARCB MARCC MARCEL MARCELO MARCELSER MARCGREEN MARCIN MARCIO MARCLANG MARCO MARCOG MARCOS MARCP MARCS MARCUS MARCUSSEN MAREKR MARGHI MARIAB MARIAN MARIO MARIOF MARIOROY MARISABT MARIUSLJ MARIUZ MARK MARKB MARKBACH MARKBUSH MARKC MARKD MARKEMER MARKF MARKFI MARKG MARKIM MARKJ MARKJJNSN MARKK MARKLE MARKLEEUW MARKM MARKNG MARKO MARKOV MARKPASC MARKPF MARKPRIOR MARKSMITH MARKSTOS MARKSYMAS MARKUSB MARKW MARKWHI MARKWIN MARKWKM MARMS MARNANEL MARNIX MAROS MARR MARROTTE MARSA MARSAB MARSCHAP MARSENI MARSHALL MARSKO MART MARTIAN MARTIJN MARTIN MARTINB MARTINDT MARTINELL MARTINEZ MARTING MARTINI MARTINK MARTINTO MARTIRE MARTO MARTY MARTYKUBE MARTYLOO MARTYNOFF MARTYNOV MARTYZZ MARVIN MARWATK MARWIL MARYA MARZOA MASACCIO MASAHITO MASAHJI MASAK MASAKI MASANORIH MASAO MASAP MASARTZ MASH MASKLIN MASON MASQUE MASSA MASSYN MASTA MASTERDAM MASTR MASUTARO MAT MATBLACK MATEU MATH MATHEW MATHIASM MATHOMAS MATISSE MATIU MATJA MATKARC MATKIN MATRIA MATSUMOTO MATT MATTBM MATTD MATTDEES MATTDM MATTHEWG MATTHIAS MATTHIASW MATTI MATTIASH MATTIE MATTK MATTLAW MATTMCG MATTMK MATTN MATTOATES MATTP MATTS MATTW MATTWB MAUKE MAUNDER MAURERM MAURICE MAUSER MAVANWIN MAVERICK MAVIT MAX MAXA MAXB MAXDB MAXICO MAXIS MAXM MAXOU MAXS MAXSCHUBE MAXX MAYGILL MAYUR MBAAS MBAILEY MBAIT MBARBON MBARTOSCH MBAS MBASOV MBASTOS MBASUNOV MBATISTA MBB MBEAST MBEATO MBEEBE MBERENDS MBERG MBETHKE MBGRAY MBHALL MBITTER MBKODOS MBLAZ MBLYTHE MBOECK MBP MBRADLEY MBRANDON MBRECH MBROOKS MBSTE MBURNS MBUSIGIN MBUSIK MCAFEE MCANANN MCANTONI MCARDWELL MCARR MCARTER MCARTMELL MCASHNER MCAST MCAUDILL MCB MCC MCCARRELL MCCOHY MCEGLOWS MCEWAN MCGILL MCGREGOR MCGRUFF MCHANG MCHAPMAN MCHE MCHING MCKAY MCLANDER MCMAHON MCMIC MCNEWTON MCONNER MCORAZAO MCPL MCRAWFOR MCREENAN MCROSBY MCROSE MCSNOLTE MCT MCURTIS MCVELLA MCWEATHER MCWEHNER MDA MDARWIN MDASH MDBGRIZ MDEHOON MDEWJONES MDI MDIEP MDIEROLF MDIETRICH MDIMEO MDMONSEN MDMS MDOM MDOOTSON MDORMAN MDOWNING MDPERRY MDUNHAM MDUPONT MDVALLEY MDXI MEDINA MEDINED MEDMONDS MEETTYA MEGA MEGAMIC MEH MEHNER MEINTEUCH MEIRM MEKK MELE MELEZHIK MELLERY MELO MELONMAN MELVIN MEMES MEMOWE MENDEL MENDOR MENGARINI MENGEL MENGWONG MENGXR MENTIFEX MEPHIST MERGL MERIJNB MERIXZON MERKER MERLIN MERLYN MERNST MESADAVEY MET METADOO METAL METALIX METZZO MEWILCOX MEWP MEWSOFT MEYEAARD MEYERCJM MEYERING MFAJAR MFENTON MFERRIS MFOLLETT MFONTANI MFOWLER MFRANKL MFREE MFROST MFU MFUHR MFULLER MFURLOTTI MFW MFX MGAMMON MGANNON MGASEK MGH MGI MGILFIX MGLEE MGODINHO MGOULD MGRABNAR MGRAHAM MGREAM MGREGORO MGRIMES MGRIMM MGRUBB MGUTSCH MGVDM MHALLGREN MHAMILTON MHARNISCH MHASCH MHAT MHAZEN MHECKMAN MHEMPEL MHENSON MHEUSSER MHG MHIONESCU MHJACKS MHM MHO MHODGSON MHOOLEHAN MHOOREMAN MHOSKEN MHOWARD MHOYT MHX MHYOUNG MICB MICHAEL MICHAELD MICHAELR MICHAELW MICHEL MICHELEO MICHELEON MICHELS MICHIEL MICHIELB MICKWEISS MICLAR MICOAMS MICROFT MICU MICVU MIDDLETO MIDELLAQ MIDH MIDI MIDNITE MIFOE MIG MIGEN MIGMIR MIGO MIHOBU MIJIT MIKAGE MIKE MIKEB MIKEC MIKEDLR MIKEG MIKEGRB MIKEH MIKEJ MIKEK MIKEKING MIKEM MIKEO MIKEOB MIKER MIKERY MIKESCOTT MIKESTEPH MIKESTOK MIKESZCZ MIKET MIKEWHOO MIKEWONG MIKFIRE MIKHAILK MIKI MIKIHOSHI MIKIO MIKKOI MIKO MIKRA MILA MILAD MILAN MILANO MILES MILLAWAY MILLERADA MILSO MIMER MINAKOV MINCUS MINDHACK MINDOS MINDTRUST MINER MINGYILIU MINGZHANG MINIC MINIMAL MINORU MINTER MIOREL MIRABELLA MIRE MIRELL MIRK MIRKO MIROD MIRROROK MIRRORS MISAKA MISC MISCHKE MISHA MISHIKAL MISHIN MISHOO MIST MISTI MIT MITEL MITHALDU MITHUN MITREHC MITTI MIVKOVIC MIWALTER MIXI MIYAGAWA MIYAMUKO MIYAZAKI MIZZY MJA MJAEG MJAHN MJBRIGHT MJBUDDEN MJCANUP MJCARMAN MJD MJDILLON MJEMMESON MJEVANS MJEWELL MJFLICK MJFO MJFS MJG MJGARDNER MJH MJHARR MJHEWITT MJLEEDS MJM MJONDET MJP MJR MJS MJSR MJW MKAL MKAMM MKANAT MKENNEDY MKHRAPOV MKI MKJELLMAN MKMUELLER MKODERER MKOSSATZ MKOWSIAK MKREBS MKRUSE MKSCHUEL MKUGLER MKUL MKUTTER MLAMBERT MLANDMAN MLANIER MLAWREN MLCASTLE MLEBLANC MLEHMANN MLEVI MLEWINSK MLF MLFISHER MLIGHTNER MLINEEN MLING MLONGTIN MLRU MLS MLUCERO MLUKER MLX MMABRY MMACHADO MMAKAAY MMALLARD MMALONE MMARCO MMASTERS MMATHEWS MMAURICE MMBK MMCCLENN MMCGILLIS MMCLAGAN MMCLERIC MMERTEL MMETTES MMHEIKAL MMIMS MMITCHELL MMJB MMK MMKHAJAH MML MMLANGE MMLEVITT MMML MMORENO MMORGAN MMORRIS MMOYLE MMUELLER MMULLI MMUSGROVE MMV MMWEBER MNAGUIB MNCOPPOLA MNDRIX MNEMONIC MNEYLON MNF MNIKHIL MNILES MNOONING MNUNBERG MOB MOBILEART MOCK MOCONNOR MODULO MOFOGHLU MOGAAL MOHACSI MOHAN MOINEFOU MOLECULES MOLT MONA MOND MONDEJAR MONDONGO MONKEYUK MONMON MONO MONS MONSENHOR MONSIEUR MONTUORI MOO MOOCOW MOOLI MOOLLAZA MOON MOONRANBW MOOSA MOPOKE MOPY MOREGAN MORGENEGG MORGOTHII MORI MORIS MORITZ MORIYA MORNI MORNINDED MORRISNIH MORROW MORTENF MORTIY MORTY MORUNGOS MOSBOY MOSCONI MOSES MOSHEGOOD MOSHEN MOSSAD MOSTAFAVI MOTEMEN MOTIF MOTONAROL MOTT MOUNS MOXFYRE MOXNET MOZNION MOZZERB MPAPP MPASTERN MPB MPCZ MPECK MPELZER MPEREZ MPERRY MPETERS MPG MPGUTTA MPIJANKA MPIOTR MPLATTU MPLUS MPMANSELL MPOCOCK MPOURASG MPRESSLY MPREWITT MPRIES MPRUNTY MPULTZ MPUSCH MQSERIES MRA MRAJESH MRAMBERG MRANDALL MRASH MRATA MRDELAYER MRDEVFREE MRDINI MRDVT MREIMANN MRENZ MRF MRFELTON MRG MRGRIPH MRICE MRICHARDS MRIDDLE MRJC MRJONES MRKAE MRKOFFEE MRMALOOF MRMICK MRMIKE MRMONEYC MRMT MRO MROBINSON MROEDEL MROGASKI MROWLANDS MROZ MRPALMER MRPERSON MRSAM MRSCOTTY MRSLX MRTS MRUIZ MRWHIPPLE MRWILL MRWOLF MRX MRXOR MSABRAMO MSANTINHO MSANTOS MSARFY MSCHARRER MSCHILLI MSCHLUE MSCHMITT MSCHOUT MSCHUETT MSCHWARTZ MSCHWERN MSCROGGIN MSERGEANT MSERSKINE MSF MSH MSHELOR MSHIBLA MSHILTONJ MSHIMPI MSHLD MSHOGIN MSHOYHER MSICKEL MSIEB MSILVA MSIMERSON MSIMKINS MSISK MSK MSLAGLE MSMITH MSMOUSE MSOLOMON MSORENS MSOULIER MSOUTH MSOUTHERN MSPENCER MSPERL MSPRICK MSPRING MSREDDY MSROTH MSS MSTEELE MSTEVENS MSTINSON MSTOCK MSTPLBG MSTRAT MSTREEK MSTROUT MSULLAND MSULLIVA MSULLIVAN MTADEL MTHOMAS MTHURN MTINBERG MTIRAMANI MTL MTMAIL MTROWBRI MTRUE MTURK MUCKER MUDDYDIXN MUELLERW MUELLI MUENALAN MUGENKEN MUGUET MUIR MUJI MUKUND MULL MUMIAW MUNCHIE MUNCUS MUNROER MUNSINGER MURATA MURATAYA MURPHY MURRAY MURUGAVEL MURUGU MUSED MUTANT MUTOGUZ MVERB MVICKERS MVIDNER MVOELKER MVORL MVR MVS MVUETS MVZ MWALKER MWALLRAF MWARD MWARDELL MWARREN MWARWICK MWB MWCZANECB MWDHK MWELLS MWENDL MWILSON MWITTE MWRENZ MWS MWX MWYER MWZ MXEY MXF MYAKININ MYB MYBOXEN MYDMNSN MYFINDER MYKL MYNEID MYRTSCHT MYSFITT MYSOCIETY MYSTERYTE MYSZ MYUJI MZEHRER MZIESCHA MZRAHMAN MZSANFORD NAB NABCIF NABESHIMA NABOJ NABU NACHBAUR NACHMORE NADIM NAGARAJU NAGATA NAGGIMAN NAIM NAITO NAKISA NAMOTCO NANARDON NANCHO NANDU NANIS NANOBIT NANTO NANZOU NAOKIURAI NAOYA NAOYAT NAOYUKI NAPAIM NAPHTALI NAR NARAZAKA NARBEY NARIPON NARITOSHI NARKED NARSE NARUSE NASTASSIA NATANAEL NATARAJ NATE NATEK NATELEWIS NATERAJJ NATG NATHANAEL NATHANIEL NATHANL NATON NATTFODD NATTIS NAUGHTON NAUNIDH NAVI NAVIN NAVNEET NAZAROV NAZGUL NAZRI NBAREIL NBEBOUT NBERTRAM NBR NBYRD NCHUCHE NCICB NCLARK NCLEATON NCOWHAM NCPL NCS NDAY NDECLARI NDEES NDHOUSE NEBBISH NEBULOUS NEDKONZ NEELSJ NEELY NEERAJPM NEERI NEGR NEIKON NEIL NEILB NEILBAWD NEILW NEJIGANE NEKOKAK NEKOYA NELHAGE NELIO NELO NELSONC NEMUX NEMWS NEO NEODON NEOLITE NEOPHENIX NEP NEPOS NERDVANA NERVENET NES NESTING NETCOMICS NETDNS NETKEN NETSNAKE NETSRAK NEVDULL NEVES NEVESENIN NEVETS NEWELLC NEWIO NEXEN NEXUSSIX NEYASOV NEZUMI NFALCO NFERRAGU NFERRAZ NFMNUNES NGEREN NGLEDHILL NGLENN NGRUNWALD NGS NHAINER NHARALE NHEINRIC NHORNE NIBZE NICEPERL NICK NICKB NICKBAKER NICKC NICKH NICKOLA NICKS NICO NICOLAE NICOLAW NICOMEN NICS NICSPA NICWOLFF NICZERO NIDHOEGGR NIDS NIELS NIELSEN NIERLEIN NIERMAN NIF NIGAM NIGE NIGELM NIGHTLORD NIHILIAD NIKC NIKHILTAL NIKIP NIKO NIKOLAO NIKOLAS NIKOLASCO NIKOLAY NIKOSV NIKRATIO NILSONSFJ NINE NINJA NINJAZ NINUZZO NIRAVE NIRIX NIRMA NI_S NISHANT NISHIGAYA NISHIKAWA NITO NITRAM NITRO NIX NIXEAGLE NIXUS NJENSEN NJH NJLEON NJO NJS NJWALKER NKH NKNOUF NKUITSE NLEWIS NLIDZ NLNETLABS NMAKAROV NMARCI NMARDEN NMARLEY NMCFARL NMELNICK NMMSEP NMOHORIC NMOLDAV NMONNET NMORRIS NMUELLER NNAG NNEUL NNMEN NNUNLEY NNUTTER NOAH NOBJAS NOBODY NOBULL NODINE NOELBK NOG NOGUCHI NOHA NOKY NOLA NOMAD NOMAS NONZERO NOOBIONT NORBU NORDAAKER NORDIC NORITAKA NOSEYNICK NOTBENH NOTDOCTOR NOUDARD NOVIKOV NOZZZZZ NPADGEN NPEREZ NPERNAS NPESKETT NPF NPLATONOV NPW NRBRT NREICHEN NRG NRH NRINEAU NRODRIGO NRR NSARDO NSHADOW NSHAFER NSHARROCK NSKILLING NSNAKE NSUSHKIN NTHIERY NTHIERYA NTSUTSUMI NUANCE NUBA NUCLON NUCWIN NUDDLEGG NUF NUFFIN NURAB NURK NUTSO NUTTALL NUXWIN NVBINDING NVL NVPAT NWALSH NWCLARK NWELLNHOF NWETTERS NWHITEF NWIGER NWILLIAMS NWINT NWRIGHT NWTOUR NXADM NXONEP NYAKNYAN NYARLA NYGEL OALDERS OARCHER OAXLIN OBOGREW OBRADOVIC OBULON OCEANLAB OCHEUNG OCROW OCTO ODDFELLOW ODDRUNE ODED ODEZWART ODIGITY ODO ODYNIEC OEMBRY OESI OESTERHOL OETIKER OEVANS OFER OFEYAIKON OFFERK OFUN OGASAWARA OGAWA OGB OGMONSTER OHA OHOLLMEN OHOUSHYAR OHREALLY OHRT OHWOW OINUME OISHI OKAMOTO OKAMUUU OKKO OKLAS OKLETSGO OKOLOBOV OKOPNIK OKROEGER OKTAL OLAF OLDER OLEG OLEGM OLEGSHER OLEKO OLEKSHY OLEO OLEPR OLESEN OLIBOU OLIMAUL OLIVER OLIVERM OLIVIERT OLKID OLLY OLOF OLORYN OLPA OMAC OMEGA OMKELLOGG OMNI ONDR ONEGRAY ONEIROS ONISHI ONLYJOB ONUR OOGLEK OPENSYS OPERA OPHIUCI OPI OPIATE OPITZ OPPARA ORAKIOPEG ORAOCI ORCHEW ORCLEV ORDOBAN ORENBK ORIEN ORION ORLANDOV ORTALO OSALAUN OSCAR OSFAMERON OSMAN OSMUNDOHW OSTEENBU OSTERMILL OSTRICH OSTROM OSTROVOK OTAKA OTAVIOF OTAYLOR OTISG OTTERBURN OTTERLEY OTTO OTY OUBIWANN OUEST OVAL OVENMAN OVERLAST OVERMARS OVID OWEN OWHITE OWL OXE OYAMA OZAWA P5P PAAL PACKI PACKRATS PACKY PACMAN PAI PAINA PAIXAOP PAJAS PAJOUT PAKALRA PALANT PALIK PALLOTRON PALPATE PALVARO PAMAG PANDICH PANGJ PANIKAL PANKY PANOLEX PANYU PAOLO PAPILLION PAPKALA PARADIS PARAGAPE PARANOID PARASEW PARCHER PARDUS PARI PARIZHE PARKER PARROTRE PARTICLE PARUSSEL PARV PARVESH PARVEZ PASHA PASKY PASP PASSANI PASSOS PASTI PATBAKER PATBHATK PATCH PATL PATLAN PATM PATRICK PATTERNER PAUAMMA PAUL PAULB PAULC PAULDOOM PAULG PAULJUDGE PAULLAM PAULM PAULO PAULOKING PAULPG PAULV PAULW PAUSE PAVEL PAVELH PAVERBUJ PAWA PAWAL PAWAPAWA PAWEL PAWELKROL PAYERLE PBAKER PBARLOW PBEDI PBERLIN PBOETTCH PBOIN PBOWEN PBOYD PBRYANT PBWOLF PCAMPBELL PCANARAN PCASTO PCHINEA PCHINES PCHRISTE PCIMPRICH PCMANTZ PCOLLINS PCSJJ PCZERKAS PDBKANGE PDCAWLEY PDEEGAN PDENIS PDEVLIN PDLMAN PDONELAN PDR PDURBIN PDURDEN PDW PDWARREN PEARCE PEARCEC PEARL PEARSE PEASE PECASTRO PECO PEDERST PEDLAR PEDWARDS PEGI PEICHMAN PEIRY PEISCH PEK PELAGIC PELAM PEM PEN PENFOLD PENGAS PENGUIN PENK PENKIA PENMA PENNYWELL PENTCHEFF PEOPLES PEPE PEPL PEREINAR PEREZ PERFI PERFORB PERFORIN PERFSONAR PERHENRIK PERIGRIN PERL4LIB PERLBOTIX PERLBOY PERLCHINA PERLDL PERLEONE PERLER PERLGIRL PERLISH PERLJ PERLORGPM PERLOVER PERLPIE PERLPROXY PERLR PERLSYNTX PERLWEVIL PERLXPERT PERNST PEROLI PERRAD PERRETTDL PERRIN PERSICOM PETAMEM PETDANCE PETE PETEF PETEK PETER PETERG PETERGAL PETERHI PETERKEEN PETERL PETERM PETERMCD PETERPG PETERW PETERWN PETERZEN PETRIS PETTIT PEVANS PEZI PFARR PFAUT PFEIFFER PFIG PFISCHER PFLEURY PFRANCEUS PFUSIK PGASKILL PGMART PGOLLUCCI PGOR PGPML PGRASSIE PGRECIAN PGRIFFIN PGRIMES PGRINBERG PGUEN PGUNN PGUZIS PHADLEY PHAM PHANTAS PHARVEY PHASEFX PHATWARES PHAYLON PHCOTE PHDYE PHENSON PHF PHIL PHILBURT PHILC PHILCROW PHILIP PHILIPA PHILIPDYE PHILIPH PHILIPM PHILIPP PHILIPS PHILKIME PHILLIPS PHILLTYLR PHILLUP PHILMI PHILOU PHINYX PHIO PHIPS PHISH PHIXCOCO PHLI PHOCUS PHOENIX PHOENIXL PHONYBONE PHOTO PHRED PIA PICZ PID PIED PIER PIERRE PIERS PIGMALION PIJLL PILHAMU PILOO PIN PING PINGALI PINGLEI PINKHASN PINYAN PIOTO PIOTR PIP PIPER PIPPIJN PIROLIX PIRZYK PISCO PITCHLESS PIXEL PIXIE PJA PJACKLAM PJACOBS PJAIN PJB PJC PJCJ PJD PJF PJFL PJIRAK PJJH PJNEWMAN PJOBSON PJONES PJORDAN PJS PKAEDING PKALUSKI PKAROUKIN PKEMMEREN PKENT PKKM PKNOWLES PKRUMINS PKUBANEK PKUMAR PKUTS PKWAN PLAMBERT PLANET PLANETSCP PLANK PLATO PLAVEN PLAZARD PLCGI PLDAP PLEGALL PLEO PLEXUS PLIAM PLICEASE PLISCO PLISTER PLOBBES PLONKA PLU PLURAL PLYTLE PMAGNUS PMAINS PMAK PMAKHOLM PMAREK PMC PMCERLEAN PMCG PMEVZEK PMH PMIC PMICHAUD PMICHAUX PMINT PMISON PMKANE PMOONEY PMOORE PMORCH PMQS PMURIAS PMV PNE PNENOV PNEWMAN PNTYPNTY PNU POCKET PODGURSV PODMASTER POHANL POLERA POLETTIX POLGAB POLLUX POLOCKY POM POMEROL POOJANKU POPEL POPIEL POPMXTM POPPEN POPZ PORPOISE PORRIDGE PORTA PORTERJE PORTON POSSUM POTYL POULHS POUM POWERMAN POZNICK PPANT PPATTHAR PPISAR PPUTNAM PRABHUCH PRAGMATIC PRAKASH PRAMAKERS PRANTL PRASAD PRASADJP PRASANNAK PRASHANT PRATH PRATP PRATYEKA PRATZLAFF PRAVEEN PRAVUS PRBRENAN PREACTION PREFECT PREMSHREE PRESSEL PRESTON PRIEZT PRILMEIE PRILUSKYJ PRIMEBASE PRINSTON PRIVI PRL PRLOVER PRO PROFANTER PROFERO PROPANOID PROTALIS PROUTE PROWELL PRRODRIG PRYAN PSANNES PSANTORO PSCHNELL PSCHOO PSCM PSCUST PSD PSEIBEL PSHANGOV PSHARPE PSILVA PSINCLAIR PSINNOTT PSIONIC PSISSON PSLESTANG PSME PSMEJKAL PSMIRNOV PSMITH PSPELTZ PSTADT PSTEWART PSYCHE PSYON PTALACKO PTANDLER PTC PTF PTHOMSEN PTILL PTIMMINS PTIMOF PTINSLEY PTIZOOM PTOMLI PTRS PTS PTULLY PUBNOOP PUCK PUCKERING PUERCOMAL PUFF PUJAR PUMBA PUNG PUNKISH PUNYTAN PURDY PURL PURP PUSHPARAJ PUSHTAEV PVANDE PVANDRY PVERD PVHP PVILETA PVIZELI PWALTON PWBENNETT PWD PWES PWIRDEMO PWO PYARDLEY PYH PYREDITOR PYTHIAN PYVERDON QANTINS QAPS QAZWART QIANGLI QIU QIUHW QJZHOU QMACRO QMIC QOS QSUN QUANACK QUATRIX QUATTRO QUBE QUEEN QUELCOM QUENTUS QUESTATEC QUEVLAR QUINN QUIZDOG QUONG RA RAAB RABUSPA RACEMIC RACHANA RACHEL RADAMS RADCLIFF RADEJONG RADEK RADER RADEV RADFORD RADIANT RADIUS RADKOV RADOS RADPOC RAF RAFAEL RAFALUCAS RAGA RAGEFOUR RAGHURAM RAGOFF RAGRO RAHUNT RAJARAM RAJKPRI RAKADALI RAKESH RALACR RALAMOSM RALF RALPH RAM RAMAN RAMBALDI RAMESH RAMESHG RAMGOPAL RAMKI RAMPAGEAI RAMSDEND RAMTEK RANA RANDERSON RANDIR RANDOMMAN RANDY RANDYM RANDYS RANGER RANGERER RANI RANN RANT RANTCZAK RANXEROX RAP RAPH RAPHAEL RAPHINK RAPMANKIN RAPTNOR RARBOX RASSIE RAT RATAXIS RATCLIFFE RATL RATLCC RATLIFFM RATTLER RATTR RAULMATEI RAVENHALL RAVENOR RAVN RAWAGNER RAWFLUX RAYG RAYGWINN RAYM RAYMOR RAYNERLUC RAYT RAZ RAZINF RAZOR RAZVANM RAZZOR RBA RBACH RBAUER RBDAVISON RBERJON RBINKL RBLACKWE RBLASCH RBO RBOUR RBOW RBOWES RBRAGG RBRASE RBRASIL RBROOM RBS RBUCKER RBUELS RBUSH RCABLE RCALEY RCAPUTO RCASHA RCH RCHANDRAM RCHIBOIS RCL RCLAMP RCONOVER RCORDER RCORVALAN RCREAGER RCROWDER RCS RCSEEGE RCTHOMAS RDB RDESHMUKH RDEYS RDF RDICE RDIETRICH RDJ RDO RDP RDRAKE RDROUSIES RDSMITHAZ RDUARTE REALHOLGI REATMON REBEL REBOS REBX RECKON RECLAW RECORDOND RECSKY RED REDEN REDICAPS REDOX REDS REDTREE REECE REEDFISH REESD REFLOG REFLUX REGGERS REGODON REHSACK REICHERT REID REIFI REITER REITMAN REITMEIE REITTER REIVEUNE RELAIS RELIANCE REMI RENAN RENEEB RENELSON RENNERT RENTOCRON RETOH REUSSERL REUVEN REV REVMISCHA REVOLET REVPREZ REVSBECH REYNOLDS REZAREZA RFAUST RFL RFLENS RFLORENCE RFOLEY RFRANKEL RFREIMUTH RGARCIA RGARTON RGC RGE RGEAKE RGEOFFREY RGIBSON RGIERSIG RGIFFORD RGILIAM RGLAUE RGRAFF RGRAHAM RGRAU RGRAVES RGRJR RGS RGVAL RHAEN RHANDOM RHANSON RHARMAN RHARRIS RHASE RHASH RHAXTON RHENSSEL RHESA RHETTBULL RHICKS RHIZO RHNELSON RHOELZ RHOFER RHONDA RHOOPER RHUNDT RHUNTER RHYTHM RHYTHMAN RIBAMAR RIBASUSHI RICH RICHARD RICHARDC RICHARDJ RICHDAWE RICHIH RICK RICKEST RICKFORD RICKI RICKM RICKYLEVI RIDDLE RIDINGER RIDWAN RIESM RIFFER RIGBYC RIIKI RIJ RIJA RIJK RIK RILEO RINTARO RIOS RIOSCB RIPENCC RIR RISCOSML RIVOTTI RIVY RIYWO RIZAPN RIZEN RIZWANK RJA RJAND RJB RJBS RJCHU RJE RJENKS RJESCHMI RJH RJHARMON RJITLA RJLEE RJOHNSON RJOHNST RJOOP RJP RJRAY RJROOS RJS RJSRI RJT RJUNG RJURNEY RJUYAL RKACKLEY RKADURA RKAPL RKAYE RKEEN RKHILL RKIES RKILGORE RKING RKINYON RKITOVER RKJISM RKOBES RKOCHERL RKOWEN RKRIEGER RKRIMEN RKS RLANDRUM RLATHROP RLAUGHLIN RLB RLBJR RLGARRIS RLINDLEY RLOCKE RLOOMANS RLUCAS RLZWART RMAH RMALAFAIA RMANGI RMBARKER RMCFARLA RMCKAY RMENNIE RMF RMGIROUX RMICHAUD RMILLER RMINNER RMITZ RMK RMMHG RMORE RMORIZ RMOSE RMUHLE RMURRI RMV RNAIMA RNAREN RNDLPH RNEWSHAM RNS ROAL ROAM ROASIORIK ROB ROBAU ROBBAT ROBBE ROBBIEBOW ROBCLARKE ROBD ROBERT ROBERTMAY ROBERTSD ROBF ROBIN ROBINBANK ROBINDAR ROBINROWE ROBINS ROBL ROBLEACH ROBM ROBMAC ROBMAN ROBN ROBOTICUS ROBURBAN ROBVANSON ROBWALKER ROBWOLF ROCKY RODIN RODRIGO ROEL ROGER ROGERHALL ROGERS ROHAN ROHANK ROHANPM ROHITM ROHLFSC ROKR ROLAND ROLFYONE ROLIVEIRA ROMAN ROMANF ROMDAV ROMENRG ROMERUN ROMICK ROMKEY ROMM RONALDWS RONAN RONGRW RONRIVERA RONS ROODE ROOK ROONEG ROOT ROOTKWOK ROOTLEVEL ROOTROUTE RORY ROS ROSCH ROSCIO ROSSEY ROSSI ROSULEK ROTH ROTKRAUT ROUGE ROUS ROUZIER ROWAA ROWDOG ROWLESD ROZALLIN ROZARY RPAGITSCH RPANMAN RPAUL RPEASE RPETRE RPETTETT RPHANEY RPICARD RPIKKARA RPKELLY RPLATEL RPLESSL RPOLZER RPORRES RPQS RPRICE RRA RRAWLINGS RRECK RRG RRINDELS RRIVAS RROTTER RRS RRWO RSAINZAJA RSANDBERG RSARAN RSAVAGE RSCHUPP RSD RSE RSHADOW RSHANKLIN RSI RSIDDALL RSIMOES RSMITH RSN RSOD RSOLIV RSPIER RSQUIERS RSRCHBOY RST RSTARR RSTRONG RSYMES RSZEMETI RSZENO RTFIREFLY RTHANGAM RTHOMPSON RTWARD RTXING RUBATTEL RUBINJ RUBLIND RUBYKAT RUDENKO RUDY RUEYCHENG RUFF RUITTENB RULIX RUMCHO RUNE RUR RURBAN RUSAKOV RUSCHER RUSEKD RUSSELLJ RUSSELLP RUSSOZ RUSST RUSSUM RUSTYP RUTSCHLE RUYK RUZ RUZAM RVA RVAIDH RVASICEK RVAZ RVDP RVGOKHALE RVIREDAY RVMINDSTP RVNCERR RVOSA RVR RVSUTHERL RWAHBY RWALKER RWAN RWENNER RWG RWKST RWMJ RWOODARD RWS RWSTAUNER RWTNORTON RYAN RYANC RYANGIES RYANHUNT RYANLOWE RYANM RYANPARR RYANROSE RYBSKEJ RYM RYOCHIN RYOLAND RZILAVEC SAA SAAIB SAB SABECK SABREN SABUJ SACAVILIA SACHINJSK SACIK SACKCLOTH SADAHIRO SADAMS SADASIVAN SADI SADIB SADRAK SAFRONOV SAGIVBA SAHAAB SAHIR SAIFUL SAILFREE SAILSDIG SAILTHRU SAINTMARK SAIPH SAIT SAJITH SAKOHT SAKRU SAL SALTBREEZ SALVA SAM SAMARZONE SAMB SAMBAKKI SAMBAZZI SAMCORL SAMHELLER SAMIRCURY SAMLOWRIE SAMO SAMOD SAMPO SAMSK SAMSON SAMTREGAR SAMUEL SAMUELL SAMV SAMY SANBEG SANDERSON SANFACE SANJIT SANJOGA SANKO SANSBACON SANTERI SANTEX SANTOS SANTU SAPA SAPAPO SAPER SAPERSKI SAR SARAVANAN SARAVASE SARENNER SARFY SARGE SARGIE SARTAK SASAKURE SASAO SASATA SASAZUKA SASHA SASIKALA SASYKES SATALINK SATANAIL SATHIYA SATKINS SATOH SATOSIN SATRAC SATZZ SAUBER SAULIUS SAUMITRA SAURABH SAV SAVA SAXJAZMAN SAYANO SAYMEDIA SBALA SBASHTON SBAZZI SBECK SBEE SBERKHOLZ SBERRY SBHARR SBLANTON SBM SBOLTE SBONDS SBOSS SBR SBURKE SBZ SCAIN SCANNELL SCATO SCESANO SCHAFFTER SCHALLEE SCHERBAUM SCHICKM SCHINDER SCHLAEGEL SCHLUMPF SCHMICKL SCHMUKER SCHNEE SCHNUECK SCHNYBZER SCHOEJO SCHOEN SCHOP SCHRIECK SCHROEER SCHRORG SCHUBIGER SCHUBOE SCHUETT SCHULTE SCHUMACK SCHUMANN SCHUSTER SCHWA SCHWENKE SCHWIGON SCIO SCL SCLOUSE SCM SCOLINET SCOOBY SCOOK SCOOPER SCOOTER SCOP SCOTT SCOTTA SCOTTHOM SCOTTLC SCOTTLEE SCOTTN SCOTTS SCOTTVR SCOTTW SCOTTY SCOTTZED SCOUNDREL SCR SCRAM SCRESTO SCRUBB SCS SCW SCYLDINGA SDAGUE SDAODEN SDAVE SDD SDECASTE SDEKEN SDEN SDERLE SDESEILLE SDETHER SDEZURIK SDIZ SDOWD SDOWIDEIT SDP SDPRICE SDRABBLE SDT SDUBS SEAGIRL SEAHEN SEANBO SEANM SEANO SEANQ SEB SEBA SEBASTIAN SEBDIAZ SEBNOW SECAYFORD SEDMONDS SEESTIETO SEGAN SEGV SEI SEIJ SEJWAL SEKIA SEKIMURA SELCOMB SELENA SELKOVJR SEMANTICO SEMENOVF SEMM SEMUELF SEN SENDU SENGER SENSEI SENTHIL SEOVISUAL SEPA SEPARATOR SEPEHR SEPH SEPP SEPT SER SERA SERG SERGEY SERGEYCHE SETHG SETHJ SETITESUK SEVA SEVEAS SEVVIE SEWI SEYHAN SEYN SEZAY SFAISON SFARRELL SFI SFINK SFLEX SFLINT SFOSSE SFRESCHI SFRYER SGEL SGERSTEN SGIKAS SGLADKOV SGMIANO SGODIN SGOELDNER SGOVIND SGP SGRAHAM SGRANTZ SHABBLE SHADEJON SHADINGER SHADOVV SHADOW SHADOWX SHAFTEK SHAILESH SHAMROCK SHANCOCK SHANLG SHANNON SHANO SHANTANU SHARADA SHARAN SHARDIWAL SHARGROVE SHARI SHARIFULN SHARKEY SHARPLESA SHARRIS SHARRISON SHARYANTO SHASHIVAJ SHASSAN SHAUN SHAUNGUTH SHAW SHAWNPW SHAWNVAN SHAY SHAYH SHCOREY SHE SHEENAMS SHELDRAKE SHELLING SHELLTUX SHEMYAK SHENJ SHENSAP SHERLOCK SHERM SHERWIN SHERWOOD SHERZODR SHEVEK SHGUN SHIAR SHIBAZAKI SHIBLON SHIBUYA SHIELDS SHIGETA SHIGIO SHIJIALEE SHIKONO SHILDREY SHIMAZU SHIMI SHINPEI SHINY SHIRAIWA SHIRIRU SHIV SHJ SHL SHLOMIF SHLOMOY SHMEM SHMORIMO SHOEPHONE SHOGE SHOHEIK SHOK SHOLDEN SHOMODJ SHONORIO SHOOP SHOORICK SHOOTNIX SHOT SHOTGUN SHOW SHR SHRADDHA SHRIRAM SHRS SHTATLAND SHUCAO SHUCHO SHUFF SHUGHES SHULL SHUMPHREY SHUQUE SHURD SHURIKO SHUTTON SHV SHY SHYAM SHYOKOU SIC SID SIDD SIDIBE SIEMERSN SIERRA SIFUKURT SIGIN SIGIZ SIGJE SIGURKO SIGZERO SILAS SILASMONK SILENCER SILI SILLYMOOS SILVAN SILVER SILVIOQ SIMATIKA SIMCOP SIMENSEN SIMEON SIMES SIMKIN SIMM SIMMONSA SIMNE SIMON SIMONC SIMONF SIMONFLK SIMONG SIMONHF SIMONIKI SIMONJ SIMONMCC SIMONP SIMONW SIMOTRONE SIMPLEX SIMRAN SINI SINISTER SIRMXE SIRROBERT SISYPHUS SITETECH SITS SIVY SIXAPART SIXTEASE SIZUR SJAVEED SJBAKER SJBURGES SJCARBON SJENKINS SJFD SJH SJHAWTIN SJM SJN SJO SJOHNSTON SJQUINNEY SJSMITH SJSOFT SJSZ SJZASADA SJZHANG SKA SKAMANSAM SKANE SKANGAS SKASHYAP SKATTOOR SKAUFMAN SKEENAN SKI SKIM SKIMO SKINGTON SKIPPY SKIT SKJM SKMACPHE SKNPP SKOLYCHEV SKONNO SKORIKOV SKREUZER SKROVAN SKUBOTA SKUD SKUNZ SKUPSY SKUZN SKX SKYEND SKYFIVE SKYHAWK SLAFF SLAFFAN SLANNING SLAVA SLAZAR SLEDGE SLENK SLEONOV SLEUNG SLICK SLILLEY SLINKY SLMATH SLOBBAN SLOBIN SLOWKOW SLREID SLSTAT SLU SMAK SMALHOTRA SMALLEYD SMALLFISH SMALLPOND SMALYSHEV SMAN SMANROSS SMART SMARTWORK SMARX SMASH SMAXIME SMCCAM SMCKAY SMCMURRAY SMCNABB SMCOLASH SMEE SMEISNER SMERTEN SMICHEL SMIFFY SMILLET SMIRNIOS SMITZ SMJAKOBS SMKZONE SMOLAREK SMONF SMORTON SMPB SMPETERS SMPILL SMRBRTS SMRZ SMS SMTHAMES SMUELLER SMULLIS SMURF SMURUGAN SMUSKIE SMY SMYLERS SNAFUFANS SNARKY SNCK SNEAK SNEEX SNEHASIS SNEMAROV SNEVINE SNEX SNGJOKEN SNI SNIPER SNKWATT SNOWFLY SNOWHARE SNOYBERG SNS SNSTANTON SNUMANO SOCK SOCYNO SODABREW SODASODA SODONNELL SOENKE SOERGEL SOFTDIA SOFTLORD SOKOL SOKOLOV SOLARANT SOLFOPRO SOLIVER SOLO SOLVE SOMIAN SOMMAR SOMMERB SOMU SOMUP SONAM SONDBERG SONDIEK SONGMU SONNEN SONNY SOOZ SOR SORBS SOREAR SORENSO SORHED SORO SORTIZ SOTONA SOTSA SOURCERER SOVA SOZIN SPACEBAT SPACEMAN SPACEY SPADIX SPADKINS SPAI SPALE SPANG SPANNMAN SPANNRING SPARKS SPARKY SPARSONS SPATOCS SPAULGER SPAZM SPDITNER SPECTRE SPECTRUM SPEEVES SPESHAK SPICEMAN SPIDB SPIDERBOY SPINER SPINEY SPIRITFIR SPIRITSKY SPJW SPLECK SPLICE SPLOTCHY SPM SPOINTER SPONHEIM SPOON SPOULSON SPP SPR SPRADEEP SPRAGST SPROCTOR SPROUT SPUD SPUDLY SPUDSOUP SPUG SPURKIS SQUALL SQUEEK SQUIRREL SQUISH SRAMKI SRBU SRC SRCHULO SRDAVIS SREAGLE SREEKANTH SREZIC SRGRN SRHOTON SRI SRIDHARG SRIEHM SRIHA SRINIK SRINIPERL SRINIVAS SRIOPEN SRIRAM SRIRAMM SRL SRMOODY SROHIT SROLLYSON SROMANOV SRPATT SRSHAH SRUSHE SRVANCE SRYLE SRYNOBIO SRZ SSAAMM SSANTY SSC SSCAFFIDI SSCANLON SSCHECK SSCHNEID SSCOTTO SSEI SSEVERIN SSHAH SSHAW SSIMMS SSIMON SSINYAGIN SSKLAR SSM SSNODGRA SSORICHE SSOTKA SSOURABH SSQQ SSWAM STABILE STAJICH STAKASHIO STANM STANS STARKY STAS STASH STATHY STATPERL STAUGAARD STBEY STC STCHER STE STEALTHY STEF STEFAANC STEFAN STEFANOS STEFANRAM STEFANT STEFFEN STEFFENW STEINSBO STEJ STELLAR STENNIE STEPANOV STEPHANB STEPHANJ STEPHEN STEPHENC STEPHENCA STEPHENLB STEPHWEN STERLING STERLPERL STEVAN STEVE STEVEAU STEVEB STEVEC STEVECO STEVECOOK STEVEGT STEVEHA STEVEL STEVEMA STEVENC STEVENH STEVENL STEVENSL STEVENV STEVIEO STFN STHEBERT STHOMAS STICKPIN STIG STIGMATA STIGPJE STIGTSP STILLWELL STINKY STIQS STKEVIN STLACY STLLE STOCKS STODGHIL STOLKIN STONE STORULIS STOTZ STOUGARD STOVENOUR STRADER STRAITJAC STRANGE STRAT STRCEK STRIB STRO STRUAN STRUANB STRYTOAST STRZELEC STSANDER STSAREV STSI STU STUARTC STUB STUCOOPER STUDIOB STUIFZAND STULTS STURM STWIGGER STYPNOW SUAVEANT SUBBU SUBHRADIP SUBSTACK SUDIP SUE SUGAR SUGMAK SUGOIK SUGYAN SUHANOV SUHARA SUJAI SUKRIA SULFERIC SULLETF SULLIVAN SULLR SUMMER SUMPFRALL SUMUS SUNCPAN SUNGO SUNILS SUNNAH SUNNAVY SUNTONG SUPAPLEX SUPCIK SUPER SUPERASN SUPERL SURCOUF SURESHG SURESHRAM SURYA SUTCH SUWER SUZUKI SVANZOEST SVED SVEN SVENBAUM SVENH SVENTECH SVERBERN SVIEBROCK SVINTO SVISTUNOV SVITENTI SVOELKEL SVV SWABNER SWAJ SWALTERS SWAMPFOX SWAMPY SWANSUN SWARHURST SWAROOP SWARTIK SWEETBLUD SWEN SWESTRUP SWETH SWFLINT SWHIRSCH SWHITAKER SWILCOX SWILLIAM SWILLS SWINED SWMCC SWMCD SWOLF SWORDSMAN SWSLADE SXW SYAGI SYAMAL SYAZWAN SYBER SYEDMADAR SYM SYMKAT SYMX SYNCLOVER SYNEDRA SYNTAX SYOHEX SYP SYSDEF SYSDEV SYSEK SYSMON SYXANASH SZABGAB SZARATE SZBALINT SZECK TABAC TABAYASHI TABRISNET TAC TACOTOM TADAM TADMC TADOKORO TAFFY TAG TAGESTAD TAGOMORIS TAILRIVER TAIY TAKADONET TAKASH TAKASHI TAKAYUKI TAKEFUMI TAKEO TAKERU TAKESAKO TAKESHI TAKESHIGE TAKESHUN TAKIMO TAKKYUN TAKUJI TAKUMI TALE TALEX TALEXB TALKASAB TALSMACA TAMAKOTO TAMASHIRO TAMBASCOT TANABE TANAMI TANGENT TANIGUCHI TANIMOTO TANTALOR TAO TAPASRB TAPPER TARAO TAREKA TARO TAROK TARSKI TARXVF TARYK TASMIN TATE TATENO TATOUTE TATTOOJP TAULMARIL TAUNGER TAVIN TAWAN TAYERS TAYLOR TBC TBEKEL TBESTE TBOLIOLI TBONE TBONECA TBOUTELL TBR TBRADFUTE TBRAUN TBROWDER TBURGESS TBURKE TBUSCH TCAINE TCHATZI TCHINCHOW TCOGGINS TCOHEN TCOMM TCONST TCOX TCP TCURTIS TDANE TDEITRICH TDESANTIS TDL TDRILLICH TDRUGEON TDS TEAK TEALSEAL TEAM TEAMBOB TECH TECHCODE TECHIE TECHIVIST TECHNIK TED TEDDY TEDDYBER TEDHOPP TEDK TEDKAT TEEJAY TEHSI TEKE TEKFU TELIN TELKINS TELS TEMA TEMPALTE TEMPIRE TEMPLER TENGU TENGULRE TEODOR TEONGKEE TEPES TEQUETER TERCEIRO TERDOEST TERENCEMO TERHECHTE TERJE TERRY TERSKINE TESCHI TETRAGON TEUN TEVERETT TEWK TEX TEXMEC TFHEEN TFJELL TFM TFOUCART TFPBL TFRAYNER TGAETZ TGC TGIMMEL TGJ TGROSE TGUMMELS THALAKAN THALIANA THALJEF THAMUS THANGA THANSON THARDISON THARSCH THARTMAN THAVELICK THB THECRAMPS THEDEVIL THEFISHY THEILING THEOK THEPLER THEREK THEREVMJ THHAMON THILO THINC THINCH THIRTYSVN THIRUMAL THISWALLZ THOC THOGEE THOLEN THOM THOMAS THOMSON THOR THORGIS THORIE THORIKAWA THORSTT THOSPEL THOTH THOWE THOWELL THPFFT THUERRSCH THUNDERA THUNDERS THW TIAGOSOUS TIAO TIBBS TIBI TIEDEMANN TIGERPERL TIGRIS TILFORDC TILLY TIM TIMA TIMB TIMBRODY TIMBU TIMELLO TIMJIA TIMM TIMMY TIMOS TIMPOTTER TIMPX TINCHO TINGTING TINITA TISIMPSON TJAKE TJBYRNE TJC TJENKINS TJENNESS TJHLADISH TJMATHER TJMC TJOHNSON TJORGEN TJORMOLA TJRANDALL TJUGO TJWALKER TJYANG TKEEFER TKEMMER TKHARRIS TKISHEL TKLEIN TKML TKP TKR TKREMER TKURITA TLBDK TLILLEY TLINDEN TLO TLOO TLOUSKY TLOWERY TLP TMA TMAC TMAEK TMAESAKA TMANNERM TMATERNA TMCCUBBIN TMCGLYNN TMCMEEKI TMERRITT TMETRO TMHALL TMHARISH TMOERTEL TMONROE TMR TMTM TMUELLER TMULEVICH TMURRAY TNAGA TNGUYEN TNICKEL TNISH TNISHINO TNY TOAMANO TOBEYA TOBI TOBIAS TOBIASLY TOBIASTAR TOBIWAN TOBIX TOBY TOBYINK TOCIYUKI TOD TODA TODBOT TODD TODDAG TODDR TOJO TOKAR TOKAREV TOKUBASS TOKUHIROM TOKUMEI TOLIKL TOM TOMA TOMASZ TOMASZF TOMB TOMC TOMDAAE TOMDEE TOMELIAZ TOMFA TOMFAHLE TOMFEINER TOMH TOMHRR TOMHUGHES TOMHUKINS TOMI TOMITA TOMK TOMMIE TOMMY TOMO TOMPE TOMPIE TOMSON TOMSTD TOMTE TOMTEN TOMUSCHAT TOMYHERO TOMYK TOMZO TONIHER TONKIN TONNERRE TONODERA TONVOON TONYAVRIL TONYC TONYO TOONA TOPHER TOPIA TOREAU TORG TORRANCEW TORU TOSHIFJW TOSHIOITO TOST TOSTI TOWLIEBAN TOY TOYVO TPABA TPARVIAI TPEDERSE TPG TPRESTA TPROCTER TQISJIM TRACKONE TRANSWEBT TRAVAIL TRAVIS TRAVISB TRBC TRCJR TREEKI TRENDELS TRENFRO TREVELYAN TREVORJ TREY TRIAS TRICK TRICKY TRIDDLE TRIEMER TRIPIE TRIZEN TRIZOR TRLORENZ TROCKIJ TROHAU TRON TRONDMM TROTSKEY TROXEL TROYP TRSKI TRUELSEN TRUESDALE TRUEY TRUSTCOM TRWWW TRYM TRZ TSAFSERGE TSANDERS TSANGTC TSBRIGGS TSCANLAN TSCH TSCHULZ TSCHWAND TSHDW TSHINNIC TSHOENFE TSIBLEY TSINGH TSIRKIN TSKHO TSKIRVIN TSL TSMPERL TSPIN TSS TSTANLEY TSTANTON TSTAPFF TSTOCK TSUCCHI TSUKAMOTO TSUNODA TSV TSWAN TTAR TTG TTIGER TTKCIAR TTOD TTUECHLER TTUL TTY TUCKERM TUCKERWR TUCO TULSOFT TUMORIM TUNNUZ TURNERA TURNERJW TURNERMM TURNSTEP TURUGINA TUSHAR TUTOLMIN TVANCURA TVCCONG TVI TVIERLING TVIGNAUD TWEBER TWEGNER TWENRICH TWERNER TWESTWIND TWH TWIBBLER TWID TWILDE TWILLERT TWINKLE TWITTEK TWIX TWRIGHT TWYLIE TXIWASAKI TYANO TYEGAH TYEMQ TYLDUM TYLER TYOSHII TYPESTER TYPO TYRODEN TYRU TZADIKV UARUN UASI UCHIKO UDASSIN UDHAY UEW UFK UGANSERT UGEH UGEN UGEXE UGUTOMO UHANDA UHERBST UJIN UKAUTZ UKOLOFF UKSZA ULAS ULI ULIANOV ULIZAMA ULLAPRA ULPFR ULTRADM UMEMOTO UMIYOSH UMVUE UNCLE UNCLEANDY UNCLELVIS UNDEF UNDERMINE UNDX UNERA UNICOLET UNIEJO UNIFIEDSW UNISOLVE UNIXNOMAD UNIXTOWN UNKNOWNQ UNLEARNED UNOBE UNRTST UNSAVED URI URKLE URSUS USAGIJER USEDHONDA USEOPENID USMANOV USPROCESS USSJOIN USTIANSKY USUALOMA UTAANI UUDEREK UVOELKER UWEH UWES VAD VADIM VADIML VADIMT VADZ VAGNERR VAHAKOTA VAL VALDEZ VALERIE VALIANP VALSALAM VALY VAMSI VAMSIK VANAMBURG VANIX VANY VARUNK VASEK VASEKD VASILUS VASUNDHAR VAUTRIN VAXMAN VAYDE VBALUSU VBAR VBFG VCRINI VDAMIAN VDANJEAN VDG VDV VECTOR VEDGE VEEP VELJKO VELTZER VEN VENKAACT VENOMOUS VENTRAIP VENTZ VERESC VEROLOM VERRENS VESELOSKY VETLER VFOLEY VGIRISH VGOD VHOLER VIALVES VICHOU VICKBOCH VICTOR VICTORF VICTORI VICTORP VIDUL VIDUR VIFO VIGITH VIGREP VIKAS VIKLUND VIKRAMVI VIKTORK VILA VILEDA VILJUN VINAYSKI VINCENT VINIAN VINSWORLD VIORELS VIOSCA VIPERCODE VIPINTM VIPUL VIPULG VITA VITOCO VITROTH VIVEK VIY VIZDOM VKH VKHERA VKON VKOZLOV VKRAMSKIH VLAD VLADB VLADISLAV VLADO VLEERTJE VLYON VMAN VMARCHEN VMIKULIC VMOISEEV VMORAL VMS VMSML VMSPB VNAIPAUL VODKAU VOEGELAS VOICET VOISCHEV VOJ VOLKE VORSPRUNG VORTEX VOVA VOVKASM VPARSEVAL VPIT VPLA VPOROSHIN VPORTON VRK VROBIN VROOM VRUANO VRUOTTI VSANCHEZ VSARKISS VSEGO VSESPB VSPADER VSSANOJ VSTOICAN VTI VTRONE VULCAN VVD VVELOX VVISWAN VVU VXX WAC WADE WADG WAGNER WAGNERCH WAHYUDI WAIDER WAKAPON WALDI WALKER WALLISDS WALLMARI WALSHAM WALSHTP WALTER WALTERH WALTMAN WAMBOLD WANG WANKER WARD WARDY WARMING WARREN WARRENHUA WARRENM WARRINGD WARTHURT WASX WATA WATANABE WATCHDOG WATSON WAYLAND WAYNE WAYNEDAV WAYNEM WAZOOX WAZZUTEKE WBAILEY WBAKER WBASSON WBNIV WBUNTINE WBXV WBY WCATLAN WCN WCOTTAY WCW WDH WDL WDLOWRY WDOBLER WEAV WEBRENE WEBSTER WEBSTERIS WEBY WEHR WEICHONG WEINBERG WEINERK WEIQK WELTYJJ WENAMUN WENJIE WENZ WERDNUM WERRIE WESAL WESJDJ WESLEYYE WESM WESTRATE WETERS WEZ WFRERICHS WGDAVIS WHAMMO WHITCODE WHITEB WHITEHSE WHITEOWL WHITEPAGE WHITFIELD WHITNEY WHIZDOG WHOELSE WHOM WHOPPIX WHUMANN WHYNOT WHYTEWOLF WICKEDX WICKLINE WIGGLY WIGODA WIGS WIHAA WILCO WILCOXON WILDCHILD WILDGOOSE WILIAM WILIV WILL WILLBELL WILLEM WILLERT WILLIAM WILLIAMSG WILLIS WILLIW WILLMOJG WILLP WILLRUBEL WILLWOLF WILSON WILSOND WILSONPM WIMDH WIML WIMV WIN32 WINDLEY WINFINIT WINGMAN WINGNUT WINKO WINSTONS WINTER WINTRU WIRELESSD WISSER WITTEN WITTHAUT WITTROCK WIZARD WIZEAZZ WJBLACK WKEENAN WKI WKNIGHT WKSHARE WLVERINE WMARQ WMCKEE WMOHCHI WMORGAN WMORRIS WMSZELIGA WNEESSEN WNGDN WNODOM WOADEV WOHL WOHLFARJ WOLDRICH WOLF WOLFEN WOLFMAN WOLFSAGE WOLFTOWER WOLLMERS WOLS WOLVERIAN WONKO WOODY WOOSTER WOREMACX WORENKD WORLDMIND WORM WORR WORRALL WOSE WOWASURIN WPDESIGN WPMCCORMI WPMOORE WPS WRATH WRATY WREARDON WREIS WREN WRILEY WROG WROSS WRUPPERT WRW WSANNIS WSCHL WSCOT WSDOOKADR WSHELDAHL WSMITH WSNYDER WST WSTEPHENS WSYVINSKI WTGEE WTOMPSON WULEE WUMING WUMPUS WUNSCH WURBLZAP WURST WVALDAR WVARGAS WVDB WWA WWALKER WWILLIS WWOLF WWORKS WYANT WYCHUNG WYLLIE WYRD WYTAN XACHEN XAICRON XANDELA XANDRE XANNI XANT XANTUS XAOC XAOINC XAOS XAV XAVIER XAXXON XCALBET XCEZX XDR XELWARTO XENO XENOCIDE XENU XEONTIME XERHINO XERN XERXES XESTIA XETHAIR XFIRE XGUNNERX XIAODIAN XIAOLAN XIAOMO XING XINMING XINZHENG XIONG XIONGYING XJIANG XLAT XMAN XMATH XMLML XOMINA XOR XOUBIR XPANEL XPIX XROBAU XSAWYERX XTYPE XUDAYE XUERON XUJUNAINI XULUN XWOLF XXPMRIVE XYF XYLOS XYZZY YAAKOV YAIR YAITSKOV YAK YAKEX YAKUBORI YAKWOO YALE YALH YAMAKURA YAMAMOTO YAMATO YANA YANBE YANICK YANKAITUO YANMAR YANN YANNK YANOTHER YAPPO YARBER YARGEVAD YARON YARROW YASHA YASU YASUHIRO YATH YAXU YAYOHEI YBLUSSEAU YDNA YDZHANGPL YEGG YEHEZKIEL YENYA YEWEI YEWENBIN YFLIU YFZY YHA YHHUANG YHPENG YIBE YIFANG YINGLIU YINJIEH YISHINO YISOHOO YISUDONG YKAR YKO YKOSTYLEV YLU YNONP YNOTMGMA YOBERT YOCC YODA YOGESHATE YOHAMED YOKKUNS YONGBIN YONGLI YOREEK YOREN YORHEL YORICK YORKWU YOSEFM YOSHIAKI YOSHIDA YOSHIMI YOSHIOKA YOSTY YOU YOUAM YOUD YOUNG YOURABI YOWCOW YPANDIT YPERL YRCHEN YRESNOB YSAS YSASAKI YSAWANT YSHIBATA YSHTIL YSIMONX YSTH YSUZUKI YSYROTA YTAKEUCH YTURTLE YUDUBAN YUKI YUKINOBU YUKIO YULESHOW YUMATSUMO YUMPY YUNCHING YUNFANG YUPUG YURAN YURY YUSUKE YUSUKEBE YUTA YUUKI YUYAW YUZI YVDHOVE YVES YVESAGO YVESP YWANGPERL YWATASE YXES ZABA ZABEL ZACKSE ZACS ZADYREE ZAEBST ZAF ZAG ZAHORI ZAKAME ZAKZ ZANDET ZANGHIHU ZAPHAR ZAPHER ZARABOZO ZARDOZ ZARQUON ZAXO ZBLAIR ZBODI ZBS ZBUH ZBY ZBYS ZDK ZDM ZEBAZ ZECHIM ZED ZEEK ZEESHAN ZEFONSECA ZEFRAM ZELT ZEMAN ZENIN ZENOG ZENSPIDER ZENTLIM ZENTOOO ZEPHG ZERHASH ZERO ZEROALTI ZERODEUX ZERODOGG ZEROHP ZEROLIU ZETA ZEV ZEYA ZGH ZGRIM ZHANGBO ZHANGHJ ZHANGL ZHANGXIN ZHDA ZHIRSCH ZHLONG ZHOUBO ZHOUXIN ZHR ZHUANGLI ZHUZHU ZIBRI ZIEFLE ZIGDON ZIGOROU ZIGUZAGU ZIM ZIMAGE ZIMMERR ZINCDEV ZIPPO ZIPPY ZITOLOCO ZIYA ZJFDLUT ZJT ZKESSIN ZLIPTON ZMAN ZMIJ ZMUGHAL ZMYRGEL ZOCCAV ZOFFIX ZOGZAPPER ZOHAER ZOMBITHRD ZOOLEIKA ZOOM ZOOT ZOOVY ZORDRAK ZOUL ZOWERS ZOWIE ZOZO ZPMORGAN ZRUSILLA ZSC ZSTEVENS ZTANG ZTK ZTURK ZULF ZUMMO ZUQIF ZURAWSKI ZURBORG ZWON ZZCGUMK ZZZ\ ''' from metasyntactic.base import parse_data from random import choice, shuffle from six import iteritems data = parse_data(DATA) def default(): try: if 'default' in data: return data['default'][0] except (KeyError, IndexError): pass return 'en' def all(): acc = set() for category, names in iteritems(data['names']): if names: acc |= names return acc def names(category=None): if not category: category = default() if category == ':all': return list(all()) category = category.replace('/', ' ') return list(data['names'][category]) def random(n=1, category=None): got = names(category) if got: shuffle(got) if n == 1: return choice(got) return got[:n] def categories(): return set(data['names'])
""" Programming for linguists Implementation of the class Triangle """ from math import sqrt from shapes.shape import Shape class Triangle(Shape): """ A class for triangles """ def __init__(self, uid: int, first_edge: int, second_edge: int, third_edge: int): super().__init__(uid) self.first_edge = first_edge self.second_edge = second_edge self.third_edge = third_edge def get_area(self): """ Returns the area of a triangle :return int: the area of a triangle """ semi_per = self.get_perimeter() / 2 return sqrt(semi_per * (semi_per - self.first_edge) * (semi_per - self.second_edge) * (semi_per - self.third_edge)) def get_perimeter(self): """ Returns the perimeter of a triangle :return int: the perimeter of a triangle """ return self.first_edge + self.second_edge + self.third_edge def get_altitude(self): """ Returns the largest altitude of a triangle :return int: the largest altitude of a triangle """ return 2 * self.get_area() / min(self.first_edge, self.second_edge, self.third_edge)
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from numpy.testing import assert_allclose import astropy.units as u from gammapy.irf import Background2D, Background3D from gammapy.utils.testing import requires_data @pytest.fixture(scope="session") def bkg_3d(): """Example with simple values to test evaluate""" energy = [0.1, 10, 1000] * u.TeV fov_lon = [0, 1, 2, 3] * u.deg fov_lat = [0, 1, 2, 3] * u.deg data = np.ones((2, 3, 3)) * u.Unit("s-1 MeV-1 sr-1") # Axis order is (energy, fov_lon, fov_lat) # data.value[1, 0, 0] = 1 data.value[1, 1, 1] = 100 return Background3D( energy_lo=energy[:-1], energy_hi=energy[1:], fov_lon_lo=fov_lon[:-1], fov_lon_hi=fov_lon[1:], fov_lat_lo=fov_lat[:-1], fov_lat_hi=fov_lat[1:], data=data, ) @requires_data() def test_background_3d_basics(bkg_3d): assert "NDDataArray summary info" in str(bkg_3d.data) axis = bkg_3d.data.axis("energy") assert axis.nbin == 2 assert axis.unit == "TeV" axis = bkg_3d.data.axis("fov_lon") assert axis.nbin == 3 assert axis.unit == "deg" axis = bkg_3d.data.axis("fov_lat") assert axis.nbin == 3 assert axis.unit == "deg" data = bkg_3d.data.data assert data.shape == (2, 3, 3) assert data.unit == "s-1 MeV-1 sr-1" bkg_2d = bkg_3d.to_2d() assert bkg_2d.data.data.shape == (2, 3) def test_background_3d_read_write(tmp_path, bkg_3d): bkg_3d.to_fits().writeto(tmp_path / "bkg3d.fits") bkg_3d_2 = Background3D.read(tmp_path / "bkg3d.fits") axis = bkg_3d_2.data.axis("energy") assert axis.nbin == 2 assert axis.unit == "TeV" axis = bkg_3d_2.data.axis("fov_lon") assert axis.nbin == 3 assert axis.unit == "deg" axis = bkg_3d_2.data.axis("fov_lat") assert axis.nbin == 3 assert axis.unit == "deg" data = bkg_3d_2.data.data assert data.shape == (2, 3, 3) assert data.unit == "s-1 MeV-1 sr-1" def test_background_3d_evaluate(bkg_3d): # Evaluate at nodes where we put a non-zero value res = bkg_3d.evaluate( fov_lon=[0.5, 1.5] * u.deg, fov_lat=[0.5, 1.5] * u.deg, energy_reco=[100, 100] * u.TeV, ) assert_allclose(res.value, [1, 100]) assert res.shape == (2,) assert res.unit == "s-1 MeV-1 sr-1" res = bkg_3d.evaluate( fov_lon=[1, 0.5] * u.deg, fov_lat=[1, 0.5] * u.deg, energy_reco=[100, 100] * u.TeV, ) assert_allclose(res.value, [3.162278, 1], rtol=1e-5) res = bkg_3d.evaluate( fov_lon=[[1, 0.5], [1, 0.5]] * u.deg, fov_lat=[[1, 0.5], [1, 0.5]] * u.deg, energy_reco=[[1, 1], [100, 100]] * u.TeV, ) assert_allclose(res.value, [[1, 1], [3.162278, 1]], rtol=1e-5) assert res.shape == (2, 2) def test_background_3d_integrate(bkg_3d): # Example has bkg rate = 4 s-1 MeV-1 sr-1 at this node: # fov_lon=1.5 deg, fov_lat=1.5 deg, energy=100 TeV rate = bkg_3d.evaluate_integrate( fov_lon=[1.5, 1.5] * u.deg, fov_lat=[1.5, 1.5] * u.deg, energy_reco=[100, 100 + 2e-6] * u.TeV, ) assert rate.shape == (1,) # Expect approximately `rate * de` # with `rate = 4 s-1 sr-1 MeV-1` and `de = 2 MeV` assert_allclose(rate.to("s-1 sr-1").value, 200, rtol=1e-5) rate = bkg_3d.evaluate_integrate( fov_lon=0.5 * u.deg, fov_lat=0.5 * u.deg, energy_reco=[1, 100] * u.TeV ) assert_allclose(rate.to("s-1 sr-1").value, 99000000) rate = bkg_3d.evaluate_integrate( fov_lon=[[1, 0.5], [1, 0.5]] * u.deg, fov_lat=[[1, 1], [0.5, 0.5]] * u.deg, energy_reco=[[1, 1], [100, 100]] * u.TeV, ) assert rate.shape == (1, 2) assert_allclose(rate.to("s-1 sr-1").value, [[99000000.0, 99000000.0]], rtol=1e-5) @pytest.fixture(scope="session") def bkg_2d(): """A simple Background2D test case""" energy = [0.1, 10, 1000] * u.TeV offset = [0, 1, 2, 3] * u.deg data = np.zeros((2, 3)) * u.Unit("s-1 MeV-1 sr-1") data.value[1, 0] = 2 data.value[1, 1] = 4 return Background2D( energy_lo=energy[:-1], energy_hi=energy[1:], offset_lo=offset[:-1], offset_hi=offset[1:], data=data, ) def test_background_2d_evaluate(bkg_2d): # TODO: the test cases here can probably be improved a bit # There's some redundancy, and no case exactly at a node in energy # Evaluate at log center between nodes in energy res = bkg_2d.evaluate( fov_lon=[1, 0.5] * u.deg, fov_lat=0 * u.deg, energy_reco=[1, 1] * u.TeV ) assert_allclose(res.value, [0, 0]) assert res.shape == (2,) assert res.unit == "s-1 MeV-1 sr-1" res = bkg_2d.evaluate( fov_lon=[1, 0.5] * u.deg, fov_lat=0 * u.deg, energy_reco=[100, 100] * u.TeV ) assert_allclose(res.value, [3, 2]) res = bkg_2d.evaluate( fov_lon=[[1, 0.5], [1, 0.5]] * u.deg, fov_lat=0 * u.deg, energy_reco=[[1, 1], [100, 100]] * u.TeV, ) assert_allclose(res.value, [[0, 0], [3, 2]]) assert res.shape == (2, 2) res = bkg_2d.evaluate( fov_lon=[1, 1] * u.deg, fov_lat=0 * u.deg, energy_reco=[1, 100] * u.TeV ) assert_allclose(res.value, [0, 3]) assert res.shape == (2,) def test_background_2d_read_write(tmp_path, bkg_2d): bkg_2d.to_fits().writeto(tmp_path / "tmp.fits") bkg_2d_2 = Background2D.read(tmp_path / "tmp.fits") axis = bkg_2d_2.data.axis("energy") assert axis.nbin == 2 assert axis.unit == "TeV" axis = bkg_2d_2.data.axis("offset") assert axis.nbin == 3 assert axis.unit == "deg" data = bkg_2d_2.data.data assert data.shape == (2, 3) assert data.unit == "s-1 MeV-1 sr-1" def test_background_2d_integrate(bkg_2d): # TODO: change test case to something better (with known answer) # e.g. constant spectrum or power-law. rate = bkg_2d.evaluate_integrate( fov_lon=[1, 0.5] * u.deg, fov_lat=[0, 0] * u.deg, energy_reco=[0.1, 0.5] * u.TeV ) assert rate.shape == (1,) assert_allclose(rate.to("s-1 sr-1").value[0], [0, 0]) rate = bkg_2d.evaluate_integrate( fov_lon=[1, 0.5] * u.deg, fov_lat=[0, 0] * u.deg, energy_reco=[1, 100] * u.TeV ) assert_allclose(rate.to("s-1 sr-1").value, 0) rate = bkg_2d.evaluate_integrate( fov_lon=[[1, 0.5], [1, 0.5]] * u.deg, fov_lat=0 * u.deg, energy_reco=[1, 100] * u.TeV, ) assert rate.shape == (1, 2) assert_allclose(rate.value, [[0, 198]])
def sum(arr): if len(arr) == 1: return arr[0] return arr[0] + sum(arr[1:]) print(sum([2, 2, 4, 6])) # 14
#!/usr/bin/env python # The MIT License (MIT) # # Copyright (c) 2016 Andrew Savonichev # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import os import unittest from subprocess import check_call import memprof from memprof import read_alloc_stream tests_dir = "." def run_memprof_exe(exe): check_call([exe]) class TestBasic(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_exe = os.path.join(tests_dir, "test_basic") run_memprof_exe(cls.test_exe) cls.alloc_file = "test_basic.alloc" with open(cls.alloc_file, "rb") as f: cls.alloc_stream = [entry for entry in read_alloc_stream(f)] @classmethod def tearDownClass(cls): os.remove(cls.alloc_file) os.remove(cls.alloc_file + "_ref") def test_id_unique(self): ids = {} for entry in TestBasic.alloc_stream: ids[entry.id] = True self.assertEqual(len(ids.keys()), len(TestBasic.alloc_stream)) def test_memory_ptr(self): with open("test_basic.alloc_ref") as f: def ref_memptrs(): for line in f.readlines(): yield int(line.split()[0], 16) for entry, memptr in zip(TestBasic.alloc_stream, ref_memptrs()): self.assertEqual(memptr, entry.memptr) def test_alloc_size(self): with open("test_basic.alloc_ref") as f: def ref_alloc_sizes(): for line in f.readlines(): yield int(line.split()[1]) for entry, size in zip(TestBasic.alloc_stream, ref_alloc_sizes()): self.assertEqual(size, entry.size) def test_alloc_types(self): ref_types = ["alloc"] * 3 + ["free"] * 3 self.assertEqual(len(ref_types), len(TestBasic.alloc_stream)) for entry, ref in zip(TestBasic.alloc_stream, ref_types): self.assertEqual(ref, entry.ty) def test_symbol_resolve(self): ref_stacks = [ ["a", "main"], ["a", "b", "main"], ["a", "b", "c", "main"], ["cleanup", "main"], ["cleanup", "main"], ["cleanup", "main"] ] def resolve_name(frame): symbol = memprof.Symbol(frame.rel_addr, TestBasic.test_exe) return symbol.name def stacks_stream(): for entry in TestBasic.alloc_stream: yield [resolve_name(f) for f in entry.backtrace] got_stacks = list(stacks_stream()) self.assertEqual(len(ref_stacks), len(got_stacks)) for got, ref in zip(got_stacks, ref_stacks): self.assertEqual(got[:len(ref)], ref) if __name__ == '__main__': unittest.main()
"""Common imports for generated deploymentmanager client library.""" # pylint:disable=wildcard-import import pkgutil from googlecloudapis.apitools.base.py import * from googlecloudapis.deploymentmanager.v2beta1.deploymentmanager_v2beta1_client import * from googlecloudapis.deploymentmanager.v2beta1.deploymentmanager_v2beta1_messages import * __path__ = pkgutil.extend_path(__path__, __name__)
from typing import Optional from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from .routers import recommandation app = FastAPI() version = "v1" # Bonne pratique pour API-REST/Microservices : préfixe de version pour facilement migrer vers d'autres versions du back route_prefix= "/API/{}".format(version) # Les routes disponibles dans routers auront pour préfixe : /api/{version}/ # @app.get("/") # def read_root(): # return {"Hello": "recommendation API :)"} origins = [ "http://localhost:8080" ] app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) app.include_router(recommandation.router,prefix=route_prefix)
from Task_1 import * def TestPointLocation(): point = Point(3,4) center = Point(0,0) circle = Circle(center,5) assert circle.pointLocation(point) == "On Circle" circle.radius = 6 assert circle.pointLocation(point) == "In Circle" circle.radius = 2 assert circle.pointLocation(point) == "Outside Circle" TestPointLocation() print("Pass")
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'section_input.ui' # # Created by: PyQt5 UI code generator 5.15.6 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(255, 96) self.formLayout = QtWidgets.QFormLayout(Dialog) self.formLayout.setObjectName("formLayout") self.sectionLabel = QtWidgets.QLabel(Dialog) self.sectionLabel.setObjectName("sectionLabel") self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.sectionLabel) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") self.sectionLineEdit = QtWidgets.QLineEdit(Dialog) self.sectionLineEdit.setObjectName("sectionLineEdit") self.horizontalLayout.addWidget(self.sectionLineEdit) self.sectionComboBox = QtWidgets.QComboBox(Dialog) self.sectionComboBox.setEnabled(True) self.sectionComboBox.setMaximumSize(QtCore.QSize(32, 32)) self.sectionComboBox.setObjectName("sectionComboBox") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.sectionComboBox.addItem("") self.horizontalLayout.addWidget(self.sectionComboBox) self.formLayout.setLayout(0, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout) self.buttonBox = QtWidgets.QDialogButtonBox(Dialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok) self.buttonBox.setObjectName("buttonBox") self.formLayout.setWidget(1, QtWidgets.QFormLayout.SpanningRole, self.buttonBox) self.retranslateUi(Dialog) self.buttonBox.accepted.connect(Dialog.accept) # type: ignore self.buttonBox.rejected.connect(Dialog.reject) # type: ignore QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): _translate = QtCore.QCoreApplication.translate Dialog.setWindowTitle(_translate("Dialog", "Dialog")) self.sectionLabel.setText(_translate("Dialog", "Section")) self.sectionComboBox.setItemText(0, _translate("Dialog", "Intro")) self.sectionComboBox.setItemText(1, _translate("Dialog", "Verse")) self.sectionComboBox.setItemText(2, _translate("Dialog", "Pre-Chorus")) self.sectionComboBox.setItemText(3, _translate("Dialog", "Chorus")) self.sectionComboBox.setItemText(4, _translate("Dialog", "Bridge")) self.sectionComboBox.setItemText(5, _translate("Dialog", "Outro")) self.sectionComboBox.setItemText(6, _translate("Dialog", "Custom"))
''' Register modules here. Module-specific parameters in the config .ini file can be added under a section with the same name as the module. 2019-2020 Benjamin Kellenberger ''' # set up Celery configuration import celery_worker from .LabelUI.app import LabelUI from .Database.app import Database from .FileServer.app import FileServer from .UserHandling.app import UserHandler from .Reception.app import Reception from .ProjectAdministration.app import ProjectConfigurator from .ProjectStatistics.app import ProjectStatistics from .DataAdministration.app import DataAdministrator from .StaticFiles.app import StaticFileServer from .AIDEAdmin.app import AIDEAdmin from .ModelMarketplace.app import ModelMarketplace from .TaskCoordinator.app import TaskCoordinator #TODO from .AIController.app import AIController from .AIWorker.app import AIWorker REGISTERED_MODULES = { 'LabelUI': LabelUI, 'AIController': AIController, 'AIWorker': AIWorker, 'Database': Database, 'FileServer': FileServer, 'UserHandler': UserHandler, 'Reception': Reception, 'ProjectConfigurator': ProjectConfigurator, 'ProjectStatistics': ProjectStatistics, 'DataAdministrator': DataAdministrator, 'StaticFileServer': StaticFileServer, 'AIDEAdmin': AIDEAdmin, 'ModelMarketplace': ModelMarketplace, 'TaskCoordinator': TaskCoordinator }
# Copyright 2019 Ali (@bincyber) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from . import exceptions from Cryptodome.Cipher import AES from Cryptodome.Hash import SHA1, SHA256 from Cryptodome.Protocol.KDF import PBKDF2 from Cryptodome.Random import get_random_bytes from datetime import datetime from pathlib import Path from typing import Dict, Tuple import base64 import distutils.spawn import uuid def get_random_string(length: int = 32) -> str: """ This function returns an alphanumeric string of the requested length. :param int length: the length of the random string. Max of 32 characters :returns: a random string :rtype: str """ if length > 32: length = 32 elif length <= 0: length = 1 random_string = uuid.uuid4().hex return random_string[:length] def generate_project_name() -> str: """ This fuction generates and returns a unique name for the Pulumi Project. :returns: a unique project name :rtype: str """ random_string = get_random_string(16) project_name = f"pitf-project-{random_string}" return project_name def generate_stack_name() -> str: """ This fuction generates and returns a unique name for the Pulumi Stack """ random_string = get_random_string(16) stack_name = f"pitf-stack-{random_string}" return stack_name def get_project_backend_url(path: Path = None) -> Dict[str, str]: """ This fuction returns the location of the Pulumi state directory. By default, the current working directory. :param Path path: a path object :returns: dictionary containing a file URL pointing to the Pulumi state directory :rtype: dict """ if path is None: path = Path.cwd() return {"url": path.as_uri()} def generate_aes_encryption_key(password: str, salt: bytes = None) -> Tuple[bytes, bytes]: """ uses PBKDF2 with SHA256 HMAC to derive a 32-byte encryption key from the provided password """ if salt is None: salt = get_random_bytes(8) return PBKDF2(password, salt, 32, count=1000000, hmac_hash_module=SHA256), salt def encrypt_with_aes_gcm(key: bytes, plaintext: bytes) -> Tuple[bytes, bytes, bytes]: """ encrypts plaintext using 256-bit AES in GCM mode """ nonce = get_random_bytes(12) cipher = AES.new(key=key, nonce=nonce, mode=AES.MODE_GCM, mac_len=16) ciphertext, mac = cipher.encrypt_and_digest(plaintext) return nonce, ciphertext, mac def decrypt_with_aes_gcm(key: bytes, nonce: bytes, ciphertext: bytes, mac: bytes) -> bytes: """ decrypts 256-bit AES encrypted ciphertext """ cipher = AES.new(key=key, nonce=nonce, mode=AES.MODE_GCM, mac_len=16) plaintext = cipher.decrypt_and_verify(ciphertext, mac) return plaintext def generate_encryptionsalt(password: str) -> Tuple[bytes, str]: """ generates the base64 encoded string for the encryptionsalt field in Pulumi stack files """ plaintext = b'pulumi' key, salt = generate_aes_encryption_key(password) nonce, ciphertext, mac = encrypt_with_aes_gcm(key, plaintext) # 16-byte MAC tag is appended to the ciphertext message = ciphertext + mac salt_b64 = base64.b64encode(salt).decode('utf-8') nonce_b64 = base64.b64encode(nonce).decode('utf-8') message_b64 = base64.b64encode(message).decode('utf-8') encryptionsalt = f"v1:{salt_b64}:v1:{nonce_b64}:{message_b64}" return key, encryptionsalt def get_encrypted_secret(plaintext: bytes, key: bytes) -> str: """ returns a base64 formatted encrypted Pulumi secret """ nonce, ciphertext, mac = encrypt_with_aes_gcm(key, plaintext) # 16-byte MAC tag is appended to the ciphertext message = ciphertext + mac nonce_b64 = base64.b64encode(nonce).decode('utf-8') message_b64 = base64.b64encode(message).decode('utf-8') encrypted_secret = f"v1:{nonce_b64}:{message_b64}" return encrypted_secret def get_current_timestamp() -> str: """ returns the current date and time in ISO 8601 format """ return datetime.now().astimezone().isoformat() def sha1sum(data: bytes) -> str: """ returns the SHA1 hash of the provided data """ h = SHA1.new() h.update(data) return h.hexdigest() def sha256sum(data: bytes) -> str: """ returns the SHA256 hash of the provided data """ h = SHA256.new() h.update(data) return h.hexdigest() def decode_utf8(data: bytes) -> str: return data.decode('utf-8') def get_directory_abspath(path: Path) -> Path: if not path.is_dir(): path = path.parent return path.absolute() def find_pulumi_binary() -> str: location = distutils.spawn.find_executable('pulumi') if location is None: raise exceptions.PulumiBinaryNotFoundError("Could not find the pulumi binary on the system") return location
from abc import ABC import json import logging import os import ast import torch from transformers import ( AutoModelForSequenceClassification, AutoTokenizer, AutoModelForQuestionAnswering, AutoModelForTokenClassification, ) from ts.torch_handler.base_handler import BaseHandler from captum.attr import LayerIntegratedGradients logger = logging.getLogger(__name__) class TransformersSeqClassifierHandler(BaseHandler, ABC): """ Transformers handler class for sequence, token classification and question answering. """ def __init__(self): super(TransformersSeqClassifierHandler, self).__init__() self.initialized = False def initialize(self, ctx): """In this initialize function, the BERT model is loaded and the Layer Integrated Gradients Algorithmfor Captum Explanations is initialized here. Args: ctx (context): It is a JSON Object containing information pertaining to the model artefacts parameters. """ self.manifest = ctx.manifest properties = ctx.system_properties model_dir = properties.get("model_dir") serialized_file = self.manifest["model"]["serializedFile"] model_pt_path = os.path.join(model_dir, serialized_file) self.device = torch.device( "cuda:" + str(properties.get("gpu_id")) if torch.cuda.is_available() else "cpu" ) # read configs for the mode, model_name, etc. from setup_config.json setup_config_path = os.path.join(model_dir, "setup_config.json") if os.path.isfile(setup_config_path): with open(setup_config_path) as setup_config_file: self.setup_config = json.load(setup_config_file) else: logger.warning("Missing the setup_config.json file.") # Loading the model and tokenizer from checkpoint and config files based on the user's choice of mode # further setup config can be added. if self.setup_config["save_mode"] == "torchscript": self.model = torch.jit.load(model_pt_path) elif self.setup_config["save_mode"] == "pretrained": if self.setup_config["mode"] == "sequence_classification": self.model = AutoModelForSequenceClassification.from_pretrained( model_dir ) elif self.setup_config["mode"] == "question_answering": self.model = AutoModelForQuestionAnswering.from_pretrained(model_dir) elif self.setup_config["mode"] == "token_classification": self.model = AutoModelForTokenClassification.from_pretrained(model_dir) else: logger.warning("Missing the operation mode.") else: logger.warning("Missing the checkpoint or state_dict.") if not os.path.isfile(os.path.join(model_dir, "vocab.*")): self.tokenizer = AutoTokenizer.from_pretrained( self.setup_config["model_name"], do_lower_case=self.setup_config["do_lower_case"], ) else: self.tokenizer = AutoTokenizer.from_pretrained( model_dir, do_lower_case=self.setup_config["do_lower_case"] ) self.model.to(self.device) self.model.eval() logger.info( "Transformer model from path %s loaded successfully", model_dir ) # Read the mapping file, index to object name mapping_file_path = os.path.join(model_dir, "index_to_name.json") # Question answering does not need the index_to_name.json file. if not self.setup_config["mode"] == "question_answering": if os.path.isfile(mapping_file_path): with open(mapping_file_path) as f: self.mapping = json.load(f) else: logger.warning("Missing the index_to_name.json file.") # ------------------------------- Captum initialization ----------------------------# self.lig = LayerIntegratedGradients( captum_sequence_forward, self.model.bert.embeddings ) self.initialized = True def preprocess(self, requests): """Basic text preprocessing, based on the user's chocie of application mode. Args: requests (str): The Input data in the form of text is passed on to the preprocess function. Returns: list : The preprocess function returns a list of Tensor for the size of the word tokens. """ input_batch = None for idx, data in enumerate(requests): input_text = data.get("data") if input_text is None: input_text = data.get("body") if isinstance(input_text, (bytes, bytearray)): input_text = input_text.decode('utf-8') max_length = self.setup_config["max_length"] logger.info("Received text: '%s'", input_text) # preprocessing text for sequence_classification and token_classification. if self.setup_config["mode"] == "sequence_classification" or self.setup_config["mode"] == "token_classification": inputs = self.tokenizer.encode_plus(input_text, max_length=int(max_length), pad_to_max_length=True, add_special_tokens=True, return_tensors='pt') # preprocessing text for question_answering. elif self.setup_config["mode"] == "question_answering": # TODO Reading the context from a pickeled file or other fromats that # fits the requirements of the task in hand. If this is done then need to # modify the following preprocessing accordingly. # the sample text for question_answering in the current version # should be formated as dictionary with question and text as keys # and related text as values. # we use this format here seperate question and text for encoding. question_context = ast.literal_eval(input_text) question = question_context["question"] context = question_context["context"] inputs = self.tokenizer.encode_plus(question, context, max_length=int(max_length), pad_to_max_length=True, add_special_tokens=True, return_tensors="pt") input_ids = inputs["input_ids"].to(self.device) if input_ids.shape is not None: if input_batch is None: input_batch = input_ids else: input_batch = torch.cat((input_batch, input_ids), 0) return input_batch def inference(self, input_batch): """Predict the class (or classes) of the received text using the serialized transformers checkpoint. Args: input_batch (list): List of Text Tensors from the pre-process function is passed here Returns: list : It returns a list of the predicted value for the input text """ inferences = [] # Handling inference for sequence_classification. if self.setup_config["mode"] == "sequence_classification": predictions = self.model(input_batch) print("This the output size from the Seq classification model", predictions[0].size()) print("This the output from the Seq classification model", predictions) num_rows, num_cols = predictions[0].shape for i in range(num_rows): out = predictions[0][i].unsqueeze(0) y_hat = out.argmax(1).item() predicted_idx = str(y_hat) inferences.append(self.mapping[predicted_idx]) # Handling inference for question_answering. elif self.setup_config["mode"] == "question_answering": # the output should be only answer_start and answer_end # we are outputing the words just for demonstration. answer_start_scores, answer_end_scores = self.model(input_batch) print("This the output size for answer start scores from the question answering model", answer_start_scores.size()) print("This the output for answer start scores from the question answering model", answer_start_scores) print("This the output size for answer end scores from the question answering model", answer_end_scores.size()) print("This the output for answer end scores from the question answering model", answer_end_scores) num_rows, num_cols = answer_start_scores.shape # inferences = [] for i in range(num_rows): answer_start_scores_one_seq = answer_start_scores[i].unsqueeze(0) answer_start = torch.argmax(answer_start_scores_one_seq) answer_end_scores_one_seq = answer_end_scores[i].unsqueeze(0) answer_end = torch.argmax(answer_end_scores_one_seq) + 1 prediction = self.tokenizer.convert_tokens_to_string(self.tokenizer.convert_ids_to_tokens(input_batch[i].tolist()[answer_start:answer_end])) inferences.append(prediction) logger.info("Model predicted: '%s'", prediction) # Handling inference for token_classification. elif self.setup_config["mode"]== "token_classification": outputs = self.model(input_batch)[0] print("This the output size from the token classification model", outputs.size()) print("This the output from the token classification model",outputs) num_rows = outputs.shape[0] for i in range(num_rows): output = outputs[i].unsqueeze(0) predictions = torch.argmax(output, dim=2) tokens = self.tokenizer.tokenize(self.tokenizer.decode(input_batch[i])) if self.mapping: label_list = self.mapping["label_list"] label_list = label_list.strip('][').split(', ') prediction = [(token, label_list[prediction]) for token, prediction in zip(tokens, predictions[0].tolist())] inferences.append(prediction) logger.info("Model predicted: '%s'", prediction) return inferences def postprocess(self, inference_output): """Post Process Function converts the predicted response into Torchserve readable format. Args: inference_output (list): It contains the predicted response of the input text. Returns: (list): Returns a list of the Predictions and Explanations. """ return inference_output def get_insights(self, input_batch, text, target): """This function calls the layer integrated gradient to get word importance of the input text Args: input_batch (int): Batches of tokens IDs of text text (str): The Text specified in the input request target (int): The Target can be set to any acceptable label under the user's discretion. Returns: (list): Returns a list of importances and words. """ if isinstance(text, (bytes, bytearray)): text = text.decode('utf-8') input_ids, ref_input_ids, attention_mask = construct_input_ref( text, self.tokenizer, self.device ) all_tokens = get_word_token(input_ids, self.tokenizer) attributions, delta = self.lig.attribute( inputs=input_ids, baselines=ref_input_ids, target=self.target, additional_forward_args=(attention_mask, 0, self.model), return_convergence_delta=True, ) attributions_sum = summarize_attributions(attributions) response = {} response["importances"] = attributions_sum.tolist() response["words"] = all_tokens response["delta"] = delta[0].tolist() return [response] def construct_input_ref(text, tokenizer, device): """For a given text, this function creates token id, reference id and attention mask based on encode which is faster for captum insights Args: text (str): The text specified in the input request tokenizer (AutoTokenizer Class Object): To word tokenize the input text device (cpu or gpu): Type of the Environment the server runs on. Returns: input_id(Tensor): It attributes to the tensor of the input tokenized words ref_input_ids(Tensor): Ref Input IDs are used as baseline for the attributions attention mask() : The attention mask is a binary tensor indicating the position of the padded indices so that the model does not attend to them. """ text_ids = tokenizer.encode(text, add_special_tokens=False) # construct input token ids logger.info("text_ids %s", text_ids) logger.info("[tokenizer.cls_token_id] %s", [tokenizer.cls_token_id]) input_ids = [tokenizer.cls_token_id] + text_ids + [tokenizer.sep_token_id] logger.info("input_ids %s", input_ids) input_ids = torch.tensor([input_ids], device=device) # construct reference token ids ref_input_ids = ( [tokenizer.cls_token_id] + [tokenizer.pad_token_id] * len(text_ids) + [tokenizer.sep_token_id] ) ref_input_ids = torch.tensor([ref_input_ids], device=device) # construct attention mask attention_mask = torch.ones_like(input_ids) return input_ids, ref_input_ids, attention_mask def captum_sequence_forward(inputs, attention_mask=None, position=0, model=None): """This function is used to get the predictions from the model and this function can be used independent of the type of the BERT Task. In case of a QnA, there is no need to create two models. one model with different positions can be used. Args: inputs (list): Input for Predictions attention_mask (list, optional): The attention mask is a binary tensor indicating the position of the padded indices so that the model does not attend to them, it defaults to None. position (int, optional): Position depends on the BERT Task. If it is a QnA, then positon is set to 1. Defaults to 0. model ([type], optional): Name of the model, it defaults to None. Returns: list: Prediction Outcome """ model.eval() model.zero_grad() pred = model(inputs, attention_mask=attention_mask) pred = pred[position] return pred def summarize_attributions(attributions): """Summarises the attribution across multiple runs Args: attributions ([list): attributions from the Layer Integrated Gradients Returns: list : Returns the attributions after normalizing them. """ attributions = attributions.sum(dim=-1).squeeze(0) attributions = attributions / torch.norm(attributions) return attributions def get_word_token(input_ids, tokenizer): """constructs word tokens from token id using the BERT's Auto Tokenizer Args: input_ids (list): Input IDs from construct_input_ref method tokenizer (class): The Auto Tokenizer Pre-Trained model object Returns: (list): Returns the word tokens """ indices = input_ids[0].detach().tolist() tokens = tokenizer.convert_ids_to_tokens(indices) # Remove unicode space character from BPE Tokeniser tokens = [token.replace("Ġ", "") for token in tokens] return tokens
############################################################################### # # Tests for libxlsxwriter. # # Copyright 2014-2019, John McNamara, jmcnamara@cpan.org # import base_test_class class TestCompareXLSXFiles(base_test_class.XLSXBaseTest): """ Test file created with libxlsxwriter against a file created by Excel. """ def test_chart_chartarea01(self): self.run_exe_test('test_chart_chartarea01') # chartarea02 is for a deprecated api in Perl. def test_chart_chartarea03(self): self.run_exe_test('test_chart_chartarea03') # chartarea04 is an unsupported stock chart. def test_chart_chartarea05(self): self.run_exe_test('test_chart_chartarea05') def test_chart_chartarea06(self): self.run_exe_test('test_chart_chartarea06')
#!/usr/bin/env python """ Emulated QPD functionality Hazen 04/17 """ import math import random import time from PyQt5 import QtCore import storm_control.hal4000.halLib.halMessage as halMessage import storm_control.sc_hardware.baseClasses.hardwareModule as hardwareModule import storm_control.sc_hardware.baseClasses.lockModule as lockModule class NoneQPDFunctionality(hardwareModule.BufferedFunctionality, lockModule.QPDFunctionalityMixin): qpdUpdate = QtCore.pyqtSignal(dict) def __init__(self, noise = 0.0, tilt = 0.0, **kwds): super().__init__(**kwds) self.first_scan = True self.noise = noise self.tilt = tilt self.xy_stage_fn = None self.z_offset = 0.0 self.z_stage_center = None self.z_stage_fn = None self.z_stage_max = None self.z_stage_min = None def getOffset(self): self.mustRun(task = self.scan, ret_signal = self.qpdUpdate) def scan(self): if self.first_scan: self.first_scan = False else: time.sleep(0.1) # # Determine current z offset. This is the offset of the z stage from # it's center position adjusted by xy stage tilt (if any). # z_offset = 0.0 if (self.xy_stage_fn is not None) and (self.z_stage_fn is not None): z_center = self.z_stage_center pos_dict = self.xy_stage_fn.getCurrentPosition() if pos_dict is not None: dx = pos_dict["x"] #dy = pos_dict["y"] #dd = math.sqrt(dx*dx + dy*dy) z_center += self.tilt * dx if (z_center > self.z_stage_max): z_center = self.z_stage_max elif (z_center < self.z_stage_min): z_center = self.z_stage_min z_offset = self.z_stage_fn.getCurrentPosition() - z_center if (self.noise > 0.0): z_offset += random.gauss(0.0, self.noise) power = 600.0 * math.exp(-0.250 * (z_offset * z_offset)) if (power < (0.5 * self.getParameter("sum_warning_low"))): z_offset = 0.0 return {"is_good" : True, "offset" : z_offset, "sum" : power, "x" : 100.0 * z_offset, "y" : 0.0} def setFunctionality(self, name, functionality): if (name == "xy_stage"): self.xy_stage_fn = functionality elif (name == "z_stage"): self.z_stage_fn = functionality self.z_stage_center = self.z_stage_fn.getCenterPosition() self.z_stage_max = self.z_stage_fn.getMaximum() self.z_stage_min = self.z_stage_fn.getMinimum() else: print(">> Warning unknown function", name) class NoneQPDModule(hardwareModule.HardwareModule): def __init__(self, module_params = None, qt_settings = None, **kwds): super().__init__(**kwds) self.qpd_functionality = None self.configuration = module_params.get("configuration") self.qpd_functionality = NoneQPDFunctionality(parameters = self.configuration.get("parameters"), noise = self.configuration.get("noise", 0.0), tilt = self.configuration.get("tilt", 0.0), units_to_microns = self.configuration.get("units_to_microns")) def cleanUp(self, qt_settings): if self.qpd_functionality is not None: self.qpd_functionality.wait() def getFunctionality(self, message): if (message.getData()["name"] == self.module_name): message.addResponse(halMessage.HalMessageResponse(source = self.module_name, data = {"functionality" : self.qpd_functionality})) def handleResponse(self, message, response): if message.isType("get functionality"): self.qpd_functionality.setFunctionality(message.getData()["extra data"], response.getData()["functionality"]) def processMessage(self, message): if message.isType("configure2"): # # The xy and z stage functionalities are used so that the none focus lock # can more realistically simulate the behavior of a real focus lock. # if self.configuration.has("xy_stage_fn"): self.sendMessage(halMessage.HalMessage(m_type = "get functionality", data = {"name" : self.configuration.get("xy_stage_fn"), "extra data" : "xy_stage"})) if self.configuration.has("z_stage_fn"): self.sendMessage(halMessage.HalMessage(m_type = "get functionality", data = {"name" : self.configuration.get("z_stage_fn"), "extra data" : "z_stage"})) elif message.isType("get functionality"): self.getFunctionality(message)
import os import sys # import transaction from sqlalchemy import create_engine from .models import Idea, Author, Base from .session import DBSession def usage(argv): cmd = os.path.basename(argv[0]) print('usage: %s <database_uri>\n' '(example: "%s sqlite:///agora.sqlite")\n' 'to seed the database: "%s <database_uri> seed\n' '(example: "%s sqlite:///agora.sqlite seed")\n' % (cmd, cmd, cmd, cmd)) sys.exit(1) def main(argv=sys.argv): if not 2 <= len(argv) <= 3: usage(argv) database_uri = argv[1] seed = argv[2] if len(argv) > 2 else False engine = create_engine(database_uri) DBSession.configure(bind=engine) Base.metadata.create_all(engine) if seed == 'seed': author = Author(username='misinformation', fullname='Miss Information', email='misinformation@example.com') DBSession.add(author) author = DBSession.query(Author).filter_by( username='misinformation').one() idea = Idea(title='First Idea!', idea='This is my idea.', author=author) DBSession.add(idea) idea = Idea(title='Another Idea!', idea='This is another idea.', author=author) DBSession.add(idea)
# Copyright 2017. Allen Institute. All rights reserved # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the # following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following # disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import os import numpy as np import math import json import pandas as pd import h5py from neuron import h def rotation_matrix(axis, theta): """Return the rotation matrix associated with counterclockwise rotation about the given axis by theta radians. """ axis = np.asarray(axis) theta = np.asarray(theta) axis = axis/math.sqrt(np.dot(axis, axis)) a = math.cos(theta/2.0) b, c, d = -axis*math.sin(theta/2.0) aa, bb, cc, dd = a*a, b*b, c*c, d*d bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)], [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)], [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]]) def edge_converter_csv(output_dir, csv_file): """urrently being used by BioNetwork.write_connections(), need to refactor :param output_dir: :param csv_file: :return: """ syns_df = pd.read_csv(csv_file, sep=' ') for name, group in syns_df.groupby(['trg_network', 'src_network']): trg_net, src_net = name group_len = len(group.index) with h5py.File(os.path.join(output_dir, '{}_{}_edges.h5'.format(trg_net, src_net)), 'w') as conns_h5: conns_h5.create_dataset('edges/target_gid', data=group['trg_gid']) conns_h5.create_dataset('edges/source_gid', data=group['src_gid']) conns_h5.create_dataset('edges/edge_type_id', data=group['edge_type_id']) conns_h5.create_dataset('edges/edge_group', data=group['connection_group']) group_counters = {group_id: 0 for group_id in group.connection_group.unique()} edge_group_indicies = np.zeros(group_len, dtype=np.uint) for i, group_id in enumerate(group['connection_group']): edge_group_indicies[i] = group_counters[group_id] group_counters[group_id] += 1 conns_h5.create_dataset('edges/edge_group_indicies', data=edge_group_indicies) for group_class, sub_group in group.groupby('connection_group'): grp = conns_h5.create_group('edges/{}'.format(group_class)) if group_class == 0: grp.create_dataset('sec_id', data=sub_group['segment'], dtype='int') grp.create_dataset('sec_x', data=sub_group['section']) grp.create_dataset('syn_weight', data=sub_group['weight']) grp.create_dataset('delay', data=sub_group['delay']) elif group_class == 1: grp.create_dataset('syn_weight', data=sub_group['weight']) grp.create_dataset('delay', data=sub_group['delay']) else: print('Unknown cell group {}'.format(group_class))
from enum import auto from sqlglot.helper import AutoName class ErrorLevel(AutoName): IGNORE = auto() WARN = auto() RAISE = auto() class SqlglotError(Exception): pass class UnsupportedError(SqlglotError): pass class ParseError(SqlglotError): pass class TokenError(SqlglotError): pass class OptimizeError(SqlglotError): pass
from random import randint num = randint(0, 5) print('='*100) print('Olá! Vamos nos divertir. Vou pensar em um número de 0 a 5, e você tentar adivinhar...') print('='*100) n = int(input('Em que número eu pensei? ')) if n == num: print('Parabéns! Você conseguiu, eu pensei no número {}, o mesmo que o seu {}.'.format(num, n)) else: print('Não foi dessa vez. Eu pensei no número {}, e não no {}. Mas não desista.'.format(num, n))
"""Functions to support MedPhys Taught Module workshop on calibration and tracking """ import math import numpy as np from sksurgerycore.algorithms.procrustes import orthogonal_procrustes from sksurgerycore.algorithms.errors import compute_tre_from_fle, \ compute_fre_from_fle class PointBasedRegistration: """ Does the registration and assoctiated measures """ def __init__(self, target, fixed_fle_esv, moving_fle_esv): """ :params target: 1x3 target point :params fixed_fle_esv: the expected squared value of the fixed image fle :params moving_fle_esv: the expected squared value of the moving image fle """ if not moving_fle_esv == 0.0: raise NotImplementedError("Currently we only support zero" + "fle on moving image ") self.target = None self.fixed_fle_esv = None self.moving_fle_esv = None self.reinit(target, fixed_fle_esv, moving_fle_esv) def reinit(self, target, fixed_fle_esv, moving_fle_esv): """ reinitiatilses the target and errors """ self.target = target self.fixed_fle_esv = fixed_fle_esv self.moving_fle_esv = moving_fle_esv def register(self, fixed_points, moving_points): """ Does the registration """ success = False fre = 0.0 expected_tre_squared = 0.0 expected_fre_sq = 0.0 actual_tre = 0.0 transformed_target = np.zeros(shape=(1, 3), dtype=np.float64) no_fids = fixed_points.shape[0] if no_fids > 2: rotation, translation, fre = orthogonal_procrustes( fixed_points, moving_points) expected_tre_squared = compute_tre_from_fle( moving_points[:, 0:3], self.fixed_fle_esv, self.target[:, 0:3]) expected_fre_sq = compute_fre_from_fle(moving_points[:, 0:3], self.fixed_fle_esv) transformed_target = np.matmul(rotation, self.target.transpose()) + \ translation actual_tre = np.linalg.norm( transformed_target - self.target[:, 0:3].transpose()) success = True return [success, fre, self.fixed_fle_esv, expected_tre_squared, expected_fre_sq, transformed_target[:, 0:3], actual_tre, no_fids] class PlotRegStatistics(): """ writes the registration statistics """ def __init__(self, plot): """ The plot to write on """ self.plot = plot self.fids_text = None self.tre_text = None self.exp_tre_text = None self.fre_text = None self.props = dict(boxstyle='round', facecolor='wheat', alpha=0.8) def update_stats_plot(self, tre, exp_tre, fre, exp_fre): """ Updates the statistics display """ if self.tre_text is not None: self.tre_text.remove() if self.exp_tre_text is not None: self.exp_tre_text.remove() if self.fre_text is not None: self.fre_text.remove() stats_str = ('Expected FRE = {0:.2f}\n'.format(exp_fre) + 'Expected TRE = {0:.2f}'.format(exp_tre)) actual_tre_str = ('Actual TRE = {0:.2f}'.format(tre)) actual_fre_str = ('Actual FRE = {0:.2f}'.format(fre)) self.exp_tre_text = self.plot.text(-0.90, 1.10, stats_str, transform=self.plot.transAxes, fontsize=26, verticalalignment='top', bbox=self.props) self.tre_text = self.plot.text(-0.05, 1.10, actual_tre_str, transform=self.plot.transAxes, fontsize=26, verticalalignment='top', bbox=self.props) self.fre_text = self.plot.text(0.65, 1.10, actual_fre_str, transform=self.plot.transAxes, fontsize=26, verticalalignment='top', bbox=self.props) def update_fids_stats(self, no_fids, mean_fle): """ Updates the fids stats display """ if self.fids_text is not None: self.fids_text.remove() fids_str = ('Number of fids = {0:}\n'.format(no_fids) + 'Expected FLE = {0:.2f}'.format(mean_fle)) self.fids_text = self.plot.text(-1.65, 1.10, fids_str, transform=self.plot.transAxes, fontsize=26, verticalalignment='top', bbox=self.props) class PlotRegistrations(): """ Plots the results of registrations """ def __init__(self, fixed_plot, moving_plot): """ :params fixed_plot: the fixed image subplot :params moving_plot: the moving image subplot """ self.fixed_plot = fixed_plot self.moving_plot = moving_plot self.target_scatter = None self.trans_target_plots = [None, None] self.fixed_fids_plots = [None, None] self.moving_fids_plot = None self.stats_plot = PlotRegStatistics(fixed_plot) self.show_actual_positions = True self.target_point = None def initialise_new_reg(self, img, target_point, outline): """ resets the registration """ self.moving_plot.imshow(img) self.fixed_plot.plot(outline[:, 1], outline[:, 0], '-b', lw=3) self.fixed_plot.set_ylim([0, img.shape[0]]) self.fixed_plot.set_xlim([0, img.shape[1]]) self.fixed_plot.axis([0, img.shape[1], img.shape[0], 0]) self.fixed_plot.axis('scaled') self.target_point = target_point if self.target_scatter is not None: self.target_scatter.remove() self.target_scatter = self.moving_plot.scatter(self.target_point[0, 0], self.target_point[0, 1], s=144, c='r') if self.trans_target_plots[0] is not None: self.trans_target_plots[0].remove() self.trans_target_plots[0] = None if self.trans_target_plots[1] is not None: self.trans_target_plots[1].remove() self.trans_target_plots[1] = None self.stats_plot.update_stats_plot(0, 0, 0, 0) self.moving_plot.set_title('Pre-Operative Image', y=-0.10, fontsize=26) self.fixed_plot.set_title('Patient in Theatre', y=-0.10, fontsize=26) def plot_fiducials(self, fixed_points, moving_points, no_fids, mean_fle): """ Updates plot with fiducial data """ if self.fixed_fids_plots[0] is not None: self.fixed_fids_plots[0].remove() if self.moving_fids_plot is not None: self.moving_fids_plot.remove() if self.fixed_fids_plots[1] is not None: self.fixed_fids_plots[1].remove() self.fixed_fids_plots[0] = self.fixed_plot.scatter(fixed_points[:, 0], fixed_points[:, 1], s=64, c='g', marker='o') self.moving_fids_plot = self.moving_plot.scatter(moving_points[:, 0], moving_points[:, 1], s=64, c='g', marker="o") if self.show_actual_positions: self.fixed_fids_plots[1] = self.fixed_plot.scatter( moving_points[:, 0], moving_points[:, 1], s=36, c='black', marker='+') self.stats_plot.update_fids_stats(no_fids, mean_fle) def plot_registration_result(self, actual_tre, expected_tre, fre, expected_fre, transformed_target_2d): """ Plots the results of a registration """ self.stats_plot.update_stats_plot(actual_tre, expected_tre, fre, expected_fre) if self.trans_target_plots[0] is not None: self.trans_target_plots[0].remove() if self.trans_target_plots[1] is not None: self.trans_target_plots[1].remove() self.trans_target_plots[0] = self.fixed_plot.scatter( transformed_target_2d[0], transformed_target_2d[1], s=144, c='r', marker='o') if self.show_actual_positions: self.trans_target_plots[1] = self.fixed_plot.scatter( self.target_point[0, 0], self.target_point[0, 1], s=36, c='black', marker='+') class AddFiducialMarker: """ A class to handle mouse press events, adding a fiducial marker. """ def __init__(self, fig, plotter, pbr, logger, fixed_fle_sd, moving_fle_sd): """ :params fig: the matplot lib figure to get mouse events from :params fixed_plot: the fixed image subplot :params moving_plot: the moving image subplot :params target: 1x3 target point :params fixed_fle: the standard deviations of the fixed image fle :params moving_fle: the standard deviations of the moving image fle """ self.pbr = pbr self.plotter = plotter self.fig = fig self.cid = fig.canvas.mpl_connect('button_press_event', self) self.logger = logger self.fixed_points = None self.moving_points = None self.fids_plot = None self.fixed_fle_sd = fixed_fle_sd self.moving_fle_sd = moving_fle_sd self.reset_fiducials(0.0) def __call__(self, event): if event.xdata is not None: fiducial_location = np.zeros((1, 3), dtype=np.float64) fiducial_location[0, 0] = event.xdata fiducial_location[0, 1] = event.ydata if _is_valid_fiducial(fiducial_location): fixed_point = _add_guassian_fle_to_fiducial( fiducial_location, self.fixed_fle_sd) moving_point = _add_guassian_fle_to_fiducial( fiducial_location, self.moving_fle_sd) self.fixed_points = np.concatenate( (self.fixed_points, fixed_point), axis=0) self.moving_points = np.concatenate( (self.moving_points, moving_point), axis=0) [success, fre, mean_fle_sq, expected_tre_sq, expected_fre_sq, transformed_target_2d, actual_tre, no_fids] = self.pbr.register( self.fixed_points, self.moving_points) mean_fle = math.sqrt(mean_fle_sq) self.plotter.plot_fiducials(self.fixed_points, self.moving_points, no_fids, mean_fle) if success: expected_tre = math.sqrt(expected_tre_sq) expected_fre = math.sqrt(expected_fre_sq) self.plotter.plot_registration_result( actual_tre, expected_tre, fre, expected_fre, transformed_target_2d) self.logger.log_result( actual_tre, fre, expected_tre, expected_fre, mean_fle, no_fids) self.fig.canvas.draw() def reset_fiducials(self, mean_fle_sq): """ resets the fiducial markers """ self.fixed_points = np.zeros((0, 3), dtype=np.float64) self.moving_points = np.zeros((0, 3), dtype=np.float64) self.plotter.plot_fiducials(self.fixed_points, self.moving_points, 0, math.sqrt(mean_fle_sq)) def _is_valid_fiducial(_unused_fiducial_location): """ Checks the x, y, and z location of a fiducial :returns: true if a valid fiducial """ return True def _add_guassian_fle_to_fiducial(fiducial, fle_standard_deviation): moved = np.random.normal(fiducial, fle_standard_deviation) return moved def make_target_point(outline, edge_buffer=0.9): """ returns a target point, that should lie within the outline. """ #let's assume the anatomy is a circle with #centre, and radius centre = np.mean(outline, 0) max_radius = np.min((np.max(outline, 0) - np.min(outline, 0))/2)*edge_buffer radius = np.random.uniform(low=0.0, high=max_radius) radius = np.random.uniform(low=0.0, high=max_radius) angle = np.random.uniform(low=0.0, high=math.pi*2.0) x_ord = radius * math.cos(angle) + centre[0] y_ord = radius * math.sin(angle) + centre[1] return np.array([[x_ord, y_ord, 0.0]])
# import libraries import sys import pandas as pd import numpy as np from sqlalchemy import create_engine import re import pickle import nltk nltk.download('stopwords') from nltk import word_tokenize from nltk.corpus import stopwords from nltk.stem import WordNetLemmatizer from sklearn.pipeline import Pipeline from sklearn.metrics import confusion_matrix, classification_report from sklearn.model_selection import train_test_split from sklearn.ensemble import RandomForestClassifier from sklearn.multioutput import MultiOutputClassifier from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer from sklearn.model_selection import GridSearchCV def load_data(database_filepath): ''' Description: Loads data from the SQLite database Input: Database name Output: Features(X), labels(y) and cateogry_names ''' engine = create_engine('sqlite:///data/DisasterResponse.db') df = pd.read_sql_table('DisasterResponseData', engine) X = df['message'] y = df.iloc[:,4:] category_names = y.columns.tolist() return X, y, category_names def tokenize(text): ''' Description: Normalise, lemmatize and tokenize text from messages. Input: Text data Output: Normalised, lemmatized and tokenized text ''' stop_words = stopwords.words("english") lemmatizer = WordNetLemmatizer() # normalize case and remove punctuation text = re.sub(r"[^a-zA-Z0-9]", " ", text.lower()) # tokenize text tokens = word_tokenize(text) # lemmatize andremove stop words tokens = [lemmatizer.lemmatize(word) for word in tokens if word not in stop_words] return tokens def build_model(): ''' Description: Create text processing and machine learning pipeline that uses the custom tokenize function in the ML pipeline to vectorize and transform text. MultiOutputClassifier to support multi-target classification using LinearSVC classifier to enables predictions on 36 categories. Use GridSearchCV to select the best parameters for the classifier. Output: Text processing and ML pipeline ''' forest = RandomForestClassifier(n_estimators=10, random_state=1) pipeline = Pipeline([ ('vect', CountVectorizer(tokenizer=tokenize)), ('tfidf', TfidfTransformer()), ('clf', MultiOutputClassifier(forest)) ]) parameters = {'clf__estimator__n_estimators': [5, 10]} cv = GridSearchCV(pipeline, param_grid=parameters) return cv def evaluate_model(model, X_test, Y_test, category_names): ''' Description: Use ML pipeline to predict labels of test features and produce classification report containing precision, recall, f1 score for each category. Report best parameters tested using GridSearchCV. Input: ML pipeline, test and label features and category_names Output: F1 score, precision and recall for each category in test set ''' Y_pred = model.predict(X_test) for i in range(36): cr = classification_report(Y_test.iloc[:, i], Y_pred[:, i],target_names=category_names) print(cr) def save_model(model, model_filepath): ''' Description: Exports the final model as a pickle file Input: ML pipeline, name of pickle file Output: Pickle file ''' filename = model_filepath return pickle.dump(model, open(filename, 'wb')) def main(): if len(sys.argv) == 3: database_filepath, model_filepath = sys.argv[1:] print('Loading data...\n DATABASE: {}'.format(database_filepath)) X, Y, category_names = load_data(database_filepath) X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2) print('Building model...') model = build_model() print('Training model...') model.fit(X_train, Y_train) print('Evaluating model...') evaluate_model(model, X_test, Y_test, category_names) print('Saving model...\n MODEL: {}'.format(model_filepath)) save_model(model, model_filepath) print('Trained model saved!') else: print('Please provide the filepath of the disaster messages database '\ 'as the first argument and the filepath of the pickle file to '\ 'save the model to as the second argument. \n\nExample: python '\ 'train_classifier.py ../data/DisasterResponse.db classifier.pkl') if __name__ == '__main__': main()
# -*- coding: utf-8 -*- """ Created on Mon Feb 28 14:18:51 2022 @author: Yang """ """ This is some tools for analyzing the simulation result from AtomECS (SimECS). """ import scipy.constants as cts import numpy as np import pandas as pd import matplotlib.pyplot as plt import matplotlib #matplotlib.rc('text', usetex = True) #plt.rcParams['font.family'] = "Times New Roman" import CONST def get_T_Rho(FileName, cap_r, r_max, dr): #define useful constants m_Rb = CONST.M_RB Kb = CONST.KB # load the file and add labels to each columns of them trj = pd.read_table(FileName, skiprows=9, sep=' ', skipinitialspace=True) trj.columns = ['id','atom','x','y','z','vx','vy','vz','speed','vxy','t'] # define the atom cloud by selecting atom under at certain threshold trj_cloud = trj[(trj.x)**2 + (trj.y)**2 + (trj.z)**2 <= cap_r**2] # getting coordinates vxs, vys, vzs = get_velocities(trj_cloud) # substracting the COM velocity vxs = vxs - np.mean(vxs) vys = vys - np.mean(vys) vzs = vzs - np.mean(vzs) # calculating the temperature of the atom cloud speeds2 = vxs**2 + vys**2 + vzs**2 T = ((0.5*m_Rb*speeds2)/(1.5*Kb)).mean() # calculate rhos R, rho_shell, rho_e, psd_e, psd_mean, psd_max = get_Rho(trj, cap_r, r_max, dr, T) return (len(trj_cloud),T, R, np.array(rho_shell), rho_e, psd_e, psd_mean, psd_max) def get_Rho (trj, cap_r, r_max, dr, T): m_Rb = CONST.M_RB Kb = CONST.KB h = CONST.H # define the atom cloud by selecting atom under at certain threshold trj_core = trj[(trj.x)**2 + (trj.y)**2 + (trj.z)**2 <= cap_r**2] # get the coordinates of each direction xs, ys, zs = get_coord(trj, use_com = True) # calculate the distance of each atoms to the origin r2 = xs**2 + ys**2 + zs**2 # the r value for each atom for calculating the histogram r = np.sqrt(r2) rg = np.sqrt(np.mean(r**2)) r_sphere = np.sqrt(5/3) * rg # calculate the number of bins for the RDF nbins = int(r_max/dr) Natom_shell, R = np.histogram(r, bins = nbins, range = (0, r_max), density = False) R = np.delete(R,0) #calculate the volume of each shell, or, in another words, the normalization factor norm_factors = (4/3)*np.pi*(R**3 - (R-dr)**3) rho_shell = (Natom_shell/norm_factors) # calculate different rhos e_radius = get_eradius(rho_shell, dr) rho_e = len(trj_core) / calc_volume(e_radius) rho_mean = len(trj_core) / calc_volume(r_sphere) rho_max = np.max(rho_shell) # calculate lambda lamb_da = h/np.sqrt(2*np.pi*m_Rb*Kb*T) # calculate the PSD psd_e = rho_e * lamb_da**3 psd_mean = rho_mean * lamb_da**3 psd_max = rho_max * lamb_da**3 return (np.array(R), np.array(rho_shell), rho_mean, psd_e, psd_mean, psd_max) def get_coord(trj, use_com): if use_com == True: comx = np.mean(np.array(trj.iloc[:, 2])) comy = np.mean(np.array(trj.iloc[:, 3])) comz = np.mean(np.array(trj.iloc[:, 4])) else: comx = 0 comy = 0 comz = 0 return (np.array(trj.iloc[:, 2]) - comx , np.array(trj.iloc[:, 3]) - comy, np.array(trj.iloc[:, 4]) - comz) def get_velocities(trj): return (trj.vx, trj.vy, trj.vz) def calc_volume (r): return ((4/3) * np.pi * r**3) def get_eradius (rho_shell, dr): e_rho_max = np.max(rho_shell)/np.e e_r_ndx = min(range(len(rho_shell)), key = lambda i: abs(rho_shell[i] - e_rho_max)) return e_r_ndx*dr def get_instant_laser_intersection(timestep, frequency, lx0 = 0.0, ly0 = 0.0, lz0 = 0.0): lx = 0.0002 * np.sin(frequency*2*np.pi * 1e-6 * timestep) ly = 0.0002 * np.sin(frequency*2*np.pi * 1e-6 * timestep) lz = 0.0002 * np.sin(frequency*2*np.pi * 1e-6 * timestep) return (lx, ly, lz) def get_Ti(FileName): m_Rb = 86.909*cts.value('atomic mass constant') Kb = cts.value('Boltzmann constant') trj = pd.read_table(FileName, skiprows=9, sep=' ', skipinitialspace=True) trj.columns = ['id','atom','x','y','z','vx','vy','vz','speed','vxy'] vxi = np.array(trj.iloc[:, 6]) vyi = np.array(trj.iloc[:, 7]) vzi = np.array(trj.iloc[:, 8]) speedsi2 = vxi**2 + vyi**2 + vzi**2 Ti = ((0.5*m_Rb*speedsi2)/(1.5*Kb)).mean() return (round(Ti,3)) def trj_analysis (features, pre_directory, tot_steps, d_step, cap_r, r_max, dr, output_dir, dt, skipfirstframe): # Initiate lists for storing calculated data Tini = [] # initial temperature Nini = [] # initial number TFinals = [] # final temperature NFinals = [] # final number PSDFinalsE = [] # final psd with e_radius determined density PSDFinalsMean = [] # final psd with mean density PSDFinalsMax = [] # final psd wihm max density for feature, tot_step in zip(features, tot_steps): if feature == '': feature = '.' # defining the path directory = pre_directory + feature +'/trjs/' STEP = [] T = [] Natom = [] RHO = [] RHOE = [] PSDE = [] PSDMEAN = [] PSDMAX = [] # initialize the step number based on skipping the first step or not if skipfirstframe == True: step = d_step else: step = 0 if step == 0: filename = '1.trj' # because we don't have "0.trj" else: filename = str(step) + '.trj' counter = 0 #print(step, tot_step) while step <= tot_step: # print(counter) num, temp, R, rho, rho_mean, psd_e, psd_mean, psd_max = get_T_Rho(directory + filename, cap_r, r_max, dr) # accumulating rho values if len(RHO) == 0: RHO = rho else: RHO += rho # append values to the storing lists STEP.append(step) T.append(temp) Natom.append(num) RHOE.append(rho_mean) PSDE.append(psd_e) PSDMEAN.append(psd_mean) PSDMAX.append(psd_max) step += d_step filename = str(step) + '.trj' counter += 1 RHO = RHO/counter NFinals.append(num) TFinals.append(temp) PSDFinalsE.append(psd_e) PSDFinalsMean.append(psd_mean) PSDFinalsMax.append(psd_max) pd.DataFrame(RHO, R*1000).to_csv(output_dir + '/Rho_' + feature + '.csv') pd.DataFrame(T, np.array(STEP)*dt).to_csv(output_dir + '/T_' + feature + '.csv') pd.DataFrame(Natom, np.array(STEP)*dt).to_csv(output_dir + '/N_' + feature + '.csv') pd.DataFrame(np.array(RHOE), np.array(STEP)*dt).to_csv(output_dir + '/rho_mean_' + feature + '.csv') pd.DataFrame(np.array(PSDE), np.array(STEP)*dt).to_csv(output_dir + '/psd_eradius_' + feature + '.csv') pd.DataFrame(np.array(PSDMEAN), np.array(STEP)*dt).to_csv(output_dir + '/psd_mean_' + feature + '.csv') pd.DataFrame(np.array(PSDMAX), np.array(STEP)*dt).to_csv(output_dir + '/psd_max_' + feature + '.csv') print(str(feature) + " : " + str(Natom[-1]) + " atoms left." + " Final T: " + str(T[-1]*1e6) + " uK." + "eradius rho: " + str(rho_mean) + ". eradius psd: " + str(psd_e), ". mean psd: " + str(psd_mean), ". max psd: " + str(psd_max) + ".")
""" [ #480390 ] main() does not throw exceptions """ import support support.compileJPythonc("test340c.py", core=1, jar="test340.jar", output="test340.err") support.compileJava("test340j.java") rc = support.runJava("test340j", classpath=".", expectError=1) if rc != 42: support.TestError("Did not catch exception correctly %d" % rc)
#!/usr/bin/env python # -*- coding: utf-8 -*- """core/__init__.py """
#Nama : #NIM : #Tanggal : #Deskripsi : #DefSpek #type PohonBiner : < A: elemen, L: PohonBiner, R: PohonBiner> #<A,L,R> adalah type bentukan pohon biner dimana A adalah akar, L adalah daun kiri, dan R adalah daun kanan class PohonBiner: def __init__(self,A,L,R): self.A = A self.L = L self.R = R #DefSpek #MakePB: 3 integer ---> PohonBiner #MakePB(A,L,R) menghasilkan sebuah pohon biner dengan A adalah akar, L adalah daun kiri, dan R adalah daun kanan def MakePB(A,L,R): return PohonBiner(A,L,R) #Fungsi Akar #DefSpek #Akar (p) pohon biner tak kosong ---> pohon biner def Akar(P): return P.A #Fungsi Left #DefSpek #Left : pohon biner tak kosong ---> pohon biner #Left (P) adalah sub pohon kiri dari P jika /L, A, R\ maka left (P) adalah L def Left(P): return P.L #Fungsi Right #DefSpek #Right : pohon biner tak kosong ---> pohon biner #Right (P) adalah sub pohon kanan dari P jika /L, A , R\ maka right (P) adalah R def Right(P): return P.R #Fungsi IsTreeEmpty #DefSpek #IsTreeEmpty : pohon biner ---> boolean #IsTreeEmpty (P) bernilai benar jika P kosong def IsTreeEmpty(P): if(P == None): return True else: return False #Fungsi IsOneElmtPB #DefSpek #IsOneElmtPB : poho biner --> boolean #IsOneElmtPB (P) bernilai benar jika P hanya mempunyai satu elemen yaitu /A def IsOneElmtPB(P): if(not IsTreeEmpty(P) and IsTreeEmpty(Right(P)) and IsTreeEmpty(Left(P))): return True else: return False #Fungsi IsUnerLeftPB #DefSpek #IsUnerLeftPB : pohon biner ---> boolean #IsUnerLeftPB (P) bernilai benar jika P mengandung sub pohon kiri def IsUnerLeftPB(P): if(not IsTreeEmpty(P) and IsTreeEmpty(Right(P)) and not IsTreeEmpty(Left(P))): return True else: return False #Fungsi IsUnerRightPB #DefSpek #IsUnerRightPB : pohon biner ---> boolean #IsUnerRightPB (P) bernilai benar jika P mengandung sub pohon kanan def IsUnerRightPB(P): if(not IsTreeEmpty(P) and not IsTreeEmpty(Right(P)) and IsTreeEmpty(Left(P))): return True else: return False #Fungsi IsBinerPB #DefSpek #IsBinerPB : pohon biner ---> boolean #IsBinerPB (P) bernilai benar jika P mengandung sub pohon kiri dan sub pohon kanan def IsBinerPB(P): if(not IsTreeEmpty(P) and not IsTreeEmpty(Right(P)) and not IsTreeEmpty(Left(P))): return True else: return False #Fungsi IsExistRightPB #DefSpek #IsExistRightPB : pohon biner tak kosong ---> boolean #IsExistRightPB (P) bernilai benar jika P mengandung sub pohon kiri def IsExistRightPB(P): if(not IsTreeEmpty(P) and not IsTreeEmpty(Right(P))): return True else: return False #Fungsi IsExistLeftPB #DefSpek #IsExistLeftPB : pohon biner tak kosong ---> boolean #IsExistLeftPB (P) bernilai benar jika P mengandung sub pogon kiri def IsExistLeftPB(P): if(not IsTreeEmpty(P) and not IsTreeEmpty(Left(P))): return True else: return False P1 = MakePB(1, (MakePB( 2, MakePB( 4, None, None), MakePB( 5, None, None))), MakePB( 3, None, MakePB( 6, MakePB( 7, None, None), MakePB( 8, None, None)))) P2 = MakePB( 2, MakePB( 4, None, None), MakePB( 5, None, None))
class Dog: def __init__(self,name,posx,posy): self.name=name self.posx=posx self.posy=posy self.awaken=False self.hungry=False self.counter=0 def awake(self): if self.awaken: print(self.name+' is already awaken') else: self.awaken=True print(self.name+' is no longer slept') def move(self,x1,y1): if self.hungry: print(self.name+' is hungry') elif self.awaken: self.posx+=x1 self.posy+=y1 self.counter+=1 else: print(self.name+' is slept') if self.counter>=3: self.hungry=True def feed(self): self.counter=0 self.hungry=False print(self.name+' is no longer hungry') MyDog=Dog('Lambda',0,0) print(MyDog.posx,MyDog.posy) MyDog.move(1,1) MyDog.awake() MyDog.move(1,0) print(MyDog.posx,MyDog.posy) MyDog.move(0,1) print(MyDog.posx,MyDog.posy) MyDog.move(1,1) print(MyDog.posx,MyDog.posy) MyDog.move(1,1) print(MyDog.posx,MyDog.posy) MyDog.feed() MyDog.move(1,0) print(MyDog.posx,MyDog.posy)
# mypy: ignore-errors from .. import fixtures from ..assertions import eq_ from ..schema import Column from ..schema import Table from ... import Integer from ... import select from ... import testing from ... import union class DeprecatedCompoundSelectTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("y", Integer), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "x": 1, "y": 2}, {"id": 2, "x": 2, "y": 3}, {"id": 3, "x": 3, "y": 4}, {"id": 4, "x": 4, "y": 5}, ], ) def _assert_result(self, conn, select, result, params=()): eq_(conn.execute(select, params).fetchall(), result) def test_plain_union(self, connection): table = self.tables.some_table s1 = select(table).where(table.c.id == 2) s2 = select(table).where(table.c.id == 3) u1 = union(s1, s2) with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) # note we've had to remove one use case entirely, which is this # one. the Select gets its FROMS from the WHERE clause and the # columns clause, but not the ORDER BY, which means the old ".c" system # allowed you to "order_by(s.c.foo)" to get an unnamed column in the # ORDER BY without adding the SELECT into the FROM and breaking the # query. Users will have to adjust for this use case if they were doing # it before. def _dont_test_select_from_plain_union(self, connection): table = self.tables.some_table s1 = select(table).where(table.c.id == 2) s2 = select(table).where(table.c.id == 3) u1 = union(s1, s2).alias().select() with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) @testing.requires.order_by_col_from_union @testing.requires.parens_in_union_contained_select_w_limit_offset def test_limit_offset_selectable_in_unions(self, connection): table = self.tables.some_table s1 = select(table).where(table.c.id == 2).limit(1).order_by(table.c.id) s2 = select(table).where(table.c.id == 3).limit(1).order_by(table.c.id) u1 = union(s1, s2).limit(2) with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) @testing.requires.parens_in_union_contained_select_wo_limit_offset def test_order_by_selectable_in_unions(self, connection): table = self.tables.some_table s1 = select(table).where(table.c.id == 2).order_by(table.c.id) s2 = select(table).where(table.c.id == 3).order_by(table.c.id) u1 = union(s1, s2).limit(2) with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) def test_distinct_selectable_in_unions(self, connection): table = self.tables.some_table s1 = select(table).where(table.c.id == 2).distinct() s2 = select(table).where(table.c.id == 3).distinct() u1 = union(s1, s2).limit(2) with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) def test_limit_offset_aliased_selectable_in_unions(self, connection): table = self.tables.some_table s1 = ( select(table) .where(table.c.id == 2) .limit(1) .order_by(table.c.id) .alias() .select() ) s2 = ( select(table) .where(table.c.id == 3) .limit(1) .order_by(table.c.id) .alias() .select() ) u1 = union(s1, s2).limit(2) with testing.expect_deprecated( "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): self._assert_result( connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] )
# -*- coding: utf-8 -*- """ This module is for demonstration purposes only and the integrators here are not meant for production use. Consider them provisional, i.e., API here may break without prior deprecation. """ import math import warnings import numpy as np from .util import import_ lu_factor, lu_solve = import_('scipy.linalg', 'lu_factor', 'lu_solve') class RK4_example_integrator: """ This is an example of how to implement a custom integrator. It uses fixed step size and is usually not useful for real problems. """ with_jacobian = False @staticmethod def integrate_adaptive(rhs, jac, y0, x0, xend, dx0, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) xspan = xend - x0 n = int(math.ceil(xspan/dx0)) yout = [y0[:]] xout = [x0] k = [np.empty(len(y0)) for _ in range(4)] for i in range(0, n+1): x, y = xout[-1], yout[-1] h = min(dx0, xend-x) rhs(x, y, k[0]) rhs(x + h/2, y + h/2*k[0], k[1]) rhs(x + h/2, y + h/2*k[1], k[2]) rhs(x + h, y + h*k[2], k[3]) yout.append(y + h/6 * (k[0] + 2*k[1] + 2*k[2] + k[3])) xout.append(x+h) return np.array(xout), np.array(yout), {'nfev': n*4} @staticmethod def integrate_predefined(rhs, jac, y0, xout, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) x_old = xout[0] yout = [y0[:]] k = [np.empty(len(y0)) for _ in range(4)] for i, x in enumerate(xout[1:], 1): y = yout[-1] h = x - x_old rhs(x_old, y, k[0]) rhs(x_old + h/2, y + h/2*k[0], k[1]) rhs(x_old + h/2, y + h/2*k[1], k[2]) rhs(x_old + h, y + h*k[2], k[3]) yout.append(y + h/6 * (k[0] + 2*k[1] + 2*k[2] + k[3])) x_old = x return np.array(yout), {'nfev': (len(xout)-1)*4} class EulerForward_example_integrator: with_jacobian = False integrate_adaptive = None @staticmethod def integrate_predefined(rhs, jac, y0, xout, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) x_old = xout[0] yout = [y0[:]] f = np.empty(len(y0)) for i, x in enumerate(xout[1:], 1): y = yout[-1] h = x - x_old rhs(x_old, y, f) yout.append(y + h*f) x_old = x return np.array(yout), {'nfev': (len(xout)-1)} class Midpoint_example_integrator: with_jacobian = False integrate_adaptive = None @staticmethod def integrate_predefined(rhs, jac, y0, xout, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) x_old = xout[0] yout = [y0[:]] f = np.empty(len(y0)) for i, x in enumerate(xout[1:], 1): y = yout[-1] h = x - x_old rhs(x_old, y, f) dy_efw = h*f rhs(x_old + h/2, y + dy_efw/2, f) yout.append(y + h*f) x_old = x return np.array(yout), {'nfev': (len(xout)-1)} class EulerBackward_example_integrator: with_jacobian = True integrate_adaptive = None @staticmethod def integrate_predefined(rhs, jac, y0, xout, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) x_old = xout[0] yout = [y0[:]] f = np.empty(len(y0)) j = np.empty((len(y0), len(y0))) I = np.eye(len(y0)) for i, x in enumerate(xout[1:], 1): y = yout[-1] h = x - x_old jac(x_old, y, j) lu_piv = lu_factor(h*j - I) rhs(x, y, f) ynew = y + f*h norm_delta_ynew = float('inf') while norm_delta_ynew > 1e-12: rhs(x, ynew, f) delta_ynew = lu_solve(lu_piv, ynew - y - f*h) ynew += delta_ynew norm_delta_ynew = np.sqrt(np.sum(np.square(delta_ynew))) yout.append(ynew) x_old = x return np.array(yout), {'nfev': (len(xout)-1)} class Trapezoidal_example_integrator: with_jacobian = True integrate_adaptive = None @staticmethod def integrate_predefined(rhs, jac, y0, xout, **kwargs): if kwargs: warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) x_old = xout[0] yout = [y0[:]] f = np.empty(len(y0)) j = np.empty((len(y0), len(y0))) I = np.eye(len(y0)) for i, x in enumerate(xout[1:], 1): y = yout[-1] h = x - x_old jac(x_old, y, j) lu_piv = lu_factor(h*j - I) rhs(x, y, f) euler_fw_dy = f*h ynew = y + euler_fw_dy norm_delta_ynew = float('inf') while norm_delta_ynew > 1e-12: rhs(x, ynew, f) delta_ynew = lu_solve(lu_piv, ynew - y - f*h) ynew += delta_ynew norm_delta_ynew = np.sqrt(np.sum(np.square(delta_ynew))) yout.append((ynew + y + euler_fw_dy)/2) x_old = x return np.array(yout), {'nfev': (len(xout)-1)}
from graphene_federation import build_schema from .mutation import Mutation from .query import Query schema = build_schema(query=Query, mutation=Mutation)
import argparse from glob import glob import importlib import hashlib import logging import os from typing import Optional from pydantic import BaseModel import re from sqlalchemy import create_engine, text import sqlalchemy from sqlalchemy import exc from sqlalchemy.exc import InternalError, OperationalError from sqlalchemy.orm import Session import sys import random import time import yaml from migreat import __VERSION__ # Log config logger = logging.getLogger('MiGreat') logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter("%(levelname)s: %(asctime)s - %(message)s")) logger.addHandler(ch) class Config(BaseModel): """ Config file schema """ hostname: str port: int = 5432 database: str priv_db_username: str priv_db_password: str service_db_username: str service_db_password: str service_schema: str group: Optional[str] legacy_sqlalchemy: bool = False max_conn_retries: int = 10 conn_retry_interval: int = 5 migration_table: str = "migrate_version" dead: Optional[bool] = False use_advisory_lock: Optional[bool] = False class MiGreat: """ Encapsulates MiGreat functionality. """ OPER_CREATE = "create" OPER_INIT = "init" OPER_UPGRADE = "upgrade" """ CLI operations """ SCRIPTS_DIR = os.path.abspath( os.path.join( os.curdir, "versions" ) ) """ Migration scripts directory """ CONFIG_FILE = os.path.abspath( os.path.join( os.curdir, "MiGreat.yaml" ) ) """ MiGreat configuration file """ TEMPLATES_DIR = os.path.abspath( os.path.join( os.path.dirname(__file__), "templates" ) ) """ Templates directory """ @staticmethod def cli(): """ Runs the migration batch. """ parser = argparse.ArgumentParser(description="MiGreat CLI") parser.add_argument( "oper", type=str, choices=( MiGreat.OPER_CREATE, MiGreat.OPER_INIT, MiGreat.OPER_UPGRADE, ), ) parser.add_argument( "--version", type=int, default=None, help="Version to downgrade to if downgrading", ) parser.add_argument( "--verbose", action="store_true", default=False, help="Enable verbose output", ) args = parser.parse_args() if args.verbose is True: logger.setLevel(logging.DEBUG) if args.oper == MiGreat.OPER_INIT: logger.info("Initializing MiGreat") try: os.mkdir(MiGreat.SCRIPTS_DIR) except FileExistsError: logger.error(f"The directory {MiGreat.SCRIPTS_DIR} already exists") sys.exit(1) with open(os.path.join(MiGreat.TEMPLATES_DIR, "MiGreat.yaml"), "rt") as config_file: config_template = config_file.read() with open(MiGreat.CONFIG_FILE, "wt") as config_file: config_file.write(config_template) logger.info("MiGreat initialized at ./") logger.info("Please adjust defaults in ./MiGreat.yaml") elif args.oper == MiGreat.OPER_CREATE: mg = MiGreat.from_yaml() mg.create() else: assert args.oper == MiGreat.OPER_UPGRADE mg = MiGreat.from_yaml() config = mg.config if config.use_advisory_lock: priv_engine = MiGreat.connect( config.hostname, config.port, config.database, config.priv_db_username, config.priv_db_password, config.conn_retry_interval, config.max_conn_retries, False, ) sha_start = hashlib.sha256(config.service_schema.encode('utf8')).digest()[:4] lock_id = int.from_bytes(sha_start, 'little') with priv_engine.connect() as lock_conn: logger.info("Waiting for advisory lock") # Block until lock is available. This allows init container to wait on all replicas # until the migration is complete. lock_conn.execute(text(f"SELECT pg_advisory_lock({lock_id})")) logger.info("Lock acquired") mg.upgrade() logger.info("Releasing lock") else: mg.upgrade() @staticmethod def from_yaml() -> "MiGreat": """ Initializes and returns a MiGreat instance from the yaml configuration file. """ if not os.path.exists(MiGreat.CONFIG_FILE): logger.error("Couldn't find MiGreat config file. Try initializing the space first.") sys.exit(1) if not os.path.exists(MiGreat.SCRIPTS_DIR): logger.error("Couldn't find MiGreat scripts directory. Try initializing the space first.") sys.exit(1) with open(MiGreat.CONFIG_FILE) as config_file: the_yaml = yaml.safe_load(config_file) annotations = Config.__annotations__ for key, value in the_yaml.items(): match = MiGreat.__VAR_SUBST_MATCHER.match(str(value)) if match is not None: var_name = match.groups()[0] var = os.environ.get(var_name, "") if key in annotations: # Convert to the proper type since all environment variables are strings the_yaml[key] = annotations[key](var) config = Config(**the_yaml) return MiGreat(config) @staticmethod def connect( hostname, port, database, username, password, retry_interval, max_retries, legacy_sqlalchemy, ): """ Returns a connection to the target database. """ logger.debug(f"Connecting to: postgresql://{username}:<password>@{hostname}:{port}/{database}") engine = create_engine( f"postgresql://{username}:{password}@{hostname}:{port}/{database}", future=not legacy_sqlalchemy, ) # Attempt to connect, and retry on failure for _ in range(max_retries+1): try: with engine.connect() as conn: conn.execute(text("SELECT 1")) break except OperationalError as e: logger.info(f"Connection failed, waiting {retry_interval}s before retrying") logger.debug(e) time.sleep(retry_interval) else: logger.error(f"Unable to establish connection after {max_retries+1} attempts") sys.exit(1) return engine def __init__(self, config: Config): """ Initializes an instance of MiGreat. """ logger.info(f"MiGreat {__VERSION__}") self.__config = config @property def config(self) -> Config: """ Returns the configuration object. """ return self.__config def create(self): """ Creates a new migration script from the template. """ highest_version, _ = self.__validate_migrator_scripts() next_version = highest_version + 1 migrator = f"{str(next_version).zfill(4)}_unnamed_migrator.py" with open(os.path.join(MiGreat.TEMPLATES_DIR, "migrator.tmpl"), "rt") as m_tmpl: template = m_tmpl.read() with open(os.path.join(MiGreat.SCRIPTS_DIR, migrator), "wt") as m_script: m_script.write(template) logger.info(f"Wrote new migrator {migrator}") def upgrade(self): """ Runs migrators in order, starting with the next version. Each migrator is independently transacted. Migrations are always executed by the service user. """ config = self.config if config.dead is True: # Ensure all elements are deleted and then exit self.__check_and_remove_migraton_controls() return self.__check_and_apply_migration_controls() highest_version, scripts = self.__validate_migrator_scripts() priv_engine = MiGreat.connect( config.hostname, config.port, config.database, config.priv_db_username, config.priv_db_password, config.conn_retry_interval, config.max_conn_retries, config.legacy_sqlalchemy, ) service_engine = MiGreat.connect( config.hostname, config.port, config.database, config.service_db_username, config.service_db_password, config.conn_retry_interval, config.max_conn_retries, config.legacy_sqlalchemy, ) with service_engine.connect() as conn: query = f"SELECT version FROM \"{config.service_schema}\".\"{config.migration_table}\"" if not config.legacy_sqlalchemy: query = text(query) result = conn.execute(query) row = result.fetchone() curr_ver = row[0] if curr_ver == highest_version: logger.info("Migrations are already up to date") sys.exit(0) if curr_ver > highest_version: logger.error("Migration version in database exceeds that of the migration scripts") sys.exit(1) next_version = curr_ver + 1 for script in scripts[curr_ver:]: spec = importlib.util.spec_from_file_location( script[:-3], os.path.join( MiGreat.SCRIPTS_DIR, script, ), ) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) if not hasattr(module, 'upgrade'): logger.error(f"Migrator {script} does not have an upgrade method") sys.exit(1) if not hasattr(module, 'downgrade'): logger.error(f"Migrator {script} does not have a downgrade method") sys.exit(1) if hasattr(module, 'CONFIG_OPTIONS'): CONFIG_OPTIONS = module.CONFIG_OPTIONS else: CONFIG_OPTIONS = {} transact = CONFIG_OPTIONS.get('transact', True) run_as_priv = CONFIG_OPTIONS.get('run_as_priv', False) engine = priv_engine if run_as_priv else service_engine session = Session(engine, future=not config.legacy_sqlalchemy) logger.info(f"Migrating {next_version - 1} to {next_version}") try: if transact: with session.begin(): module.upgrade(session) self.__update_version(session, next_version) else: module.upgrade(session) with session.begin(): self.__update_version(session, next_version) except: logger.error("Migration failed", exc_info=1) sys.exit(1) next_version += 1 def __update_version(self, conn, next_version: int): """ Updates the schema version. """ config = self.config # This is fully qualified in case the privileged user has been selected to perform # the operation. query = f""" UPDATE \"{config.service_schema}\".\"{config.migration_table}\" SET version = :next_version """ if not self.config.legacy_sqlalchemy: query = text(query) conn.execute( query, { "next_version": next_version } ) def __validate_migrator_scripts(self) -> int: """ Validates and returns information about the current migrator scripts. """ highest_version = 0 scripts_by_version = {} scripts = [] existing_scripts = glob(os.path.join(MiGreat.SCRIPTS_DIR, "*.py")) for full_path in sorted(existing_scripts): _, filename = os.path.split(full_path) match = MiGreat.__SCRIPT_MATCHER.match(filename) if match is not None: ver = int(match.groups()[0]) if ver in scripts_by_version: logger.error(f"Multiple migrators share version number {ver}") sys.exit(1) scripts_by_version[ver] = filename highest_version = max(highest_version, ver) # Make sure there are no holes in the scripts: if highest_version: for ver in range(1, highest_version + 1): if ver not in scripts_by_version: logger.error(f"Migrator {ver} is missing from the series") sys.exit(1) scripts.append(scripts_by_version[ver]) return highest_version, scripts def __concurrency_protection(self, engine, query, bindings={}): """ Certain first-time operations will create a concurrency violation within the database. These operations need to be handled in separate transactions, and have a degree of retry-ability before failing. """ failure_retries = 5 while failure_retries > 0: with engine.begin() as conn: try: if bindings: conn.execute(query, bindings) else: conn.execute(query) return except sqlalchemy.exc.InternalError as e: logger.info("Possible resource contention, retrying shortly.") logger.debug(e) failure_retries -= 1 if failure_retries == 0: logger.error("Failed to prepare database", exc_info=1) sys.exit(1) # Try to avoid collision by sleeping for a random time interval time.sleep(.5 + random.random()) def __check_and_remove_migraton_controls(self): """ Checks to determine if MiGreat's migration controls have been removed from the target database, and removes them if they haven't been already. """ config = self.config engine = MiGreat.connect( self.config.hostname, self.config.port, self.config.database, self.config.priv_db_username, self.config.priv_db_password, self.config.conn_retry_interval, self.config.max_conn_retries, False, ) if config.group is not None: with engine.begin() as conn: result = conn.execute(text(""" SELECT 1 FROM pg_catalog.pg_roles cr JOIN pg_catalog.pg_auth_members m ON (m.member = cr.oid) JOIN pg_roles r ON (m.roleid = r.oid) WHERE cr.rolname = :username AND r.rolname = :group """), { "username": config.service_db_username, "group": config.group, }) is_group_member = result.fetchone() is not None # Remove schema if is_group_member: self.__concurrency_protection( engine, text(f""" ALTER GROUP "{config.group}" DROP USER "{config.service_db_username}" """) ) with engine.begin() as conn: conn.execute(text(f""" DROP SCHEMA IF EXISTS "{config.service_schema}" CASCADE; DROP USER IF EXISTS "{config.service_db_username}"; """)) def __check_and_apply_migration_controls(self): """ Checks to determine if MiGreat's migration controls have been applied to the target database, and applies them if they have not already been applied. """ engine = MiGreat.connect( self.config.hostname, self.config.port, self.config.database, self.config.priv_db_username, self.config.priv_db_password, self.config.conn_retry_interval, self.config.max_conn_retries, False, ) config = self.config if config.group is not None: # This block mitigates a race condition that can manifest as a failed transaction, # when multiple different services attempt to create the non existant group for the # first time. try: with engine.begin() as conn: # Check if group exists result = conn.execute( text(""" SELECT 1 FROM pg_roles WHERE rolname = :group """), { "group": config.group, } ) row = result.fetchone() if row is None: conn.execute( text(f"CREATE GROUP \"{config.group}\"") ) except: logger.info("Continuing... group probably created in parallel") with engine.begin() as conn: # Check if the service user exists result = conn.execute( text("SELECT 1 FROM pg_roles WHERE rolname=:username"), { "username": config.service_db_username, } ) row = result.fetchone() if row is None: logger.info(f'Creating user "{config.service_db_username}"') conn.execute( text( f"CREATE USER \"{config.service_db_username}\" WITH ENCRYPTED PASSWORD :password" ), { "password": config.service_db_password, } ) # Check if the service schema exists result = conn.execute( text(""" SELECT schema_name FROM information_schema.schemata WHERE schema_name = :schema """), { "schema": config.service_schema } ) row = result.fetchone() if row is None: logger.info(f'Creating schema "{config.service_schema}"') conn.execute(text(f"CREATE SCHEMA \"{config.service_schema}\"")) conn.execute(text(f""" GRANT ALL PRIVILEGES ON SCHEMA \"{config.service_schema}\" TO \"{config.service_db_username}\" """)) conn.execute(text(f""" GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA \"{config.service_schema}\" TO \"{config.service_db_username}\" """)) conn.execute(text(f""" ALTER ROLE \"{config.service_db_username}\" SET search_path TO \"{config.service_schema}\", PUBLIC """)) # Check if migration tracking table exists result = conn.execute(text(""" SELECT FROM pg_catalog.pg_class cat JOIN pg_catalog.pg_namespace ns ON ns.oid = cat.relnamespace WHERE ns.nspname = :service_schema AND cat.relname = :migration_table AND cat.relkind = 'r' """), { "service_schema": config.service_schema, "migration_table": config.migration_table, }) row = result.fetchone() if row is None: conn.execute(text(f""" CREATE TABLE \"{config.service_schema}\".\"{config.migration_table}\" ( repository_id TEXT NOT NULL, repository_path TEXT NOT NULL, version INT NOT NULL ); """)) conn.execute(text(f""" INSERT INTO \"{config.service_schema}\".\"{config.migration_table}\" ( repository_id, repository_path, version ) VALUES ( :service_schema, :migrator_dir, :version ) """), { "service_schema": config.service_schema, "migrator_dir": MiGreat.SCRIPTS_DIR, "version": 0, }) conn.execute(text(f""" GRANT ALL PRIVILEGES ON TABLE \"{config.service_schema}\".\"{config.migration_table}\" TO \"{config.service_db_username}\" """)) if config.group is not None: result = conn.execute(text(""" SELECT 1 FROM pg_catalog.pg_roles cr JOIN pg_catalog.pg_auth_members m ON (m.member = cr.oid) JOIN pg_roles r ON (m.roleid = r.oid) WHERE cr.rolname = :username AND r.rolname = :group """), { "username": config.service_db_username, "group": config.group, }) is_group_member = result.fetchone() is not None if config.group is not None and not is_group_member: # This can easily happen at the same time in multiple services that are migrating # concurrently for the first time. We add some contention tollerance logic here. self.__concurrency_protection( engine, text(f""" ALTER GROUP \"{config.group}\" ADD USER \"{config.service_db_username}\"; GRANT USAGE ON SCHEMA \"{config.service_schema}\" TO GROUP \"{config.group}\"; """) ) __SCRIPT_MATCHER = re.compile("^(\d+)_.+.py$") """ Regular expression to match active migrator scripts """ __VAR_SUBST_MATCHER = re.compile("^\$\{(.+)\}$") """ Regular expression to perform environment variable injection """
# -*- coding: utf-8 -*- """ Created on Tue Nov 17 17:41:47 2020 @author: Koustav """ import os import glob import math import matplotlib.pyplot as plt import seaborn as sea import numpy as np import pandas as pan from scipy.optimize import curve_fit import matplotlib.ticker as mtick ''' This is a script that is specifically designed to ascertain, read and subsequently operate on data present in an arbitrary number of CSV files scattered across various folders.''' def ticks(y, pos): return r'$e^{:.0f}$'.format(np.log(y)) def ln_pow_law(x, a, tau): return a - tau*x def pow_law(x, a, expo): return a*(np.power(x, expo)) def starter_pack(): crt_exp_gamma_beta() #crt_exp_nuu() # Refer to inline descriptions of these functions to discern the layout and organisation of the CSV data. base_path = r"13th Nov\Output" files = glob.glob(base_path + "/**/*.csv", recursive=True) m=0; L=[] for file in files: print(file) if (os.path.getsize(file) > 1024): print(True) else: print(False) abba = (m, int(''.join(filter(lambda i: i.isdigit(), file)))) #Tuple that stores index number of files list alongside a concatanation of all the digits present inside "file". L.append(abba) m+=1 for x in range(0, len(files)): print(L[x], end="\t") print(files[x]) #sort_files = sorted(files, key=int(''.join(filter(lambda i: i.isdigit(), files)))) def crt_exp_gamma_beta(): ''' The CSV data is grouped into the following columns: The first column contains the occupation probability ( p ----> p_c = 0.728 for DP class) at which the simulations were run. The second column contains the grid size of the lattices, while the third stores the current simulation number. The fourth and fifth column usually store the cluster size and the corresponding n_s(p) value for a given simulation, with the exception of the last entry into each of these columns for each trial number, which stores -10 (indicative of a spanning cluster) and P[p] respectively. ''' unspliced_data = [] #This will store the raw unspliced data without any processing from all the CSV files. for i in range(0,9): base_path = r"13th Nov\Output" + "\\" + str(i) #print(base_path) files = glob.glob(base_path + "/**/*.csv", recursive=True) for file in files: if (os.path.getsize(file) > 512): #Filtering for file sizes that are greater than 512 Bytes in size. print(file) data_temp = np.genfromtxt('%s' %(file), delimiter=",", comments='#', skip_header=1) if(len(unspliced_data) == 0): #First chip off the old block. unspliced_data = data_temp else: if( unspliced_data[-1,1] == data_temp[-1,1]): ''' The current CSV stores experiments performed using the same relevant parameters (specifically grid size) as the last CSV file that was processed.''' data_temp[:,2] += unspliced_data[-1,2] print("Boris") # We update the trial numbers in the new CSV file to be contiguous with the trial numbers from # the previous CSV, if the experimental data is the same. unspliced_data = np.concatenate((unspliced_data, data_temp), axis=0) m_data = exxon_split(unspliced_data) m_data = np.array(m_data) #s= input("Enter any key to continue.") s=0 '''for x in m_data: if(s % 25 == 0): print("%d \t %d \t %f \t %f" %(int(x[1]), int(x[2]), x[3], x[4])) s+=1 print("Total:\t %d" %(s))''' ''' Now to thresh out the percolation strength data from m_data. Remember, P[p] data is characterised by a -10 entry in the 4th column.''' split_data = m_data[:,3] == -10 perc_data = m_data[split_data] #Final data form for percolation strength. post_transcript = m_data[~split_data] #Final data form for average cluster size calculations '''print("Collated Percolation Data:") for x in perc_data: print("%d \t %d \t %f \t %f" %(int(x[1]), int(x[2]), x[3], x[4])) s+=1 print("Total:\t %d" %(s)) s= input("Enter any key to continue.") s=0 for x in post_transcript: if(s % 25 == 0): print("%d \t %d \t %f \t %f" %(int(x[1]), int(x[2]), x[3], x[4])) s+=1 print("Total:\t %d" %(s))''' plt_beta(perc_data) #Makes Finite Sized Scaling Plots For The Beta Critical Exponent. plt_gamma(post_transcript) #Makes Finite Sized Scaling Plots For The Gamma Critical Exponent. def crt_exp_nuu(): ''' The CSV data is grouped into the following columns: The first column contains the occupation probability ( p ----> p_c = 0.728 for DP class) at which the simulations were run. The second column contains the grid size of the lattices, while the third stores the current simulation number. The fourth and fifth column usually store the p and the p^2 values for a given simulation, at which the given system has been found to percolate for the first time (Ahorny, Stauffer, Dietrich Pgs 70-75) | p_c ; L ; # ; p' ; (p')^2 | ''' unspliced_data = [] #This will store the raw unspliced data without any processing from all the CSV files. for i in range(0,19): base_path = r"20th Nov\Output" + "\\" + str(i) #print(base_path) files = glob.glob(base_path + "/**/*.csv", recursive=True) for file in files: if (os.path.getsize(file) > 512): #Filtering for file sizes that are greater than 512 Bytes in size. print(file) data_temp = np.genfromtxt('%s' %(file), delimiter=",", comments='#', skip_header=1) if(len(unspliced_data) == 0): #First chip off the old block. unspliced_data = data_temp else: if( unspliced_data[-1,1] == data_temp[-1,1]): ''' The current CSV stores experiments performed using the same relevant parameters (specifically grid size) as the last CSV file that was processed.''' data_temp[:,2] += unspliced_data[-1,2] print("Boris") # We update the trial numbers in the new CSV file to be contiguous with the trial numbers from # the previous CSV, if the experimental data is the same. unspliced_data = np.concatenate((unspliced_data, data_temp), axis=0) m_data = exxon_split(unspliced_data) m_data = np.array(m_data) plt_nuu(m_data) def plt_beta(perc_data): # Plots percolation data using Seaborn and stores it to the relevant directory. p = perc_data[0,0] #Occupation Probability value at which simulations were run. g1 = int(perc_data[0,1]) #Starting Grid Size g2 = int(perc_data[-1,1]) #Ending Grid Size. print("%f \t G1---- %d \t G2---- %d" %(p,g1,g2)) os.chdir(r"..\..\figures") # Changing to relevant directory. if(os.path.isdir("CrtExp")==False): os.mkdir("CrtExp") os.chdir("CrtExp") if(os.path.isdir("DP")==False): os.mkdir("DP") os.chdir("DP") if(os.path.isdir("Finite Scaling")==False): os.mkdir("Finite Scaling") os.chdir("Finite Scaling") if(os.path.isdir("Beta")==False): os.mkdir("Beta") os.chdir("Beta") hurtlocker= pan.DataFrame(perc_data, columns= ["p", "L", "Trial Number", "-10", r"P[p]"]) x1 =np.transpose(perc_data[:,1]) x2= np.transpose(perc_data[:,4]) g= sea.lineplot(data=hurtlocker, x="L" , y="P[p]", estimator='mean', ci='sd', marker="s", err_style="band") popt, pcov = curve_fit(pow_law, x1, x2, p0= np.asarray([0.3, -0.005])) perr = np.sqrt(np.diag(pcov)) print("SD of C:\t" +str(perr[1]) + " for p:\t" +str(p)) tukan= (popt[0], -popt[1], perr[1]) plt.plot(x1, pow_law(x1, *popt), 'm--', label=r'Th Fit: $ P[p] = %5.4f \times L^{-(%5.4f \mp %5.4f)} $ ' % tukan ) plt.xlim(math.exp(3),g2+20) plt.yscale('log', basey= math.e) plt.xscale('log', basex= math.e) g.xaxis.set_major_formatter(mtick.FuncFormatter(ticks)) g.yaxis.set_major_formatter(mtick.FuncFormatter(ticks)) plt.legend() g.set_title(r'$p = %5.4f, ( \xi \longrightarrow \infty ) $' %(p)) plt.savefig("Log Line Beta P(p) vs L (p--%8.7f) (Range-- %d-%d).png" %(p, g1, g2), dpi=400) plt.show() plt.close() os.chdir(r"..\..\..\..\..\analysis\Mass Action") #Returning to our home directory. def plt_gamma(post_transcript): # Plots average cluster size data using Seaborn and stores it to the relevant directory. p = post_transcript[0,0] #Occupation Probability value at which simulations were run. g1 = int(post_transcript[0,1]) #Starting Grid Size g2 = int(post_transcript[-1,1]) #Ending Grid Size. print("%f \t G1---- %d \t G2---- %d" %(p,g1,g2)) os.chdir(r"..\..\figures") # Changing to relevant directory. if(os.path.isdir("CrtExp")==False): os.mkdir("CrtExp") os.chdir("CrtExp") if(os.path.isdir("DP")==False): os.mkdir("DP") os.chdir("DP") if(os.path.isdir("Finite Scaling")==False): os.mkdir("Finite Scaling") os.chdir("Finite Scaling") if(os.path.isdir("Gamma")==False): os.mkdir("Gamma") os.chdir("Gamma") g=g1; trl_no=1;a=0; b=0; nu_data=[] for x in range(0, post_transcript[:,1].size): if(g != post_transcript[x,1]): b=x; denom = float(np.sum(post_transcript[a:b,4])); denom2= 2 - (1/post_transcript[x,0]); s_nsp= np.multiply(post_transcript[a:b,3], post_transcript[a:b,4]) denom= float(np.sum(s_nsp)) #Calculating S[p] for a given trial number as per defn. s2_nsp = np.multiply(post_transcript[a:b,3], s_nsp) numer = float(np.sum(s2_nsp)) nu_data.append([g, trl_no, (numer/denom), (numer/denom2)]) g = post_transcript[x,1] trl_no=1; a=x; elif(trl_no != post_transcript[x,2]): b=x; denom = float(np.sum(post_transcript[a:b,4])); denom2= 2 - (1/post_transcript[x,0]); s_nsp= np.multiply(post_transcript[a:b,3], post_transcript[a:b,4]) denom= float(np.sum(s_nsp)) #Calculating S[p] for a given trial number as per defn. s2_nsp = np.multiply(post_transcript[a:b,3], s_nsp) numer = float(np.sum(s2_nsp)) nu_data.append([g, trl_no, (numer/denom), (numer/denom2)]) trl_no= post_transcript[x,2]; a=x; elif(x == post_transcript[:,1].size -1): #last entry in series b=x+1 denom = float(np.sum(post_transcript[a:b,4])); denom2= 2 - (1/post_transcript[x,0]); s_nsp= np.multiply(post_transcript[a:b,3], post_transcript[a:b,4]) denom= float(np.sum(s_nsp)) #Calculating S[p] for a given trial number as per defn. s2_nsp = np.multiply(post_transcript[a:b,3], s_nsp) numer = float(np.sum(s2_nsp)) nu_data.append([g, trl_no, (numer/denom), (numer/denom2)]) break print(" L , # , <S[p]\t") new_data= np.array(nu_data) zerodark30= pan.DataFrame(new_data, columns= ["L", "$Trial Number$", r"$\langle S[p] \rangle$", r"$\langle S'[p] \rangle$"]) x1= np.transpose(new_data[:,0]) x2= np.transpose(new_data[:,2]) g= sea.lineplot(data=zerodark30, x="L" , y=r"$\langle S[p] \rangle$", estimator='mean', ci='sd', marker="s", err_style="band") popt, pcov = curve_fit(pow_law, x1, x2, p0= np.asarray([7.5, 0.15])) perr = np.sqrt(np.diag(pcov)) print("SD of p_avg - p_c:\t" +str(perr[1])) tukan= (popt[0], popt[1], perr[1]) plt.plot(x1, pow_law(x1, *popt), 'm--', label=r'Th Fit: $ \langle S[p] \rangle = %5.4f \times L^{(%5.4f \mp %5.4f)} $ ' % tukan ) plt.xlim(math.exp(3),g2+20) plt.yscale('log', basey= math.e) plt.xscale('log', basex= math.e) g.xaxis.set_major_formatter(mtick.FuncFormatter(ticks)) g.yaxis.set_major_formatter(mtick.FuncFormatter(ticks)) plt.legend() g.set_title(r'$p = %f \quad ( \xi \longrightarrow \infty ) $' %(post_transcript[0,0])) plt.savefig("Log Band Gamma S(p) vs L (p--%8.7f) (Range-- %d-%d).png" %(post_transcript[0,0], g1, g2), dpi=400) plt.show() plt.close() os.chdir(r"..\..\..\..\..\analysis\Mass Action") #Returning to our home directory. def plt_nuu(nudata): # Plots average cluster size data using Seaborn and stores it to the relevant directory. p_c = 0.728 p = nudata[0,0] #Occupation Probability value at which simulations were run. g1 = int(nudata[0,1]) #Starting Grid Size g2 = int(nudata[-1,1]) #Ending Grid Size. print("%f \t G1---- %d \t G2---- %d" %(p,g1,g2)) os.chdir(r"..\..\figures") # Changing to relevant directory. if(os.path.isdir("CrtExp")==False): os.mkdir("CrtExp") os.chdir("CrtExp") if(os.path.isdir("DP")==False): os.mkdir("DP") os.chdir("DP") if(os.path.isdir("Finite Scaling")==False): os.mkdir("Finite Scaling") os.chdir("Finite Scaling") if(os.path.isdir("Nuu")==False): os.mkdir("Nuu") os.chdir("Nuu") g=g1; a=0; b=0; gingerman=[] for x in range(0, nudata[:,1].size): if(g != nudata[x,1]): b=x; print("For size %f, we have b = %d and b - a = %d" %(g, b, (b-a))) mean_p= np.mean(nudata[a:b,3]) mean_p2= np.mean(nudata[a:b,4]) sd_p = np.std(nudata[a:b,3]) gingerman.append([g, mean_p, mean_p2, sd_p]) a=x; g = nudata[x,1] elif(x == nudata[:,1].size -1): #last entry in series b=x+1 print("For size %f, we have b = %d and b - a = %d" %(nudata[x,1], b, (b-a))) mean_p= np.mean(nudata[a:b,3]) mean_p2= np.mean(nudata[a:b,4]) sd_p = np.std(nudata[a:b,3]) gingerman.append([g, mean_p, mean_p2, sd_p]) break npframe = np.array(gingerman) print(npframe.shape) npframe[:,1] -= p_c npframe[:,1] = np.fabs(npframe[:,1]) hurtlocker= pan.DataFrame(npframe, columns= [ "L", r"$ | \langle p \rangle - p_c | $", r"$ \langle p^{2} \rangle $", r"$ \sigma_{p} $"]) x1 =np.transpose(npframe[:,0]) x2= np.transpose(npframe[:,3]) g= sea.scatterplot(data=hurtlocker, x="L" , y= r"$ \sigma_{p} $") popt, pcov = curve_fit(pow_law, x1, x2, p0= np.asarray([0.1, -0.75])) perr = np.sqrt(np.diag(pcov)) print("SD of Sigma_p:\t" +str(perr[1])) tukan= (popt[0], -popt[1], perr[1]) plt.plot(x1, pow_law(x1, *popt), 'm--', label=r'Th Fit: $ \sigma_{p} = %5.4f \times L^{-(%5.4f \mp %5.4f)} $ ' % tukan ) plt.xlim(g1- 10, g2 + 10) plt.legend() g.set_title(r'$ \sigma_{p} \quad vs \quad L$') plt.savefig("Nu Sigma_p vs L (G1--%d G2-- %d).png" %(g1, g2), dpi=400) plt.show() plt.close() x2= np.transpose(npframe[:,1]) g= sea.scatterplot(data=hurtlocker, x="L" , y= r"$ | \langle p \rangle - p_c | $") popt, pcov = curve_fit(pow_law, x1, x2, p0= np.asarray([0.05, -0.75])) perr = np.sqrt(np.diag(pcov)) print("SD of p_avg - p_c:\t" +str(perr[1])) tukan= (popt[0], -popt[1], perr[1]) plt.plot(x1, pow_law(x1, *popt), 'm--', label=r'Th Fit: $ | \langle p \rangle - p_c | = %5.4f \times L^{-(%5.4f \mp %5.4f)} $ ' % tukan ) plt.xlim(g1- 10, g2 + 10) plt.legend() g.set_title(r'$ | \langle p \rangle - p_c | \quad vs \quad L $') plt.savefig("Nu p_avg - p_c vs L (G1--%d G2-- %d).png" %(g1, g2), dpi=400) plt.show() plt.close() def exxon_split(unspliced_data): # Arranges unspliced_data in strict ascending order of grid sizes. '''But first, we need to have a comprehensive list of all grid sizes in ascending order)''' a=unspliced_data[0,1]; L=[a] #Initialsing variables so as to detect all grid sizes. for x in range(0,len(unspliced_data[:,1])): #Iterating over all possible grid values to create a list of grid sizes in ascending order. b = unspliced_data[x,1] if( b > a and (b not in L)): print("Bonobo:\t %3.0f" %(b)) # A new grid size has been detected. L.append(b) '''Now for each grid size, all the revelant data from unspliced_data must be spliced out and concatanated into a new array''' a=0; b=0 #Stores relevant splices for each grid size. m_splice =[] for l in L: #Iterating over all the grid sizes, in unspliced_data flag =0; a=0; b=0 for x in range(0,len(unspliced_data[:,1])): #Iterating over unspliced_data. if(l == unspliced_data[x,1] and flag==0): # We have a new hit for the given grid size "l". a=x flag=1 elif(unspliced_data[x,1] != l and flag==1): # The splice for the given grid size "l" just ended and we must extract the relevant slice. b=x; flag=0 print("Slice for grid of size %d is:\t [ %d , %d ]" %(int(l), int(a), int(b))) if (len(m_splice) == 0): #First one in the bag. m_splice = unspliced_data[a:b,:] else: m_splice = np.concatenate((m_splice, unspliced_data[a:b,:]), axis=0) if( x == len(unspliced_data[:,1])-1 and flag == 1): #Special case that only applies to very last row of unspliced_data. b= x+1; flag=0 print("Slice for grid of size %d is:\t [ %d , %d ]" %(int(l), int(a), int(b))) m_splice = np.concatenate((m_splice, unspliced_data[a:b,:]), axis=0) return m_splice; starter_pack()
# !/usr/bin/python # 中文映射为英文 from pypinyin import lazy_pinyin _map = { "姓名": "name", "名字": "name", "昵称": "nickname", "用户名": "username", "联系人": "contact_name", "联系方式": "tel", "座机电话": "phone", "企业名称": "company_name", "企业地址": "company_address", "省份城市": "province_and_city", "省份": "province", "省": "province", "市": "city", "城市": "city", "行业关键词": "company_keyword", "身份证号码": "card_no", "身份证": "card_no", "手机号码": "tel", "手机号": "tel", "手机": "tel", "电话": "phone", "地址": "address", "电子邮箱": "email", "邮箱": "email", "出生日期": "birthday", "性别": "gender", "邮编": "post_no", "行业": "work", "月薪": "salary", "婚姻": "married", "婚姻状况": "married", "教育": "education_level", "BRAND": "brand", "车系": "car_series", "车型": "car_model", "颜色": "color", "发动机号": "engine_no", "车架号": "car_struct_no", } def get_keyname(h): value = _map[h] if h in _map else "_".join(lazy_pinyin(h)) return value
"""Python neurology toolbox Subpackages ----------- analysis specialized neuroscience analysis functions (gridness score, place maps, e.t.c.) general general signal processing function (smoothing, correlation, e.t.c.) defaults default values for keyword analysis parameters """ from . import defaults from . import errors from . import analysis from . import general __author__ = """Simon Ball""" __email__ = 'simon.ball@ntnu.no' __version__ = '0.6.1'
from django.shortcuts import render from django.http import HttpResponse from django.contrib.auth.forms import UserCreationForm from django.contrib.auth import login, authenticate from django.contrib.auth.models import User from django.http import JsonResponse #################### # IMPORT OTHER LIBS #################### import os import numpy as np import seaborn as sns import cv2 from heatmappy import Heatmapper from heatmappy.video import VideoHeatmapper from PIL import Image import moviepy.editor as mp import urllib import glob import pandas as pd from pathlib import Path import shutil import vimeo_dl as vimeo import plotly.express as px import plotly import plotly.graph_objects as go from .models import Video, VideoStat EMOTIONS = [ 'angry', 'disgusted', 'fearful', 'happy', 'neutral', 'sad', 'surprised' ] # # Create your views here. # def index(request): # return render(request, 'index.html') heatmap_points = [] def index(request): ''' Renders login + main page ''' global user if request.method == 'POST': username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) if user is not None: # if user is authentificated data = Video.objects.all() response_data = { "video_data": data, "name" : username, "is_staff": user.is_staff, } return render(request, 'main.html', response_data) return render(request, 'index.html') else: form = UserCreationForm() return render(request, 'index.html', {'form': form}) def video(request, video_id): ''' Renders video page ''' global video video = list(Video.objects.all())[video_id-1] VideoStat.objects.filter(video_link= video.video_link, user_id= user.username).delete() response_data = { "name" : user.username, "video_name": video.video_name, "video_link": video.video_link, "is_staff": user.is_staff } return render(request, 'video.html', response_data) def calibrate(request): return render(request, 'calibration.html') def recievePoints(request): ''' Recieves gaze points via ajax request ''' x, y = request.GET['x'], request.GET['y'] time = request.GET['time'] width, height = request.GET['width'], request.GET['height'] username = request.GET['username'] try: expressions = urllib.parse.unquote(request.GET['expressions']).split(';') expressions = list(map(float, expressions)) except: expressions = [] try: emotion = EMOTIONS[np.argmax(expressions)] except: emotion = 'None' try: x, y, time = int(float(x)), int(float(y)), int(float(time)) except: x, y = 0, 0 try: width, height = int(width), int(height) except: width, height = 0, 0 VideoStat.objects.create(video_link= video.video_link, user_id= user.username, timestamp = time, emotions=emotion, coordinates=f'{x}:{y}', screen_width=width, screen_height=height) return JsonResponse({'ok': True}) def exportStats(request): ''' Recieves export request via ajax ''' # get video data entries = VideoStat.objects.filter(video_link=video.video_link) DOWNLOAD_PATH = Path('viewer/static/downloads') / video.video_link try: os.mkdir(DOWNLOAD_PATH) except: pass video_data = vimeo.new(f'https://vimeo.com/{video.video_link}') video_data.streams[0].download(quiet=False) video_width, video_height = str(video_data.streams[0]).split('@')[-1].split('x') video_width, video_height = int(video_width), int(video_height) # get video db entries heatmap_points = [] emotion_points = [] for e in entries: x,y = list(map(int, e.coordinates.split(':'))) time = int(e.timestamp) x *= video_width / int(e.screen_width) y *= video_height / int(e.screen_height) heatmap_points.append([x,y, time]) emotion_points.append([e.user_id, time//5000, e.emotions]) emotions = pd.DataFrame(emotion_points) emotions.columns = ['user_name', 'timestamp', 'emotion'] emotion_counts = [] for (ts, item) in emotions.groupby('timestamp'): COUNTER = { 'timestamp': item['timestamp'].iloc[0] * 5, 'angry': 0, 'disgusted': 0, 'fearful': 0, 'happy': 0, 'neutral': 0, 'sad': 0, 'surprised': 0, 'None': 0 } for index, count in item['emotion'].value_counts().items(): COUNTER[index] = count emotion_counts.append(COUNTER.values()) emotion_counts = pd.DataFrame(emotion_counts) emotion_counts.columns = COUNTER.keys() emotion_counts.to_csv(DOWNLOAD_PATH / 'out.csv', index = None) heatmapper = Heatmapper(point_strength=0.6, opacity=0.8) video_heatmapper = VideoHeatmapper(heatmapper) heatmap_video = video_heatmapper.heatmap_on_video_path( video_path=f'{video_data.title}.mp4', points=heatmap_points ) heatmap_video.write_videofile(str(DOWNLOAD_PATH / 'out.mp4'), bitrate="500k", fps=24) mp4_files = glob.glob(str('*.mp4')) for f in mp4_files: if f != 'out.mp4': os.remove(f) shutil.make_archive(str(DOWNLOAD_PATH), 'zip', str(DOWNLOAD_PATH)) shutil.rmtree(str(DOWNLOAD_PATH)) # time based graph fig = px.line(emotion_counts, x="timestamp", y=emotion_counts.columns[1:]) fig = plotly.graph_objs.Figure(fig.data, fig.layout) fig_json_1 = fig.to_json() # pie chart labels, counts = list(emotions['emotion'].value_counts().index), list(emotions['emotion'].value_counts().values) fig = go.Figure(data=[go.Pie(labels=labels, values=counts)]) fig_json_2 = fig.to_json() return JsonResponse({'ok': True, 'plotly_graph_1': fig_json_1, 'plotly_graph_2': fig_json_2})
# HiQ version 1.0. # # Copyright (c) 2022, Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # import os import json import syslog import time import requests import urllib3 from hiq.utils import read_file import queue urllib3.disable_warnings(urllib3.exceptions.SecurityWarning) # Suppress warnings about deprecated use of certificate common name. # RFC-2818 deprecated the CN field in server certificates, but it's still # the most common way to indicate the server hostname. The openssl tool # makes it ridiculously difficult to use the SAN field (the recommended # alternative), and support for SAN across various systems seems sketchy, # at best. The Python maintainers have decided that it's worth a nasty # runtime warning. We don't agree with that, so I've added this line. import datetime def __emit_log(sev, msg): """ Emit a log to stdout. sev should be a syslog error level; msg can be any text string. """ print(datetime.datetime.now().isoformat(), sev, msg) class HttpMetricsClient(object): """ HttpMetricsClient is a generic class for transmitting metrics to any metrics server by HTTP This includes the config needed to create and submit metrics, along with the requests.Session we use to send the data. Examples: .. highlight:: python .. code-block:: python from hiq.http_metric_client import HttpMetricsClient as Client from time import monotonic client = Client(url=...) METRIC_NAME = "hiq.predict" metric = Client.metric_calc_delta(f"{METRIC_NAME}.success", start_time=monotonic()) metrics_queue.put_nowait(metric) #... retry_count=1 client.gauge_metric("operation_retry_count", retry_count) client.submit_metrics_queue() """ @staticmethod def metric_calc_delta(metric, start_time): """Send a metric with a time measurement""" end_time = time.monotonic() delta = round((end_time - start_time) * 1000) metric_data = { "name": metric, "datapoints": [ {"timestamp": int(round(time.time() * 1000)), "value": delta} ], } return metric_data def __init__( self, url, ad_longform=None, metrics_queue=None, trusted_cert=None, project="hiq", timeout=5, ): """Constructor Parameters: url: The metrics data server URL ad_longform: the long-form name of the availability domain of the style like `eu-frankfurt-ad-1` metrics_queue: a Python queue.Queue object used for queuing metrics trusted_cert: The filename of the root certificate for authenticating the server, if needed """ self.conf = {} self.conf["hostname"] = read_file("/etc/hostname", by_line=False, strip=True) region = read_file("/etc/region", by_line=False, strip=True) self.conf["region"] = region if region else "none" self.conf["ad_longform"] = ad_longform self.conf["url"] = url self.metrics_queue = metrics_queue if metrics_queue else queue.Queue() if not trusted_cert or not os.path.exists(trusted_cert): trusted_cert = "/etc/pki/ca-trust/extracted/openssl/ca-bundle.trust.crt" self.trusted_cert = trusted_cert if os.path.exists(trusted_cert) else None self.timeout = timeout self.project = project def timing_metric_data(self, metric_data): """Send a list of metrics server :param metric_data list of metrics """ info = self.wrap_metric_data(metric_data=metric_data) try: resp = requests.put( f'{self.conf["url"]}aggregation', timeout=self.timeout, json=info, verify=self.trusted_cert, ) except requests.exceptions.ConnectionError as err: __emit_log(syslog.LOG_ERR, f"network connection error: {err}") else: if resp.status_code < 200 or resp.status_code >= 300: __emit_log( syslog.LOG_ERR, f"server status error: {resp} {info} {resp.text}" ) def wrap_metric_data(self, metric_data) -> dict: """ package up metrics in the data structure metrics server wants :param metric_data: info['metrics'] => list of metrics :return: info (metric dict/json) """ info = { "project": self.project, "hostname": self.conf["hostname"], "region": self.conf["region"], "availabilityDomain": self.conf["ad_longform"], "metrics": metric_data, } if fleet := os.getenv("T2_FLEET", ""): info["fleet"] = fleet return info def submit_metrics_queue(self): """drain metrics_queue and submit the metrics to server""" metric_data = list(self.metrics_queue.queue) if not metric_data: msg = "metric_data must not be empty!" raise ValueError(msg) self.timing_metric_data(metric_data=metric_data) # handy for peeking at request to server and metric_data # __emit_log(syslog.LOG_DEBUG, # f'json: {json.dumps(self.wrap_metric_data(metric_data))}') def gauge_metric(self, metric, value): """emit a gauge metric with the given name and value.""" metric_data = { "name": metric, "datapoints": [ {"timestamp": int(round(time.time() * 1000)), "value": value} ], } self.metrics_queue.put_nowait(metric_data) def timing_metric(self, metric: str, start_time: int): """ convenience method to calculate a time delta for a metric and also put it in the metrics_queue :param metric: metric name :param start_time: start time in millisecond unit """ metric_data = self.metric_calc_delta(metric, start_time) self.metrics_queue.put_nowait(metric_data) def get(self, query, params=None): """ method for mock metrics server API during testing, not applicable to prod """ resp = requests.get( f'{self.conf["url"]}{query}', params=params, timeout=self.timeout, verify=self.trusted_cert, ) try: return json.loads(resp.text), resp.headers except json.JSONDecodeError: msg = ( "Failed to parse query response: " + f'{self.conf["url"]}{query} ' + f"{repr(params)} " + f"(response code = {str(resp.status_code)})" ) raise IOError(msg) if __name__ == "__main__": o = HttpMetricsClient(url="", ad_longform="")
from testxsendfile import app as application
def studianumero1(numero1,kk,calcolo,discretizzaarchicerchi,traslax,traslay): from calcolatrice.misuras import size2, transponi,UnireMatriciRig,UnireMatriciCol,EstraiUnPezzo,ScalarXMatr #print("numero1 333", numero1) #print(size2(numero1,1)) #print(size2(numero1,2)) printa=0 enne=6 #il numero 1 è per: calcolare la geometria delle aree #il numero 2 è per: calcolare i minimi #il numero 3 è per: traslare tutto #il numero 4 è per: i contorni #il numero 5 è per: disegnare from math import floor,pi,cos,sin kk=floor(kk) import numpy as np coordinate=np.matrix("0 0") baricentrox=0 baricentroy=0 area=0 momenti_statici=np.matrix("0 0") matrice_inerzia=np.matrix("0 0; 0 0") centroix=0 centroiy=0 peso_specifico=0 #siamo nel python quindi non è più i=1 ma i=0 i=0 #print("ddasdas",numero1) peso_specifico=numero1[0,1] if numero1[i,0]==1.: if printa: print(numero1) #%studio della riga del csv #%per il cerchio #%circle (X Center,Y Center,Radius) coordxc=numero1[i,kk+0] coordyc=numero1[i,kk+1] raggio=numero1[i,kk+2] if calcolo==1: area=pi*(raggio**2) momenti_statici=np.matrix("0 0") baricentrox=coordxc baricentroy=coordyc centroix=coordxc centroiy=coordyc inerz=pi*(raggio**4)*0.25 matrice_inerzia=np.matrix(np.zeros((2,2))) matrice_inerzia[0,0]=matrice_inerzia[1,1]=inerz elif calcolo==2: centroix=numero1[i,kk+0]-raggio centroiy=numero1[i,kk+1]-raggio elif calcolo==3: numero1[i,kk+0]=numero1[i,kk+0]+traslax numero1[i,kk+1]=numero1[i,kk+1]+traslay elif calcolo==4 or calcolo==5: if calcolo==4: discretizzaarchicerchi=floor(discretizzaarchicerchi/enne) #ago=np.linspace(0,2*pi,discretizzaarchicerchi+1) #%angoloi=0; angolof=2*pi; ago=np.asmatrix(np.linspace(0,2*pi,discretizzaarchicerchi+1)) c=transponi(np.asmatrix(np.add(coordxc,ScalarXMatr(raggio,np.cos(ago))))) d=transponi(np.asmatrix(np.add(coordyc,ScalarXMatr(raggio,np.sin(ago))))) discretizzaarchicerchi=floor(discretizzaarchicerchi) coordinate=UnireMatriciRig(c,d) #print(coordinate) #print(numero1) #%arco di cerchio #%arc (X Center,Y Center,Radius,Start angle,End angle) elif numero1[i,0]==2.: coordxc=numero1[i,kk+0] coordyc=numero1[i,kk+1] raggio=numero1[i,kk+2] angoloi=numero1[i,kk+3] angolof=numero1[i,kk+4] #print("ciccione",calcolo,coordxc,coordyc,raggio,angoloi,angolof) if calcolo==1: from calcolatrice.inerziasettorecoronacircolare import InerSettCoronCir baricentrox,baricentroy,area,momenti_statici,matrice_inerzia=InerSettCoronCir(raggio,angolof,angoloi,coordxc,coordyc) centroix=baricentrox centroiy=baricentroy #print(baricentrox,baricentroy,area,momenti_statici,matrice_inerzia) elif calcolo==2: centroix=numero1[i,kk+0]-raggio centroiy=numero1[i,kk+1]-raggio elif calcolo==3: #print("aaa") numero1[i,kk+0]=numero1[i,kk+0]+traslax numero1[i,kk+1]=numero1[i,kk+1]+traslay elif calcolo==4 or calcolo==5: if calcolo==4: discretizzaarchicerchi=floor(discretizzaarchicerchi/enne) discretoarco=floor(1+discretizzaarchicerchi*np.abs(angolof-angoloi)/(2*pi)) ago=np.asmatrix(np.linspace(angoloi,angolof,discretoarco+1)) c=transponi(np.asmatrix(np.add(coordxc,ScalarXMatr(raggio,np.cos(ago))))) d=transponi(np.asmatrix(np.add(coordyc,ScalarXMatr(raggio,np.sin(ago))))) discretoarco=floor(discretoarco) VetColonGen=range(1,discretoarco+1) c1=np.matrix(coordxc) c2=np.matrix(coordyc) d1=UnireMatriciCol(c1,c) d2=UnireMatriciCol(c2,d) coordinatex=UnireMatriciCol(d1,c1) coordinatey=UnireMatriciCol(d2,c2) coordinate=UnireMatriciRig(coordinatex,coordinatey) #print("cc",coordinate) elif numero1[i,0]==2.5: #print("evviva") coordx1=numero1[i,kk+0] coordy1=numero1[i,kk+1] coordx2=numero1[i,kk+2] coordy2=numero1[i,kk+3] spessore=numero1[i,kk+4] if calcolo==1: lunghezza=((coordx1-coordx2)**2+(coordy1-coordy2)**2)**0.5 area=spessore*lunghezza baricentrox=(coordx1+coordx2)/2 baricentroy=(coordy1+coordy2)/2 centroix=baricentrox centroiy=baricentroy momentostatx=area*baricentroy momentostaty=area*baricentrox momenti_statici=np.asmatrix(np.zeros((1,2))) momenti_statici[0,0]=momentostatx momenti_statici[0,1]=momentostaty from math import atan2 angolo=atan2(coordy1-coordy2,coordx1-coordx2) inerziax=spessore*(lunghezza**3)/12*(sin(angolo))**2 inerziay=spessore*(lunghezza**3)/12*(cos(angolo))**2 inerziaxy=spessore*(lunghezza**3)*cos(angolo)*cos(angolo)/12 matrice_inerzia=np.asmatrix(np.zeros((2,2))) matrice_inerzia[0,0]=inerziay matrice_inerzia[0,1]=inerziaxy matrice_inerzia[1,0]=matrice_inerzia[0,1] matrice_inerzia[1,1]=inerziax #print(lunghezza,area,baricentrox,baricentroy,momenti_statici,matrice_inerzia) elif calcolo==2: centroix=np.minimum(numero1[i,kk+0],numero1[i,kk+0]) centroiy=np.minimum(numero1[i,kk+1],numero1[i,kk+1]) elif calcolo==3: numero1[i,kk+0]=numero1[i,kk+0]+traslax numero1[i,kk+1]=numero1[i,kk+1]+traslay numero1[i,kk+2]=numero1[i,kk+2]+traslax numero1[i,kk+3]=numero1[i,kk+3]+traslay #print(numero1) elif calcolo==4: coordinate=np.asmatrix(np.zeros((2,2))) coordinate[0,0]=coordx1 coordinate[0,1]=coordy1 coordinate[1,0]=coordx2 coordinate[1,1]=coordy2 #print(coordinate) elif calcolo==5: s=spessore if coordy1-coordy2==0: coordinate=np.asmatrix(np.zeros((5,2))) coordinate[0,0]=coordx1 coordinate[1,0]=coordx1 coordinate[2,0]=coordx2 coordinate[3,0]=coordx2 coordinate[4,0]=coordx1 coordinate[0,1]=coordy1-s/2 coordinate[1,1]=coordy1+s/2 coordinate[2,1]=coordy2+s/2 coordinate[3,1]=coordy2-s/2 coordinate[4,1]=coordy1-s/2 elif coordx1-coordx2==0: coordinate=np.asmatrix(np.zeros((5,2))) coordinate[0,0]=coordx1-s/2 coordinate[1,0]=coordx1+s/2 coordinate[2,0]=coordx2+s/2 coordinate[3,0]=coordx2-s/2 coordinate[4,0]=coordx1-s/2 coordinate[0,1]=coordy1 coordinate[1,1]=coordy1 coordinate[2,1]=coordy2 coordinate[3,1]=coordy2 coordinate[4,1]=coordy1 #print(coordinate) elif numero1[i,0]>=3.: #print("bravissimo") #print("numero1 5",numero1) punti=floor(numero1[i,0]) #print("numero1",numero1) #print(punti) #%dovremmo guardare la polilinea nel senso opposto # from misuras import EstraiUnPezzo #numero2=numero1(1,kk+1:kk+((punti)*2)) #numero2=EstraiUnPezzo(numero1,0,0,kk+1-1,kk-1+(punti)*2) pizza2=size2(numero1,2)+2 pizza=np.asmatrix(np.zeros((1,pizza2))) #print("pizza",pizza) #zeros((1,)) #print(numero1) if printa: print(np.size(numero1),kk,punti) pizza=np.asmatrix(numero1[:,kk:kk+(punti)*2]) #pizza=np.asmatrix(EstraiUnPezzo(numero1,0,"end",kk,kk+(punti)*2)) pizzavecchia=np.asmatrix(EstraiUnPezzo(numero1,0,"end",0,kk)) #print("pv",size2(pizzavecchia,1),size2(pizzavecchia,2)) #print(pizza) g=np.asmatrix("0 0") g[0,0]=pizza[0,0] g[0,1]=pizza[0,1] numero2=UnireMatriciRig(pizza,g) #print("pizza dopo", numero2) VetColonGen=range(0,punti) #print(VetColonGen) #print("numero22",numero2) if calcolo==1: area=0 inerziax=0 inerziay=0 inerziaxy=0 momentostatx=0 momentostaty=0 for k in VetColonGen: xi=numero2[i,2*k+0] yi=numero2[i,2*k+1] xip1=numero2[i,2*k+2] yip1=numero2[i,2*k+3] #print("pip", k, 2*k,2*k+1,2*k+2,2*k+3) #print("pip", k, xi,yi,xip1,yip1) #print(xi,yi,xip1,yip1) if xip1-xi==0.: m=99999999999 else: m=(yip1-yi)/(xip1-xi) q=yi-m*xi area=area+(xip1**2-xi**2)*(m/2)+q*(xip1-xi) momentostatx=momentostatx+(xip1**3-xi**3)*((m**2)/6)+(xip1**2-xi**2)*(q*m/2)+(xip1-xi)*((q**2)/2) momentostaty=momentostaty+(xip1**3-xi**3)*(m/3)+(xip1**2-xi**2)*(q/2) inerziax=inerziax+(xip1**4-xi**4)*((m**3)/12)+(xip1**3-xi**3)*((q*m**2)/3)+(xip1**2-xi**2)*((q**2)*m/2)+(xip1-xi)*((q**3)/3) inerziay=inerziay+(xip1**4-xi**4)*(m/4)+(xip1**3-xi**3)*(q/3) inerziaxy=inerziaxy+(xip1**4-xi**4)*((m**2)/8)+(xip1**3-xi**3)*(q*m/3)+(xip1**2-xi**2)*((q**2)/4) #print("area",area,"barix",baricentrox,"bariy",baricentroy) if area<0: area=-area momentostatx=-(momentostatx) momentostaty=-(momentostaty) inerziax=-(inerziax) inerziay=-(inerziay) inerziaxy=-(inerziaxy) baricentrox=momentostaty/area baricentroy=momentostatx/area centroix=baricentrox centroiy=baricentroy momenti_statici=np.zeros((1,2)) momenti_statici[0,0]=momentostatx momenti_statici[0,1]=momentostaty matrice_inerzia=np.zeros((2,2)) matrice_inerzia[0,0]=inerziay-area*(baricentrox**2) matrice_inerzia[0,1]=inerziaxy-area*(baricentrox*baricentroy) matrice_inerzia[1,0]=matrice_inerzia[0,1] matrice_inerzia[1,1]=inerziax-area*(baricentroy**2) elif calcolo==2: centroix=numero2[i,0] centroiy=numero2[i,1] #print(centroix,centroiy) for k in VetColonGen: #print(numero2[i,2*k],numero2[i,2*k+1],centroix,centroiy) centroix=np.minimum(numero2[i,2*k+0],centroix) centroiy=np.minimum(numero2[i,2*k+1],centroiy) elif calcolo==3: #print(numero2) for k in VetColonGen: #print("bee",i,kk+2*k+1-1+1,kk+2*k+1-1+2) numero2[i,2*k+0]=numero2[i,2*k+0]+traslax numero2[i,2*k+1]=numero2[i,2*k+1]+traslay pizzafinale=np.matrix(np.zeros((1,size2(numero1,2)-kk-2*k-2))) numero1=UnireMatriciRig(pizzavecchia,numero2[:,0:-2]) numero1=UnireMatriciRig(numero1,pizzafinale) #print(numero1) elif calcolo==4 or calcolo==5: #print(numero2) coordinate=np.asmatrix(np.zeros((punti+1,2))) #print(coordinate) #print("ad",numero2) #print(coordinate,VetColonGen,numero2) for k in VetColonGen: coordinate[k,0]=numero2[i,2*k+0] coordinate[k,1]=numero2[i,2*k+1] coordinate[k+1,0]=numero2[i,0] coordinate[k+1,1]=numero2[i,1] #print(coordinate) #print(coordinate) """ print("nbaricentri") print(coordinate) print(baricentrox) print(baricentroy) print(area) print(momenti_statici) print(matrice_inerzia) print(centroix) print(centroiy) print(numero1) print(peso_specifico) """ coordinate=np.asmatrix(coordinate) return coordinate,baricentrox,baricentroy,area,momenti_statici,matrice_inerzia,centroix,centroiy,numero1,peso_specifico
#O(nlog(n)) time and O(n) space def sortedSquaredArrayLambda(array): lst = list(map(lambda x: x ** 2, array )) return sorted(lst) def sortedSquaredArrayLoopSquaresPythonic(array): lst2 = [i**2 for i in array] return sorted(lst2) #O(n) both time and Space def sortedSquaredArray(array): sampleList = [0 for _ in array] l = 0 end = len(array) - 1 r = len(array) - 1 while(l <= r): if(abs(array[l]) > abs(array[r])): sampleList[end] = abs(array[l]) ** 2 l += 1 end = end -1 else: sampleList[end] = abs(array[r]) ** 2 r -= 1 end = end -1 return sampleList
import torch from torch.utils.data import DataLoader, TensorDataset import numpy as np class IIDBatchSampler: def __init__(self, dataset, minibatch_size, iterations): self.length = len(dataset) self.minibatch_size = minibatch_size self.iterations = iterations def __iter__(self): for _ in range(self.iterations): indices = np.where(torch.rand(self.length) < (self.minibatch_size / self.length))[0] if indices.size > 0: yield indices def __len__(self): return self.iterations class EquallySizedAndIndependentBatchSampler: def __init__(self, dataset, minibatch_size, iterations): self.length = len(dataset) self.minibatch_size = minibatch_size self.iterations = iterations def __iter__(self): for _ in range(self.iterations): yield np.random.choice(self.length, self.minibatch_size) def __len__(self): return self.iterations def get_data_loaders(minibatch_size, microbatch_size, iterations, drop_last=True): def minibatch_loader(dataset): return DataLoader( dataset, batch_sampler=IIDBatchSampler(dataset, minibatch_size, iterations) ) def microbatch_loader(minibatch): return DataLoader( minibatch, batch_size=microbatch_size, # Using less data than allowed will yield no worse of a privacy guarantee, # and sometimes processing uneven batches can cause issues during training, e.g. when # using BatchNorm (although BatchNorm in particular should be analyzed seperately # for privacy, since it's maintaining internal information about forward passes # over time without noise addition.) # Use seperate IIDBatchSampler class if a more granular training process is needed. drop_last=drop_last, ) return minibatch_loader, microbatch_loader
import numpy as np from sklearn.cluster import DBSCAN from grouping import remove_outliers, helper_object_points, max_min from data import input_nn, flat_input # Using the DB Scan of scikit learn for segmenting the data, take in the dataframe and return the labeled_df # This DB scan is sensitive to starting point # Come up with another idea def segmentation(data): clustering = DBSCAN(eps=1, min_samples=16).fit(data) labels = clustering.labels_ return labels # Try to use the top view to reduce the computation def prepare_data(data_frame): labels = segmentation( np.array( list( zip( np.array(data_frame["X"]), np.array(data_frame["Y"]), np.array(data_frame["Z"]), ) ) ) ) data_frame["labels"] = labels return data_frame # Extract the points of the clusters def list_of_objects(dataframe): num_of_objects = dataframe["labels"].value_counts().index.shape[0] - 1 list_objects = [] for i in range(num_of_objects): list_objects.append(dataframe[dataframe["labels"] == i]) return list_objects # Function for pred the input def pred_scene(test_input, session, y_pred_cls, x): test_batch_size = 64 num_obj = len(test_input) cls_pred = np.zeros(shape=num_obj, dtype=np.int) # Starting index i = 0 while i < num_obj: j = min(i + test_batch_size, num_obj) # get the images images = test_input[i:j] # Calculate the predicted class using tensor flow cls_pred[i:j] = session.run(y_pred_cls, feed_dict={x: images}) i = j return cls_pred # Take the points of the cluster and fix the input using the above function def predict(data_frame, session, img_length, img_height, y_pred_cls, x): # prepare the data and segment it data_frame = remove_outliers([data_frame])[0] segmented_df = prepare_data(data_frame) object_df = list_of_objects(segmented_df) dummy_input = [] img_length = 10 img_heigth = 7 for j in object_df: x_max, x_min, y_max, y_min = max_min([j], img_length, img_height, 2) object_arr = input_nn( j, [x_min[0], x_max[0]], [y_min[0], y_max[0]], 0.1, img_length, img_height, 2, ) object_arr = flat_input([object_arr]).tolist()[0] dummy_input.append(object_arr) return pred_scene(np.array(dummy_input), session, y_pred_cls, x) # Take the predicted list and convert to dictionary of object counts for each scene def convert_pred_to_dict(data_frame, session, object_names, img_length, img_height, y_pred_cls, x): output_pred = predict(data_frame, session, img_length, img_height, y_pred_cls, x) a = {} for j in output_pred: if j >= 22: if object_names[j + 1] in a: a[object_names[j + 1]] = a[object_names[j + 1]] + 1 else: a[object_names[j + 1]] = 1 else: if object_names[j] in a: a[object_names[j]] = a[object_names[j]] + 1 else: a[object_names[j]] = 1 return a
import json import click from ml_project.data import read_data, split_train_val_data from ml_project.entities.train_pipeline_params import read_training_pipeline_params from ml_project.features import make_features from ml_project.features.build_features import extract_target, build_transformer from ml_project.models import ( train_model, serialize_model, predict_model, evaluate_model, ) from ml_project.models.model_fit_predict import create_inference_pipeline from ml_project.utils import init_logger logger = init_logger("logger") def train_pipeline(config_path: str): training_pipeline_params = read_training_pipeline_params(config_path) model_path, metrics = run_train_pipeline(training_pipeline_params) return model_path, metrics def run_train_pipeline(training_pipeline_params): logger.info(f"start train pipeline with params {training_pipeline_params}") data = read_data(training_pipeline_params.input_data_path) logger.info(f"data.shape is {data.shape}") train_df, val_df = split_train_val_data( data, training_pipeline_params.splitting_params ) logger.info(f"train.shape is {train_df.shape}, val_df.shape is {val_df.shape}") val_target = extract_target(val_df, training_pipeline_params.feature_params) train_target = extract_target(train_df, training_pipeline_params.feature_params) train_df = train_df.drop(training_pipeline_params.feature_params.target_col, 1) val_df = val_df.drop(training_pipeline_params.feature_params.target_col, 1) logger.info(f"train_df.shape is {train_df.shape}") logger.info(f"val_df.shape is {val_df.shape}") transformer = build_transformer( training_pipeline_params.feature_params, training_pipeline_params.preprocess_params, ) transformer.fit(train_df) train_features = make_features(transformer, train_df) logger.info(f"train_features.shape is {train_features.shape}") model = train_model( train_features, train_target, training_pipeline_params.train_params ) inference_pipeline = create_inference_pipeline(model, transformer) predicts = predict_model(inference_pipeline, val_df,) metrics = evaluate_model(predicts, val_target,) with open(training_pipeline_params.metric_path, "w") as metric_file: json.dump(metrics, metric_file) logger.info(f"metrics is {metrics}") path_to_model = serialize_model( inference_pipeline, training_pipeline_params.output_model_path ) logger.info(f"end train pipeline") return path_to_model, metrics @click.command(name="train_pipeline") @click.argument("config_path") def train_pipeline_command(config_path: str): train_pipeline(config_path) if __name__ == "__main__": train_pipeline_command()
from donk.dynamics.dynamics import DynamicsModel from donk.dynamics.linear_dynamics import LinearDynamics from donk.dynamics.utils import to_transitions __all__ = [ "DynamicsModel", "LinearDynamics", "to_transitions", ]
############################################################################## # # The MIT License (MIT) # # Copyright (c) 2015 Eric F Sorton # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # ############################################################################## import unittest import ultrak498 from collections import namedtuple class TEST_bcd_to_int(unittest.TestCase): def testValueOfZeroReturnsZero(self): value = ultrak498.bcd_to_int(chr(0x00)) self.assertEqual(value, 0) def testValueOfOneReturnsOne(self): value = ultrak498.bcd_to_int(chr(0x10)) self.assertEqual(value, 1) def testValueOfNineReturnsNine(self): value = ultrak498.bcd_to_int(chr(0x90)) self.assertEqual(value, 9) def testValueOfTenReturnsTen(self): value = ultrak498.bcd_to_int(chr(0x01)) self.assertEqual(value, 10) def testValueOfElevenReturnsEleven(self): value = ultrak498.bcd_to_int(chr(0x11)) self.assertEqual(value, 11) def testValueOfFifteenReturnsFifteen(self): value = ultrak498.bcd_to_int(chr(0x51)) self.assertEqual(value, 15) def testValueOfTwentyReturnsTwenty(self): value = ultrak498.bcd_to_int(chr(0x02)) self.assertEqual(value, 20) def testValueOf42Returns42(self): value = ultrak498.bcd_to_int(chr(0x24)) self.assertEqual(value, 42) def testValueOf50Returns50(self): value = ultrak498.bcd_to_int(chr(0x05)) self.assertEqual(value, 50) def testValueOf75Returns75(self): value = ultrak498.bcd_to_int(chr(0x57)) self.assertEqual(value, 75) def testValueOf98Returns98(self): value = ultrak498.bcd_to_int(chr(0x89)) self.assertEqual(value, 98) def testValueOf99Returns99(self): value = ultrak498.bcd_to_int(chr(0x99)) self.assertEqual(value, 99) def testMultiByteStringRaisesException(self): with self.assertRaises(ValueError): ultrak498.bcd_to_int("ab") def testOnesPlaceGreaterThanNineRaisesException(self): with self.assertRaises(ValueError): ultrak498.bcd_to_int(chr(0xA0)) def testTensPlaceGreaterThanNineRaisesException(self): with self.assertRaises(ValueError): ultrak498.bcd_to_int(chr(0x0A)) class TEST_bcd_string_to_integer_list(unittest.TestCase): def testEmptyStringReturnsEmptyList(self): value = ultrak498.bcd_string_to_integer_list("") self.assertEqual(value, []) def testOneByteStringReturnsOneItemList(self): value = ultrak498.bcd_string_to_integer_list(chr(0x32)) self.assertEqual(value, [23]) def testTwoByteStringReturnsTwoItemList(self): value = ultrak498.bcd_string_to_integer_list(chr(0x52) + chr(0x21)) self.assertEqual(value, [25,12]) def testFiveByteStringReturnsFiveItemList(self): value = ultrak498.bcd_string_to_integer_list(chr(0x11) + chr(0x21) + chr(0x31) + chr(0x41) + chr(0x51)) self.assertEqual(value, [11,12,13,14,15]) class TEST_integer_list_to_named_tuple(unittest.TestCase): def testInvalidInputArgument(self): with self.assertRaises(ValueError): value = ultrak498.integer_list_to_named_tuple("") def testInvalidLengthOfTuple(self): with self.assertRaises(ValueError): value = ultrak498.integer_list_to_named_tuple([0,0,0]) def testInvalidType(self): with self.assertRaises(ValueError): value = ultrak498.integer_list_to_named_tuple([99,0,0,0,0]) def testRaceHeaderTuple(self): value = ultrak498.integer_list_to_named_tuple([1,0,0,0,0]) self.assertEqual(value, (1,0,0,0,0)) self.assertEqual(value.type, 1) self.assertEqual(value.year, 0) self.assertEqual(value.month, 0) self.assertEqual(value.day, 0) self.assertEqual(value.id, 0) value = ultrak498.integer_list_to_named_tuple([2,1,2,3,4]) self.assertEqual(value, (2,1,2,3,4)) self.assertEqual(value.type, 2) self.assertEqual(value.year, 1) self.assertEqual(value.month, 2) self.assertEqual(value.day, 3) self.assertEqual(value.id, 4) def testLapTimeTuple(self): value = ultrak498.integer_list_to_named_tuple([10,0,0,0,0]) self.assertEqual(value, (10,0,0,0,0)) self.assertEqual(value.type, 10) self.assertEqual(value.minutes, 0) self.assertEqual(value.seconds, 0) self.assertEqual(value.hundreths, 0) self.assertEqual(value.lap, 0) value = ultrak498.integer_list_to_named_tuple([10,1,2,3,4]) self.assertEqual(value, (10,1,2,3,4)) self.assertEqual(value.type, 10) self.assertEqual(value.minutes, 1) self.assertEqual(value.seconds, 2) self.assertEqual(value.hundreths, 3) self.assertEqual(value.lap, 4) def testAbsTimeTuple(self): value = ultrak498.integer_list_to_named_tuple([20,0,0,0,0]) self.assertEqual(value, (20,0,0,0,0)) self.assertEqual(value.type, 20) self.assertEqual(value.minutes, 0) self.assertEqual(value.seconds, 0) self.assertEqual(value.hundreths, 0) self.assertEqual(value.lap, 0) value = ultrak498.integer_list_to_named_tuple([20,1,2,3,4]) self.assertEqual(value, (20,1,2,3,4)) self.assertEqual(value.type, 20) self.assertEqual(value.minutes, 1) self.assertEqual(value.seconds, 2) self.assertEqual(value.hundreths, 3) self.assertEqual(value.lap, 4) def testRaceEndTuple(self): value = ultrak498.integer_list_to_named_tuple([50,0,0,0,0]) self.assertEqual(value, (50,0,0,0,0)) self.assertEqual(value.type, 50) self.assertEqual(value.minutes, 0) self.assertEqual(value.seconds, 0) self.assertEqual(value.hundreths, 0) self.assertEqual(value.laps, 0) value = ultrak498.integer_list_to_named_tuple([50,1,2,3,4]) self.assertEqual(value, (50,1,2,3,4)) self.assertEqual(value.type, 50) self.assertEqual(value.minutes, 1) self.assertEqual(value.seconds, 2) self.assertEqual(value.hundreths, 3) self.assertEqual(value.laps, 4) class TEST_adjust_lap_hundreds(unittest.TestCase): def testNoAdjustmentLap(self): ultrak498.adjust_lap_hundreds.lap_hundreds = 0 LapTimeMock = namedtuple('LapTime', 'lap') for test_value in [1, 2, 11, 23, 45, 68, 80, 99]: in_record = LapTimeMock(test_value) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) def testNoAdjustmentAbs(self): ultrak498.adjust_lap_hundreds.abs_hundreds = 0 AbsTimeMock = namedtuple('AbsTime', 'lap') for test_value in [1, 2, 11, 23, 45, 68, 80, 99]: in_record = AbsTimeMock(test_value) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) def testAdjustmentLapForHundreds(self): ultrak498.adjust_lap_hundreds.lap_hundreds = 0 LapTimeMock = namedtuple('LapTime', 'lap') for test_value, expected_value in [(99, 99), (0, 100), (1, 101), (2, 102), (99, 199), (0, 200), (21, 221), (0, 300), (0, 400), (0, 500), (55, 555)]: in_record = LapTimeMock(test_value) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(LapTimeMock(expected_value), out_record) def testAdjustmentAbsForHundreds(self): ultrak498.adjust_lap_hundreds.abs_hundreds = 0 AbsTimeMock = namedtuple('AbsTime', 'lap') for test_value, expected_value in [(99, 99), (0, 100), (1, 101), (2, 102), (99, 199), (0, 200), (21, 221), (0, 300), (0, 400), (0, 500), (55, 555)]: in_record = AbsTimeMock(test_value) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(AbsTimeMock(expected_value), out_record) def testAdjustmentForHundredsResetOnRaceStart(self): ultrak498.adjust_lap_hundreds.lap_hundreds = 0 ultrak498.adjust_lap_hundreds.abs_hundreds = 0 RaceHeaderMock = namedtuple('RaceHeader', 'ignore') LapTimeMock = namedtuple('LapTime', 'lap') AbsTimeMock = namedtuple('AbsTime', 'lap') in_record = LapTimeMock(1) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) in_record = AbsTimeMock(1) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) in_record = LapTimeMock(99) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) in_record = AbsTimeMock(99) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) in_record = LapTimeMock(0) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(LapTimeMock(100), out_record) in_record = AbsTimeMock(0) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(AbsTimeMock(100), out_record) in_record = LapTimeMock(99) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(LapTimeMock(199), out_record) in_record = AbsTimeMock(99) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(AbsTimeMock(199), out_record) in_record = LapTimeMock(0) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(LapTimeMock(200), out_record) in_record = AbsTimeMock(0) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(AbsTimeMock(200), out_record) in_record = RaceHeaderMock(0) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record , out_record) in_record = LapTimeMock(1) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) in_record = AbsTimeMock(1) out_record = ultrak498.adjust_lap_hundreds(in_record) self.assertEqual(in_record, out_record) ############################################################################## # vim: ts=4 sts=4 sw=4 tw=78 sta et ##############################################################################
def format_money(n): return "$%.2f" % n
#!/usr/bin/env python import sys #to get argument import unittest import unitstyle #collect all of our test scripts as a test suite my_tests = unittest.TestLoader().discover("tests/") # get command-line output format if provided try: output_format = sys.argv[1] except IndexError: #nothing provided output_format = '' #run our test suite using unitstyle unitstyle.TestRunner(format=output_format).run(my_tests)
import sys from PyQt5 import QtCore, QtGui, QtWidgets from PyQt5 import uic class MainWindow(QtWidgets.QMainWindow): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) uic.loadUi('mainwindow.ui', self) app = QtWidgets.QApplication(sys.argv) window = MainWindow() window.show() app.exec_()
from dataclasses import dataclass from config import Config import numpy as np from messages.generic_api import GenericApi from messages.discord_embeds import DiscordEmbeds class Messenger: def __init__(self, api_type: str = None): if api_type is not None: if api_type == "generic_api": self._message_api = GenericApi() elif api_type == "discord": self._message_api = DiscordEmbeds() elif Config().general["message_api_type"] == "generic_api": self._message_api = GenericApi() elif Config().general["message_api_type"] == "discord": self._message_api = DiscordEmbeds() else: self._message_api = None def send_item(self, item: str, image: np.ndarray, location: str, ocr_text: str = None): self._message_api.send_item(item, image, location, ocr_text) def send_death(self, location: str, image_path: str = None): self._message_api.send_death(location, image_path) def send_chicken(self, location: str, image_path: str = None): self._message_api.send_chicken(location, image_path) def send_stash(self): self._message_api.send_stash() def send_gold(self): self._message_api.send_gold() def send_message(self, msg: str): self._message_api.send_message(msg) if __name__ == "__main__": messenger = Messenger() item = "rune_test" image = None location = "Shenk" # messenger.send_item(item, img, location) # messenger.send_death(location, "./info_screenshots/info_debug_chicken_20211220_110621.png") # messenger.send_chicken(location, "./info_screenshots/info_debug_chicken_20211220_110621.png") messenger.send_stash() messenger.send_gold() messenger.send_message("This is a test message")
class Interpreter(object): def __init__(self, analyzed_tree): self.analyzed_tree = analyzed_tree def interpret(self, world_state): for element in self.analyzed_tree: element.interpret(world_state)
class Generator: def __init__(self, generator): self._generator = generator self._receivers = set() self.intensity = 0 self.request_count = 0 def add_receiver(self, receiver): self._receivers.add(receiver) def remove_receiver(self, receiver): try: self._receivers.remove(receiver) except KeyError: pass def next_time(self): new_time = self._generator.generate() self.intensity += new_time self.request_count += 1 return new_time def emit_request(self): for receiver in self._receivers: receiver.receive_request() def get_avg_intensity(self): return 1/(self.intensity/self.request_count)
import logging from fastapi import FastAPI logger = logging.getLogger(__name__) def init_sentry(): """Initialize sentry on application startup""" from .config import settings if settings.SENTRY_DSN is not None and len(settings.SENTRY_DSN) > 0: logger.info("Initializing Sentry") import sentry_sdk from sentry_dramatiq import DramatiqIntegration from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration sentry_sdk.init( dsn=settings.SENTRY_DSN, environment=settings.SENTRY_ENVIRONMENT, release=settings.SENTRY_RELEASE, sample_rate=settings.SENTRY_SAMPLE_RATE, send_default_pii=settings.SENTRY_SEND_PII, traces_sample_rate=settings.SENTRY_TRACES_SAMPLE_RATE, integrations=[ # fixme write an integration for fastapi to add any other useful data to our events # (in particular, set transaction name based on route endpoint, add instrumentation, etc) SqlalchemyIntegration(), DramatiqIntegration(), ], ) def setup_sentry_middleware(app: FastAPI) -> FastAPI: """Add sentry middleware to FastAPI application""" from .config import settings if settings.SENTRY_DSN is not None and len(settings.SENTRY_DSN) > 0: logger.debug("Loading Sentry ASGI Middleware") from sentry_sdk.integrations.asgi import SentryAsgiMiddleware app.add_middleware(SentryAsgiMiddleware) return app
import mimetypes class MimeType(object): _initiated = False @staticmethod def guess_type(filename): """ Guess type :param filename: File name :return: tuple(type, encoding) """ MimeType._initiate() return mimetypes.guess_type('dummy.%s' % filename) @staticmethod def _initiate(): """ Initiate :return: void """ if MimeType._initiated: return mimetypes.init() mimetypes.add_type('application/vnd.google-apps.document', '.gdoc') mimetypes.add_type('application/vnd.google-apps.drawing', '.gdraw') mimetypes.add_type('application/vnd.google-apps.spreadsheet', '.gsheet') mimetypes.add_type('application/vnd.google-apps.form', '.gform') mimetypes.add_type('application/vnd.google-apps.site', '.gsite') mimetypes.add_type('application/vnd.google-apps.map', '.gmap') mimetypes.add_type('application/vnd.google-apps.presentation', '.gslides') mimetypes.add_type('application/vnd.openxmlformats-officedocument.wordprocessingml.document', '.docx') mimetypes.add_type('application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', '.xslx') mimetypes.add_type('application/vnd.openxmlformats-officedocument.presentationml.presentation', '.pptx') mimetypes.add_type('application/vnd.google-earth.kml+xml', '.kml') MimeType._initiated = True
''' HOBNOB BY ARYAN BHAJANKA REFER THE 'README.MD' FILE FOR DETAILS ''' #main.py, HobNob from re import template from flask import Flask,render_template, request,redirect from flask_login import login_required, current_user, login_user, logout_user from models import UserModel,PostModel, ProfileModel, FollowModel, db, login from random import sample app = Flask(__name__) app.secret_key = 'hobnob_aryanbhajanka' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db.init_app(app) login.init_app(app) login.login_view = 'login' @app.before_first_request def create_all(): db.create_all() @app.route('/', methods=['POST', 'GET']) @login_required def blog(): username = request.form.get('text') if request.method == 'POST': print("liked") '''post_id = request.form.get('id') account = PostModel.query.get(post_id) print(PostModel.account(id=post_id)) like = PostModel(id=post_id) db.session.add(like) db.session.commit()''' post = PostModel.query.all() post_list = sample(post, 5) return render_template('index.html',posts=post_list) @app.route('/login', methods = ['POST', 'GET']) def login(): if current_user.is_authenticated: return redirect('/') if request.method == 'POST': email = request.form['email'] user = UserModel.query.filter_by(email = email).first() if user is not None and user.check_password(request.form['password']): login_user(user) return redirect('/') return render_template('login.html') @app.route('/register', methods=['POST', 'GET']) def register(): if current_user.is_authenticated: return redirect('/') if request.method == 'POST': email = request.form['email'] username = request.form['username'] password = request.form['password'] if UserModel.query.filter_by(email=email).first(): return ('Email already exists') user = UserModel(email=email, username=username) user.set_password(password) db.session.add(user) db.session.commit() bio = ProfileModel(bio_account=username,bio="Hello I am on HobNob!") db.session.add(bio) db.session.commit() return redirect('/login') return render_template('register.html') @app.route('/logout') def logout(): logout_user() return redirect('/') @app.route('/<username>',methods=['POST', 'GET']) @login_required def profile(username): try: if request.method == 'POST': follow = FollowModel(follow_account=username,follower_account=current_user.username) followers = FollowModel.query.filter_by(follow_account=username).all() if current_user.username in followers: print("pass") else: db.session.add(follow) db.session.commit() bio = ProfileModel.query.filter_by(bio_account=username).all() print_post = PostModel.query.filter_by(account=username).all() length = len(bio)-1 bio_main = bio[length] return render_template('profile.html',user=username,posts=print_post,bio=bio_main) except IndexError or TypeError: pass @app.route('/newpost', methods=['POST', 'GET']) @login_required def post(): if request.method == 'POST': text = request.form['text'] post = PostModel(text=text,account=current_user.username) db.session.add(post) db.session.commit() return render_template('newpost.html') @app.route('/myprofile',methods=['POST', 'GET']) @login_required def myprofile(): if request.method == 'POST': bio = request.form['bio'] name = request.form['name'] post_bio = ProfileModel(bio=bio,bio_account=current_user.username, name=name) db.session.add(post_bio) db.session.commit() return render_template("myprofile.html") @app.route('/viewmyprofile') def viewmyprofile(): user = current_user.username return redirect('/'+user) @app.route('/feed') def feed(): return render_template('feed.html') @app.route('/<username>/followers') def followers(username): followers = FollowModel.query.filter_by(follow_account=username).all() return render_template("followers.html",followers=followers,username=username) @app.route('/delete_post',methods=['POST', 'GET']) @login_required def delete_post(): if request.method == 'POST': post = request.form['delete'] post_id = PostModel.query.filter_by(account=current_user.username).all() for i in post_id: if i.text == post: PostModel.query.filter_by(id=i.id).delete() db.session.commit() else: pass print_post = PostModel.query.filter_by(account=current_user.username).all() return render_template('delete_post.html',posts=print_post) if __name__ == "__main__": app.run (debug=True)
from deepnet import *
"""This file and its contents are licensed under the Apache License 2.0. Please see the included NOTICE for copyright information and LICENSE for a copy of the license. """ import logging from projects.api import ProjectNextTaskAPI logger = logging.getLogger(__name__) def next_task(project, queryset, **kwargs): """ Generate next task for labeling stream :param project: project :param queryset: task ids to sample from :param kwargs: arguments from api request """ kwargs['pk'] = project.pk api = ProjectNextTaskAPI(kwargs=kwargs) api.prepared_tasks = queryset response = api.get(request=kwargs['request']) result = response.data result['response_code'] = response.status_code return result actions = [ { 'entry_point': next_task, 'title': 'Generate next task', 'order': 0, 'hidden': True } ]
import os, yaml from pathlib import Path from click.testing import CliRunner from luna.radiology.cli.window_volume import cli import medpy.io import numpy as np def test_cli_window(tmp_path): runner = CliRunner() result = runner.invoke(cli, [ 'pyluna-radiology/tests/luna/testdata/data/2.000000-CTAC-24716/volumes/image.mhd', '-o', tmp_path, '--low_level', 0, '--high_level', 100]) assert result.exit_code == 0 assert os.path.exists(str(tmp_path) + '/metadata.yml') with open ((str(tmp_path) + '/metadata.yml'), 'r') as fp: metadata = yaml.safe_load(fp) assert os.path.exists(metadata['itk_volume']) image, _ = medpy.io.load(metadata['itk_volume']) assert np.max(image)==100 assert np.min(image)==0
"""Add support for OAuth to let users to connect the bot various services.""" from plumeria.command import commands from plumeria.core.storage import pool, migrations from plumeria.core.webserver import app, render_template from plumeria.message import Message, Response from plumeria.perms import direct_only from plumeria.core.oauth.manager import * from plumeria.core.oauth.storage import * __requires__ = ['plumeria.core.storage'] def find_endpoint(name) -> Endpoint: try: return oauth_manager.get_endpoint(name) except KeyError: raise CommandError("No such service **{}** exists.".format(name)) @commands.create('connect', cost=4, category='Services') @direct_only async def connect(message: Message): """ Authenticate yourself to a service and provide the bot with permissions of your choosing. Example:: /connect spotify """ endpoint = find_endpoint(message.content.strip()) try: url = await endpoint.request_authorization(message.author) return Response("You will have to visit this link to connect your account: {}" \ "\n\nYou can later disable access from your account settings on the website.".format(url), private=True) except NotImplementedError as e: raise CommandError("Could not connect service: {}".format(str(e))) @app.route("/oauth2/callback/", methods=['GET']) async def handle(request): error = request.GET.get("error", "") code = request.GET.get("code", "") state = request.GET.get("state", "") if not len(error): try: await oauth_manager.process_request_authorization(state, code) return render_template("oauth/success.html") except UnknownFlowError: return render_template("oauth/error.html", error="The link you have visited has expired.") except FlowError: return render_template("oauth/error.html", error="We were unable to contact the service to check that you authorized access.") else: await oauth_manager.cancel_request_authorization(state, error) return render_template("oauth/error.html", error="Something went wrong while connecting to the service. The service says: {}".format( error[:200])) async def setup(): commands.add(connect) app.add(handle) store = DatabaseTokens(pool, migrations) await store.initialize() oauth_manager.redirect_uri = await app.get_base_url() + "/oauth2/callback/" oauth_manager.store = store
# Copyright (c) 2016-2021 Renata Hodovan, Akos Kiss. # # Licensed under the BSD 3-Clause License # <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>. # This file may not be copied, modified, or distributed except # according to those terms. import json import logging import os import pkgutil import signal import sys import time from multiprocessing import Lock, Process, Queue from urwid import connect_signal, ExitMainLoop, MainLoop, raw_display, util from ... import Controller from .tui_listener import TuiListener from .widgets import MainWindow logger = logging.getLogger(__name__) class Tui(object): signals = ['close'] def __init__(self, controller, style): # Shared objects to help event handling. self.events = Queue() self.lock = Lock() self.view = MainWindow(controller) self.screen = raw_display.Screen() self.screen.set_terminal_properties(256) self.loop = MainLoop(widget=self.view, palette=style, screen=self.screen, unhandled_input=Tui.exit_handler, pop_ups=True) self.pipe = self.loop.watch_pipe(self.update_ui) self.loop.set_alarm_in(0.1, self.update_timer, self.view.logo.timer) controller.listener += TuiListener(self.pipe, self.events, self.lock) connect_signal(self.view.issues_table, 'refresh', lambda source: self.loop.draw_screen()) connect_signal(self.view.stat_table, 'refresh', lambda source: self.loop.draw_screen()) def update_ui(self, _): while True: try: event = self.events.get_nowait() if hasattr(self, event['fn']): getattr(self, event['fn'])(**event['kwargs']) except Exception: break def update_timer(self, loop, timer): if timer.update(): loop.set_alarm_in(0.1, self.update_timer, timer) def on_fuzz_job_added(self, ident, cost, sut, fuzzer, batch): self.view.job_table.on_fuzz_job_added(ident, fuzzer, sut, cost, batch) def on_reduce_job_added(self, ident, cost, sut, issue_id, size): self.view.job_table.on_reduce_job_added(ident, sut, cost, issue_id, size) def on_update_job_added(self, ident, cost, sut): self.view.job_table.on_update_job_added(ident, sut) def on_validate_job_added(self, ident, cost, sut, issue_id): self.view.job_table.on_validate_job_added(ident, sut, issue_id) def on_job_removed(self, ident): self.view.job_table.on_job_removed(ident) def on_job_activated(self, ident): self.view.job_table.on_job_activated(ident) def on_job_progressed(self, ident, progress): self.view.job_table.on_job_progressed(ident, progress) def on_load_updated(self, load): self.view.logo.load.set_completion(load) def on_stats_updated(self): self.view.stat_table.update() def on_issue_added(self, ident, issue): # Do shiny animation if a new issue has received. self.view.logo.do_animate = True self.loop.set_alarm_at(time.time() + 5, callback=self.view.logo.stop_animation) self.loop.set_alarm_in(0.1, self.view.logo.animate, self.view.logo) self.view.issues_table.add_row(issue) def on_issue_invalidated(self, ident, issue): self.view.issues_table.invalidate_row(ident=issue['_id']) def on_issue_updated(self, ident, issue): self.view.issues_table.update_row(ident=issue['_id']) def on_issue_reduced(self, ident, issue): self.view.issues_table.update_row(ident=issue['_id']) def warning(self, ident, msg): self.view._emit('warning', msg) @staticmethod def exit_handler(key): if key in ('q', 'Q', 'f10'): raise ExitMainLoop() def load_style(style): return [ ('default', style['default_fg'], style['default_bg']), ('logo', style['logo_fg'], style['default_bg']), ('logo_secondary', style['logo_secondary_fg'], style['default_bg']), ('logo_fireworks_1', 'yellow', style['default_bg']), ('logo_fireworks_2', 'light red', style['default_bg']), ('logo_fireworks_3', 'dark blue', style['default_bg']), ('logo_fireworks_4', 'light green', style['default_bg']), ('label', style['label_fg'], style['default_bg']), ('time', style['time_fg'], style['default_bg']), ('load_progress', style['load_progress_fg'], style['load_progress_bg']), ('load_progress_complete', style['load_progress_complete_fg'], style['load_progress_complete_bg']), ('button', style['button_fg'], style['button_bg']), ('border', style['border_fg'], style['default_bg']), ('border_title', style['border_title_fg'], style['default_bg']), ('selected', style['default_fg'], style['selected_bg']), ('table_head', style['table_head_fg'], style['table_head_bg']), ('table_head_sorted', style['table_head_sorted_fg'], style['table_head_bg']), ('issue_reduced', style['issue_reduced_fg'], style['default_bg']), ('issue_reported', style['issue_reported_fg'], style['default_bg']), ('issue_reduced_selected', style['issue_reduced_fg'], style['selected_bg']), ('issue_reported_selected', style['issue_reported_fg'], style['selected_bg']), ('issue_invalid', style['issue_invalid_fg'], style['default_bg']), ('issue_invalid_selected', style['issue_invalid_fg'], style['selected_bg']), ('job_head', style['job_head_fg'], style['job_head_bg']), ('job_label', style['job_label_fg'], style['default_bg']), ('job_head_inactive', style['job_head_inactive_fg'], style['job_head_inactive_bg']), ('job_inactive', style['job_inactive_fg'], style['default_bg']), ('job_progress', style['job_progress_fg'], style['job_progress_bg']), ('job_progress_complete', style['job_progress_complete_fg'], style['job_progress_complete_bg']), ('job_progress_inactive', style['default_bg'], style['default_bg']), ('job_label_selected', style['job_label_fg'], style['selected_bg']), ('job_inactive_selected', style['job_inactive_fg'], style['selected_bg']), ('dialog', style['dialog_fg'], style['dialog_bg']), ('dialog_title', style['dialog_title_fg'], style['dialog_title_bg']), ('dialog_border', style['dialog_border_fg'], style['dialog_bg']), ('dialog_secondary', style['dialog_secondary_fg'], style['dialog_bg']), ('warning', style['warning_fg'], style['warning_bg']), ('warning_title', style['warning_title_fg'], style['warning_title_bg']), ('warning_border', style['warning_border_fg'], style['default_bg']), ] def execute(arguments): # Redirect or suppress errors to spare tui from superfluous messages. if arguments.log_file: sys.stdout = open(os.devnull, 'w') sys.stderr = open(arguments.log_file, 'w') else: sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') if arguments.style: raw_style = json.load(arguments.style) else: raw_style = json.loads(pkgutil.get_data(__package__, os.path.join('resources', 'default_style.json')).decode(encoding='utf-8')) style = load_style(raw_style) if arguments.force_encoding: util.set_encoding(arguments.force_encoding) controller = Controller(config=arguments.config) tui = Tui(controller, style=style) fuzz_process = Process(target=controller.run, args=(), kwargs={'max_cycles': arguments.max_cycles, 'validate': arguments.validate, 'reduce': arguments.reduce}) try: fuzz_process.start() tui.loop.run() except KeyboardInterrupt: # No need to handle CTRL+C as SIGINT is sent by the terminal to all # (sub)processes. pass except Exception as e: # Handle every kind of TUI exceptions except for KeyboardInterrupt. # SIGINT will trigger a KeyboardInterrupt exception in controller, # thus allowing it to perform proper cleanup. os.kill(fuzz_process.pid, signal.SIGINT) logger.error('Unhandled exception in TUI.', exc_info=e) else: # Handle normal exit after 'Q' or F10. SIGINT will trigger a # KeyboardInterrupt exception in controller, thus allowing it to # perform proper cleanup. os.kill(fuzz_process.pid, signal.SIGINT) finally: raise ExitMainLoop()
#[0] is ours ##whole_level[1] calais #[2] ritter #[3] stanford import datetime from threading import Thread import random import math from queue import Queue import pandas as pd import warnings import numpy as np import time import pickle import matplotlib.pyplot as plt import copy import matplotlib.ticker as ticker import matplotlib from matplotlib import rc import matplotlib.font_manager as fm warnings.filterwarnings("ignore") rc('font',**{'family':'dejavusans','serif':['Times']}) rc('text', usetex=False) csfont = {'fontname':'DejaVu Sans Condensed'} # sentence level # whole_level=[ # [ # 1082.7956538044, # 1050.6432401884, # 1053.7217456599, # 1012.5515266169, # 1008.6493123934, # 989.9630010678, # 952.6808602644, # 913.4404472567, # 877.5589883683, # 850.7370345206, # 838.5608943759 # ], # [83.5613470219, # 86.1817237598, # 91.4628130584, # 89.9526238481, # 86.8751099374, # 92.2544695707, # 95.2050934217, # 97.7485665828, # 99.7710068994, # 101.1100325883, # 101.5450283182 # ], # [ # 282.5355908803, # 286.9383684093, # 304.265311919, # 304.7142461953, # 307.9814362368, # 307.7235153447, # 303.0300608151, # 298.1720040698, # 293.5781982817, # 290.6551030755, # 290.0621321868 # ], # [0.7847898245, # 0.7890123589, # 0.7931519113, # 0.7704950433, # 0.7660793922, # 0.771851223, # 0.7642034553, # 0.7541908589, # 0.7508384564, # 0.7468438025, # 0.7450788343 # ] # ] whole_level=[[624.4496273381776, 593.30218438052, 594.1481509218495, 652.9262676551306, 605.6753390579645, 552.3884725429233, 597.3519978583432, 606.3851824297484, 543.6730550626653, 540.0956312505397, 529.8341210809203], [48.1899348453864, 48.66714314099523, 51.57192729540457, 58.00438734328955, 52.16690381930308, 51.4769799239461, 59.69570202760152, 64.89014424264954, 61.811010822796185, 64.19032516588791, 64.15994496041087], [162.9386337256632, 162.03517449871248, 171.56205915928956, 196.48968989695575, 184.9371813615319, 171.70633894746533, 190.00655916273732, 197.940747671422, 181.88014489644576, 184.52417727437214, 183.27209854023903], [0.45258928748558247, 0.4455582429242619, 0.4472240830561037, 0.49684034801617233, 0.46001656860802365, 0.4306844979493904, 0.47917249084547664, 0.5006677369437788, 0.46516603767974085, 0.47413837483176313, 0.470768660874824]] tweets_been_processed_list=[173400, 350484, 527834, 682913, 849446, 1028661, 1188145, 1338782, 1500195, 1657711, 1713105 ] tweets_been_processed_list=[100000, 200000, 300000, 400000, 500000, 600000, 700000, 800000, 900000, 1000000, 1035000] fontPath = "/usr/share/fonts/truetype/abyssinica/AbyssinicaSIL-R.ttf" font_axis = fm.FontProperties(fname=fontPath, size=19) fontPath = "/usr/share/fonts/truetype/abyssinica/AbyssinicaSIL-R.ttf" font_axis2 = fm.FontProperties(fname=fontPath, size=24) fontPath = "/usr/share/fonts/truetype/abyssinica/AbyssinicaSIL-R.ttf" font_legend = fm.FontProperties(fname=fontPath, size=18) f, (ax, ax2,ax3) = plt.subplots(3, 1, sharex=True) #fig, ax = plt.subplots() params = { 'text.usetex': False, 'legend.fontsize': 20, 'figure.figsize': [40, 400] } matplotlib.rcParams.update(params) print("BITTI BITTIBITTIBITTIBITTIBITTIBITTIBITTIBITTIBITTIBITTI") ax.plot( tweets_been_processed_list, whole_level[0],marker='s' ,markersize=8,linewidth=1, label="TwiCS") ax3.plot( tweets_been_processed_list, whole_level[0],marker='s' ,markersize=8,linewidth=1, label="TwiCS") ax2.plot( tweets_been_processed_list, whole_level[0],marker='s' ,markersize=8,linewidth=1, label="TwiCS") ax2.plot( tweets_been_processed_list, whole_level[1],marker='>' ,markersize=8,linewidth=1, label="OpenCalais") ax3.plot( tweets_been_processed_list, whole_level[1],marker='>' ,markersize=8,linewidth=1, label="OpenCalais") ax2.plot( tweets_been_processed_list, whole_level[2],marker='x' ,markersize=8,linewidth=1, label="TwitterNLP") ax3.plot( tweets_been_processed_list, whole_level[2],marker='x' ,markersize=8,linewidth=1, label="TwitterNLP") ax3.plot( tweets_been_processed_list, whole_level[3],marker='o' , markersize=8, linewidth=1,label="Stanford") ax.set_ylim(500,700) # outliers only ax2.set_ylim(40, 220) ax3.set_ylim(0,1) ax.spines['bottom'].set_visible(False) ax2.spines['top'].set_visible(False) ax2.spines['bottom'].set_visible(False) ax3.spines['top'].set_visible(False) ax.xaxis.tick_top() ax2.xaxis.tick_top() ax.tick_params(labeltop='off') # don't put tick labels at the top ax.tick_params(labelbottom='off',axis='both', which='major', labelsize=12) ax2.tick_params(labeltop='off',axis='both', which='major', labelsize=12) ax3.tick_params(labeltop='off',axis='both', which='major', labelsize=12) # don't put tick labels at the top # don't put tick labels at the top # ax2.xaxis.tick_bottom() ax3.xaxis.tick_bottom() d = 0.01 # how big to make the diagonal lines in axes coordinates kwargs = dict(transform=ax.transAxes, color='k', clip_on=False) ax.plot((-d, +d), (-d, +d), **kwargs) # top-left diagonal ax.plot((1 - d, 1 + d), (-d, +d), **kwargs) # top-right diagonal kwargs.update(transform=ax2.transAxes) # switch to the bottom axes ax2.plot((-d, +d), (1 - d, 1 + d), **kwargs) # bottom-left diagonal ax2.plot((1 - d, 1 + d), (1 - d, 1 + d), **kwargs) # bottom-right diagonal kwargs.update(transform=ax2.transAxes) # switch to the bottom axes ax2.plot((-d, +d), (-d, +d), **kwargs) # top-left diagonal ax2.plot((1 - d, 1 + d), (-d, +d), **kwargs) # top-right diagonal kwargs.update(transform=ax3.transAxes) # switch to the bottom axes ax2.plot((-d, +d), (1 - d, 1 + d), **kwargs) # bottom-left diagonal ax2.plot((1 - d, 1 + d), (1 - d, 1 + d), **kwargs) # bottom-right diagonal tick_spacing = 50 ax.yaxis.set_major_locator(ticker.MultipleLocator(tick_spacing)) tick_spacing_ax2 = 50 ax2.yaxis.set_major_locator(ticker.MultipleLocator(tick_spacing_ax2)) tick_spacing_x_axis = 400000 ax.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing_x_axis)) plt.tick_params(axis='both', which='major', labelsize=12) abc=f.text(0.03, 0.5, 'Tweet Processing Throughput',fontproperties=font_axis, ha='center', va='center', rotation='vertical') ax.text(0.1, 0.3,'TwiCS', ha='center', va='center', transform=ax.transAxes,FontProperties=font_legend) ax2.text(0.5, 0.64, 'TwitterNLP',ha='center', va='center', transform=ax2.transAxes,FontProperties=font_legend) ax2.text(0.15, -0.1, 'OpenCalais',ha='center', va='center', transform=ax2.transAxes,FontProperties=font_legend) ax3.text(0.8, 0.55, 'Stanford',ha='center', va='center', transform=ax3.transAxes,FontProperties=font_legend) plt.xlabel('Tweet in Input Stream',fontproperties=font_axis2) # plt.ylabel('Tweet Throughput',fontproperties=font_axis)#prop=20) ax2.grid(True) ax3.grid(True) ax.grid(True) # plt.ylim((0.1,1.0)) # plt.legend(loc="lower right",ncol=4,frameon=False,prop=font_legend) # plt.legend(loc="upper left", bbox_to_anchor=[0, 1], # ncol=2,frameon=False,prop=font) f.savefig("f1_score_us_vs_others7.pdf",dpi=1200,bbox_inches='tight',bbox_extra_artists=[abc]) plt.show()
# Pipenv modules from typer import BadParameter # Global configuration variables ENDPOINT = "https://app.api.surehub.io" PORT = None CORS = None EMAIL = None PASSWORD = None LOGLEVEL = None def validate_loglevel(value: str): loglevel_values = ['critical', 'error', 'warning', 'info', 'debug', 'trace'] if value not in loglevel_values: raise BadParameter(f"Only one of the following is allowed: {loglevel_values}") return value
import dumper def qdump__thrust__device_vector(d, value): innerType = value.type[0] size = int(value["m_size"]) start = value["m_storage"]["m_begin"]["m_iterator"]["m_iterator"].pointer() d.putItemCount(size) d.putPlotData(start, size, innerType)
from operator import itemgetter import py from rply.utils import IdentityDict class TestIdentityDict(object): def test_create(self): IdentityDict() def test_get_set_item(self): d = IdentityDict() x = [] d[x] = "test" assert d[x] == "test" def test_delitem(self): d = IdentityDict() x = [] d[x] = "hello" del d[x] with py.test.raises(KeyError): d[x] def test_len(self): d = IdentityDict() d[[]] = 3 d[3] = 5 assert len(d) == 2 def test_iter(self): d = IdentityDict() x = [] y = [] d[x] = 1 d[y] = 2 assert sorted(d.items(), key=itemgetter(1)) == [(x, 1), (y, 2)]
# coding=utf8 from interfacebdd.Connexion import Connexion from interfacebdd.QualificationDAO import QualificationDAO from flask import Blueprint, request, json import pymysql qualification_api = Blueprint('qualification_api', __name__ ) @qualification_api.route('/qualification/all/') def allQualifications(): """ Récupère la liste de toutes les qualifications Returns: code 1, data : data contient la liste des qualifications code -1, message : erreur lors de la récupération """ try: qualification = QualificationDAO() conn = Connexion().connect() s = [] for i in qualification.getAll(conn, False, []): s.append(i.serial()) resp = {"code" : 1, "data" : json.dumps(s, encoding="utf-8")} except pymysql.err.Error: Connexion().exception() resp = {"code" : -1, "message" : "Une erreur est survenue lors de la récupération de la liste des qualifications."} finally: Connexion().disconnect(conn) return json.dumps(resp, encoding="utf-8")
"""Base class for AWS organizations resources.""" from typing import Type from botocore.client import BaseClient from altimeter.aws.resource.resource_spec import ScanGranularity, AWSResourceSpec class OrganizationsResourceSpec(AWSResourceSpec): """Base class for AWS organizations resources.""" service_name = "organizations" scan_granularity = ScanGranularity.ACCOUNT @classmethod def skip_resource_scan( cls: Type["OrganizationsResourceSpec"], client: BaseClient, account_id: str, region: str ) -> bool: """Return a bool indicating whether this resource class scan should be skipped, in this case skip if the current account is not an org master.""" resp = client.describe_organization() return resp["Organization"]["MasterAccountId"] != account_id
import os import click from flask_migrate import Migrate from app import create_app, db from app.models import User #创建flaskapp实例 app = create_app('default') #创建数据库 @app.before_first_request def create_db(): db.create_all() if __name__ == '__main__': app.run()
from __future__ import absolute_import from django import VERSION as DJANGO_VERSION def get_user_model(): if DJANGO_VERSION >= (1, 5): from django.contrib.auth import get_user_model as gum return gum() else: from django.contrib.auth.models import User return User
from django.apps import AppConfig class MpesaWebhooksConfig(AppConfig): name = 'mpesa_proxy'
import unittest from .general_tests import GeneralTests from mldictionary import Portuguese class TestGeneralPortuguese(GeneralTests, unittest.TestCase): word = 'palavra' def setUp(self): return super().setUp(Portuguese) class TestPortuguese(unittest.TestCase): def setUp(self): self.portuguese_dictionary = Portuguese()
import os import platform import requests import json from flask import Flask from flask import request from flask import jsonify from flask import render_template from version import app_version app = Flask(__name__) # Initialization debug = bool(os.getenv('DEBUG')) print(debug) #Main page @app.route('/') def main(): # Check for any headers starting with "Kollegehill and pass them downstream" request_headers = request.headers if debug: print("Request headers: %s " % request_headers) pass_headers = {} for k,v in request_headers.items(): if str(k).startswith("Kollegehill"): pass_headers[k] = v print("Adding headers: %s " % pass_headers) # Image placeholders cat_fact_image_url = 'https://placekitten.com/300/200' # Get the cat caption catfact_uri = 'https://catfact.ninja/fact' image_caption = get_catfact(catfact_uri) # Get basic system info uname = platform.uname() response = render_template('index.html', cat_image = cat_fact_image_url, caption = image_caption, uname = uname) return response def get_catfact(uri): response = requests.get(uri, headers = {'accept': 'application/json'}).json() value = response['fact'] return value #system info page @app.route('/system_info') def get_system_info(): platform_data = {} uname = platform.uname() platform_data['System'] = uname.system platform_data['Node'] = uname.node platform_data['Release'] = uname.release platform_data['Version'] = uname.version platform_data['Machine'] = uname.machine return platform_data #verion page @app.route('/version') def get_version(): return app_version if __name__ == '__main__': app.run(debug=True,host='0.0.0.0')
from __future__ import annotations import asyncio import math import json import threading from sphero.sphero_bolt import SpheroBolt import numpy as np from cv2 import cv2 from typing import List # CAP = None CURRENT_COORDINATES = {} def get_json_data(file: str) -> List[dict[str, str]]: """Reads json file and returns a list of dictionaries. Parameters ---------- file : str location of the json file. Returns ------- list[dict[str, str]] list with one or more dictionaries. """ with open(file) as json_file: return json.load(json_file) async def viewMovement(): print("VIEW MOVEMENTS!") global CAP global CURRENT_COORDINATES if CAP is None or not CAP.isOpened(): print("[Error] Could not open the main webcam stream.") return while CAP.isOpened(): ret, main_frame = CAP.read() for bolt_address in list(CURRENT_COORDINATES): bolt = CURRENT_COORDINATES[bolt_address] # color is via BGR cv2.circle(main_frame, (int(bolt.get('coordinate')[0]), int(bolt.get('coordinate')[1])), 5, (int(bolt.get('color')[2]), int(bolt.get('color')[1]), int(bolt.get('color')[0])), 2) cv2.circle(main_frame, (320, 240), 10, (255, 255, 255), 3) cv2.imshow("Movement Viewer", main_frame) if cv2.waitKey(1) & 0xFF == ord("q"): CAP.release() cv2.destroyAllWindows() def findDirection(_point_a, _point_b): direction1 = _point_b[0] - _point_a[0] direction2 = _point_b[1] - _point_a[1] if direction1 == 0: if direction2 == 0: # same points? degree = 0 else: degree = 0 if _point_a[1] > _point_b[1] else 180 elif direction2 == 0: degree = 90 if _point_a[0] < _point_b[0] else 270 else: degree = math.atan(direction2 / direction1) / math.pi * 180 lowering = _point_a[1] < _point_b[1] if (lowering and degree < 0) or (not lowering and degree > 0): degree += 270 else: degree += 90 return degree def getSquareCoordinates(_center=(0, 0), _r=10, _n=10): if _n < 4: _n = 4 if _n == 4: return [[_center[0] + _r, _center[1] - _r], [_center[0] + _r, _center[1] + _r], [_center[0] - _r, _center[1] + _r], [_center[0] - _r, _center[1] - _r]] elif 4 < _n <= 6: return [[_center[0] + _r, _center[1] - _r], [_center[0] + _r, _center[1]], [_center[0] + _r, _center[1] + _r], [_center[0] - _r, _center[1] + _r], [_center[0] - _r, _center[1]], [_center[0] - _r, _center[1] - _r]] elif 6 < _n <= 8: return [[_center[0] + _r, _center[1] - _r], [_center[0] + _r, _center[1]], [_center[0] + _r, _center[1] + _r], [_center[0], _center[1] + _r], [_center[0] - _r, _center[1] + _r], [_center[0] - _r, _center[1]], [_center[0] - _r, _center[1] - _r], [_center[0], _center[1] - _r]] elif 8 < _n <= 10: return [[_center[0] + _r, _center[1] - _r], [_center[0] + _r, _center[1]], [_center[0] + _r, _center[1] + _r], [_center[0] + _r* 0.5, _center[1] + _r], [_center[0] - _r * 0.5, _center[1] + _r], [_center[0] - _r, _center[1] + _r], [_center[0] - _r, _center[1]], [_center[0] - _r, _center[1] - _r], [_center[0] - _r * 0.5, _center[1] - _r], [_center[0] + _r * 0.5, _center[1] - _r]] def getTriangleCoordinates(_center=(0, 0), _r=10, _n=10): if _n < 3: _n = 3 if _n == 3: return [[_center[0], _center[1] + _r], [_center[0] - _r/2, _center[1] - _r], [_center[0] + _r/2, _center[1] - _r]] elif 3 < _n <= 6: return [[_center[0], _center[1] + _r], [(_center[0] + (_center[0] - _r / 2)) / 2, (_center[1] + _r + _center[1] - _r) / 2], [_center[0] - _r / 2, _center[1] - _r], [((_center[0] - _r / 2) + (_center[0] + _r / 2)) / 2, (_center[1] - _r + _center[1] - _r) / 2], [_center[0] + _r / 2, _center[1] - _r], [(_center[0] + (_center[0] + _r / 2))/2, (_center[1] + _r + _center[1] - _r)/2]] elif 6 < _n <= 10: return [[_center[0], _center[1] + _r*1.5, [_center[0], _center[1] + _r*0.75], [(_center[0] + (_center[0] - _r / 2)) / 2, (_center[1] + _r + _center[1] - _r) / 2], [_center[0], _center[1]], [_center[0] - _r, _center[1] - _r], [_center[0] - _r / 2, _center[1] - _r], [((_center[0] - _r / 2) + (_center[0] + _r / 2)) / 2, (_center[1] - _r + _center[1] - _r) / 2], [_center[0] + _r / 2, _center[1] - _r], [_center[0] + _r, _center[1] - _r], [(_center[0] + (_center[0] + _r / 2))/2, (_center[1] + _r + _center[1] - _r)/2]]] def getCircleCoordinates(_center=(0, 0), _r=10, _n=10): if _n < 4: _n = 4 return [ [ _center[0] + (math.cos(2 * math.pi / _n * x) * _r), # x _center[1] + (math.sin(2 * math.pi / _n * x) * _r) # y ] for x in range(0, _n)] async def sendToCoordinates(bolts, coordinates, CAPTURE): global CURRENT_COORDINATES threads = [] for bolt in bolts: await bolt.setMatrixLED(0, 0, 0) await bolt.setFrontLEDColor(0, 0, 0) await bolt.setBackLEDColor(0, 0, 0) for i in range(len(bolts)): if i >= len(coordinates): break thread = threading.Thread(target=asyncio.run, args=(sendToCoordinate(bolts[i], coordinates[i], CAPTURE),)) thread.start() threads.append(thread) for thread in threads: thread.join() for bolt in bolts: await bolt.setMatrixLED(bolt.color[0], bolt.color[1], bolt.color[2]) await bolt.setFrontLEDColor(255, 255, 255) await bolt.setBackLEDColor(255, 0, 0) async def sendToCoordinate(bolt, coordinate, CAPTURE): global CURRENT_COORDINATES print(f"[!] Sending bolt {bolt.address} to X: {coordinate[0]}, Y: {coordinate[1]}") if CAPTURE is None or not CAPTURE.isOpened(): print("[Error] Could not open webcam.") return CURRENT_COORDINATES[bolt.address] = { 'color': bolt.color, 'coordinate': coordinate } correct_coordinate = False while CAPTURE.isOpened() and not correct_coordinate: ret, main_frame = CAPTURE.read() cv2.circle(main_frame, (int(coordinate[0]), int(coordinate[1])), 5, (0, 0, 255), 2) hsv_frame = cv2.medianBlur(cv2.cvtColor(main_frame, cv2.COLOR_BGR2HSV), 9) lower = np.array(bolt.low_hsv, np.uint8) upper = np.array(bolt.high_hsv, np.uint8) mask = cv2.inRange(hsv_frame, lower, upper) contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) if len(contours) > 0: # for pic, contour in enumerate(contours): contour = max(contours, key=cv2.contourArea) area = cv2.contourArea(contour) if area >= 25: x, y, w, h = cv2.boundingRect(contour) cv2.rectangle(main_frame, (x, y), (x + w, y + h), (0, 255, 0), 2) direction = findDirection([x, y], coordinate) # in right position if x < coordinate[0] < x + h and y < coordinate[1] < y + h: # to be sure that the bolt gets the command for i in range(10): await bolt.roll(0, 0) correct_coordinate = True CURRENT_COORDINATES.pop(bolt.address, None) else: await bolt.roll(35, int(direction)) cv2.imshow(f"Detection for {bolt.name}, coordinates: {coordinate}", main_frame) if cv2.waitKey(1) & 0xFF == ord("q"): CAP.release() cv2.destroyAllWindows()
class BattleObject(): def __init__(self, hp, mp, str, dex, int,agi, wis, luk): self.name = None self.job = None self.hp = hp self.mp = mp self.str = str self.dex = dex self.int = int self.agi = agi self.wis = wis self.luk = luk def __str__(self): return self.name
from canaille.apputils import obj_to_b64 from canaille.flaskutils import permissions_needed from canaille.mails import profile_hash from canaille.mails import send_invitation_mail from flask import Blueprint from flask import current_app from flask import flash from flask import request from flask import url_for from flask_babel import gettext as _ from flask_themer import render_template from flask_wtf import FlaskForm from wtforms import StringField from wtforms.validators import DataRequired from wtforms.validators import Email bp = Blueprint("admin", __name__, url_prefix="/admin") class MailTestForm(FlaskForm): mail = StringField( _("Email"), validators=[ DataRequired(), Email(), ], render_kw={ "placeholder": _("jane@doe.com"), "spellcheck": "false", "autocorrect": "off", }, ) @bp.route("/mail", methods=["GET", "POST"]) @permissions_needed("manage_oidc") def mail_index(user): form = MailTestForm(request.form or None) if request.form and form.validate(): if send_invitation_mail(form.mail.data, ""): flash(_("The test invitation mail has been sent correctly"), "success") else: flash(_("The test invitation mail has been sent correctly"), "error") return render_template("mail/admin.html", form=form, menuitem="admin") @bp.route("/mail/password-init.html") @permissions_needed("manage_oidc") def password_init_html(user): base_url = url_for("account.index", _external=True) reset_url = url_for( "account.reset", uid=user.uid[0], hash=profile_hash(user.uid[0], user.mail[0], user.userPassword[0]), _external=True, ) return render_template( "mail/firstlogin.html", site_name=current_app.config.get("NAME", reset_url), site_url=base_url, reset_url=reset_url, logo=current_app.config.get("LOGO"), title=_("Password initialization on {website_name}").format( website_name=current_app.config.get("NAME", reset_url) ), ) @bp.route("/mail/password-init.txt") @permissions_needed("manage_oidc") def password_init_txt(user): base_url = url_for("account.index", _external=True) reset_url = url_for( "account.reset", uid=user.uid[0], hash=profile_hash(user.uid[0], user.mail[0], user.userPassword[0]), _external=True, ) return render_template( "mail/firstlogin.txt", site_name=current_app.config.get("NAME", reset_url), site_url=current_app.config.get("SERVER_NAME", base_url), reset_url=reset_url, ) @bp.route("/mail/reset.html") @permissions_needed("manage_oidc") def password_reset_html(user): base_url = url_for("account.index", _external=True) reset_url = url_for( "account.reset", uid=user.uid[0], hash=profile_hash(user.uid[0], user.mail[0], user.userPassword[0]), _external=True, ) return render_template( "mail/reset.html", site_name=current_app.config.get("NAME", reset_url), site_url=base_url, reset_url=reset_url, logo=current_app.config.get("LOGO"), title=_("Password reset on {website_name}").format( website_name=current_app.config.get("NAME", reset_url) ), ) @bp.route("/mail/reset.txt") @permissions_needed("manage_oidc") def password_reset_txt(user): base_url = url_for("account.index", _external=True) reset_url = url_for( "account.reset", uid=user.uid[0], hash=profile_hash(user.uid[0], user.mail[0], user.userPassword[0]), _external=True, ) return render_template( "mail/reset.txt", site_name=current_app.config.get("NAME", reset_url), site_url=current_app.config.get("SERVER_NAME", base_url), reset_url=reset_url, ) @bp.route("/mail/<uid>/<email>/invitation.html") @permissions_needed("manage_oidc") def invitation_html(user, uid, email): base_url = url_for("account.index", _external=True) registration_url = url_for( "account.registration", data=obj_to_b64([uid, email]), hash=profile_hash(uid, email), _external=True, ) return render_template( "mail/invitation.html", site_name=current_app.config.get("NAME", base_url), site_url=base_url, registration_url=registration_url, logo=current_app.config.get("LOGO"), title=_("Invitation on {website_name}").format( website_name=current_app.config.get("NAME", base_url) ), ) @bp.route("/mail/<uid>/<email>/invitation.txt") @permissions_needed("manage_oidc") def invitation_txt(user, uid, email): base_url = url_for("account.index", _external=True) registration_url = url_for( "account.registration", data=obj_to_b64([uid, email]), hash=profile_hash(uid, email), _external=True, ) return render_template( "mail/invitation.txt", site_name=current_app.config.get("NAME", base_url), site_url=base_url, registration_url=registration_url, )
#Copyright 2014 MathWorks, Inc. class RejectedExecutionError(Exception): """Exception raised from MATLAB engine""" def __init__(self, message): self.message = message def __repr__(self): return self.message
import pygame class Block: def __init__(self, size = (30, 10), position = (100, 100), hits_to_dissaper = 1, image = None): self.image = image self.image = self.image.subsurface((513, 612, 1681 - 513, 1334 - 612)) self.image = pygame.transform.scale(self.image, (self.image.get_rect().size[0] // 10, self.image.get_rect().size[1] // 10)) self.size = (self.image.get_rect().size[0], self.image.get_rect().size[1]) self.position = position self.hits_to_dissaper = hits_to_dissaper def draw(self, screen): if self.hits_to_dissaper > 0 : screen.blit(self.image, self.position) def check_for_hit(self, ball): if self.hits_to_dissaper > 0: if ball.center[0] > self.position[0] - ball.radius and ball.center[0] < self.position[0] \ and ball.center[1] > self.position[1] and ball.center[1] < self.position[1] + self.size[1]: self.hits_to_dissaper = self.hits_to_dissaper - 1 ball.move_vector[0] = -ball.move_vector[0] if ball.center[0] > self.position[0] + self.size[0] and ball.center[0] < self.position[0] + self.size[0] + ball.radius \ and ball.center[1] > self.position[1] and ball.center[1] < self.position[1] + self.size[1]: self.hits_to_dissaper = self.hits_to_dissaper - 1 ball.move_vector[0] = -ball.move_vector[0] if ball.center[0] > self.position[0] and ball.center[0] < self.position[0] + self.size[0] \ and ball.center[1] < self.position[1] and ball.center[1] > self.position[1] - ball.radius: self.hits_to_dissaper = self.hits_to_dissaper - 1 ball.move_vector[1] = -ball.move_vector[1] if ball.center[0] > self.position[0] and ball.center[0] < self.position[0] + self.size[0] \ and ball.center[1] > self.position[1] + self.size[1] and ball.center[1] < self.position[1] + self.size[1] + ball.radius: self.hits_to_dissaper = self.hits_to_dissaper - 1 ball.move_vector[1] = -ball.move_vector[1]
import dash import dash_table import dash_core_components as dcc import dash_html_components as html import dash_bootstrap_components as dbc from dash.dependencies import Input, Output import plotly.graph_objs as go import pandas as pd from numpy import nan from province_names import prov_names from get_covid_data_from_url import get_covid_data from format_data import group_age, order_agegroups, inverse_order_dict external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] covid_case_url = r'https://docs.google.com/spreadsheets/d/1D6okqtBS3S2NRC7GFVHzaZ67DuTw7LX49-fqSLwJyeo/export?format=xlsx' df, deaths, update_date = get_covid_data(covid_case_url, method_='url') # Clean age group data for x in [df, deaths]: x['age'] = group_age(x['age']) x['age_order'] = order_agegroups(x['age']) app = dash.Dash() server = app.server app.title = 'COVID-19 Dashboard for Canada' # Define layout layout = { 'autosize': True, 'automargin': True, 'margin': {'l': 30, 'r': 30, 'b': 20, 't': 40}, 'hovermode': "closest", 'plot_bgcolor': "#F9F9F9", 'paper_bgcolor': "#F9F9F9", 'legend': {'font': {'size': 10}, 'orientation': "h"}, 'title': "Satellite Overview", 'main_bg': '#ececec', 'Male': '#bcd2ee', 'Female': '#6ca6cd', 'Not Reported': '#cae1ff' } colname_dict = {'provincial_case_id': 'Provincial ID', 'age': 'Age Range', 'sex': 'Sex', 'health_region': 'Region', 'province': 'Province', 'date_report': 'Report Date', 'report_week': 'Week Reported', 'travel_yn': 'Travel?', 'travel_history_country': 'Country of Travel', 'additional_info': 'Additional Info', 'age_order': 'Age (order)', 'death_id': 'ID', 'date_death_report': 'Report Date'} app.layout = html.Div( style={'backgroundColor': layout['main_bg'], 'textAlign': 'center', 'font-family': 'arial'}, children=[ html.H1(children='COVID-19 Confirmed Cases in Canada by Date Reported'), html.H3(children=f'(Last refresh: {update_date})'), html.P(children='Built by Fabienne Chan. Data is crowd-sourced and I do not take liability for faulty reporting.'), html.A("[Data source]", href="https://docs.google.com/spreadsheets/d/1D6okqtBS3S2NRC7GFVHzaZ67DuTw7LX49-fqSLwJyeo/"), html.A("[GitHub]", href="https://github.com/fabhlc/covid-dash"), # Dropdown html.Div(children='''Select geography:'''), html.Div([dcc.Dropdown(id='Province', options=[{'label': prov_names[i], 'value': i } for i in ['All Provinces'] + sorted(list(df.province.unique()))], value='All Provinces')], style={'width': '25%', 'display': 'inline-block'}), html.Div([dcc.Dropdown(id='Region', options=[{'label': 'All Regions', 'value': 'All Regions'}],#{'label': i, # 'value': i # } for i in listt(set(df['health_region'])) + ['All Regions']], value='All Regions')], style={'width': '25%', 'display': 'inline-block'}), # keycards html.Div( dbc.Row( [dbc.Col( dbc.Card( dbc.CardBody( [html.H4(children='Canada Total', className="card-title"), html.H1(id='canadatext_subtitle', className="card-subtitle")]), color="info", outline=True)), dbc.Col( dbc.Card( dbc.CardBody( [html.H4(children='Provincial Total', className="card-title"), html.H1(id='provtext_subtitle', className="card-subtitle")]), color="info", outline=True)), dbc.Col( dbc.Card( dbc.CardBody( [html.H4(children='Regional Total', className="card-title"), html.H1(id='reg_total', className="card-subtitle")]), color="info", outline=True)) ]) ), dcc.Graph(id='funnel-graph'), html.H4(children='Individual COVID cases'), dash_table.DataTable(id='filtered-datatable', page_size=15, page_current=0, style_header={'fontWeight': 'bold', 'backgroundColor': '#cdc9c9'}, style_data_conditional=[{'if': {'row_index': 'odd'}, 'backgroundColor': '#fffafa'}]), dcc.Graph(id='agegender-graph'), html.Div(children='* - Excluding records where neither sex nor age are reported.', style={'color': 'grey', 'fontsize': 9}), # Deaths html.Div( [html.H4(children='Fatal Cases of Covid'), dash_table.DataTable(id='death-df', page_size=15, page_current=0, style_header={'fontWeight': 'bold', 'backgroundColor': '#cdc9c9'}, style_data_conditional=[{'if': {'row_index': 'odd'}, 'backgroundColor': '#fffafa'}] ), dcc.Graph(id='death-graph'), html.Div(children='* - Excluding records where neither sex nor age are reported.', style={'color': 'grey', 'fontsize': 9}) ], ) ]) @app.callback( Output('funnel-graph', 'figure'), [Input('Province', 'value'), Input('Region', 'value')]) def update_graph(prov, region): title_addendum = '' if prov == "All Provinces": df_plot = df.copy() else: df_plot = df[df['province'] == prov] if region != 'All Regions': df_plot = df_plot[df_plot['health_region'] == region] title_addendum = f' ({region})' pv = pd.pivot_table(df_plot, index=['date_report'], columns=['province'], values=['provincial_case_id'], aggfunc='count', fill_value=nan) if prov == 'All Provinces': trace1 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Ontario')], name='Ontario') trace2 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'BC')], name='British Columbia') trace3 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Alberta')], name='Alberta') trace4 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Manitoba')], name='Manitoba') trace5 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'NL')], name='Newfoundland and Labrador') trace6 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'New Brunswick')], name='New Brunswick') trace7 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Quebec')], name='Quebec') trace8 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Yukon')], name='Yukon') trace9 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Saskatchewan')], name='Saskatchewan') trace10 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'PEI')], name='Prince Edward Island') trace11 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'Nova Scotia')], name='Nova Scotia') trace12 = go.Bar(x=pv.index, y=pv[('provincial_case_id', 'NWT')], name='Northwest Territories') traces = [trace1, trace2, trace3, trace4, trace5, trace6, trace7, trace8, trace9, trace10, trace11, trace12] else: trace1 = go.Bar(x=pv.index, y=pv[('provincial_case_id', prov)], name=prov_names[prov]) traces = [trace1] return { 'data': traces, 'layout': go.Layout( title=f'Cases in {prov}{title_addendum}', barmode='stack') } # Always update region to "All Regions" when province is changed. @app.callback( Output("Region", "value"), [Input("Province", "value")]) def refresh_region(prov): return 'All Regions' # Update region list to change to region to province's regions @app.callback( Output("Region", "options"), [Input("Province", "value")]) def update_region(prov): region_list = [{'label': 'All Regions', 'value': 'All Regions'}] if prov != 'All Provinces': region_list = list(set(df[df['province'] == prov]['health_region'])) region_list = [{'label': i, 'value': i} for i in ['All Regions'] + sorted(region_list)] return region_list # Cases Table @app.callback( [Output('filtered-datatable', 'columns'), Output('filtered-datatable', 'data')], [Input('Province', 'value'), Input('Region', 'value')]) def update_graph(prov, region): if prov == "All Provinces": df_plot = df.copy() else: df_plot = df[df['province'] == prov] if region != 'All Regions': df_plot = df_plot[df_plot['health_region'] == region] # Format datetime df_plot.loc[:, 'date_report'] = df_plot['date_report'].dt.strftime('%d-%m-%Y') # Edit columns (format, drop, rename) df_plot.drop(['age_order', 'report_week'], axis=1, inplace=True) cols = [{"name": colname_dict[i], "id": i} for i in df_plot.columns] data_ = df_plot.to_dict('records') return cols, data_ # Update keycards @app.callback( [Output("canadatext_subtitle", "children"), Output("provtext_subtitle", "children"), Output("reg_total", "children")], [Input("Province", "value"), Input("Region", "value")]) def update_text(prov, region): canadatext = "{:,}".format(len(df)) provtext = '-' reg_total = '-' if prov != 'All Provinces': provtext = "{:,}".format(sum(df['province'] == prov)) if region != 'All Regions': reg_total = '{:,}'.format(len(df[(df['province'] == prov) & (df['health_region'] == region)])) return canadatext, provtext, reg_total # Update age/gender distribution - bar chart @app.callback( Output("agegender-graph", "figure"), [Input("Province", "value"), Input("Region", 'value')]) def update_agegender(prov, region): if prov == 'All Provinces': df_plot = df.copy() geo_name = 'Canada' else: df_plot = df[df['province'] == prov] geo_name = prov if region != 'All Regions': df_plot = df_plot[df_plot['health_region'] == region] geo_name = f"{prov} {(region)}" # Drop if row doesn't have values for either age or sex df_plot = df_plot[~((df_plot['age']=='Not Reported') & (df_plot['sex'] == 'Not Reported'))] # If there are not reported values: output_data = [] tick_vals =[] if len(df_plot) > 0: # Group df_plot = df_plot.groupby(['sex', 'age_order'])['provincial_case_id'].count().unstack(fill_value=0).stack() # If gender data exists for province, add to figure data for (sx, colour) in [('Female', layout['Female']), ('Male', layout['Male']), ('Not Reported', layout['Not Reported'])]: try: if sx in [i[0] for i in df_plot.index]: output_data.append({'x': df_plot[sx].index, 'y': df_plot[sx].values, 'type': 'bar', 'name': sx, 'color': colour}) tick_vals = df_plot[sx].index except: pass return { 'data': output_data, 'layout': go.Layout( title=f'Breakdown by Age and Gender in {geo_name}*', xaxis=dict(tickvals=tick_vals, ticktext=[inverse_order_dict(i) for i in tick_vals], title='Age Range') ) } @app.callback( [Output('death-df', 'columns'), Output('death-df', 'data'), Output('death-graph', 'figure')], [Input('Province', 'value'), Input('Region', 'value')]) def update_deathsdf(prov, region): if_region = '' if prov == 'All Provinces': death_plot = deaths.copy() death_count = len(deaths) else: death_plot = deaths[deaths['province'] == prov] death_count = sum(deaths['province'] == prov) if region != 'All Regions': death_plot = death_plot[death_plot['health_region'] == region] death_count = sum(death_plot['health_region'] == region) if_region = f' ({region})' cols = [{"name": colname_dict[i], "id": i} for i in death_plot.columns] data_ = death_plot.to_dict('records') if len(death_plot) > 0: # Graph death_plot = death_plot[~((death_plot['age'] == 'Not Reported') & (death_plot['sex'] == 'Not Reported'))] death_plot = death_plot.groupby(['sex', 'age_order'])['death_id'].count().unstack(fill_value=0).stack() # If gender data exists for province, add to figure data death_plot_data_data = [] for (sx, colour) in [('Female', layout['Female']), ('Male', layout['Male']), ('Not Reported', layout['Not Reported'])]: if sx in [i[0] for i in death_plot.index]: death_plot_data_data.append({'x': death_plot[sx].index, 'y': death_plot[sx].values, 'type': 'bar', 'name': sx, 'color': colour}) tick_vals = death_plot[sx].index else: tmp = deaths.groupby(['sex', 'age_order'])['death_id'].count().Male.index death_plot_data_data = [{'x': tmp, 'y': [0 for i in tmp], 'type': 'bar', 'name': 'null', 'color': 'primary'}] tick_vals = tmp death_plot_data = { 'data': death_plot_data_data, 'layout': go.Layout( title=f'Deaths by Age and Gender in {prov}{if_region}* (Total: {death_count} deaths)', xaxis={'tickvals': tick_vals, 'ticktext': [inverse_order_dict(i) for i in tick_vals], 'title': 'Age Range'} ) } return cols, data_, death_plot_data if __name__ == '__main__': app.run_server(debug=True, dev_tools_hot_reload_interval=40_000)
import bpy from gpu_extras.batch import batch_for_shader from bpy.types import Operator, GizmoGroup, Gizmo import bmesh import bgl import gpu from math import sin, cos, pi from gpu.types import ( GPUBatch, GPUVertBuf, GPUVertFormat, ) from mathutils import Matrix, Vector import mathutils # Shader from .utils.shader import vs_uni, fs_uni, vs_sm, fs_sm shader_uni = gpu.types.GPUShader(vs_uni, fs_uni) shader_sm = gpu.types.GPUShader(vs_sm, fs_sm) # Activate Tool from .utils.active_tool import active_tool # --- Retopology Tool tools_ret = { "PS_tool.poly_quilt", "PS_tool.poly_quilt_poly", "PS_tool.poly_quilt_extrude", "PS_tool.poly_quilt_edgeloop", "PS_tool.poly_quilt_loopcut", "PS_tool.poly_quilt_knife", "PS_tool.poly_quilt_delete", "PS_tool.poly_quilt_brush", "PS_tool.poly_quilt_seam", "builtin.poly_build", 'mesh_tool.poly_quilt', 'mesh_tool.poly_quilt_poly', 'mesh_tool.poly_quilt_extrude', 'mesh_tool.poly_quilt_edgeloop', 'mesh_tool.poly_quilt_loopcut', 'mesh_tool.poly_quilt_knife', 'mesh_tool.poly_quilt_delete', 'mesh_tool.poly_quilt_brush', 'mesh_tool.poly_quilt_seam', } def PS_draw_bgl(self, context): if context.mode == 'EDIT_MESH': # context.active_object != None and context.active_object.select_get() and #start_time = time.time() props = context.preferences.addons[__package__].preferences settings = context.scene.ps_set_ theme = context.preferences.themes['Default'] vertex_size = theme.view_3d.vertex_size # Color VA_Col = props.v_alone_color[0], props.v_alone_color[1], props.v_alone_color[2], props.v_alone_color[3] VE_Col = props.VE_color[0], props.VE_color[1], props.VE_color[2], props.VE_color[3] F_Col = props.F_color[0], props.F_color[1], props.F_color[2], props.opacity sel_Col = props.select_color[0], props.select_color[1], props.select_color[2], 1.0 bgl.glEnable(bgl.GL_BLEND) bgl.glLineWidth(props.edge_width) bgl.glPointSize(vertex_size + props.verts_size) bgl.glCullFace(bgl.GL_BACK) if props.xray_ret == False: bgl.glEnable(bgl.GL_DEPTH_TEST) bgl.glEnable(bgl.GL_CULL_FACE) if props.line_smooth: bgl.glEnable(bgl.GL_LINE_SMOOTH) #bgl.glDepthRange(0, 0.99999) #bgl.glDepthFunc(600) bgl.glDepthMask(False) is_perspective = context.region_data.is_perspective if is_perspective: z_bias = props.z_bias / 350 else: z_bias = 1.0 tool_retopo = active_tool().idname in tools_ret # Retopology Tools if tool_retopo: shader = shader_uni else: shader = shader_sm shader.bind() view_mat = context.region_data.perspective_matrix shader.uniform_float("view_mat", view_mat) shader.uniform_float("Z_Bias", z_bias) shader.uniform_float("Z_Offset", props.z_offset) if props.use_mod_ret: depsgraph = context.evaluated_depsgraph_get() uniques = context.objects_in_mode_unique_data #uniques = context.selected_objects #uniques = context.objects_in_mode for obj in uniques: if props.use_mod_ret: if len(obj.modifiers) > 0: depsgraph.update() ob_eval = obj.evaluated_get(depsgraph) me = ob_eval.to_mesh() bm = bmesh.new() bm.from_mesh(me, face_normals=True, use_shape_key=False) bm.verts.ensure_lookup_table() bm.edges.ensure_lookup_table() bm.faces.ensure_lookup_table() else: bm = bmesh.from_edit_mesh(obj.data) if len(bm.verts) <= props.maxP_retop: # Если выбран инструмент ретопологии if tool_retopo: # все вертексы vCo = [obj.matrix_world @ v.co for v in bm.verts] vNm = [v.normal for v in bm.verts] # --- FACES if settings.draw_faces: loop_triangles = bm.calc_loop_triangles() faces_indices = [[loop.vert.index for loop in looptris] for looptris in loop_triangles] FACES = batch_for_shader(shader, 'TRIS', {"pos": vCo, 'nrm': vNm}, indices=faces_indices) shader.uniform_float("color", F_Col) FACES.draw(shader) # --- EDGES if settings.draw_edges: #edges_indices = [ [e.verts[0].index, e.verts[1].index] for e in bm.edges] edges_ind = [e.index for e in bm.edges] edges_cord = [obj.matrix_world @ v.co for i in edges_ind for v in bm.edges[i].verts] eNm = [v.normal for i in edges_ind for v in bm.edges[i].verts] EDGES = batch_for_shader(shader, 'LINES', {"pos": edges_cord, 'nrm': eNm}) shader.uniform_float("color", VE_Col) EDGES.draw(shader) # --- VERTS # только одиночные вертексы if settings.draw_verts: vCo_one = [obj.matrix_world @ v.co for v in bm.verts if len(v.link_faces) < 1] #not v.is_manifold] (not v.is_manifold and v.is_wire) vCo_one_Nm = [v.normal for v in bm.verts if len(v.link_faces) < 1] VERTS = batch_for_shader(shader, 'POINTS', {"pos": vCo_one, 'nrm': vCo_one_Nm}) shader.uniform_float("color", VA_Col) VERTS.draw(shader) # Если выбраны обычные инструменты else: # --- FACES vCo = [obj.matrix_world @ v.co for v in bm.verts] vNm = [v.normal for v in bm.verts] v_len = len(vCo) if settings.draw_faces: loop_triangles = bm.calc_loop_triangles() faces_indices = [[loop.vert.index for loop in looptris] for looptris in loop_triangles] face_col = [F_Col for i in range(v_len)] FACES = batch_for_shader(shader, 'TRIS', {"pos": vCo, "col": face_col, 'nrm': vNm}, indices=faces_indices) FACES.draw(shader) # --- EDGES if settings.draw_edges: edges_ind = [e.index for e in bm.edges] edges_cord = [obj.matrix_world @ v.co for i in edges_ind for v in bm.edges[i].verts] eNm = [v.normal for i in edges_ind for v in bm.edges[i].verts] edge_col = [VE_Col for i in range(len(edges_cord))] for i, edge in enumerate(bm.edges): # Окрашивание выделенных элементов if edge.select: ind = i*2 ind2 = ind + 1 edge_col[ind] = sel_Col edge_col[ind2] = sel_Col #edges_indices = [ [e.verts[0].index, e.verts[1].index] for e in bm.edges] EDGES = batch_for_shader(shader, 'LINES', {"pos": edges_cord, "col": edge_col, 'nrm': eNm}) # , indices=edges_indices EDGES.draw(shader) # --- VERTS if settings.draw_verts: vert_col = [VE_Col for i in range(v_len)] for i, vert in enumerate(bm.verts): # Окрашивание выделенных элементов if len(vert.link_faces) < 1: vert_col[i] = VA_Col if vert.select: #face_col[i] = select_color_f vert_col[i] = sel_Col #edge_col[i] = sel_Col VERTS = batch_for_shader(shader, 'POINTS', {"pos": vCo, "col": vert_col, 'nrm': vNm}) if context.tool_settings.mesh_select_mode[0]: VERTS.draw(shader) if props.use_mod_ret: bm.free() """ if props.line_smooth: bgl.glDisable(bgl.GL_LINE_SMOOTH) """ """ bgl.glDisable(bgl.GL_DEPTH_TEST) bgl.glDisable(bgl.GL_CULL_FACE) bgl.glLineWidth(1) bgl.glPointSize(1) bgl.glDisable(bgl.GL_BLEND) """ #end_time = time.time() #print(end_time-start_time) REFRESH = False #----------------------------------------------- FROM GIZMO TODO class PS_GT_draw(Gizmo): bl_idname = 'PS_GT_draw' def draw(self, context): global REFRESH if REFRESH: PS_draw_bgl(self, context) #REFRESH = False def setup(self): self.use_draw_modal = False #self.hide_select = True #self.group = PS_GGT_draw_group.bl_idname """ def test_select(self, context, location): if context.area.type == 'VIEW_3D': context.area.tag_redraw() return -1 """ class PS_GGT_draw_group(GizmoGroup): bl_idname = 'PS_GGT_draw_mesh' bl_label = "PS Draw" bl_space_type = 'VIEW_3D' bl_region_type = 'WINDOW' bl_options = {'3D', 'SHOW_MODAL_ALL'} #'DEPTH_3D' , 'TOOL_INIT', 'SELECT', , 'SCALE' , 'SHOW_MODAL_ALL' 'PERSISTENT', @classmethod def poll(cls, context): settings = context.scene.ps_set_ return settings.PS_retopology def setup(self, context): mesh = self.gizmos.new(PS_GT_draw.bl_idname) self.mesh = mesh def refresh(self, context): global REFRESH REFRESH = True def draw_prepare(self, context): settings = context.scene.ps_set_ """ mesh = self.mesh if settings.PS_retopology: mesh.hide = False else: mesh.hide = True """ classes = [ PS_GT_draw, PS_GGT_draw_group, ] def register(): for cls in classes: bpy.utils.register_class(cls) def unregister(): for cls in classes: bpy.utils.unregister_class(cls)
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from nmtlab.models import EncoderDecoderModel import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence from nmtlab.modules import MultiHeadAttention class RNMTPlusModel(EncoderDecoderModel): """RNMT+ Model. Encoder: Deep bidirectional LSTM Decoder: Deep forward LSTM Attention: Multihead Attention Other tricks: dropout, residual connection, layer normalization TODO: per gate layer normlaization """ def __init__(self, num_encoders=1, num_decoders=2, layer_norm=False, **kwargs): """Create a RNMT+ Model. Args: num_encoders - Number of bidirectional encoders. num_decoders - Number of forward decoders. layer_norm - Using normal layer normalization. """ self.num_encoders = num_encoders self.num_decoders = num_decoders self.layer_norm = layer_norm super(RNMTPlusModel, self).__init__(**kwargs) def prepare(self): # Embedding layers self.src_embed_layer = nn.Embedding(self._src_vocab_size, self.embed_size) self.tgt_embed_layer = nn.Embedding(self._tgt_vocab_size, self.embed_size) # Encoder self.encoder_rnns = [] for l in range(self.num_encoders): if l == 0: encoder_lstm = nn.LSTM(self.embed_size, self.hidden_size, batch_first=True, bidirectional=True) else: encoder_lstm = nn.LSTM(self.hidden_size * 2, self.hidden_size, batch_first=True, bidirectional=True) setattr(self, "encoder_rnn{}".format(l + 1), encoder_lstm) self.encoder_rnns.append(encoder_lstm) self.project_nn = nn.Linear(self.hidden_size * 2, self.hidden_size) # Decoder self.decoder_rnns = [] for l in range(self.num_decoders): if l == 0: decoder_lstm = nn.LSTM(self.embed_size, self.hidden_size, batch_first=True) else: decoder_lstm = nn.LSTM(self.embed_size + self.hidden_size, self.hidden_size, batch_first=True) setattr(self, "decoder_rnn{}".format(l + 1), decoder_lstm) self.decoder_rnns.append(decoder_lstm) self.attention = MultiHeadAttention(self.hidden_size, num_head=4, additive=False) self.dropout = nn.Dropout(0.2) self.expander_nn = nn.Sequential( nn.Linear(self.hidden_size * 2, 600), nn.Linear(600, self._tgt_vocab_size)) self.residual_scaler = torch.sqrt(torch.from_numpy(np.array(0.5, dtype="float32"))) state_names = ["context", "final_hidden"] for i in range(self.num_decoders): state_names.append("hidden{}".format(i + 1)) state_names.append("cell{}".format(i + 1)) self.set_states(state_names, [self.hidden_size] * (self.num_decoders * 2 + 2)) self.set_stepwise_training(False) def encode(self, src_seq, src_mask=None): src_embed = self.src_embed_layer(src_seq) src_embed = self.dropout(src_embed) enc_states = src_embed for l, rnn in enumerate(self.encoder_rnns): prev_states = enc_states if src_mask is not None: prev_states = pack_padded_sequence(prev_states, lengths=src_mask.sum(1), batch_first=True) enc_states, (enc_last_hidden, _) = rnn(prev_states) if src_mask is not None: enc_states, _ = pad_packed_sequence(enc_states, batch_first=True) enc_states = self.dropout(enc_states) if l >= 2: enc_states = self.residual_scaler * (enc_states + prev_states) if self.layer_norm: enc_states = F.layer_norm(enc_states, (self.hidden_size * 2,)) enc_states = self.project_nn(enc_states) if self.layer_norm: enc_states = F.layer_norm(enc_states, (self.hidden_size,)) encoder_outputs = { "encoder_states": enc_states, "keys": enc_states, "src_mask": src_mask } return encoder_outputs def lookup_feedback(self, feedback): tgt_embed = self.tgt_embed_layer(feedback) tgt_embed = self.dropout(tgt_embed) return tgt_embed def decode_step(self, context, states, full_sequence=False): if full_sequence: feedback_embeds = states.feedback_embed[:, :-1] dec_states = None for l, rnn in enumerate(self.decoder_rnns): if l == 0: dec_states, _ = rnn(feedback_embeds) if self.layer_norm: dec_states = F.layer_norm(dec_states, (self.hidden_size,)) # Attention states.context, _ = self.attention(dec_states, context.keys, context.encoder_states, mask=context.src_mask) else: prev_states = dec_states dec_input = torch.cat([prev_states, states.context], 2) dec_states, _ = rnn(dec_input) dec_states = self.dropout(dec_states) if l >= 2: dec_states = self.residual_scaler * (dec_states + prev_states) if self.layer_norm: dec_states = F.layer_norm(dec_states, (self.hidden_size,)) else: feedback_embed = states.feedback_embed dec_states = None for l, rnn in enumerate(self.decoder_rnns): lstm_state = (getattr(states, "hidden{}".format(l + 1)), getattr(states, "cell{}".format(l + 1))) if l == 0: _, (states.hidden1, states.cell1) = rnn(feedback_embed.transpose(0, 1), lstm_state) dec_states = states.hidden1 if self.layer_norm: dec_states = F.layer_norm(dec_states, (self.hidden_size,)) # Attention states.context, _ = self.attention(dec_states.squeeze(0), context.keys, context.encoder_states, mask=context.src_mask) states.context = states.context.unsqueeze(0) else: prev_states = dec_states dec_input = torch.cat([prev_states, states.context], 2) _, (hidden, cell) = rnn(dec_input.transpose(1, 0), lstm_state) dec_states = self.dropout(hidden) if l >= 2: dec_states = self.residual_scaler * (dec_states + prev_states) if self.layer_norm: dec_states = F.layer_norm(dec_states, (self.hidden_size,)) states["hidden{}".format(l + 1)] = hidden states["cell{}".format(l + 1)] = cell states["final_hidden"] = dec_states def expand(self, states): last_dec_states = states.final_hidden softmax_input = torch.cat([last_dec_states, states.context], -1) logits = self.expander_nn(softmax_input) return logits def cuda(self, device=None): super(RNMTPlusModel, self).cuda(device) self.residual_scaler = self.residual_scaler.cuda()
import curses def color(): curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_YELLOW, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_BLUE, curses.COLOR_BLACK) curses.init_pair(5, curses.COLOR_MAGENTA, curses.COLOR_BLACK) curses.init_pair(6, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(7, curses.COLOR_RED, curses.COLOR_WHITE) curses.init_pair(8, curses.COLOR_GREEN, curses.COLOR_WHITE) curses.init_pair(9, curses.COLOR_YELLOW, curses.COLOR_WHITE) curses.init_pair(10, curses.COLOR_BLUE, curses.COLOR_WHITE) curses.init_pair(11, curses.COLOR_MAGENTA, curses.COLOR_WHITE) curses.init_pair(12, curses.COLOR_CYAN, curses.COLOR_WHITE)
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the LGPLv3 or higher. from PyQt5.QtCore import QObject, pyqtSlot, pyqtProperty, pyqtSignal from UM.Application import Application class OperationStackProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._operation_stack = Application.getInstance().getOperationStack() self._operation_stack.changed.connect(self._onUndoStackChanged) undoStackChanged = pyqtSignal() @pyqtProperty(bool, notify=undoStackChanged) def canUndo(self): return self._operation_stack.canUndo() @pyqtProperty(bool, notify=undoStackChanged) def canRedo(self): return self._operation_stack.canRedo() @pyqtSlot() def undo(self): self._operation_stack.undo() @pyqtSlot() def redo(self): self._operation_stack.redo() def _onUndoStackChanged(self): self.undoStackChanged.emit()
#!/usr/bin/env python import rospy from uav_geometric_controller.msg import states, trajectory from geometry_msgs.msg import PoseStamped, TwistStamped, AccelStamped from tf.transformations import quaternion_from_euler desiredPoseTopic = 'desired_pose' desiredTwistTopic = 'desired_twist' desiredAccelTopic = 'desired_accel' desiredTrajectory = 'Jetson/xc' exploration_name = '/run_autonomous_exploration' class exp_to_traj(object): def __init__(self): rospy.init_node('trajectory_echo', anonymous=True) rospy.Subscriber(desiredPoseTopic, PoseStamped, self.callback_pose) rospy.Subscriber(desiredTwistTopic, TwistStamped, self.callback_twist) rospy.Subscriber(desiredAccelTopic, AccelStamped, self.callback_accel) self.pub = rospy.Publisher(desiredTrajectory, trajectory, queue_size= 1) self.cmd = trajectory() self.cmd.b1 = [1, 0, 0] self.OmegaZ = 0 self.getsPose = False self.getsVel = False self.getsAcc = False def callback_pose(self, msg): pos = msg.pose.position self.cmd.xc = [pos.x, pos.y, 1.0] # TODO: add b1 # euler_from_quaternion(msg.pose.orientation) quat = msg.pose.orientation self.cmd.b1 = [1.0-2*quat.y*quat.y-2*quat.z*quat.z, 2*quat.x*quat.y+2*quat.z*quat.w, 0] self.cmd.b1dot = [-(2*quat.x*quat.y+2*quat.z*quat.w)*self.OmegaZ, (1.0-2*quat.y*quat.y-2*quat.z*quat.z)*self.OmegaZ, 0] self.getsPose = True pass def callback_twist(self, msg): pos = msg.twist.linear rot = msg.twist.angular self.OmegaZ = rot.z # self.cmd.xc_dot = [pos.x, pos.y, pos.z] self.cmd.xc_dot = [0, 0, 0] self.getsVel = True pass def callback_accel(self, msg): pos = msg.accel.linear # self.cmd.xc_2dot = [pos.x, pos.y, pos.z] self.cmd.xc_2dot = [0, 0, 0] self.getsAcc = True pass def publish(self): rate = rospy.Rate(100) if not rospy.has_param(exploration_name): rospy.set_param(exploration_name, False) while not rospy.is_shutdown(): try: if rospy.get_param(exploration_name): if self.getsPose and self.getsVel and self.getsAcc: self.pub.publish(self.cmd) else: self.getsPose = False self.getsVel = False self.getsAcc = False rate.sleep() except (rospy.exceptions): pass if __name__ == '__main__': testobj = exp_to_traj() testobj.publish()
# coding: utf-8 # flake8: noqa """ Idfy.Validation In this API you can validate signatures from the following electronic IDs (e-ID)<br/><br/> &bull; Norwegian BankId (SDO)<br/> ## Last update [LastUpdate] ## Last update Last build date for this endpoint: 12.03.2018 # noqa: E501 """ from __future__ import absolute_import # import models into model package from idfy_sdk.services.validation.models.certificate import Certificate from idfy_sdk.services.validation.models.parse_sdo_request import ParseSDORequest from idfy_sdk.services.validation.models.parse_sdo_response import ParseSDOResponse from idfy_sdk.services.validation.models.sdo_signers import SDOSigners from idfy_sdk.services.validation.models.seal import Seal from idfy_sdk.services.validation.models.signers import Signers from idfy_sdk.services.validation.models.validate_sdo_request import ValidateSDORequest from idfy_sdk.services.validation.models.validate_sdo_response import ValidateSDOResponse from idfy_sdk.services.validation.models.validated_signers import ValidatedSigners from idfy_sdk.services.validation.models.validation_error import ValidationError from idfy_sdk.services.validation.models.x509_certificate import X509Certificate
from django.apps import AppConfig from django.core.exceptions import ImproperlyConfigured from django.urls import URLPattern, reverse_lazy from oscar.core.loading import feature_hidden class OscarConfigMixin(object): """ Base Oscar app configuration mixin, used to extend :py:class:`django.apps.AppConfig` to also provide URL configurations and permissions. """ # Instance namespace for the URLs namespace = None login_url = None #: A name that allows the functionality within this app to be disabled hidable_feature_name = None #: Maps view names to lists of permissions. We expect tuples of #: lists as dictionary values. A list is a set of permissions that all #: need to be fulfilled (AND). Only one set of permissions has to be #: fulfilled (OR). #: If there's only one set of permissions, as a shortcut, you can also #: just define one list. permissions_map = {} #: Default permission for any view not in permissions_map default_permissions = None def __init__(self, app_name, app_module, namespace=None, **kwargs): """ kwargs: namespace: optionally specify the URL instance namespace """ app_config_attrs = [ 'name', 'module', 'apps', 'label', 'verbose_name', 'path', 'models_module', 'models', ] # To ensure sub classes do not add kwargs that are used by # :py:class:`django.apps.AppConfig` clashing_kwargs = set(kwargs).intersection(app_config_attrs) if clashing_kwargs: raise ImproperlyConfigured( "Passed in kwargs can't be named the same as properties of " "AppConfig; clashing: %s." % ", ".join(clashing_kwargs)) super().__init__(app_name, app_module) if namespace is not None: self.namespace = namespace # Set all kwargs as object attributes for key, value in kwargs.items(): setattr(self, key, value) def get_urls(self): """ Return the URL patterns for this app. """ return [] def post_process_urls(self, urlpatterns): """ Customise URL patterns. This method allows decorators to be wrapped around an apps URL patterns. By default, this only allows custom decorators to be specified, but you could override this method to do anything you want. Args: urlpatterns (list): A list of URL patterns """ # Test if this the URLs in the Application instance should be # available. If the feature is hidden then we don't include the URLs. if feature_hidden(self.hidable_feature_name): return [] for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): self.post_process_urls(pattern.url_patterns) if isinstance(pattern, URLPattern): # Apply the custom view decorator (if any) set for this class if this # is a URL Pattern. decorator = self.get_url_decorator(pattern) if decorator: pattern.callback = decorator(pattern.callback) return urlpatterns def get_permissions(self, url): """ Return a list of permissions for a given URL name Args: url (str): A URL name (e.g., ``basket.basket``) Returns: list: A list of permission strings. """ # url namespaced? if url is not None and ':' in url: view_name = url.split(':')[1] else: view_name = url return self.permissions_map.get(view_name, self.default_permissions) def get_url_decorator(self, pattern): """ Return the appropriate decorator for the view function with the passed URL name. Mainly used for access-protecting views. It's possible to specify: - no permissions necessary: use None - a set of permissions: use a list - two set of permissions (`or`): use a two-tuple of lists See permissions_required decorator for details """ from oscar.views.decorators import permissions_required permissions = self.get_permissions(pattern.name) if permissions: return permissions_required(permissions, login_url=self.login_url) @property def urls(self): # We set the application and instance namespace here return self.get_urls(), self.label, self.namespace class OscarConfig(OscarConfigMixin, AppConfig): """ Base Oscar app configuration. This is subclassed by each app to provide a customisable container for its configuration, URL configurations, and permissions. """ class OscarDashboardConfig(OscarConfig): login_url = reverse_lazy('dashboard:login')
import FWCore.ParameterSet.Config as cms source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring( '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/00D8AF8D-36E2-DE11-BEC4-001D09F24303.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/021BEBFC-5AE2-DE11-B1FF-000423D991D4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/0297DC23-58E2-DE11-AEC2-001D09F232B9.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/08314929-41E2-DE11-BDC1-000423D8FA38.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/084F0FCB-47E2-DE11-BFFD-0016177CA778.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/0A38D516-5AE2-DE11-BAB0-001D09F28755.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/0A85BE90-45E2-DE11-8EE1-001D09F2514F.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/0E6D4C2D-43E2-DE11-B115-0030487A1FEC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/10447316-3CE2-DE11-A868-000423D991F0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1284873F-3FE2-DE11-9E93-000423D99F1E.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/168D79DB-49E2-DE11-9E66-001D09F2906A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/18C5820E-47E2-DE11-9E37-001D09F251FE.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1A1AC218-47E2-DE11-B7BC-003048D3756A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1A80C207-42E2-DE11-9380-001D09F29538.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1C3CABBE-48E2-DE11-8F9D-001D09F252DA.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1C681C0A-42E2-DE11-99EE-000423D99BF2.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1E68E9DE-34E2-DE11-B26A-000423D99AAA.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1EBC4591-36E2-DE11-A654-000423D6BA18.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/1ECC7C87-44E2-DE11-819F-000423D33970.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/202B3CF2-54E2-DE11-B542-000423D996C8.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/2085F48F-36E2-DE11-B843-000423D9863C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/223BC3ED-3DE2-DE11-AEA6-001D09F24FEC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/24D24155-37E2-DE11-9B23-0030486780B8.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/26AB7E83-55E2-DE11-8049-001D09F2424A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/2836CEBD-39E2-DE11-8472-0016177CA7A0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/28DAE662-37E2-DE11-93BC-001617DBD224.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/2A1BA28D-45E2-DE11-8150-003048D2BE08.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/2A773003-3BE2-DE11-AFA7-003048D3750A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/2CAE01BA-5BE2-DE11-B4E9-000423D98634.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3079CB3D-3FE2-DE11-A578-0019DB29C5FC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/307D51EE-54E2-DE11-8A62-001617E30D4A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/30F7C9C1-39E2-DE11-99EF-003048D2C108.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3A3FB16C-32E2-DE11-B56F-001D09F24024.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3AE0AA3D-3FE2-DE11-A61B-000423D99896.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3AE66E46-3FE2-DE11-A5E6-000423D992DC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3C12F6E2-33E2-DE11-AF6E-001D09F24353.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3E8B9ACE-55E2-DE11-BAEE-000423D98C20.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/3E920389-44E2-DE11-8A87-0030486780B8.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/4033E024-56E2-DE11-8CC3-001D09F25438.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/407A960E-47E2-DE11-9892-001D09F24024.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/408754C4-3EE2-DE11-B66C-001617E30D40.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/421EC88D-45E2-DE11-AE74-001617E30D12.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/441A16A4-4CE2-DE11-B7F5-001D09F24DDF.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/46143304-5BE2-DE11-BF2B-000423D8F63C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/46F8CA83-55E2-DE11-BE4E-001D09F25393.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/4A0FFB6D-57E2-DE11-BB4B-001D09F24D8A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/4AF6DBE2-33E2-DE11-B03E-003048D2C1C4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/4C5EFC0D-47E2-DE11-A9D5-001D09F24934.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/4CF20C84-45E2-DE11-9F9A-001D09F24600.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/540D4582-50E2-DE11-9B7C-003048D2C108.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/54401722-38E2-DE11-95E7-000423D98F98.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/565E4F16-3CE2-DE11-A76F-0030487D1BCC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/56A3A963-3BE2-DE11-9FCE-001617C3B710.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/583404FE-3AE2-DE11-8564-0030487C5CFA.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/58987AE4-33E2-DE11-A432-001D09F2905B.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/58E1FD09-42E2-DE11-8B91-000423D986C4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/5A562BEC-3DE2-DE11-9435-000423D6CA6E.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/5ADD76EC-3DE2-DE11-BC7A-000423D98F98.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/5C0ACC32-5AE2-DE11-A26D-001D09F2305C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/5C8E0BD9-36E2-DE11-B679-001D09F28F1B.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/64009BEB-3DE2-DE11-B988-001617C3B77C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/668B761D-3AE2-DE11-96F7-0019B9F7312C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/6A1A76C7-3EE2-DE11-982B-001D09F253C0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/6AAA2B78-4DE2-DE11-854B-001617C3B66C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/6E3E0F77-4BE2-DE11-96D7-0030487A3C9A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/6E49C25C-57E2-DE11-87C6-0030487A18A4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/6E7F6974-53E2-DE11-845C-000423D6CA6E.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/707B293E-3FE2-DE11-A341-000423D9A2AE.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7097C264-3BE2-DE11-A399-000423D98868.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7216D6C2-39E2-DE11-AF93-0030487A18F2.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7479F876-53E2-DE11-805D-001D09F232B9.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/74C32C38-41E2-DE11-A502-001D09F29321.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/74F58270-3AE2-DE11-85F2-000423D60FF6.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/76FB226C-32E2-DE11-9119-001617C3B76A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/781156DE-3DE2-DE11-8A6B-000423D33970.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7A3E41D9-55E2-DE11-9CC5-001617E30D4A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7C489E8F-45E2-DE11-AC28-001D09F28D4A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/7EBBBC09-42E2-DE11-A583-000423D99394.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/847C273D-5AE2-DE11-A087-001D09F28D4A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/84EFB521-38E2-DE11-93A8-000423D9880C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/8AC701C9-3EE2-DE11-93F1-001D09F24EE3.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/8C13027A-53E2-DE11-A75A-001D09F28F25.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/8EDD9D8E-45E2-DE11-AA32-001D09F29114.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/900514C8-3EE2-DE11-A456-000423D98B08.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/90357B10-3CE2-DE11-9008-000423D98F98.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/98890F7F-55E2-DE11-9D36-001D09F29597.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/9AAD9691-36E2-DE11-B3CB-001D09F24024.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/A0EC2860-3BE2-DE11-9351-001D09F241F0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/A0F5E12C-43E2-DE11-BF0B-001D09F29538.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/A2F0E963-57E2-DE11-8AAD-001617C3B778.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/A4B77F7A-55E2-DE11-BA99-001D09F29538.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/ACD83B24-3AE2-DE11-A1D3-000423D98B6C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/B02B247C-4DE2-DE11-AF19-000423D99660.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/B2A7B072-42E2-DE11-B66D-003048D2C108.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/B2CBF2A2-4CE2-DE11-8370-001D09F25401.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/B88A1025-56E2-DE11-8E94-001D09F2462D.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/BA2226C4-3EE2-DE11-8575-001617C3B6FE.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/BCE2068B-43E2-DE11-894A-000423D944F0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/BE66658D-43E2-DE11-AD94-0030487C5CFA.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/C401F6E4-4BE2-DE11-B0E0-0019B9F72BFF.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/C4567978-3AE2-DE11-BB2B-000423D987E0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/C60EA517-47E2-DE11-B8DF-003048D2C108.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/C660A173-42E2-DE11-96AB-003048D2C020.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/CA2716E3-33E2-DE11-AD99-001D09F282F5.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/CAD39E82-50E2-DE11-90DF-003048D37580.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/CCD8CA91-36E2-DE11-9D7F-001617DBD224.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/CCE81478-55E2-DE11-9B38-000423D94E70.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/CEA81CE4-33E2-DE11-B990-001D09F23174.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/D0E9A1D8-49E2-DE11-A211-0030487C6062.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/D4BC907F-41E2-DE11-9739-0019DB29C5FC.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/D6CFC672-3AE2-DE11-A2B1-001D09F231B0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/DC498E79-50E2-DE11-9F20-0030486730C6.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E051FC63-37E2-DE11-ABDD-001D09F2512C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E0A770D7-49E2-DE11-B96D-001D09F276CF.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E41ABF0A-42E2-DE11-AD1E-000423D99658.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E47CDFC2-39E2-DE11-88F6-001D09F231B0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E64801C9-39E2-DE11-83CE-001617C3B79A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E66B4474-4BE2-DE11-8E0E-001617C3B5E4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E6F0F717-47E2-DE11-AC1D-000423D99660.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E838B5EC-3DE2-DE11-8E2A-001617C3B6DE.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E8703F24-58E2-DE11-93B6-001D09F2514F.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/E8EEE2C7-47E2-DE11-8B53-000423D952C0.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/EA4139DF-34E2-DE11-A198-000423D6CA02.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/EA6B7FFA-3AE2-DE11-883B-001617C3B6C6.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/EC98D47F-59E2-DE11-88BD-000423D6006E.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/EE2A7591-36E2-DE11-844C-001617E30D4A.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/EEA5E5C6-47E2-DE11-A3AF-001D09F25438.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F2DF3D5D-57E2-DE11-8762-001D09F2512C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F6B4F70D-5AE2-DE11-92B2-001D09F2983F.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F6EF8FA7-4CE2-DE11-B192-001D09F291D7.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F82DED93-36E2-DE11-9316-000423D9870C.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F8A6D469-32E2-DE11-93F8-003048D2C020.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/F8FEFD77-4DE2-DE11-A384-000423D951D4.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FAC74EFB-3AE2-DE11-A244-003048D37580.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FC0AF969-32E2-DE11-BCB5-000423D94908.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FCBE6855-37E2-DE11-97D1-0030487A3232.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FE03FBF4-54E2-DE11-93BA-001D09F2426D.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FE0807BE-48E2-DE11-9A06-001D09F28F25.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FE90F72C-43E2-DE11-8E89-001D09F2B30B.root', '/store/express/BeamCommissioning09/ExpressPhysics/FEVT/v2/000/123/596/FEE389F2-33E2-DE11-A62E-001617C3B76E.root' ));