blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7d7a4883c4ab514cadf97faae9ff73459bd33ab | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/139/usersdata/165/59838/submittedfiles/diagonaldominante.py | 075feccac89be566766f1e4839a07d0f21253793 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | # -*- coding: utf-8 -*-
import numpy as np
def diagonaldominante(a):
for i in range(0,a.shape[0],1):
for j in range(0,a.shape[1],1):
soma=soma+a[i,j]
soma=soma-a[i,i]
if soma<=a[i,i]:
return False
return True
n=int(input('digite a ordem da matriz:'))
a=np.zeros((n,n))
for i in range(0,a.shape[0],1):
for j in range(0,a.shape[1],1):
a[i,j]=int(input('digite um numero:'))
if diagonaldominante(a)==True:
print('SIM')
else:
print('NAO')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
28444482d26ea8121cb836029ad3e93d17809a1f | 1b48b3980abbe11691310a7f35efef62bc0ae831 | /Qt/QState/rogue.py | 54a6897f40b4217183ae0b7b120dd1c64d4ca3cb | [] | no_license | FXTD-ODYSSEY/MayaScript | 7619b1ebbd664988a553167262c082cd01ab80d5 | 095d6587d6620469e0f1803d59a506682714da17 | refs/heads/master | 2022-11-05T08:37:16.417181 | 2022-10-31T11:50:26 | 2022-10-31T11:50:26 | 224,664,871 | 45 | 11 | null | null | null | null | UTF-8 | Python | false | false | 7,836 | py |
#############################################################################
##
## Copyright (C) 2010 velociraptor Genjix <aphidia@hotmail.com>
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing/
##
## This file is part of the Qt for Python examples of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of The Qt Company Ltd nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
##
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
## $QT_END_LICENSE$
##
#############################################################################
import os
import sys
repo = (lambda f:lambda p=__file__:f(f,p))(lambda f,p: p if [d for d in os.listdir(p if os.path.isdir(p) else os.path.dirname(p)) if d == '.git'] else None if os.path.dirname(p) == p else f(f,os.path.dirname(p)))()
MODULE = os.path.join(repo,'_vendor','Qt')
sys.path.insert(0,MODULE) if MODULE not in sys.path else None
from Qt.QtGui import *
from Qt.QtCore import *
from Qt.QtWidgets import *
# class MovementTransition(QEventTransition):
class MovementTransition(QEventTransition):
def __init__(self, window):
super(MovementTransition, self).__init__(window, QEvent.KeyPress)
self.window = window
def eventTest(self, event):
# print ("eventTest event",event.type())
if event.type() == QEvent.StateMachineWrapped and \
event.event().type() == QEvent.KeyPress:
key = event.event().key()
return key == Qt.Key_2 or key == Qt.Key_8 or \
key == Qt.Key_6 or key == Qt.Key_4
return False
def onTransition(self, event):
key = event.event().key()
# print ("onTransition event",event.type())
if key == Qt.Key_4:
self.window.movePlayer(self.window.Left)
if key == Qt.Key_8:
self.window.movePlayer(self.window.Up)
if key == Qt.Key_6:
self.window.movePlayer(self.window.Right)
if key == Qt.Key_2:
self.window.movePlayer(self.window.Down)
class Custom(QState):
def __init__(self, parent, mw):
super(Custom, self).__init__(parent)
self.mw = mw
def onEntry(self, e):
print(self.mw.status)
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.pX = 5
self.pY = 5
self.width = 35
self.height = 20
self.statusStr = ''
database = QFontDatabase()
font = QFont()
if 'Monospace' in database.families():
font = QFont('Monospace', 12)
else:
for family in database.families():
if database.isFixedPitch(family):
font = QFont(family, 12)
self.setFont(font)
self.setupMap()
self.buildMachine()
self.show()
def setupMap(self):
self.map = []
qsrand(QTime(0, 0, 0).secsTo(QTime.currentTime()))
for x in range(self.width):
column = []
for y in range(self.height):
if x == 0 or x == self.width - 1 or y == 0 or \
y == self.height - 1 or qrand() % 40 == 0:
column.append('#')
else:
column.append('.')
self.map.append(column)
def buildMachine(self):
machine = QStateMachine(self)
inputState = Custom(machine, self)
# this line sets the status
self.status = 'hello!'
# however this line does not
inputState.assignProperty(self, 'status', 'Move the rogue with 2, 4, 6, and 8')
transition = MovementTransition(self)
inputState.addTransition(transition)
quitState = QState(machine)
quitState.assignProperty(self, 'status', 'Really quit(y/n)?')
yesTransition = QKeyEventTransition(self, QEvent.KeyPress, Qt.Key_Y)
self.finalState = QFinalState(machine)
yesTransition.setTargetState(self.finalState)
quitState.addTransition(yesTransition)
noTransition = QKeyEventTransition(self, QEvent.KeyPress, Qt.Key_N)
noTransition.setTargetState(inputState)
quitState.addTransition(noTransition)
quitTransition = QKeyEventTransition(self, QEvent.KeyPress, Qt.Key_Q)
quitTransition.setTargetState(quitState)
inputState.addTransition(quitTransition)
machine.setInitialState(inputState)
machine.finished.connect(QApplication.quit)
machine.start()
def sizeHint(self):
metrics = QFontMetrics(self.font())
return QSize(metrics.width('X') * self.width, metrics.height() * (self.height + 1))
def paintEvent(self, event):
metrics = QFontMetrics(self.font())
painter = QPainter(self)
fontHeight = metrics.height()
fontWidth = metrics.width('X')
painter.fillRect(self.rect(), Qt.black)
painter.setPen(Qt.white)
yPos = fontHeight
painter.drawText(QPoint(0, yPos), self.status)
for y in range(self.height):
yPos += fontHeight
xPos = 0
for x in range(self.width):
if y == self.pY and x == self.pX:
xPos += fontWidth
continue
painter.drawText(QPoint(xPos, yPos), self.map[x][y])
xPos += fontWidth
painter.drawText(QPoint(self.pX * fontWidth, (self.pY + 2) * fontHeight), '@')
def movePlayer(self, direction):
if direction == self.Left:
if self.map[self.pX - 1][self.pY] != '#':
self.pX -= 1
elif direction == self.Right:
if self.map[self.pX + 1][self.pY] != '#':
self.pX += 1
elif direction == self.Up:
if self.map[self.pX][self.pY - 1] != '#':
self.pY -= 1
elif direction == self.Down:
if self.map[self.pX][self.pY + 1] != '#':
self.pY += 1
self.repaint()
def getStatus(self):
return self.statusStr
def setStatus(self, status):
self.statusStr = status
self.repaint()
status = Property(str, getStatus, setStatus)
Up = 0
Down = 1
Left = 2
Right = 3
Width = 35
Height = 20
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
mainWin = MainWindow()
sys.exit(app.exec_())
| [
"timmyliang@tencent.com"
] | timmyliang@tencent.com |
38ae20856df89827f8a00aa25a5e08cb9eb6ceea | 04975a41eb459f1528dcbdcb1143a3cb535aa620 | /Array_easy/leetcode_1480.py | e74dd07db830ae28ab16b1a7251a89f541404fe5 | [] | no_license | RickLee910/Leetcode_easy | 2a50d632379826979a985e1b9950d4cf6bbd8b18 | c2687daf334f96a908737067bb915b8b072d0d56 | refs/heads/master | 2023-01-29T11:09:26.701243 | 2020-12-02T04:36:14 | 2020-12-02T04:36:14 | 294,952,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | class Solution:
def runningSum(self, nums):
temp = []
for i in range(len(nums)):
temp.append(sum(nums[0:i + 1]))
return temp
| [
"13554543910@163.com"
] | 13554543910@163.com |
fe5a15533c33ffc5a7c712c95bd101c436d9abeb | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_211/ch11_2020_03_05_22_17_54_209757.py | 0a1e69d816050755f964c115c59f668d1f8cb2ac | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | import math
def distancia_euclidiana(x1,y1,x2,y2):
dx=x2-x1
dy=y2-y1
dist= mat.sqrt(dx**2+dy**2)
return dist_
| [
"you@example.com"
] | you@example.com |
013d8ebbd04d376cb034689339509448b298d55a | d7d26c42cd541417edcd7b1992027286ecef7f04 | /venv/Scripts/pip-script.py | ac7ceef7b30414cd771175275ec59f973fbcdf41 | [] | no_license | plutoese/pluto_archive | bfba8df48ee5639a2666b33432004519b93ecbf7 | e6ea64aaf867fd0433714293eb65a18a28d3136d | refs/heads/master | 2021-10-22T14:46:20.540770 | 2019-03-11T12:31:08 | 2019-03-11T12:31:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | #!D:\github\pluto\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
| [
"glen.zhang7@gmail.com"
] | glen.zhang7@gmail.com |
cfbbb16014049c04dd2b6265a388c8013940b803 | 6d8ebfaf95299fa7fa892db4565f3597a72f5219 | /rest_mongo/fileutils.py | 58273a1ebc278d89e96d80a745f932d772670277 | [] | no_license | videntity/georegistry | 30aecec862f10d364cb72ce391656dc8d2a0d794 | 44fcda20d669650d1efbfee4907986654fd6d931 | refs/heads/master | 2021-01-23T20:13:20.899937 | 2011-06-11T13:15:36 | 2011-06-11T13:15:36 | 1,880,133 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,346 | py | #!/usr/bin/env python
from django.conf import settings
import sys, os
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import mimetypes
from datetime import datetime, timedelta
"""
simpleS3.py
"""
#
# simpleS3.py
#
# By: Alan Viars
# Copyright Videntity Systems, Inc. 2009
# All rights Reseved.
# License: New BSD
# Last Updated: May 9, 2009
#
# This was tested using Python 2.5 and Ubuntu Linux, but
# it should run fine w/ other configurations.
# You will need to install boto to get this library running
# and of course you need an S3 account from Amazon.
# See http://aws.amazon.com
#
# NOTES ON INSTALLING BOTO:
# 1.7.a is latestversion of boto at the time of writing.
# Execute the following from a command line prompt
# > wget http://boto.googlecode.com/files/boto-1.7a.tar.gz
# > tar zxvf boto-1.7a.tar.gz
# > cd boto-1.7a
# Run this as root or w/ admin privileges
# > python setup.py install
# > if on Ubuntu or Debian deravitive, use sudo like so:
# > sudo python setup.py install
#Set these to match your Amazon S3 Account
AWS_ACCESS_KEY= '*****PUT_YOUR_KEY_HERE****'
AWS_SECRET_ACCESS_KEY='*****PUT_YOUR_SECRET_KEY_HERE****'
class SimpleS3:
"""
A very simple class library to simple store
and retieve files in Amazon S3
Works with HTTPS/port 443 only (no HTTP/port 80)
"""
#Store a file in s3
def store_in_s3 (self, bucket,
filename,
local_filepath,
public=False):
"""Store a file in s3"""
url=""
try:
conn= S3Connection(settings.AWS_KEY,
settings.AWS_SECRET)
b = conn.create_bucket(bucket)
k=Key(b)
k.key=filename
mime = mimetypes.guess_type(filename)[0]
if mime==None:
#print "I couldn't guess MIME because"
#print "I couldn't detect a file ext."
#print "Using 'application/octet-stream'"
#print "as the default MIME instead."
mime = "application/octet-stream"
#print "MIME Type = %s" % (mime)
k.set_metadata("Content-Type", mime)
k.set_contents_from_filename(local_filepath)
if public==True:
k.set_acl("public-read")
url = "https://%s.s3.amazonaws.com/%s" % (bucket,
k.key)
except:
return url
finally:
return url
#Get a file from s3
def get_from_s3 (bucket, filename, local_filepath ):
"""Get a file from s3"""
retval = False
try:
conn= S3Connection(AWS_ACCESS_KEY,
AWS_SECRET_ACCESS_KEY)
b = conn.create_bucket(bucket)
k = Key(b)
k.key = filename
k.get_contents_to_filename(local_filepath)
retval = True
except:
#print "Error in get_from_s3"
#print sys.exc_info()
return retval
finally:
return retval
# Our MAIN application which takes 3 command line arguments
# Take in a mode, bucketname, filename, and public T/F.
# if mode=PUT, then store the file in S3
# If mode=GET, then read the file from S3,
# and write it to local disk
def handle_uploaded_file(file, user, uuid):
responsedict={'localfilename':None,
'urli': None
}
#create folder name for the api username
dirname = '%s/%s/' %(settings.MEDIA_ROOT, user.username)
try:
#create directory if it doesn't exist
if not os.path.isdir(dirname):
os.mkdir(dirname)
#get a timestamp
current_time = str(datetime.utcnow())
time_str=current_time.replace(" ", '_')
#create file name by using current datetime
new_file_name='%s_%s' %(file.name, uuid)
#create the entire directory string
file_name='%s%s' %(dirname, new_file_name)
#open to write
destination = open(file_name, 'wb')
#write out in chunks
for chunk in file.chunks():
destination.write(chunk)
destination.close()
full_path=file_name
file_name="%s/%s" %(user.username, new_file_name)
except:
responsedict['errors']="There was an error uploading your file."
print sys.exc_info()
return responsedict
if settings.BINARY_STORAGE=='LOCAL':
responsedict['localfilename']="file://%s" % (full_path)
responsedict['urli']=file_name
elif settings.BINARY_STORAGE=='AWSS3':
s=SimpleS3()
responsedict['urli']=s.store_in_s3 (settings.AWS_BUCKET,
new_file_name,
file_name,
settings.AWS_PUBLIC)
if responsedict['urli']=="":
responsedict['errors']="AWS S3 file %s upload failed" % (new_file_name)
return responsedict | [
"aviars@videntity.com"
] | aviars@videntity.com |
30c0adc1bf302aa332f6238ab1dc498667a73b8f | 7c74ceb9f8addcc0816d012e0b84b174b96e0def | /src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_origin_scenarios.py | 686c7b83182a5be9b4008bd6a6a38bd57a46f58c | [
"MIT",
"LGPL-2.1-only",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.0-or-later",
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | microsoft/azure-cli | 4c826290e7a6f6bd27da3829b05e4f02ff6dc8d9 | 9ba64b33f6f78e2c3e42f8a147f59484300e8779 | refs/heads/dev | 2023-08-31T08:51:39.526556 | 2022-11-28T19:08:23 | 2022-11-28T19:08:23 | 370,900,439 | 7 | 7 | MIT | 2023-08-01T23:34:50 | 2021-05-26T03:59:41 | Python | UTF-8 | Python | false | false | 14,702 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import ResourceGroupPreparer, ScenarioTest, JMESPathCheck, record_only
from .scenario_mixin import CdnScenarioMixin
class CdnOriginScenarioTest(CdnScenarioMixin, ScenarioTest):
@record_only() # This tests relies on a specific subscription with existing resources
@ResourceGroupPreparer()
def test_origin_crud(self, resource_group):
pls_subscription_id = '27cafca8-b9a4-4264-b399-45d0c9cca1ab'
# Workaround for overly heavy-handed subscription id replacement in playback mode.
if self.is_playback_mode():
pls_subscription_id = '00000000-0000-0000-0000-000000000000'
private_link_id = f'/subscriptions/{pls_subscription_id}/resourceGroups/cdn-sdk-test/providers/Microsoft.Network/privateLinkServices/cdn-sdk-pls-test'
private_link_location = 'EastUS'
private_link_message = 'Please approve the request'
profile_name = 'profile123'
self.profile_create_cmd(resource_group, profile_name, sku='Standard_Microsoft')
endpoint_name = self.create_random_name(prefix='endpoint', length=24)
origin_host = 'www.example.com'
self.endpoint_create_cmd(resource_group, endpoint_name, profile_name, origin_host)
checks = [JMESPathCheck('length(origins)', 1)]
endpoint = self.endpoint_show_cmd(resource_group, endpoint_name, profile_name, checks=checks)
origin1_id = f'{endpoint.json_value["id"]}/origins/{endpoint.json_value["origins"][0]["name"]}'
origin1_name = endpoint.json_value['origins'][0]['name']
checks = [JMESPathCheck('name', origin1_name),
JMESPathCheck('hostName', origin_host)]
self.origin_show_cmd(resource_group,
endpoint_name,
profile_name,
endpoint.json_value['origins'][0]['name'],
checks=checks)
checks = [JMESPathCheck('length(@)', 1),
JMESPathCheck('@[0].name', origin1_name),
JMESPathCheck('@[0].hostName', origin_host)]
self.origin_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
# Create an origin group.
origin_group_name = 'test-origin-group'
checks = [JMESPathCheck('name', origin_group_name),
JMESPathCheck('length(origins)', 1)]
origin_group = self.origin_group_create_cmd(resource_group, origin_group_name, endpoint_name, profile_name,
origins=origin1_id, checks=checks)
checks = [JMESPathCheck('length(@)', 1),
JMESPathCheck('@[0].name', origin_group_name)]
self.origin_group_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
checks = [JMESPathCheck('name', origin_group_name)]
self.origin_group_show_cmd(resource_group, origin_group_name, endpoint_name, profile_name, checks=checks)
# Set the default origin group by name and ID.
default_origin_group_id = origin_group.json_value['id']
checks = [JMESPathCheck('defaultOriginGroup.id', default_origin_group_id, case_sensitive=False)]
self.endpoint_update_cmd(resource_group,
endpoint_name,
profile_name,
options=f"--default-origin-group={origin_group_name}",
checks=checks)
self.endpoint_update_cmd(resource_group,
endpoint_name,
profile_name,
options=f"--default-origin-group={default_origin_group_id}",
checks=checks)
# Create second and third origins.
origin2_name = self.create_random_name(prefix='origin', length=24)
origin2_hostname = 'abc.contoso.com'
http_port = 8080
https_port = 8443
origin_host_header = 'abc.contoso.com'
disabled = False
weight = 534
priority = 3
private_link_id = private_link_id
private_link_location = private_link_location
private_link_message = private_link_message
checks = [JMESPathCheck('name', origin2_name),
JMESPathCheck('hostName', origin2_hostname),
JMESPathCheck('httpPort', http_port),
JMESPathCheck('httpsPort', https_port),
JMESPathCheck('originHostHeader', origin_host_header),
JMESPathCheck('enabled', not disabled),
JMESPathCheck('weight', weight),
JMESPathCheck('priority', priority)]
self.origin_create_cmd(resource_group, origin2_name, endpoint_name, profile_name,
host_name=origin2_hostname,
http_port=http_port,
https_port=https_port,
origin_host_header=origin_host_header,
disabled=disabled,
weight=weight,
priority=priority,
private_link_id=private_link_id,
private_link_location=private_link_location,
private_link_message=private_link_message,
checks=checks)
checks = [JMESPathCheck('length(@)', 2)]
origins = self.origin_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
disabled = True
origin3_name = self.create_random_name(prefix='origin', length=24)
checks = [JMESPathCheck('name', origin3_name),
JMESPathCheck('hostName', origin2_hostname),
JMESPathCheck('httpPort', http_port),
JMESPathCheck('httpsPort', https_port),
JMESPathCheck('originHostHeader', origin_host_header),
JMESPathCheck('enabled', not disabled),
JMESPathCheck('weight', weight),
JMESPathCheck('priority', priority)]
self.origin_create_cmd(resource_group, origin3_name, endpoint_name, profile_name,
host_name=origin2_hostname,
http_port=http_port,
https_port=https_port,
origin_host_header=origin_host_header,
disabled=disabled,
weight=weight,
priority=priority,
private_link_id=private_link_id,
private_link_location=private_link_location,
private_link_message=private_link_message,
checks=checks)
checks = [JMESPathCheck('length(@)', 3)]
origins = self.origin_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
# Create a second origin group.
origin2_id = origins.json_value[1]["id"]
origin3_id = origins.json_value[2]["id"]
origin_group_2_name = 'test-origin-group-2'
probe_method = 'GET'
probe_path = "/healthz"
probe_protocol = "Https"
probe_interval = 120
# Uncomment these once support for response error detection is added in RP
# response_error_detection_error_types = 'TcpErrorsOnly'
# response_error_detection_failover_threshold = 5
# response_error_detection_status_code_ranges = '300-310,400-599'
checks = [JMESPathCheck('name', origin_group_2_name),
JMESPathCheck('origins[0].id', origin2_id, case_sensitive=False),
JMESPathCheck('origins[1].id', origin3_id, case_sensitive=False),
JMESPathCheck('healthProbeSettings.probeRequestType', probe_method),
JMESPathCheck('healthProbeSettings.probePath', probe_path),
JMESPathCheck('healthProbeSettings.probeProtocol', probe_protocol),
JMESPathCheck('healthProbeSettings.probeIntervalInSeconds', probe_interval),
JMESPathCheck('responseBasedOriginErrorDetectionSettings', None)]
self.origin_group_create_cmd(resource_group, origin_group_2_name, endpoint_name, profile_name,
origins=f'{origin2_name},{origin3_name}',
probe_method=probe_method,
probe_path=probe_path,
probe_protocol=probe_protocol,
probe_interval=probe_interval,
# Uncomment these once support for response error detection is added in RP
# response_error_detection_error_types=response_error_detection_error_types,
# response_error_detection_failover_threshold=response_error_detection_failover_threshold,
# response_error_detection_status_code_ranges=response_error_detection_status_code_ranges,
checks=checks)
checks = [JMESPathCheck('name', origin_group_name)]
self.origin_group_show_cmd(resource_group, origin_group_name, endpoint_name, profile_name, checks=checks)
checks = [JMESPathCheck('length(@)', 2)]
self.origin_group_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
# Delete the second origin group.
self.origin_group_delete_cmd(resource_group, origin_group_2_name, endpoint_name, profile_name)
checks = [JMESPathCheck('length(@)', 1)]
self.origin_group_list_cmd(resource_group, endpoint_name, profile_name)
# Update the first origin group.
origins_list = f'{origin1_id},{origin2_name}'
probe_method = 'GET'
probe_path = "/healthz"
probe_protocol = "Https"
probe_interval = 60
# Uncomment these once support for response error detection is added in RP
# error_types = 'TcpAndHttpErrors'
# failover_threshold = 15
# status_code_ranges = '300-310,400-599'
checks = [JMESPathCheck('name', origin_group_name),
JMESPathCheck('origins[0].id', origin1_id, case_sensitive=False),
JMESPathCheck('origins[1].id', origin2_id, case_sensitive=False),
JMESPathCheck('healthProbeSettings.probeRequestType', probe_method),
JMESPathCheck('healthProbeSettings.probePath', probe_path),
JMESPathCheck('healthProbeSettings.probeProtocol', probe_protocol),
JMESPathCheck('healthProbeSettings.probeIntervalInSeconds', probe_interval),
JMESPathCheck('responseBasedOriginErrorDetectionSettings', None)]
self.origin_group_update_cmd(resource_group,
origin_group_name,
endpoint_name,
profile_name,
origins=origins_list,
probe_method=probe_method,
probe_path=probe_path,
probe_protocol=probe_protocol,
probe_interval=probe_interval,
# Uncomment these once support for response error detection is added in RP
# error_types=error_types,
# failover_threshold=failover_threshold,
# status_code_ranges=status_code_ranges,
checks=checks)
# Validate that unset fields aren't modified
self.origin_group_update_cmd(resource_group,
origin_group_name,
endpoint_name,
profile_name,
origins=origins_list,
checks=checks)
# Update the first origin.
origin_name = origins.json_value[0]["name"]
checks = [JMESPathCheck('name', origin_name),
JMESPathCheck('httpPort', 8080),
JMESPathCheck('httpsPort', 8443),
JMESPathCheck('privateLinkResourceId', private_link_id),
JMESPathCheck('privateLinkLocation', private_link_location),
JMESPathCheck('privateLinkApprovalMessage', private_link_message)]
self.origin_update_cmd(resource_group,
origin_name,
endpoint_name,
profile_name,
http_port='8080',
https_port='8443',
origin_host_header=origin_host_header,
disabled=True,
priority=priority,
weight=weight,
private_link_id=private_link_id,
private_link_location=private_link_location,
private_link_message=private_link_message,
checks=checks)
checks = [JMESPathCheck('name', origin_name),
JMESPathCheck('httpPort', 8080),
JMESPathCheck('httpsPort', 8443),
JMESPathCheck('privateLinkResourceId', private_link_id),
JMESPathCheck('privateLinkLocation', private_link_location),
JMESPathCheck('privateLinkApprovalMessage', private_link_message)]
self.origin_show_cmd(resource_group,
endpoint_name,
profile_name,
origin_name,
checks=checks)
# Delete the second origin.
self.origin_delete_cmd(resource_group, origin3_name, endpoint_name, profile_name)
checks = [JMESPathCheck('length(@)', 2)]
self.origin_list_cmd(resource_group, endpoint_name, profile_name, checks=checks)
| [
"noreply@github.com"
] | microsoft.noreply@github.com |
16a07c4a5130861ee929c4c6aa071b6b522282fa | ce29884aa23fbb74a779145046d3441c619b6a3c | /hot/101.py | 59780c0d19d68e9723dd52964b189a62795bf5f8 | [] | no_license | gebijiaxiaowang/leetcode | 6a4f1e3f5f25cc78a5880af52d62373f39a546e7 | 38eec6f07fdc16658372490cd8c68dcb3d88a77f | refs/heads/master | 2023-04-21T06:16:37.353787 | 2021-05-11T12:41:21 | 2021-05-11T12:41:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | #!/usr/bin/python3.7
# -*- coding: utf-8 -*-
# @Time : 2020/11/18 21:40
# @Author : dly
# @File : 101.py
# @Desc :
# 对称二叉树
class Solution(object):
def isSymmetric(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
if not root:
return True
def dfs(ra, rb):
if not ra and not rb:
return True
if not ra or not rb:
return False
if ra.val != rb.val:
return False
return dfs(ra.left, rb.right) and dfs(ra.right, rb.left)
return dfs(root.left, root.right)
| [
"1083404373@qq.com"
] | 1083404373@qq.com |
f0706f295a0f5c0439315b98c658cffe45b19867 | 571d36f865b545c0a72134e586fbcddd6953a68b | /eng/tilemap.py | eccae387cca8ac4c2300f94fc619d7cc4e784680 | [] | no_license | andrew-turner/Ditto | a5a79faaf31cc44d08ac5f70fa2ac51e51d1b60f | 72841fc503c716ac3b524e42f2311cbd9d18a092 | refs/heads/master | 2020-12-24T14:19:01.164846 | 2015-05-20T08:42:26 | 2015-05-20T08:42:26 | 35,935,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,642 | py | import os
import xml.etree.ElementTree as ET
from . import settings
from . import tileset
from . import camera
from . import npc
from . import trainer
from . import script_engine
from . import events
from . import error
from . import data
from . import globs
from . import sound
from . import environment_objects
import eng.behaviours as behaviours
import eng.movement as movement
import eng.resource_ids as resource_ids
from eng.constants.weather import *
from eng.constants.directions import *
from eng.constants.behaviours import *
#border tile locations
BD_NW = 0
BD_NE = 1
BD_SW = 2
BD_SE = 3
class Layer():
"""Class to represent a single layer of a map."""
def __init__(self):
"""
Create a blank layer, which must be populated by calling some kind of opening function.
"""
#create initial tile array and animation dictionary for walkonto animations
self.array = []
self.animations = {}
def openTMXNode(self, layerNode):
"""
Use a <layer> node from a TMX file to create the layer data.
layerNode - the TMX <layer> node
"""
#find the level
self.level = None
props = layerNode.find("properties")
if props is None:
raise error.DInvalidResourceError("Unknown TMX file", "No layer properties defined.")
for p in props.getchildren():
if p.attrib["name"] == "level":
try:
self.level = int(p.attrib["value"])
except ValueError:
raise error.DInvalidResourceError("Unknown TMX file", "Level is not an integer.")
break
if self.level is None:
raise error.DInvalidResourceError("Unknown TMX file", "Layer property \"level\" is not defined.")
#get hold of the data
data = layerNode.find("data")
#if it's csv encoded, simply split into lines,
#and split each line into tiles which can be added to the array
if data.attrib["encoding"] == "csv":
lines = data.text.split("\n")
for line in lines:
if line != "":
listed = line.split(",")
listed = [a for a in listed if a != ""] #remove any blank elements
row = [int(a)-1 for a in listed] #TMX indexes start at 1, we start at 0
self.array.append(row)
else:
raise error.DittoUnsupportedException("Unknown TMX file", "TMX layer encoding", data.attrib["encoding"])
def offsetElements(self, i):
"""
Subtract an amount from each element in the tile array.
Used to correct for multiple tilesets being offset by TMX format.
i - the amount to subtract
"""
#iterate over each tile and subtract
#if the value is -1, indicating a blank tile, leave it as that
for y in range(0, len(self.array)):
for x in range(0, len(self.array[0])):
if self.array[y][x] != -1:
self.array[y][x] -= i
def tick(self):
"""
Update the map by one frame.
Updates all the animations currently active on the map.
"""
#tick each animation, and remember any animations which have finished
#remove any finished ones from the dictionary
finished = []
for key, anim in list(self.animations.items()):
anim.tick()
if not anim.active:
finished.append(key)
for key in finished:
self.animations.pop(key)
def __getitem__(self, position):
"""
Returns the tile at the position given.
If the tile is animated, returns the correct animation frame.
position - the x,y position coordinate to get.
"""
#if the tile is animated, get the animation frame
#otherwise just grab the required tile from the array
if position in self.animations:
t = self.animations[position].currentFrame
else:
t = self.array[position[1]][position[0]]
return t
class Tilemap(script_engine.ScriptableObject):
"""
Class representing a map object.
"""
def __init__(self, mapId):
"""
Open the map data file, set border tiles and connections, and add NPCs and other events.
mapId - the map's id.
"""
#for the scripting engine
script_engine.ScriptableObject.__init__(self)
self.scriptCommands["playAnim"] = self.command_playAnim
#store variables we'll need later
self.id = mapId
self.fn = resource_ids.getMapFn(mapId)
#get a script engine (singleton)
self.scriptEngine = script_engine.ScriptEngine()
#parse the XML file
root = data.getTreeRoot(self.fn)
self.name = root.getOptionalAttr("name", data.D_STRING)
self.music = os.path.join(settings.path, "data", root.getAttr("music", data.D_STRING))
#check that ids match
if root.getAttr("id", data.D_STRING) != self.id:
raise error.DevError("Map id incorrect.",
"Map pointed by id \"{}\" considers it's id to be \"{}\".".format(self.id, root.getAttr("id", data.D_STRING)))
#open the actual map data file to create the map tile data
mapPath = os.path.join(settings.path, "data", root.getAttr("file", data.D_STRING))
self.openMap(mapPath)
#create the tileset
tsId = root.getAttr("tileset", data.D_STRING)
self.tileset = tileset.Tileset(tsId)
#set the border tiles
self.borderTiles = {}
borderNode = root.getChild("border")
#set each border node with the correct tile indexes, subtracting 1 because the tileset starts at 1 not 0
self.borderTiles[BD_NW] = borderNode.getAttr("nw", data.D_INT)-1
self.borderTiles[BD_NE] = borderNode.getAttr("ne", data.D_INT)-1
self.borderTiles[BD_SW] = borderNode.getAttr("sw", data.D_INT)-1
self.borderTiles[BD_SE] = borderNode.getAttr("se", data.D_INT)-1
#get weather data
weatherNode = root.getOptionalChild("weather")
if weatherNode is not None:
weatherName = weatherNode.getAttr("type", data.D_STRING)
try:
self.weather = WEATHERNAMES[weatherName]
except KeyError:
raise KeyError
else:
self.weather = W_NONE
#script default file
self.scriptDefault = root.getChild("scriptfile").getAttr("source", data.D_FILENAME)
#environment data
self.environment = root.getOptionalAttr("environment", data.D_STRING, "FIELD")
#create any connections from the map
#connected maps will not be loaded until the map becomes the main game map
#connections are stored as {direction: (filename, offset)}
self.connections = {}
self.connectedMaps = {}
for c in root.getChildren("connection"):
side = c.getAttr("side", data.D_STRING)
conId = c.getAttr("map", data.D_STRING)
offset = c.getAttr("offset", data.D_INT)
if side == "left":
self.connections[DIR_LEFT] = (conId, offset)
elif side == "right":
self.connections[DIR_RIGHT] = (conId, offset)
elif side == "up":
self.connections[DIR_UP] = (conId, offset)
elif side == "down":
self.connections[DIR_DOWN] = (conId, offset)
#create any NPCs, adding them to the sprite dictionary
self.sprites = {}
for n in root.getChildren("npc"):
spr = npc.NPC(n, self)
self.sprites[spr.id] = spr
#create any trainers
#for trainerNode in root.getChildren("trainer"):
# spr = trainer.Trainer(trainerNode, self)
# self.sprites[spr.id] = spr
#create a dictionary to hold positions reserved by moving sprites
self.reservedPositions = {}
#create objects
self.objects = []
for objectNode in root.getChildren("object"):
obj = environment_objects.createObject(objectNode, self)
self.objects.append(obj)
self.strengthActive = False
#create script and warp events, adding them to the events dictionary
#if a load script is defined, create it
self.events = {}
loadScript = None
for s in root.getChildren("script"):
trigger = s.getAttr("trigger", data.D_STRING)
if trigger == "load":
loadScript = script_engine.scriptFromNode(s, self.scriptDefault)
else:
position = tuple(s.getAttr("position", data.D_INT2LIST))
self.events[position] = events.ScriptEvent(s, self)
for w in root.getChildren("warp"):
position = tuple(w.getAttr("position", data.D_INT2LIST))
self.events[position] = events.Warp(w, self)
#if there is a load script, run it
if loadScript is not None:
self.scriptEngine.run(loadScript, self)
def openMap(self, fn):
ext = os.path.splitext(fn)[1]
if ext == ".tmx":
self.openTMX(fn)
else:
raise error.DittoUnsupportedException("map data extension", ext)
def openTMX(self, fn):
"""
Open a TMX file and use it to set map size and create tile layers and a collision layer.
fn - the filename of the TMX file.
"""
#parse the TMX XML markup
try:
tree = ET.parse(fn)
except FileNotFoundError:
raise error.DevError("TMX file not found:",
fn,
"While opening map \"{}\" with file:".format(self.id),
self.fn)
root = tree.getroot()
self.size = int(root.attrib["width"]), int(root.attrib["height"])
#find the offset at which the collision and behaviour layers tile data is stored
collisionTilesetOffset = None
behaviourTilesetOffset = None
for ts in root.findall("tileset"):
if ts.attrib["name"] == "collision":
collisionTilesetOffset = int(ts.attrib["firstgid"])-1
elif ts.attrib["name"] == "behaviour":
behaviourTilesetOffset = int(ts.attrib["firstgid"])-1
if collisionTilesetOffset is None:
raise error.DittoInvalidResourceException(fn, "Collision tileset")
if behaviourTilesetOffset is None:
raise error.DittoInvalidResourceException(fn, "Behaviour tileset")
#create each layer, separating the collision and behaviour data
self.layers = []
self.collisionLayer = None
self.behaviourLayer = None
for layer in root.findall("layer"):
l = Layer()
l.openTMXNode(layer)
if l.level == -1: #collision layer indicated by level == -1
self.collisionLayer = l
elif l.level == -2:
self.behaviourLayer = l
else:
self.layers.append(l)
if self.collisionLayer is None:
raise error.DittoInvalidResourceException(fn, "Collision data layer")
if self.behaviourLayer is None:
raise error.DittoInvalidResourceException(fn, "Behaviour data layer")
#compensate for tilesets not starting at 1
self.collisionLayer.offsetElements(collisionTilesetOffset)
self.behaviourLayer.offsetElements(behaviourTilesetOffset)
def getLayersOnLevel(self, i):
"""
Return a list of layers on this map on a given level.
i - the level to look on.
"""
#return a filtered copy of the map's layers
return [a for a in self.layers if a.level == i]
def getBorderTile(self, position):
"""
Return the index of the border tile at a position.
position - the position of the tile on the map
"""
#determine the tiles position in the border, and return the relevant tile index
borderPosition = (position[0]%2, position[1]%2)
if borderPosition == (0,0):
return self.borderTiles[BD_NW]
elif borderPosition == (1,0):
return self.borderTiles[BD_NE]
elif borderPosition == (0,1):
return self.borderTiles[BD_SW]
elif borderPosition == (1,1):
return self.borderTiles[BD_SE]
def walkonto(self, spr, destination, isPlayer=False):
"""
Deal with a sprite walking onto a tile, by animating if required and reserving the position.
spr - the sprite which is walking onto the tile.
destination - the tile they're walking onto.
"""
#if the destination is on the map, check the sprite's layer for walkonto animations
#if there are any, play them
#then reserve the position
if (0 <= destination[0] < self.size[0]) and (0 <= destination[1] < self.size[1]):
layers = self.getLayersOnLevel(spr.level)
for l in layers:
tile = l[destination]
if tile in self.tileset.walkontoAnimations:
l.animations[destination] = self.tileset.walkontoAnimations[tile]
l.animations[destination].play(False)
self.reservedPositions[destination] = spr
#if it's a player, check for events and deal with any
if isPlayer:
if destination in self.events:
s = self.events[destination]
if s.trigger == events.EV_WALKONTO:
s.activate()
return
b = self.getBehaviourData(destination)
if b in behaviours.BUILTINBEHAVIOURS:
builtin = behaviours.BUILTINBEHAVIOURS[b]
if builtin == B_SLIDE:
spr.slide()
return
elif builtin in (B_LEDGEDOWN, B_LEDGELEFT, B_LEDGERIGHT):
spr.ledge()
return
elif builtin == B_WATERFALL:
if spr.climbingWaterfall:
spr.stepQueue.append(DIR_UP)
spr.switch = True
else:
spr.stepQueue.append(DIR_DOWN)
spr.switch = True
elif builtin == B_FORCEDOWN:
spr.forceTile(DIR_DOWN)
elif builtin == B_FORCEUP:
spr.forceTile(DIR_UP)
elif builtin == B_FORCELEFT:
spr.forceTile(DIR_LEFT)
elif builtin == B_FORCERIGHT:
spr.forceTile(DIR_RIGHT)
for spriteId in self.sprites:
spr1 = self.sprites[spriteId]
if isinstance(spr1, trainer.Trainer):
if spr1.checkPosition(destination):
spr1.activate()
def loadConnections(self):
"""
Load all the connecting maps.
Called when the map becomes the main game map.
"""
#create each connecting map
for direction, (conId, offset) in list(self.connections.items()):
self.connectedMaps[direction] = (Tilemap(conId), offset)
def getCollisionData(self, position):
"""
Get the collision tile index at a given position.
position - the position to use.
"""
#if it's on the map, simply return the collision data
if (0 <= position[0] < self.size[0]) and (0 <= position[1] < self.size[1]):
return self.collisionLayer.array[position[1]][position[0]] #direct indexing prevents checking for animations
#otherwise see if it's on a connecting map
#if it is, get it
else:
for key in self.connectedMaps:
con = self.connectedMaps[key][0]
offset = self.connectedMaps[key][1]
if key == DIR_LEFT:
rel = position[0]+con.size[0], position[1]-offset
elif key == DIR_RIGHT:
rel = position[0]-self.size[0], position[1]-offset
elif key == DIR_UP:
rel = position[0]-offset, position[1]+con.size[1]
elif key == DIR_DOWN:
rel = position[0]-offset, position[1]-self.size[1]
if (0 <= rel[0] < con.size[0]) and (0 <= rel[1] < con.size[1]):
return con.getCollisionData(rel)
#else it must be a border tile, return 1 (block)
return 1
def getBehaviourData(self, position):
"""
Get the behaviour value at a given position.
position - the position to use.
"""
#if the position is on this map, return the relevant data
if (0 <= position[0] < self.size[0]) and (0 <= position[1] < self.size[1]):
return self.behaviourLayer.array[position[1]][position[0]] #direct indexing prevents checking for animations
#otherwise see if it's on a connecting map
#if it is, get it
else:
for key in self.connectedMaps:
con = self.connectedMaps[key][0]
offset = self.connectedMaps[key][1]
if key == DIR_LEFT:
rel = position[0]+con.size[0], position[1]-offset
elif key == DIR_RIGHT:
rel = position[0]-self.size[0], position[1]-offset
elif key == DIR_UP:
rel = position[0]-offset, position[1]+con.size[1]
elif key == DIR_DOWN:
rel = position[0]-offset, position[1]-self.size[1]
if (0 <= rel[0] < con.size[0]) and (0 <= rel[1] < con.size[1]):
return con.getBehaviourData(rel)
#else it must be a border tile, return -1 (no behaviour)
return -1
def emptyAt(self, position, allowPushing=True):
"""
Find out whether a given position is empty and available.
position - the position to use.
direction - the direction the sprite would be walking. (For pushing)
allowPushing - whether pushables can be pushed to make the position empty
"""
#check for any sprites at the position
for key in self.sprites:
s = self.sprites[key]
if s.position == position and s.visible: #not visible means it isn't taking up the tile
return False
#check whether the position is reserved
for pos in self.reservedPositions:
if pos == position:
return False
#check for objects blocking
for obj in self.objects:
if obj.position == position:
return False
#if nothing found, it must be empty
return True
def getSpriteById(self, spriteId):
"""
Get a given sprite by it's id.
spriteId - the id of the sprite to find.
"""
#find the required sprite
return self.sprites[spriteId]
def getPushableAt(self, position):
for obj in self.objects:
if (obj.position == position) and isinstance(obj, environment_objects.PushableObject):
return obj
else:
return None
def getObject(self, name):
if name == "tileset":
return self.tileset
else:
return self.getSpriteById(name)
def command_playAnim(self, name, x, y):
l = self.layers[-1]
position = (x,y)
l.animations[position] = self.tileset.scriptAnimations[name]
l.animations[position].play(False)
def investigate(self, target, level):
"""
Called by a player to investigate a map for events.
target - the position to look at
level - the level to look on
"""
#check for sprites on the position and level
#if one is found, call its onInvestigate method
spriteIds = [a for a in self.sprites if self.sprites[a].level == level]
sprites = [self.sprites[x] for x in spriteIds]
for s in sprites:
if s.position == target and s.visible:
s.onInvestigate()
return
#next check for objects
for obj in self.objects:
if obj.position == target:
obj.onInvestigate()
return
#if there was no sprite, check for any events triggered on investigate
if target in self.events:
if self.events[target].trigger == events.EV_INVESTIGATE:
self.events[target].activate()
return
#finally, check for a behaviour byte and process that
b = self.getBehaviourData(target)
col = self.getCollisionData(target)
action, targetLevel = movement.getActionLevel(col)
if (targetLevel == level) and (action != movement.BRIDGE):
self.processBehaviour(b, events.EV_INVESTIGATE)
def processBehaviour(self, b, trigger, caller=None):
if trigger == events.EV_INVESTIGATE:
try:
s = behaviours.BEHAVIOURSCRIPTS_INVESTIGATE[b]
self.scriptEngine.run(s, self)
except KeyError:
pass
elif trigger == events.EV_WALKONTO:
try:
s = behviours.BEHAVIOURSCRIPTS_WALKONTO[b]
self.scriptEngine.run(s, self)
except KeyError:
pass
elif trigger == events.EV_FINISHWALKONTO:
try:
builtin = behaviours.BUILTINBEHAVIOURS[b]
if builtin == B_SLIDE:
caller.walkForward()
except KeyError:
caller.sliding = False
caller.speed = 1
def tick(self):
"""Update the map one frame"""
#we don't need to do anything, just tick our components
self.tileset.tick()
for l in self.layers:
l.tick()
for key in self.sprites:
self.sprites[key].tick()
for obj in self.objects:
obj.tick()
for key in self.connectedMaps:
self.connectedMaps[key][0].tick()
def getVar(self, name):
if name == "strengthActive":
return self.strengthActive
else:
raise script_engine.DLookupError(name)
def setVar(self, name, val):
if name == "strengthActive":
self.strengthActive = bool(val)
else:
raise script_engine.DLookupError(name)
def __repr__(self):
return "<tilemap \"{}\">".format(self.id)
| [
"andrew.turner@merton.ox.ac.uk"
] | andrew.turner@merton.ox.ac.uk |
7ce34c0f27ee62952ab7710218314c661a9307d4 | 922d5487bc18dd4d044d41d6b81cd14eb952c591 | /build/maps/catkin_generated/pkg.develspace.context.pc.py | 4cbdbf4d1fbe95e207783ee3aac5fe8a53f99498 | [] | no_license | hanbincho/ros_ws | 21b747f115cee85a3b8a578028ac44e069721c31 | d92feebd845a69bb8535e8c48592caf2b94d6497 | refs/heads/master | 2020-04-02T15:45:16.395227 | 2018-11-07T04:09:07 | 2018-11-07T04:09:07 | 149,820,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "maps"
PROJECT_SPACE_DIR = "/home/hanbin/ros_ws/devel"
PROJECT_VERSION = "0.0.0"
| [
"hxc431@case.edu"
] | hxc431@case.edu |
889fd162b2172cea0fc44dfa040f0d7923be9d3b | 66927122b147061fefd486712e4caa797eb1f4c4 | /python/sdssdb/database/__init__.py | b280fcd6ce6dbab0cde71e29575d28f47cedf8fb | [] | no_license | sdss/sdssdb_peewee | 23ed0fb70e819bf2b2b73e55dedd4a16c55a547b | f64d042606eb3688e85f1450513afe38ae76e4b6 | refs/heads/master | 2021-03-27T12:37:24.793109 | 2018-06-10T19:36:10 | 2018-06-10T19:36:10 | 94,731,246 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25 | py |
from .database import *
| [
"gallegoj@uw.edu"
] | gallegoj@uw.edu |
1f778357bc0ffe65c05214c2bc7f0432cea408f5 | aaa204ad7f134b526593c785eaa739bff9fc4d2a | /tests/providers/amazon/aws/hooks/test_step_function.py | 54f9556b7016ac20701ad68f26a857dad7136944 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | cfei18/incubator-airflow | 913b40efa3d9f1fdfc5e299ce2693492c9a92dd4 | ffb2078eb5546420864229cdc6ee361f89cab7bd | refs/heads/master | 2022-09-28T14:44:04.250367 | 2022-09-19T16:50:23 | 2022-09-19T16:50:23 | 88,665,367 | 0 | 1 | Apache-2.0 | 2021-02-05T16:29:42 | 2017-04-18T20:00:03 | Python | UTF-8 | Python | false | false | 2,550 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import unittest
from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
try:
from moto import mock_stepfunctions
except ImportError:
mock_stepfunctions = None
@unittest.skipIf(mock_stepfunctions is None, 'moto package not present')
class TestStepFunctionHook(unittest.TestCase):
@mock_stepfunctions
def test_get_conn_returns_a_boto3_connection(self):
hook = StepFunctionHook(aws_conn_id='aws_default')
assert 'stepfunctions' == hook.get_conn().meta.service_model.service_name
@mock_stepfunctions
def test_start_execution(self):
hook = StepFunctionHook(aws_conn_id='aws_default', region_name='us-east-1')
state_machine = hook.get_conn().create_state_machine(
name='pseudo-state-machine', definition='{}', roleArn='arn:aws:iam::000000000000:role/Role'
)
state_machine_arn = state_machine.get('stateMachineArn')
execution_arn = hook.start_execution(
state_machine_arn=state_machine_arn, name=None, state_machine_input={}
)
assert execution_arn is not None
@mock_stepfunctions
def test_describe_execution(self):
hook = StepFunctionHook(aws_conn_id='aws_default', region_name='us-east-1')
state_machine = hook.get_conn().create_state_machine(
name='pseudo-state-machine', definition='{}', roleArn='arn:aws:iam::000000000000:role/Role'
)
state_machine_arn = state_machine.get('stateMachineArn')
execution_arn = hook.start_execution(
state_machine_arn=state_machine_arn, name=None, state_machine_input={}
)
response = hook.describe_execution(execution_arn)
assert 'input' in response
| [
"noreply@github.com"
] | cfei18.noreply@github.com |
67258a6994da1d273531135b1b83de48c41edf94 | 1adc05008f0caa9a81cc4fc3a737fcbcebb68995 | /hardhat/recipes/mingw64/mingw64_libcsv.py | fdb7c482927ad1b4dc16ce316999985addbca2f6 | [
"MIT",
"BSD-3-Clause"
] | permissive | stangelandcl/hardhat | 4aa995518697d19b179c64751108963fa656cfca | 1ad0c5dec16728c0243023acb9594f435ef18f9c | refs/heads/master | 2021-01-11T17:19:41.988477 | 2019-03-22T22:18:44 | 2019-03-22T22:18:52 | 79,742,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | from .base import Mingw64BaseRecipe
class Mingw64LibCsvRecipe(Mingw64BaseRecipe):
def __init__(self, *args, **kwargs):
super(Mingw64LibCsvRecipe, self).__init__(*args, **kwargs)
self.sha256 = 'd9c0431cb803ceb9896ce74f683e6e5a' \
'0954e96ae1d9e4028d6e0f967bebd7e4'
self.name = 'mingw64-libcsv'
self.version = '3.0.3'
self.url = 'http://downloads.sourceforge.net/project/libcsv/libcsv/' \
'libcsv-$version/libcsv-$version.tar.gz'
| [
"clayton.stangeland@gmail.com"
] | clayton.stangeland@gmail.com |
09a753257b8c194d653adc57fa8fc15b1ef05a9a | a5fdc429f54a0deccfe8efd4b9f17dd44e4427b5 | /0x08-python-more_classes/4-rectangle.py | f4dd8584a36172af297d6445e87fa837e70d95f0 | [] | no_license | Jilroge7/holbertonschool-higher_level_programming | 19b7fcb4c69793a2714ad241e0cc4fc975d94694 | 743a352e42d447cd8e1b62d2533408c25003b078 | refs/heads/master | 2022-12-20T20:41:33.375351 | 2020-09-25T02:02:28 | 2020-09-25T02:02:28 | 259,471,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,192 | py | #!/usr/bin/python3
"""Module for evolved rectangle"""
class Rectangle:
"""class Rectangle with priv inst attrib width and height"""
def __init__(self, width=0, height=0):
self.__width = width
self.__height = height
@property
def width(self):
"""Method to get value of width"""
return self.__width
@width.setter
def width(self, value):
"""Method to set the value of width"""
if not isinstance(value, int):
raise TypeError("width must be an integer")
elif isinstance(value, int):
if value < 0:
raise ValueError("width must be >= 0")
else:
self.__width = value
@property
def height(self):
"""Method to get the value of height"""
return self.__height
@height.setter
def height(self, value):
"""Method to set the value of height"""
if not isinstance(value, int):
raise TypeError("height must be an integer")
elif isinstance(value, int):
if value < 0:
raise ValueError("height must be >= 0")
else:
self.__height = value
def area(self):
"""Pub Ins Method to determine area of inst rect"""
return int(self.__width) * int(self.__height)
def perimeter(self):
"""Pub Ins Method to determine perimeter of inst rect"""
if int(self.__width) == 0 or int(self.__height) == 0:
return 0
else:
return (int(self.__width) * 2) + (int(self.__height) * 2)
def __str__(self):
"""Built in method to return printed representation of rect"""
picture = ""
for i in range(int(self.__height)):
for i in range(int(self.__width)):
if int(self.__height) == 0 or int(self.__width) == 0:
return picture
picture += "#"
picture += '\n'
return picture[:-1]
def __repr__(self):
"""Built in method to return string representation of rect"""
return "Rectangle({}, {})".format(eval(repr(self.__width)), (
eval(repr(self.__height))))
| [
"1672@holbertonschool.com"
] | 1672@holbertonschool.com |
01c294996900995b8b6e38364a71528e9204bfe2 | 2b3e9b32a38f4992c529de56b4baa51e1a674c4e | /ccui/testexecution/templatetags/execution.py | 4e6b442e37794b16a9f4d7be4b72a9985bd786e8 | [] | no_license | camd/caseconductor-ui | 2c4f63fd6c20ee421012d8770b3b873c1b4f4232 | deb6b22ed417740bf947e86938710bd5fa2ee2e7 | refs/heads/master | 2021-01-18T05:36:22.647236 | 2011-10-10T14:48:29 | 2011-10-10T14:48:29 | 2,447,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,478 | py | from django import template
from django.template.loader import render_to_string
from classytags.core import Tag, Options
from classytags.arguments import Argument
from ..models import TestCaseAssignmentList
register = template.Library()
class RunCase(Tag):
name = "run_case"
options = Options(
Argument("includedtestcase"),
Argument("user"),
Argument("environments")
)
def render_tag(self, context, includedtestcase, user, environments):
assignments = TestCaseAssignmentList.get(auth=user.auth).filter(
testCaseVersion=includedtestcase.testCaseVersion.id,
testRun=includedtestcase.testRun.id,
tester=user.id)
if len(assignments):
assignment = assignments[0]
else:
assignment = includedtestcase.assign(user, auth=user.auth)
# @@@ need a better way to filter results by environment group
result = None
for res in assignment.results:
if res.environments.match(environments):
result = res
break
if result is None:
# @@@ no environment match - should never happen.
return u""
return render_to_string(
"runtests/_run_case.html",
{"case": assignment.testCase,
"caseversion": assignment.testCaseVersion,
"result": result,
"open": False,
})
register.tag(RunCase)
| [
"carl@oddbird.net"
] | carl@oddbird.net |
bde043a1c15730f638e92132c6348583ce9fcb26 | 69fff86731eb12d0879c7232c975728c0c444dc6 | /modpin/src/SBI/beans/File.py | beb03e55a3dd74ba6cc72ec3f8e0a6e78fd088f5 | [] | no_license | structuralbioinformatics/MODPIN | 158565405b0e7bddaec554add53c4a33beddfd39 | e06e0ac107de3a025d9d4961518393109dfd55db | refs/heads/master | 2021-07-07T16:46:40.602447 | 2020-07-17T12:32:11 | 2020-07-17T12:32:11 | 132,624,679 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,690 | py | """File
author: jbonet
date: 02/2013
@oliva's lab
"""
"""
Import Standard Libraries
"""
import os
import math
import gzip
import warnings
"""
Dependences in SBI library
"""
from SBI.error import FileError
from SBI import SBIglobals
class File(object):
"""The File class manages certain aspects of the IO process
File allows to seamless control some aspects about working with files.
Namely, it will allow to:
- work with both regular and gzipped files.
- quickly obtain paths, file name, extension...
- check the viability reading/writing a given file
- avoid/ignore overwrite an existing file
- split a file into several
Initializing parameters
+ file_name : [M] Name of the file
+ action : Intention.
Available options are (r, rb) to read and (w, wb) to write
@Default: 'r'
+ overwrite : On write mode, ignores if a file with the same name existed before
@Default: False
@Raises FileError
Attributes:
> full : Returns a full path to the file.
> dir : Returns the path UP TO the file.
> lastdir : Returns ONLY the last dir containing the file.
> name : Returns ONLY the file name.
> prefix : Returns everything EXCEPT the last extension of the file. (exclude gzip)
> first_prefix: Returns the absolute firts part of the file
> extension : Returns the LAST extension of the file. (exclude gzip)
> descriptor: Opens (first call) and returns the file-descriptor
* action : Defines what to do with the file.
Available options are listed in the available_action global variable.
When changing the action, the file's possibilities are rechecked
@Raises FileError
Booleans:
> is_gziped : Evaluates whether the file is/has to be compressed.
Evaluation based SOLELY on the '.gz' termination.
Methods:
- close() : Closes the filehandle
- clean (bool): deletes the file if size 0.
@Default: False
@Raises FileError
> split() : Splits a given file in multiple files
- start_separator (string): Line startswith. Is a line to mark the line that starts the new file
@Default: None, only other parameters are considered
- end_separator (string): Line startswith. Is a line to mark the line that ends the new file
@Default: None, only other parameters are considered
- size (int): Max allowed file size. IN BYTES
@Default: 1073741824 (1GB)
- sections (int): Number of sections to split file into. Overwrites 'size'
@Default: None
- overwrite (bool): Overwrite previous files of the same name
@Returns an array with the File object of each created file.
@Raises FileError
@Raises AttributeError if both start_separator and end_separator are defined
Requirements:
* os, sys, math, gzip, warnings
< SBI.error.FileError
< SBI.SBIglobals
Usage:
from SBI.beans.File import File
newReadFile = File(file_name = 'test.txt', action = 'r')
for line in newReadFile.descriptor:
//DO STUFF
newReadFile.close()
newWriteFile = File(file_name = 'test.txt', action = 'w', overwrite = True)
newWriteFile.write(//STUFF)
newWriteFile.close()
"""
write_action = set(['w', 'a', 'ar', 'wb'])
read_action = set(['r', 'rb'])
available_action = write_action.union(read_action)
def __init__(self, file_name=None, action='r', overwrite=None):
if file_name is None: raise FileError(0) # Raise 'No file specified' Error
self._file = file_name
SBIglobals.alert('debug', self, 'Preparing File: {0}'.format(self.full))
self._action = None
self._check_action(action.lower()) #action must be valid
self._fd = None
# Local overwrite takes precedence over Global overwrite
self._overwrite = SBIglobals.decide_overwrite(overwrite)
self._check_file()
"""ATTRIBUTES"""
@property
def full(self): return os.path.abspath(self._file)
@property
def dir(self): return os.path.split(self.full)[0]
@property
def lastdir(self): return os.path.basename(self.dir)
@property
def name(self): return os.path.split(self.full)[-1]
@property
def prefix(self):
if self.is_gziped: return os.path.splitext(os.path.splitext(self.name)[0])[0]
else: return os.path.splitext(self.name)[0]
@property
def first_prefix(self):return self.name.split('.')[0]
@property
def extension(self):
if self.is_gziped: return os.path.splitext(os.path.splitext(self.name)[-1])[-1]
else: return os.path.splitext(self.name)[-1]
@property
def action(self): return self._action
@action.setter
def action(self, value):
self._check_action(value.lower())
self._check_file()
@property
def descriptor(self):
if self._fd == None: # If it was not open, it needs to be opened
if not self.is_gziped: self._fd = open(self.full, self.action)
else: self._fd = gzip.open(self.full, self.action)
return self._fd
"""BOOLEANS"""
@property
def is_gziped(self): return os.path.splitext(self.name)[-1] == '.gz'
def close(self, clean = False):
if self._fd is not None: # only close something previously open
self._fd.close()
self._fd = None
if clean and os.path.getsize(self.full) == 0: # Delete empty files
warnings.warn("The output file %s is empty and it's going to be deleted" %self.full)
os.unlink(self.full)
# else: raise FileError(2, self.full) # Why would you close something already closed?
def write(self, line):
if not self._action in self.write_action:
raise FileError(6)
if self._fd is None:
if not self.is_gziped: self._fd = open(self.full, self.action)
else: self._fd = gzip.open(self.full, self.action)
self._fd.write(line)
def split(self, start_separator = None, end_separator = None, size = 1073741824, sections = None, overwrite = None):
"""
> split() : Splits a given file in multiple files
- start_separator (string): Line startswith. Is a line to mark the line that starts the new file
@Default: None, only other parameters are considered
- end_separator (string): Line startswith. Is a line to mark the line that ends the new file
@Default: None, only other parameters are considered
- size (int): Max allowed file size. IN BYTES
@Default: 1073741824 (1GB)
- sections (int): Number of sections to split file into. Overwrites 'size'
@Default: None
- overwrite (bool): Overwrite previous files of the same name
@Returns an array with the File object of each created file.
@Raises FileError
@Raises AttributeError if both start_separator and end_separator are defined
"""
# A defined separator will avoid splitting data
# For example, uniprot files separate between proteins with '//', then by declaring the separator
# we can avoid a protein from being split between two files.
counter = 1
newfiles = []
if (size is None and sections is None) or not isinstance(size, int):
raise FileError(5, self.__name__, 'split')
if (start_separator is not None and end_separator is not None):
raise AttributeError('Both start and end separators can not be defined simultaniously\n')
# section TAKES PRECEDENCE over size...
if sections is not None: size = math.ceil(os.path.getsize(self.full) / float(sections))
else: sections = math.ceil(os.path.getsize(self.full) / size)
# preparing partition file names
outputfile = os.path.join(self.dir, self.prefix + '.{0:003d}' + self.extension)
if self.is_gziped: outputfile += '.gz'
# Local overwrite takes precedence over Global overwrite
overwrite = SBIglobals.decide_overwrite(overwrite)
SBIglobals.alert('verbose', self, 'Dividing {0.full} into {1} files (aprox.)'.format(self, sections))
newfiles.append(File(file_name = outputfile.format(counter), action = 'w', overwrite = overwrite))
for line in self.descriptor:
if (start_separator is None or line.startswith(start_separator)) and os.path.getsize(newfiles[-1].full) >= size:
newfiles[-1].close()
counter += 1
newfiles.append(File(file_name = outputfile.format(counter), action = 'w', overwrite = overwrite))
newfiles[-1].write(line)
if (end_separator is None or line.startswith(end_separator)) and os.path.getsize(newfiles[-1].full) >= size:
newfiles[-1].close()
counter += 1
newfiles.append(File(file_name = outputfile.format(counter), action = 'w', overwrite = overwrite))
newfiles[-1].close()
self.close()
return newfiles
"""PRIVATE METHODS"""
def _check_action(self, action):
if not action in self.available_action: # Raise 'Wrong action' Error
raise File(1, action, self.available_action)
if self.is_gziped and not action.endswith('b'): action += 'b' # Better if working with compressed files
self._action = action
SBIglobals.alert('debug', self, '\tAction {0} is OK...'.format(self._action))
def _check_file(self):
if self._action.startswith('r'):
if not os.path.isfile(self.full): raise FileError(3, self.full, 'noexists')
if not os.access(self.full, os.R_OK): raise FileError(4, self.full, 'read')
if self._action.startswith('w') or self._action.startswith('a'):
if os.path.isfile(self.full):
if not self._overwrite: raise FileError(3, self.full, 'exists')
if not os.path.isdir(self.dir): raise FileError(4, self.dir, 'nodir')
if not os.access(self.dir, os.W_OK): raise FileError(4, self.dir, 'write')
SBIglobals.alert('debug', self, '\tFile is OK...')
"""OVERWRITE INHERITED FUNCTIONS"""
def __repr__(self):
return "[{0.__class__}]: {0.full}".format(self)
| [
"patriciamirelabota@gmail.com"
] | patriciamirelabota@gmail.com |
0fe941019348268ae95cac69f0823b8ce404d416 | 987ead1eb0877b9bdea16f3ee50bf19d5fe204bd | /DL/face_feature/faceFeature.py | a22d34eba15d893f8ddbe019281b82568ce7d04a | [] | no_license | ZHX1996/project | da62151e32254848a02292a2f9bdb1db17850d67 | 5a57be55cf173dde7e5a135a9cf1cfbc9a63a158 | refs/heads/master | 2021-07-15T11:36:02.412231 | 2020-05-15T08:51:34 | 2020-05-15T08:51:34 | 94,512,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | from itertools import islice
import numpy as np
import io
def train_data():
dtrain, label, buff = [], [], []
i = 0
train_file = io.open('training.csv', 'r', encoding='utf-8')
for s in islice(train_file, 1, 3):
p = s.split(',')
for i in range(len(p)-1):
buff.append(float(p[i]))
label.append(buff)
buff = []
q = p[-1]
e = q.split(' ')
for i in range(len(e)):
buff.append(int(e[i]))
dtrain.append(buff)
buff = []
return np.array(label), np.array(dtrain)
# print(dtrain)
# print(label)
def test_data():
imageid, image, buff = [], [], []
test_file = io.open('test.csv', 'r', encoding='utf-8')
for s in islice(test_file, 1, 3):
p = s.split('\n')
temp = p[0].split(',')
imageid.append(int(temp[0]))
e = temp[1].split(' ')
for i in range(len(e)):
buff.append(int(e[i]))
image.append(buff)
buff = []
print(imageid)
print(image)
if __name__ == '__main__':
# train_data()
test_data() | [
"1365370292@qq.com"
] | 1365370292@qq.com |
f66d19147add74c58d7348ca20d812a10a1f7bf4 | d19bfba1c92a59a9d5d888e87db32a2cd1e7bd00 | /example.py | e405d017f7947ad196b0a5d5b3ecc3639cb05de9 | [] | no_license | codesharedot/Plot-no-111 | 25a3c6a2b7895efc9f327a1b0695749361e2d858 | e046244c102253f5e538edc892f3ac106b8eec61 | refs/heads/master | 2021-03-15T03:39:44.802296 | 2020-03-12T11:51:42 | 2020-03-12T11:51:42 | 246,821,312 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | import numpy as np
import matplotlib.pyplot as plt
labels = ['G1', 'G2', 'G3', 'G4', 'G5']
m_means = [8,1,6,9,10]
m_std = [2, 3, 4, 1, 2]
width = 0.35 # the width of the bars
fig, ax = plt.subplots()
ax.bar(labels, m_means, width, yerr=m_std, label='Data')
ax.set_ylabel('Scores')
ax.set_title('Visual')
ax.legend()
#plt.show()
plt.savefig('chart.png') | [
"codeto@sent.com"
] | codeto@sent.com |
618f3a962bee7c18f831cf4dd618b01831a5b133 | 112bcac00e68ffeceeffec335a87411f141ad17f | /codes/ch4/gethostbyaddr-paranoid.py | 55dde1e691537b3e573ceba19a6590a3b79bc697 | [] | no_license | wwq0327/PyNP | a5a214fde76ef0701a8205a3509e762f47f8fc2c | 684d13533d7296116aa7a099347365ca69a72004 | refs/heads/master | 2021-01-15T17:45:23.434786 | 2011-09-23T16:41:18 | 2011-09-23T16:41:18 | 2,421,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,057 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
gethostbyaddr-paranoid.py
~~~~~~~~~~~~~~~~~~~~
:date: 2011-09-22
:from: Python Network Programming
"""
import sys, socket
def getipaddrs(hostname):
'''根据域名获得IP地址列表'''
result = socket.getaddrinfo(hostname, None, 0, socket.SOCK_STREAM)
return [x[4][0] for x in result]
def gethostname(ipaddr):
'''根据IP返回hostname'''
return socket.gethostbyaddr(ipaddr)[0]
try:
hostname = gethostname(sys.argv[1])
ipaddrs = getipaddrs(hostname)
except socket.herror, e:
print "No host names available for %s; this may be normal." % sys.argv[1]
sys.exit(0)
except socket.gaierror, e:
print "Got hostname %s, but it could not be forward-resolved: %s" % \
(hostname, str(e))
sys.exit(1)
if not sys.argv[1] in ipaddrs:
print "Got hostname %s, but on forward lookup," % hostname
print "original IP %s did not appear in IP address list." % sys.argv[1]
sys.exit(1)
print "validated hostname:", hostname
| [
"wwq0327@gmail.com"
] | wwq0327@gmail.com |
c0fb6fdd9e20b83a6bea88c1db214ee925b3d934 | 27a2864de9876b53db02bcfd1df2342af193933a | /users/views.py | d49c14148e211a0e9975e507da46a9d553808334 | [] | no_license | VinneyJ/learning_tracker | 40d04c19800cfa62ed4789753441208bf5d92cf6 | 63b0b00e9d153d193c7f12f6af57f1b756d95c5d | refs/heads/master | 2022-08-28T07:50:47.700443 | 2020-05-29T07:22:08 | 2020-05-29T07:22:08 | 266,212,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | from django.shortcuts import render, redirect
from django.contrib.auth import login
from django.contrib.auth.forms import UserCreationForm
# Create your views here.
def register(request):
if request.method != 'POST':
form = UserCreationForm()
else:
form = UserCreationForm(data=request.POST)
if form.is_valid():
new_user = form.save()
login(request, new_user)
return redirect('learning_logs:index')
context = {'form':form}
return render(request, 'registration/register.html', context) | [
"vincentjayden49@gmail.com"
] | vincentjayden49@gmail.com |
87dc8f8a1fa3afbc82df866419a33d3ed8c8f399 | d190750d6cb34e9d86ae96724cf4b56a2f57a74a | /tests/r/test_biopsy.py | dd6a45a631739b758f371642bb938df3da77b5b4 | [
"Apache-2.0"
] | permissive | ROAD2018/observations | a119f61a48213d791de0620804adb8d21c2ad9fb | 2c8b1ac31025938cb17762e540f2f592e302d5de | refs/heads/master | 2021-09-24T04:28:02.725245 | 2018-09-16T23:06:30 | 2018-09-16T23:06:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 512 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.biopsy import biopsy
def test_biopsy():
"""Test module biopsy.py by downloading
biopsy.csv and testing shape of
extracted data has 699 rows and 11 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = biopsy(test_path)
try:
assert x_train.shape == (699, 11)
except:
shutil.rmtree(test_path)
raise()
| [
"dustinviettran@gmail.com"
] | dustinviettran@gmail.com |
33daf595f2ce39ec83394a7b12ab536c060a963d | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /140_gui/pyqt_pyside/_exercises/_templates/PyQt5 Quick Start PyQt5 Database Operation/7 5. Paging Query 3. Paging Query Implementation.py | 1666eaaa01177d4de2dc0c3b30a26ae8e9ec6872 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 4,714 | py | # ______ ___
# ____ ?.?S.. ______ ?SD.., ?SQ.., ?STM.., QSqlQueryModel
# ____ ?.?C.. ______ *
# ____ ?.?W.. ______ *
# ______ re
#
# c_ DataGrid(?W..):
# ___ - parent_None):
# s__(DataGrid, self). - (parent)
# # Declare Database Connections
# db _ N..
# # Layout Manager
# layout _ ?VBL..
# # Query Model
# queryModel _ QSqlQueryModel()
# # Table View
# tableView _ ?TV..
# tableView.sM..(queryModel)
# #
# totalPageLabel _ ?L..
# currentPageLabel _ ?L..
# switchPageLineEdit _ ?LE..
# prevButton _ ?PB..("Prev")
# nextButton _ ?PB..("Next")
# switchPageButton _ ?PB..("Switch")
# # Current Page
# currentPage _ 1
# # PageCount
# totalPage _ N..
# # Total Records
# totalRecordCount _ N..
# # Number of records per page
# pageRecordCount _ 4
#
# initUI()
# initializedModel()
# setUpConnect()
# updateStatus()
#
# ___ initUI
# tableView.hH.. .setStretchLastSection( st.
# tableView.hH.. .sSRM..(?HV...Stretch)
# layout.aW..(tableView)
#
# hLayout _ ?HBL..
# hLayout.aW..(prevButton)
# hLayout.aW..(nextButton)
# hLayout.aW..(QLabel("Jump To"))
# switchPageLineEdit.setFixedWidth(40)
# hLayout.aW..(switchPageLineEdit)
# hLayout.aW..(QLabel("page"))
# hLayout.aW..(switchPageButton)
# hLayout.aW..(QLabel("Current page:"))
# hLayout.aW..(currentPageLabel)
# hLayout.aW..(QLabel("Total pages:"))
# hLayout.aW..(totalPageLabel)
# hLayout.addStretch(1)
#
# layout.aL..(hLayout)
# sL..(layout)
#
# sWT..("DataGrid")
# r..(600, 300)
#
# ___ setUpConnect
# prevButton.c__.c..(onPrevPage)
# nextButton.c__.c..(onNextPage)
# switchPageButton.c__.c..(onSwitchPage)
#
# ___ initializedModel
# db _ ?SD...aD..("QSQLITE")
# db.sDN..("/home/user/test.db")
# __ no. db.o..
# r_ F..
# queryModel.setHeaderData(0, __.H.., "ID")
# queryModel.setHeaderData(1, __.H.., "Name")
# queryModel.setHeaderData(2, __.H.., "Sex")
# queryModel.setHeaderData(3, __.H.., "Age")
# # Get all the records of the table
# sql _ "SELECT * FROM student"
# queryModel.setQuery(sql, db)
# totalRecordCount _ queryModel.rowCount()
# __ totalRecordCount % pageRecordCount __ 0:
# totalPage _ totalRecordCount / pageRecordCount
# ____
# totalPage _ int(totalRecordCount / pageRecordCount) + 1
# # Show Page 1
# sql _ "SELECT * FROM student limit %d,%d" % (0, pageRecordCount)
# queryModel.setQuery(sql, db)
#
# ___ onPrevPage
# currentPage -_ 1
# limitIndex _ (currentPage - 1) * pageRecordCount
# queryRecord(limitIndex)
# updateStatus()
#
# ___ onNextPage
# currentPage +_ 1
# limitIndex _ (currentPage - 1) * pageRecordCount
# queryRecord(limitIndex)
# updateStatus()
#
# ___ onSwitchPage
# szText _ switchPageLineEdit.t__()
# pattern _ re.compile('^[0-9]+$')
# match _ pattern.match(szText)
# __ no. match:
# ?MB...information "Tips", "please enter a number.")
# r_
# __ szText __ "":
# ?MB...information "Tips", "Please enter a jump page.")
# r_
# pageIndex _ int(szText)
# __ pageIndex > totalPage or pageIndex < 1:
# ?MB...information "Tips", "No page specified, re-enter.")
# r_
#
# limitIndex _ (pageIndex - 1) * pageRecordCount
# queryRecord(limitIndex)
# currentPage _ pageIndex
# updateStatus()
#
# # Query records based on paging
# ___ queryRecord limitIndex):
# sql _ "SELECT * FROM student limit %d,%d" % (limitIndex, pageRecordCount)
# queryModel.setQuery(sql)
#
# # Update Spatial Status
# ___ updateStatus
# currentPageLabel.sT..(st.(currentPage))
# totalPageLabel.sT..(st.(totalPage))
# __ currentPage <_ 1:
# prevButton.sE.. F..
# ____
# prevButton.sE..( st.
#
# __ currentPage >_ totalPage:
# nextButton.sE.. F..
# ____
# nextButton.sE..( st.
#
# # Close database connection when interface is closed
# ___ closeEvent event):
# db.c..
#
# __ __name__ __ "__main__":
# app _ ?A..(___.a..
# window _ DataGrid()
# window.s..
# ___.e.. ?.e.. | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
90141dabab155f6e69636187b33b83799277dfc1 | b966dde596adfdf3bd8522abd45e84fa599ec474 | /catapult_build/build_steps.py | 6cd057f68add796ae97394e557188208a0ed9669 | [
"BSD-3-Clause"
] | permissive | PratikshaDPai/catapult | 4bad688ccb724ae8ce88e1b7cca1c9536afd11ec | 8495de1eb482068897a88bbd3a59199d1a4525f5 | refs/heads/master | 2020-03-22T23:10:17.694564 | 2018-07-12T20:42:55 | 2018-07-12T21:15:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,562 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
import sys
# This is the list of tests to run. It is a dictionary with the following
# fields:
#
# name (required): The name of the step, to show on the buildbot status page.
# path (required): The path to the executable which runs the tests.
# additional_args (optional): An array of optional arguments.
# uses_app_engine_sdk (optional): True if app engine SDK must be in PYTHONPATH.
# uses_sandbox_env (optional): True if CHROME_DEVEL_SANDBOX must be in
# environment.
# disabled (optional): List of platforms the test is disabled on. May contain
# 'win', 'mac', 'linux', or 'android'.
# outputs_presentation_json (optional): If True, pass in --presentation-json
# argument to the test executable to allow it to update the buildbot status
# page. More details here:
# github.com/luci/recipes-py/blob/master/recipe_modules/generator_script/api.py
_CATAPULT_TESTS = [
{
'name': 'BattOr Smoke Tests',
'path': 'common/battor/battor/battor_wrapper_devicetest.py',
'disabled': ['android'],
},
{
'name': 'BattOr Unit Tests',
'path': 'common/battor/bin/run_py_tests',
'disabled': ['android'],
},
{
'name': 'Build Python Tests',
'path': 'catapult_build/bin/run_py_tests',
'disabled': ['android'],
},
{
'name': 'Common Tests',
'path': 'common/bin/run_tests',
},
{
'name': 'Dashboard Dev Server Tests Canary',
'path': 'dashboard/bin/run_dev_server_tests',
'additional_args': [
'--no-install-hooks',
'--no-use-local-chrome',
'--channel=canary'
],
'outputs_presentation_json': True,
'disabled': ['android'],
},
{
'name': 'Dashboard Dev Server Tests Stable',
'path': 'dashboard/bin/run_dev_server_tests',
'additional_args': [
'--no-install-hooks',
'--no-use-local-chrome',
'--channel=stable',
],
'outputs_presentation_json': True,
'disabled': ['android'],
},
{
'name': 'Dashboard Python Tests',
'path': 'dashboard/bin/run_py_tests',
'additional_args': ['--no-install-hooks'],
'uses_app_engine_sdk': True,
'disabled': ['android'],
},
{
'name': 'Dependency Manager Tests',
'path': 'dependency_manager/bin/run_tests',
},
{
'name': 'Devil Device Tests',
'path': 'devil/bin/run_py_devicetests',
'disabled': ['win', 'mac', 'linux']
},
{
'name': 'Devil Python Tests',
'path': 'devil/bin/run_py_tests',
'disabled': ['mac', 'win'],
},
{
'name': 'eslint Tests',
'path': 'common/eslint/bin/run_tests',
'disabled': ['android'],
},
{
'name': 'Native Heap Symbolizer Tests',
'path': 'tracing/bin/run_symbolizer_tests',
'disabled': ['android'],
},
{
'name': 'Py-vulcanize Tests',
'path': 'common/py_vulcanize/bin/run_py_tests',
'additional_args': ['--no-install-hooks'],
'disabled': ['android'],
},
{
'name': 'Systrace Tests',
'path': 'systrace/bin/run_tests',
},
{
'name': 'Snap-it Tests',
'path': 'telemetry/bin/run_snap_it_unittest',
'additional_args': [
'--browser=reference',
],
'uses_sandbox_env': True,
'disabled': ['android'],
},
{
'name': 'Telemetry Tests with Stable Browser (Desktop)',
'path': 'catapult_build/fetch_telemetry_deps_and_run_tests',
'additional_args': [
'--browser=reference',
'--start-xvfb'
],
'uses_sandbox_env': True,
'disabled': ['android'],
},
{
'name': 'Telemetry Tests with Stable Browser (Android)',
'path': 'catapult_build/fetch_telemetry_deps_and_run_tests',
'additional_args': [
'--browser=reference',
'--device=android',
'--jobs=1'
],
'uses_sandbox_env': True,
'disabled': ['win', 'mac', 'linux']
},
{
'name': 'Telemetry Integration Tests with Stable Browser',
'path': 'telemetry/bin/run_browser_tests',
'additional_args': [
'SimpleBrowserTest',
'--browser=reference',
],
'uses_sandbox_env': True,
'disabled': ['android', 'linux'], # TODO(nedn): enable this on linux
},
{
'name': 'Tracing Dev Server Tests Canary',
'path': 'tracing/bin/run_dev_server_tests',
'additional_args': [
'--no-install-hooks',
'--no-use-local-chrome',
'--channel=canary'
],
'outputs_presentation_json': True,
'disabled': ['android'],
},
{
'name': 'Tracing Dev Server Tests Stable',
'path': 'tracing/bin/run_dev_server_tests',
'additional_args': [
'--no-install-hooks',
'--no-use-local-chrome',
'--channel=stable',
],
'outputs_presentation_json': True,
'disabled': ['android'],
},
{
'name': 'Tracing D8 Tests',
'path': 'tracing/bin/run_vinn_tests',
'disabled': ['android'],
},
{
'name': 'Tracing Python Tests',
'path': 'tracing/bin/run_py_tests',
'additional_args': ['--no-install-hooks'],
'disabled': ['android'],
},
{
'name': 'Typ unittest',
'path': 'third_party/typ/run',
'additional_args': ['tests'],
'disabled': [
'android',
'win'], # TODO(crbug.com/851498): enable typ unittests on Win
},
{
'name': 'Vinn Tests',
'path': 'third_party/vinn/bin/run_tests',
'disabled': ['android'],
},
{
'name': 'NetLog Viewer Dev Server Tests',
'path': 'netlog_viewer/bin/run_dev_server_tests',
'additional_args': [
'--no-install-hooks',
'--no-use-local-chrome',
],
'disabled': ['android', 'win', 'mac', 'linux'],
},
]
_STALE_FILE_TYPES = ['.pyc', '.pseudo_lock']
def main(args=None):
"""Send list of test to run to recipes generator_script.
See documentation at:
github.com/luci/recipes-py/blob/master/recipe_modules/generator_script/api.py
"""
parser = argparse.ArgumentParser(description='Run catapult tests.')
parser.add_argument('--api-path-checkout', help='Path to catapult checkout')
parser.add_argument('--app-engine-sdk-pythonpath',
help='PYTHONPATH to include app engine SDK path')
parser.add_argument('--platform',
help='Platform name (linux, mac, or win)')
parser.add_argument('--output-json', help='Output for buildbot status page')
args = parser.parse_args(args)
steps = [{
# Always remove stale files first. Not listed as a test above
# because it is a step and not a test, and must be first.
'name': 'Remove Stale files',
'cmd': ['python',
os.path.join(args.api_path_checkout,
'catapult_build', 'remove_stale_files.py'),
args.api_path_checkout, ','.join(_STALE_FILE_TYPES)]
}]
if args.platform == 'android':
# On Android, we need to prepare the devices a bit before using them in
# tests. These steps are not listed as tests above because they aren't
# tests and because they must precede all tests.
steps.extend([
{
'name': 'Android: Recover Devices',
'cmd': ['python',
os.path.join(args.api_path_checkout, 'devil', 'devil',
'android', 'tools', 'device_recovery.py')],
},
{
'name': 'Android: Provision Devices',
'cmd': ['python',
os.path.join(args.api_path_checkout, 'devil', 'devil',
'android', 'tools', 'provision_devices.py')],
},
{
'name': 'Android: Device Status',
'cmd': ['python',
os.path.join(args.api_path_checkout, 'devil', 'devil',
'android', 'tools', 'device_status.py')],
},
])
for test in _CATAPULT_TESTS:
if args.platform in test.get('disabled', []):
continue
step = {
'name': test['name'],
'env': {}
}
# vpython doesn't integrate well with app engine SDK yet
if test.get('uses_app_engine_sdk'):
executable = 'python'
else:
executable = 'vpython.bat' if sys.platform == 'win32' else 'vpython'
step['cmd'] = [
executable, os.path.join(args.api_path_checkout, test['path'])]
if step['name'] == 'Systrace Tests':
step['cmd'] += ['--device=' + args.platform]
if test.get('additional_args'):
step['cmd'] += test['additional_args']
if test.get('uses_app_engine_sdk'):
step['env']['PYTHONPATH'] = args.app_engine_sdk_pythonpath
if test.get('uses_sandbox_env'):
step['env']['CHROME_DEVEL_SANDBOX'] = '/opt/chromium/chrome_sandbox'
if test.get('outputs_presentation_json'):
step['outputs_presentation_json'] = True
steps.append(step)
with open(args.output_json, 'w') as outfile:
json.dump(steps, outfile)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
ddc1653e2d5e0653376da3e54a9f3d2962cead2d | a5698f82064aade6af0f1da21f504a9ef8c9ac6e | /huaweicloud-sdk-kms/huaweicloudsdkkms/v1/model/create_datakey_without_plaintext_request.py | 2f468730dd17b485d1b5a2c0f176dd90c739fe61 | [
"Apache-2.0"
] | permissive | qizhidong/huaweicloud-sdk-python-v3 | 82a2046fbb7d62810984399abb2ca72b3b47fac6 | 6cdcf1da8b098427e58fc3335a387c14df7776d0 | refs/heads/master | 2023-04-06T02:58:15.175373 | 2021-03-30T10:47:29 | 2021-03-30T10:47:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,539 | py | # coding: utf-8
import pprint
import re
import six
class CreateDatakeyWithoutPlaintextRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'version_id': 'str',
'body': 'CreateDatakeyRequestBody'
}
attribute_map = {
'version_id': 'version_id',
'body': 'body'
}
def __init__(self, version_id=None, body=None):
"""CreateDatakeyWithoutPlaintextRequest - a model defined in huaweicloud sdk"""
self._version_id = None
self._body = None
self.discriminator = None
self.version_id = version_id
if body is not None:
self.body = body
@property
def version_id(self):
"""Gets the version_id of this CreateDatakeyWithoutPlaintextRequest.
:return: The version_id of this CreateDatakeyWithoutPlaintextRequest.
:rtype: str
"""
return self._version_id
@version_id.setter
def version_id(self, version_id):
"""Sets the version_id of this CreateDatakeyWithoutPlaintextRequest.
:param version_id: The version_id of this CreateDatakeyWithoutPlaintextRequest.
:type: str
"""
self._version_id = version_id
@property
def body(self):
"""Gets the body of this CreateDatakeyWithoutPlaintextRequest.
:return: The body of this CreateDatakeyWithoutPlaintextRequest.
:rtype: CreateDatakeyRequestBody
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this CreateDatakeyWithoutPlaintextRequest.
:param body: The body of this CreateDatakeyWithoutPlaintextRequest.
:type: CreateDatakeyRequestBody
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateDatakeyWithoutPlaintextRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
8acc3647db6c1987fba61153233611a325584767 | fb7efe44f4d9f30d623f880d0eb620f3a81f0fbd | /ui/base/ime/DEPS | db197e21a60fea15ba02e213495b3979bdc5986e | [
"BSD-3-Clause"
] | permissive | wzyy2/chromium-browser | 2644b0daf58f8b3caee8a6c09a2b448b2dfe059c | eb905f00a0f7e141e8d6c89be8fb26192a88c4b7 | refs/heads/master | 2022-11-23T20:25:08.120045 | 2018-01-16T06:41:26 | 2018-01-16T06:41:26 | 117,618,467 | 3 | 2 | BSD-3-Clause | 2022-11-20T22:03:57 | 2018-01-16T02:09:10 | null | UTF-8 | Python | false | false | 42 | include_rules = [
"+chromeos/system",
]
| [
"jacob-chen@iotwrt.com"
] | jacob-chen@iotwrt.com | |
7e1e14422e5c20ca970f9ac3f337d8f70a1365ef | 99985209fb8fa250886db43ee8c4bd3de9ec4ae6 | /Iris_flower_predict/iris.py | 6b6d64458aedccec48cfa916357bf540cc5498f8 | [] | no_license | Arpankarar/mini-data_science-projects | c7f986eacfb0901187981cbe29b978b38a3dddac | 1721226f505a6e41d3588ecc9cf57c1171c7f776 | refs/heads/master | 2023-06-24T02:01:28.406361 | 2021-07-31T08:58:48 | 2021-07-31T08:58:48 | 390,634,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,277 | py | import streamlit as st
import pandas as pd
from sklearn import datasets
from sklearn.ensemble import RandomForestClassifier
st.write("""
# Simple Iris Flower Prediction App
This app predicts the **Iris flower** type!
""")
st.sidebar.header('User Input Parameters')
def user_input_features():
sepal_length = st.sidebar.slider('Sepal length', 4.3, 7.9, 5.4)
sepal_width = st.sidebar.slider('Sepal width', 2.0, 4.4, 3.4)
petal_length = st.sidebar.slider('Petal length', 1.0, 6.9, 1.3)
petal_width = st.sidebar.slider('Petal width', 0.1, 2.5, 0.2)
data = {'sepal_length': sepal_length,
'sepal_width': sepal_width,
'petal_length': petal_length,
'petal_width': petal_width}
features = pd.DataFrame(data, index=[0])
return features
df = user_input_features()
st.subheader('User Input parameters')
st.write(df)
iris = datasets.load_iris()
X = iris.data
Y = iris.target
clf = RandomForestClassifier()
clf.fit(X, Y)
prediction = clf.predict(df)
prediction_proba = clf.predict_proba(df)
st.subheader('Class labels and their corresponding index number')
st.write(iris.target_names)
st.subheader('Prediction')
st.write(iris.target_names[prediction])
st.subheader('Prediction Probability')
st.write(prediction_proba) | [
"you@example.com"
] | you@example.com |
01e44159ebc667da0d152037de82f1570e56a198 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02796/s829176641.py | db5435558a236408a2920925ea8b5565137ac7e4 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | _, *_XL = open(0).read().split()
XL = list(zip(*[map(int, iter(_XL))]*2))
ans = 0
t = -10**10 # 数直線のマイナス方向からスタートする
arms = [(X-L, X+L) for X, L in XL]
arms_sorted = sorted(arms, key=lambda k:k[1])
ans = 0
for s, e in arms_sorted:
if t <= s:
t = e
ans += 1
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
f605fca32647d4acd9406b4bfc685b273229d17d | 640729166526060d5761458745a74a02b6fa4213 | /notebooks/solutions/day4-01-02.py | a893dd5268ede5252fcd6ca9cc82e21c705788dc | [] | no_license | AI-BlackBelt/yellow | 9dbb8b973a39d29ee071fb46fa164127836e0dbb | 560dba2268fa8dd4e4fc327bfe009b79784022ab | refs/heads/master | 2020-04-21T12:25:49.082139 | 2020-03-23T15:47:56 | 2020-03-23T15:47:56 | 169,561,717 | 8 | 3 | null | 2019-05-27T15:53:20 | 2019-02-07T11:32:49 | Jupyter Notebook | UTF-8 | Python | false | false | 150 | py | knn = KNeighborsClassifier(n_neighbors=best_n_neighbors)
knn.fit(X_train, y_train)
print("test-set score: {:.3f}".format(knn.score(X_test, y_test)))
| [
"g.louppe@gmail.com"
] | g.louppe@gmail.com |
36a49e6a2e38193458fb28a4d0df0bb692bf122d | dd5b7241ae3deed66254466d6e089cbb15ff0623 | /build/driver/depth_camera/iai_kinect2-master/kinect2_bridge/catkin_generated/pkg.installspace.context.pc.py | da100c56a86553c3e2284bf0a413954d9dea2c01 | [
"BSD-2-Clause"
] | permissive | lty1994/ros_project | 189dde5842a5bcb9392a70383a37822ccafb7de2 | d55ce07c592d545f9a43330fa6bf96af6651575f | refs/heads/master | 2020-04-14T16:14:22.878838 | 2019-01-04T05:31:46 | 2019-01-04T05:31:46 | 163,946,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 483 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/autolabor/catkin_ws/install/include".split(';') if "/home/autolabor/catkin_ws/install/include" != "" else []
PROJECT_CATKIN_DEPENDS = "kinect2_registration".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "kinect2_bridge"
PROJECT_SPACE_DIR = "/home/autolabor/catkin_ws/install"
PROJECT_VERSION = "0.0.1"
| [
"lty2008@vip.qq.com"
] | lty2008@vip.qq.com |
92ef1e32731390c9733ad4e327465dc20028c848 | 6be845bf70a8efaf390da28c811c52b35bf9e475 | /windows/Resources/Python/Core/Lib/encodings/utf_32.py | ba16204eb2a42eb85e80a813dddfdc8cb0d0d369 | [] | no_license | kyeremalprime/ms | 228194910bf2ed314d0492bc423cc687144bb459 | 47eea098ec735b2173ff0d4e5c493cb8f04e705d | refs/heads/master | 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,658 | py | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: utf_32.py
"""
Python 'utf-32' Codec
"""
import codecs
import sys
encode = codecs.utf_32_encode
def decode(input, errors='strict'):
return codecs.utf_32_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.encoder = None
return
def encode(self, input, final=False):
if self.encoder is None:
result = codecs.utf_32_encode(input, self.errors)[0]
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.encoder = None
return
def getstate(self):
if self.encoder is None:
return 2
else:
return 0
def setstate(self, state):
if state:
self.encoder = None
elif sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.decoder = None
return
def _buffer_decode(self, input, errors, final):
if self.decoder is None:
output, consumed, byteorder = codecs.utf_32_ex_decode(input, errors, 0, final)
if byteorder == -1:
self.decoder = codecs.utf_32_le_decode
elif byteorder == 1:
self.decoder = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError('UTF-32 stream does not start with BOM')
return (output, consumed)
else:
return self.decoder(input, self.errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.decoder = None
return
def getstate(self):
state = codecs.BufferedIncrementalDecoder.getstate(self)[0]
if self.decoder is None:
return (state, 2)
else:
addstate = int((sys.byteorder == 'big') != (self.decoder is codecs.utf_32_be_decode))
return (
state, addstate)
def setstate(self, state):
codecs.BufferedIncrementalDecoder.setstate(self, state)
state = state[1]
if state == 0:
self.decoder = codecs.utf_32_be_decode if sys.byteorder == 'big' else codecs.utf_32_le_decode
elif state == 1:
self.decoder = codecs.utf_32_le_decode if sys.byteorder == 'big' else codecs.utf_32_be_decode
else:
self.decoder = None
return
class StreamWriter(codecs.StreamWriter):
def __init__(self, stream, errors='strict'):
self.encoder = None
codecs.StreamWriter.__init__(self, stream, errors)
return
def reset(self):
codecs.StreamWriter.reset(self)
self.encoder = None
return
def encode(self, input, errors='strict'):
if self.encoder is None:
result = codecs.utf_32_encode(input, errors)
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, errors)
return
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
object, consumed, byteorder = codecs.utf_32_ex_decode(input, errors, 0, False)
if byteorder == -1:
self.decode = codecs.utf_32_le_decode
elif byteorder == 1:
self.decode = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError, 'UTF-32 stream does not start with BOM'
return (object, consumed)
def getregentry():
return codecs.CodecInfo(name='utf-32', encode=encode, decode=decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter) | [
"kyeremalprime@gmail.com"
] | kyeremalprime@gmail.com |
bacac8bbe869e43f529e22b4dfca17df8767215b | 0107ea802fe10efda2c9cb8ccd43e21f5e3217f2 | /hrnet/detr.py | 24036c94561b0033108e6b9510ad5afd35ef6c2b | [
"Apache-2.0"
] | permissive | openseg-group/detr | e5c25f238c346a3fded97eb2242a85c2d9a5412b | d947cf39ab716aedf7502103fc51b85b9d82822b | refs/heads/master | 2023-01-31T16:43:10.818178 | 2020-12-20T08:26:32 | 2020-12-20T08:26:32 | 267,306,642 | 0 | 0 | Apache-2.0 | 2020-10-14T07:14:08 | 2020-05-27T11:56:10 | null | UTF-8 | Python | false | false | 21,058 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
DETR model and criterion classes.
"""
import torch
import torch.nn.functional as F
from torch import nn
from util import box_ops
from util.misc import (NestedTensor, nested_tensor_from_tensor_list,
accuracy, get_world_size, interpolate,
is_dist_avail_and_initialized)
from models.matcher import build_matcher
from models.segmentation import (DETRsegm, PostProcessPanoptic, PostProcessSegm,
dice_loss, sigmoid_focal_loss)
from .backbone import build_backbone
from .transformer import build_transformer
from .sparse_transformer import build_sparse_transformer
from .linear_transformer import build_linear_transformer
from .cross_transformer import build_cross_transformer
import os
import pdb
class DETR(nn.Module):
""" This is the DETR module that performs object detection """
def __init__(self, backbone, transformer, num_classes, num_queries, aux_loss=False):
""" Initializes the model.
Parameters:
backbone: torch module of the backbone to be used. See backbone.py
transformer: torch module of the transformer architecture. See transformer.py
num_classes: number of object classes
num_queries: number of object queries, ie detection slot. This is the maximal number of objects
DETR can detect in a single image. For COCO, we recommend 100 queries.
aux_loss: True if auxiliary decoding losses (loss at each decoder layer) are to be used.
"""
super().__init__()
self.num_queries = num_queries
self.transformer = transformer
hidden_dim = transformer.d_model
self.class_embed = nn.Linear(hidden_dim, num_classes + 1)
self.bbox_embed = MLP(hidden_dim, hidden_dim, 4, 3)
self.query_embed = nn.Embedding(num_queries, hidden_dim)
self.backbone = backbone
self.aux_loss = aux_loss
if int(os.environ.get("encoder_high_resolution", 0)):
self.input_proj = nn.Conv2d((backbone.num_channels//8) * 15, hidden_dim, kernel_size=1)
else:
self.input_proj = nn.Conv2d(backbone.num_channels, hidden_dim, kernel_size=1)
def forward(self, samples: NestedTensor):
""" The forward expects a NestedTensor, which consists of:
- samples.tensor: batched images, of shape [batch_size x 3 x H x W]
- samples.mask: a binary mask of shape [batch_size x H x W], containing 1 on padded pixels
It returns a dict with the following elements:
- "pred_logits": the classification logits (including no-object) for all queries.
Shape= [batch_size x num_queries x (num_classes + 1)]
- "pred_boxes": The normalized boxes coordinates for all queries, represented as
(center_x, center_y, height, width). These values are normalized in [0, 1],
relative to the size of each individual image (disregarding possible padding).
See PostProcess for information on how to retrieve the unnormalized bounding box.
- "aux_outputs": Optional, only returned when auxilary losses are activated. It is a list of
dictionnaries containing the two above keys for each decoder layer.
"""
if isinstance(samples, (list, torch.Tensor)):
samples = nested_tensor_from_tensor_list(samples)
features, pos = self.backbone(samples)
src, mask = features[-1].decompose()
src_4x, mask_4x = features[0].decompose()
src_8x, mask_8x = features[1].decompose()
src_16x, mask_16x = features[2].decompose()
src_list = [src_4x, src_8x, src_16x]
mask_list = [mask_4x, mask_8x, mask_16x]
pos_embed_list = [pos[0], pos[1], pos[2]]
if int(os.environ.get("encoder_high_resolution", 0)):
if int(os.environ.get("encoder_resolution", 8)) == 4:
_, _, h_4x, w_4x = src_list[0].size()
feat1 = src_list[0]
feat2 = F.interpolate(src_list[1], size=(h_4x, w_4x), mode="bilinear", align_corners=True)
feat3 = F.interpolate(src_list[2], size=(h_4x, w_4x), mode="bilinear", align_corners=True)
feat4 = F.interpolate(src, size=(h_4x, w_4x), mode="bilinear", align_corners=True)
feats = torch.cat([feat1, feat2, feat3, feat4], 1)
hs = self.transformer(self.input_proj(feats), mask_4x, self.query_embed.weight, pos[0],
src_list, mask_list, pos_embed_list)[0]
elif int(os.environ.get("encoder_resolution", 8)) == 8:
_, _, h_8x, w_8x = src_list[1].size()
feat1 = F.interpolate(src_list[0], size=(h_8x, w_8x), mode="bilinear", align_corners=True)
feat2 = src_list[1]
feat3 = F.interpolate(src_list[2], size=(h_8x, w_8x), mode="bilinear", align_corners=True)
feat4 = F.interpolate(src, size=(h_8x, w_8x), mode="bilinear", align_corners=True)
feats = torch.cat([feat1, feat2, feat3, feat4], 1)
hs = self.transformer(self.input_proj(feats), mask_8x, self.query_embed.weight, pos[1],
src_list, mask_list, pos_embed_list)[0]
elif int(os.environ.get("encoder_resolution", 8)) == 16:
_, _, h_16x, w_16x = src_list[2].size()
feat1 = F.interpolate(src_list[0], size=(h_16x, w_16x), mode="bilinear", align_corners=True)
feat2 = F.interpolate(src_list[1], size=(h_16x, w_16x), mode="bilinear", align_corners=True)
feat3 = src_list[2]
feat4 = F.interpolate(src, size=(h_16x, w_16x), mode="bilinear", align_corners=True)
feats = torch.cat([feat1, feat2, feat3, feat4], 1)
hs = self.transformer(self.input_proj(feats), mask_16x, self.query_embed.weight, pos[2],
src_list, mask_list, pos_embed_list)[0]
else:
_, _, h_32x, w_32x = src.size()
feat1 = F.interpolate(src_list[0], size=(h_32x, w_32x), mode="bilinear", align_corners=True)
feat2 = F.interpolate(src_list[1], size=(h_32x, w_32x), mode="bilinear", align_corners=True)
feat3 = F.interpolate(src_list[2], size=(h_32x, w_32x), mode="bilinear", align_corners=True)
feat4 = src
feats = torch.cat([feat1, feat2, feat3, feat4], 1)
hs = self.transformer(self.input_proj(feats), mask, self.query_embed.weight, pos[-1],
src_list, mask_list, pos_embed_list)[0]
else:
hs = self.transformer(self.input_proj(src), mask, self.query_embed.weight, pos[-1],
src_list, mask_list, pos_embed_list)[0]
outputs_class = self.class_embed(hs)
outputs_coord = self.bbox_embed(hs).sigmoid()
out = {'pred_logits': outputs_class[-1], 'pred_boxes': outputs_coord[-1]}
if self.aux_loss:
out['aux_outputs'] = self._set_aux_loss(outputs_class, outputs_coord)
return out
@torch.jit.unused
def _set_aux_loss(self, outputs_class, outputs_coord):
# this is a workaround to make torchscript happy, as torchscript
# doesn't support dictionary with non-homogeneous values, such
# as a dict having both a Tensor and a list.
return [{'pred_logits': a, 'pred_boxes': b}
for a, b in zip(outputs_class[:-1], outputs_coord[:-1])]
class SetCriterion(nn.Module):
""" This class computes the loss for DETR.
The process happens in two steps:
1) we compute hungarian assignment between ground truth boxes and the outputs of the model
2) we supervise each pair of matched ground-truth / prediction (supervise class and box)
"""
def __init__(self, num_classes, matcher, weight_dict, eos_coef, losses):
""" Create the criterion.
Parameters:
num_classes: number of object categories, omitting the special no-object category
matcher: module able to compute a matching between targets and proposals
weight_dict: dict containing as key the names of the losses and as values their relative weight.
eos_coef: relative classification weight applied to the no-object category
losses: list of all the losses to be applied. See get_loss for list of available losses.
"""
super().__init__()
self.num_classes = num_classes
self.matcher = matcher
self.weight_dict = weight_dict
self.eos_coef = eos_coef
self.losses = losses
empty_weight = torch.ones(self.num_classes + 1)
empty_weight[-1] = self.eos_coef
self.register_buffer('empty_weight', empty_weight)
def loss_labels(self, outputs, targets, indices, num_boxes, log=True):
"""Classification loss (NLL)
targets dicts must contain the key "labels" containing a tensor of dim [nb_target_boxes]
"""
assert 'pred_logits' in outputs
src_logits = outputs['pred_logits']
idx = self._get_src_permutation_idx(indices)
target_classes_o = torch.cat([t["labels"][J] for t, (_, J) in zip(targets, indices)])
target_classes = torch.full(src_logits.shape[:2], self.num_classes,
dtype=torch.int64, device=src_logits.device)
target_classes[idx] = target_classes_o
loss_ce = F.cross_entropy(src_logits.transpose(1, 2), target_classes, self.empty_weight)
losses = {'loss_ce': loss_ce}
if log:
# TODO this should probably be a separate loss, not hacked in this one here
losses['class_error'] = 100 - accuracy(src_logits[idx], target_classes_o)[0]
return losses
@torch.no_grad()
def loss_cardinality(self, outputs, targets, indices, num_boxes):
""" Compute the cardinality error, ie the absolute error in the number of predicted non-empty boxes
This is not really a loss, it is intended for logging purposes only. It doesn't propagate gradients
"""
pred_logits = outputs['pred_logits']
device = pred_logits.device
tgt_lengths = torch.as_tensor([len(v["labels"]) for v in targets], device=device)
# Count the number of predictions that are NOT "no-object" (which is the last class)
card_pred = (pred_logits.argmax(-1) != pred_logits.shape[-1] - 1).sum(1)
card_err = F.l1_loss(card_pred.float(), tgt_lengths.float())
losses = {'cardinality_error': card_err}
return losses
def loss_boxes(self, outputs, targets, indices, num_boxes):
"""Compute the losses related to the bounding boxes, the L1 regression loss and the GIoU loss
targets dicts must contain the key "boxes" containing a tensor of dim [nb_target_boxes, 4]
The target boxes are expected in format (center_x, center_y, w, h), normalized by the image size.
"""
assert 'pred_boxes' in outputs
idx = self._get_src_permutation_idx(indices)
src_boxes = outputs['pred_boxes'][idx]
target_boxes = torch.cat([t['boxes'][i] for t, (_, i) in zip(targets, indices)], dim=0)
loss_bbox = F.l1_loss(src_boxes, target_boxes, reduction='none')
losses = {}
losses['loss_bbox'] = loss_bbox.sum() / num_boxes
loss_giou = 1 - torch.diag(box_ops.generalized_box_iou(
box_ops.box_cxcywh_to_xyxy(src_boxes),
box_ops.box_cxcywh_to_xyxy(target_boxes)))
losses['loss_giou'] = loss_giou.sum() / num_boxes
return losses
def loss_masks(self, outputs, targets, indices, num_boxes):
"""Compute the losses related to the masks: the focal loss and the dice loss.
targets dicts must contain the key "masks" containing a tensor of dim [nb_target_boxes, h, w]
"""
assert "pred_masks" in outputs
src_idx = self._get_src_permutation_idx(indices)
tgt_idx = self._get_tgt_permutation_idx(indices)
src_masks = outputs["pred_masks"]
src_masks = src_masks[src_idx]
masks = [t["masks"] for t in targets]
# TODO use valid to mask invalid areas due to padding in loss
target_masks, valid = nested_tensor_from_tensor_list(masks).decompose()
target_masks = target_masks.to(src_masks)
target_masks = target_masks[tgt_idx]
# upsample predictions to the target size
src_masks = interpolate(src_masks[:, None], size=target_masks.shape[-2:],
mode="bilinear", align_corners=False)
src_masks = src_masks[:, 0].flatten(1)
target_masks = target_masks.flatten(1)
target_masks = target_masks.view(src_masks.shape)
losses = {
"loss_mask": sigmoid_focal_loss(src_masks, target_masks, num_boxes),
"loss_dice": dice_loss(src_masks, target_masks, num_boxes),
}
return losses
def _get_src_permutation_idx(self, indices):
# permute predictions following indices
batch_idx = torch.cat([torch.full_like(src, i) for i, (src, _) in enumerate(indices)])
src_idx = torch.cat([src for (src, _) in indices])
return batch_idx, src_idx
def _get_tgt_permutation_idx(self, indices):
# permute targets following indices
batch_idx = torch.cat([torch.full_like(tgt, i) for i, (_, tgt) in enumerate(indices)])
tgt_idx = torch.cat([tgt for (_, tgt) in indices])
return batch_idx, tgt_idx
def get_loss(self, loss, outputs, targets, indices, num_boxes, **kwargs):
loss_map = {
'labels': self.loss_labels,
'cardinality': self.loss_cardinality,
'boxes': self.loss_boxes,
'masks': self.loss_masks
}
assert loss in loss_map, f'do you really want to compute {loss} loss?'
return loss_map[loss](outputs, targets, indices, num_boxes, **kwargs)
def forward(self, outputs, targets):
""" This performs the loss computation.
Parameters:
outputs: dict of tensors, see the output specification of the model for the format
targets: list of dicts, such that len(targets) == batch_size.
The expected keys in each dict depends on the losses applied, see each loss' doc
"""
outputs_without_aux = {k: v for k, v in outputs.items() if k != 'aux_outputs'}
# Retrieve the matching between the outputs of the last layer and the targets
indices = self.matcher(outputs_without_aux, targets)
# Compute the average number of target boxes accross all nodes, for normalization purposes
num_boxes = sum(len(t["labels"]) for t in targets)
num_boxes = torch.as_tensor([num_boxes], dtype=torch.float, device=next(iter(outputs.values())).device)
if is_dist_avail_and_initialized():
torch.distributed.all_reduce(num_boxes)
num_boxes = torch.clamp(num_boxes / get_world_size(), min=1).item()
# Compute all the requested losses
losses = {}
for loss in self.losses:
losses.update(self.get_loss(loss, outputs, targets, indices, num_boxes))
# In case of auxiliary losses, we repeat this process with the output of each intermediate layer.
if 'aux_outputs' in outputs:
for i, aux_outputs in enumerate(outputs['aux_outputs']):
indices = self.matcher(aux_outputs, targets)
for loss in self.losses:
if loss == 'masks':
# Intermediate masks losses are too costly to compute, we ignore them.
continue
kwargs = {}
if loss == 'labels':
# Logging is enabled only for the last layer
kwargs = {'log': False}
l_dict = self.get_loss(loss, aux_outputs, targets, indices, num_boxes, **kwargs)
l_dict = {k + f'_{i}': v for k, v in l_dict.items()}
losses.update(l_dict)
return losses
class PostProcess(nn.Module):
""" This module converts the model's output into the format expected by the coco api"""
@torch.no_grad()
def forward(self, outputs, target_sizes):
""" Perform the computation
Parameters:
outputs: raw outputs of the model
target_sizes: tensor of dimension [batch_size x 2] containing the size of each images of the batch
For evaluation, this must be the original image size (before any data augmentation)
For visualization, this should be the image size after data augment, but before padding
"""
out_logits, out_bbox = outputs['pred_logits'], outputs['pred_boxes']
assert len(out_logits) == len(target_sizes)
assert target_sizes.shape[1] == 2
prob = F.softmax(out_logits, -1)
scores, labels = prob[..., :-1].max(-1)
# convert to [x0, y0, x1, y1] format
boxes = box_ops.box_cxcywh_to_xyxy(out_bbox)
# and from relative [0, 1] to absolute [0, height] coordinates
img_h, img_w = target_sizes.unbind(1)
scale_fct = torch.stack([img_w, img_h, img_w, img_h], dim=1)
boxes = boxes * scale_fct[:, None, :]
results = [{'scores': s, 'labels': l, 'boxes': b} for s, l, b in zip(scores, labels, boxes)]
return results
class MLP(nn.Module):
""" Very simple multi-layer perceptron (also called FFN)"""
def __init__(self, input_dim, hidden_dim, output_dim, num_layers):
super().__init__()
self.num_layers = num_layers
h = [hidden_dim] * (num_layers - 1)
self.layers = nn.ModuleList(nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]))
def forward(self, x):
for i, layer in enumerate(self.layers):
x = F.relu(layer(x)) if i < self.num_layers - 1 else layer(x)
return x
def build(args):
# the `num_classes` naming here is somewhat misleading.
# it indeed corresponds to `max_obj_id + 1`, where max_obj_id
# is the maximum id for a class in your dataset. For example,
# COCO has a max_obj_id of 90, so we pass `num_classes` to be 91.
# As another example, for a dataset that has a single class with id 1,
# you should pass `num_classes` to be 2 (max_obj_id + 1).
# For more details on this, check the following discussion
# https://github.com/facebookresearch/detr/issues/108#issuecomment-650269223
num_classes = 20 if args.dataset_file != 'coco' else 91
if args.dataset_file == "coco_panoptic":
# for panoptic, we just add a num_classes that is large enough to hold
# max_obj_id + 1, but the exact value doesn't really matter
num_classes = 250
device = torch.device(args.device)
backbone = build_backbone(args)
if int(os.environ.get("cross_transformer", 0)):
transformer = build_cross_transformer(args)
elif int(os.environ.get("sparse_transformer", 0)):
transformer = build_sparse_transformer(args)
elif int(os.environ.get("linear_transformer", 0)):
transformer = build_linear_transformer(args)
else:
transformer = build_transformer(args)
model = DETR(
backbone,
transformer,
num_classes=num_classes,
num_queries=args.num_queries,
aux_loss=args.aux_loss,
)
if args.masks:
model = DETRsegm(model, freeze_detr=(args.frozen_weights is not None))
matcher = build_matcher(args)
weight_dict = {'loss_ce': 1, 'loss_bbox': args.bbox_loss_coef}
weight_dict['loss_giou'] = args.giou_loss_coef
if args.masks:
weight_dict["loss_mask"] = args.mask_loss_coef
weight_dict["loss_dice"] = args.dice_loss_coef
# TODO this is a hack
if args.aux_loss:
aux_weight_dict = {}
for i in range(args.dec_layers - 1):
aux_weight_dict.update({k + f'_{i}': v for k, v in weight_dict.items()})
weight_dict.update(aux_weight_dict)
losses = ['labels', 'boxes', 'cardinality']
if args.masks:
losses += ["masks"]
criterion = SetCriterion(num_classes, matcher=matcher, weight_dict=weight_dict,
eos_coef=args.eos_coef, losses=losses)
criterion.to(device)
postprocessors = {'bbox': PostProcess()}
if args.masks:
postprocessors['segm'] = PostProcessSegm()
if args.dataset_file == "coco_panoptic":
is_thing_map = {i: i <= 90 for i in range(201)}
postprocessors["panoptic"] = PostProcessPanoptic(is_thing_map, threshold=0.85)
return model, criterion, postprocessors
| [
"yhyuan@pku.edu.cn"
] | yhyuan@pku.edu.cn |
8a8938f84a7c687b66e0d1b18d01c112f7864e2f | 32fb5fbaf49cf767f276fc6cee0c31d2f63de159 | /node_modules/secp256k1/build/config.gypi | 3a1a2f612a9e9bb9a943190cc0e0235962abe708 | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | laddukavala/ladduBabu | 9d7b771516e78a3091da44ff89b01a2aaeb50eb6 | a47238f8db435fc2561bd4b994caaa2e5e36c555 | refs/heads/master | 2022-12-14T20:11:30.634919 | 2019-06-01T06:29:45 | 2019-06-01T06:29:45 | 189,697,323 | 0 | 0 | null | 2022-12-09T04:46:01 | 2019-06-01T06:11:05 | JavaScript | UTF-8 | Python | false | false | 4,933 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"coverage": "false",
"debug_devtools": "node",
"debug_http2": "false",
"debug_nghttp2": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_gyp_path": "tools/icu/icu-system.gyp",
"icu_small": "false",
"llvm_version": 0,
"node_byteorder": "little",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "false",
"node_module_version": 59,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local/Cellar/node/9.3.0_1",
"node_release_urlbase": "",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "59.dylib",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"xcode_version": "9.0",
"nodedir": "/Users/amareshjana/.node-gyp/9.3.0",
"standalone_static_library": 1,
"dry_run": "",
"legacy_bundling": "",
"save_dev": "",
"viewer": "man",
"commit_hooks": "true",
"browser": "",
"only": "",
"also": "",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"shell": "/bin/bash",
"maxsockets": "50",
"init_author_url": "",
"shrinkwrap": "true",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"git_tag_version": "true",
"cert": "",
"local_address": "",
"long": "",
"registry": "https://registry.npmjs.org/",
"fetch_retries": "2",
"message": "%s",
"key": "",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"logs_max": "10",
"always_auth": "",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"heading": "npm",
"searchlimit": "20",
"fetch_retry_mintimeout": "10000",
"offline": "",
"read_only": "",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"userconfig": "/Users/amareshjana/.npmrc",
"init_module": "/Users/amareshjana/.npm-init.js",
"cidr": "",
"node_version": "9.3.0",
"user": "",
"save": "true",
"editor": "vi",
"auth_type": "legacy",
"ignore_prepublish": "",
"tag": "latest",
"script_shell": "",
"progress": "true",
"global": "",
"searchstaleness": "900",
"optional": "true",
"ham_it_up": "",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"depth": "Infinity",
"sso_poll_frequency": "500",
"rebuild_bundle": "true",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"tag_version_prefix": "v",
"strict_ssl": "true",
"sso_type": "oauth",
"scripts_prepend_node_path": "warn-only",
"save_prefix": "^",
"ca": "",
"group": "20",
"fetch_retry_factor": "10",
"dev": "",
"save_exact": "",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"otp": "",
"cache": "/Users/amareshjana/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"package_lock_only": "",
"save_optional": "",
"user_agent": "npm/5.6.0 node/v9.3.0 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"umask": "0022",
"init_version": "1.0.0",
"node_options": "",
"scope": "",
"git": "git",
"init_author_name": "",
"unsafe_perm": "true",
"tmp": "/var/folders/78/030c1h4n5j9c7pz7r80xw0x80000gn/T",
"onload_script": "",
"prefix": "/usr/local",
"link": ""
}
}
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
6b9226b90a7de587d71b30cad44ecff48e5b36f2 | 69096ca0d67b3d6809a2fe05af51341df62ebc60 | /tibiapy/errors.py | 7d46deea3b3e3d421b16d9b41493ecba7f3cf1fd | [
"Apache-2.0"
] | permissive | Galarzaa90/tibia.py | 6f648aff8b6fbac7be4886435711f7ff08420402 | f8c145dd597c558398bac50e035711e34863b571 | refs/heads/main | 2023-08-17T15:50:31.354488 | 2023-08-17T14:00:07 | 2023-08-17T14:00:07 | 143,892,750 | 30 | 12 | Apache-2.0 | 2023-08-24T17:24:19 | 2018-08-07T15:25:23 | Python | UTF-8 | Python | false | false | 2,521 | py | """Exceptions thrown by tibia.py."""
from enum import Enum
from typing import Type, Any
class TibiapyException(Exception):
"""Base exception for the tibiapy module.
All exceptions thrown by the module are inherited from this.
"""
pass
class InvalidContent(TibiapyException):
"""Exception thrown when the provided content is unrelated for the calling function.
This usually means that the content provided belongs to a different website or section of the website.
This serves as a way to differentiate those cases from a parsing that returned no results (e.g. Character not found)
In some cases this can mean that Tibia.com's format has changed and the library needs updating.
Attributes
----------
original: :class:`Exception`
The original exception that caused this exception.
"""
def __init__(self, message, original=None):
super().__init__(message)
self.original = original
class NetworkError(TibiapyException):
"""Exception thrown when there was a network error trying to fetch a resource from the web.
Attributes
----------
original: :class:`Exception`
The original exception that caused this exception.
fetching_time: :class:`float`
The time between the request and the response.
"""
def __init__(self, message, original=None, fetching_time=0):
super().__init__(message)
self.original = original
self.fetching_time = fetching_time
class Forbidden(NetworkError):
"""A subclass of :class:`NetworkError` thrown when Tibia.com returns a 403 status code.
Tibia.com returns a 403 status code when it detects that too many requests are being done.
This has its own subclass to let the user decide to treat this differently than other network errors.
"""
class SiteMaintenanceError(NetworkError):
"""A subclass of :class:`NetworkError` thrown when Tibia.com is down for maintenance.
When Tibia.com is under maintenance, all sections of the website redirect to maintenance.tibia.com.
"""
class EnumValueError(ValueError):
def __init__(self, enum: Type[Enum], value: Any) -> None:
self.enum = enum
super().__init__(f"{value!r} is not a valid value for {enum.__name__}. Expected names ({self.names}) or values ({self.values})")
@property
def names(self):
return ", ".join(e.name for e in self.enum)
@property
def values(self):
return ", ".join(str(e.value) for e in self.enum)
| [
"allan.galarza@gmail.com"
] | allan.galarza@gmail.com |
6311dfd903716c2ca23ce2202aa867bc5a567a78 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_116/1755.py | 8c9f6f86daffb3dad052205a0bf1c9fee9d771c6 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,502 | py | import sys
inFile = open(sys.argv[1], 'r')
outFile = open(sys.argv[1][:-2]+"out", "w")
number_of_input = int(inFile.readline().rstrip("\n"))
#print number_of_input
count = 0
while(count != number_of_input):
matrix = []
for line in range(0,4):
attrib = inFile.readline().rstrip("\n")
attrib = list(attrib)
matrix.append(attrib)
inFile.readline().rstrip("\n")
count = count + 1
rTest = 0;
cTest = 0;
LRTest = 0;
RLTest = 0;
draw = 0;
dot = 0;
who = ""
diagTest = 0;
for i in range(0,1):
LR = ""
RL = ""
for j in range(0,4):
LR = LR + matrix[j][j]
RL = RL + matrix[j][3-j]
xLRCount = LR.count("X")
oLRCount = LR.count("O")
xRLCount = RL.count("X")
oRLCount = RL.count("O")
if(xLRCount >= 3):
if(xLRCount == 4):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
else:
if(("O" not in LR) and ("." not in LR) and ("T" in LR)):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
if(oLRCount >= 3):
if(oLRCount == 4):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
else:
if(("X" not in LR) and ("." not in LR) and ("T" in LR)):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
if(xRLCount >= 3):
if(xRLCount == 4):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
else:
if(("O" not in RL) and ("." not in RL) and ("T" in RL)):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
if(oRLCount >= 3):
if(oRLCount == 4):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
else:
if(("X" not in RL) and ("." not in RL) and ("T" in RL)):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
diagTest = 1
break;
if(diagTest == 0):
##print "Continue"
for r in range(0,4):
LR = ""
TB = ""
for c in range(0,4):
LR = LR + matrix[r][c]
TB = TB + matrix[c][r]
xLRCount = LR.count("X")
oLRCount = LR.count("O")
xTBCount = TB.count("X")
oTBCount = TB.count("O")
if(xLRCount >= 3):
if(xLRCount == 4):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
rTest = 1
break;
else:
if(("O" not in LR) and ("." not in LR) and ("T" in LR)):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
rTest = 1
break;
if(oLRCount >= 3):
if(oLRCount == 4):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
rTest = 1
break;
else:
if(("X" not in LR) and ("." not in LR) and ("T" in LR)):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
rTest = 1
break;
if(xTBCount >= 3):
if(xTBCount == 4):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
cTest = 1
break;
else:
if(("O" not in TB) and ("." not in TB) and ("T" in TB)):
won = "X"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
cTest = 1
break;
if(oTBCount >= 3):
if(oTBCount == 4):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
cTest = 1
break;
else:
if(("X" not in TB) and ("." not in TB) and ("T" in TB)):
won = "O"
#print "Case #%d: %c won" %(count, won)
outFile.write("Case #%d: %c won\n" %(count, won))
cTest = 1
break;
if(cTest == 0 and rTest == 0):
for r in range(0,4):
for c in range(0,4):
if(matrix[r][c] == "."):
dot = dot + 1
if(dot > 0):
#print "Case #%d: Game has not completed" %(count)
outFile.write("Case #%d: Game has not completed\n" %(count))
else:
#print "Case #%d: Draw" %(count)
outFile.write("Case #%d: Draw\n" %(count))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
3405e2abb735e5a7131c3bcfa7208aef3d54ecfc | 5a56592fb347f650cd3c7ada273bf58902c8c925 | /05_proteins_to_graph.py | c61886adf64c7a28c39d8467c3f76d6333e2bdc4 | [] | no_license | chelseaju/ProteinGraph | 05a1541719442966a76a9f8e11bc2552f41ada75 | 75a96a4eab4a7b59b18be4db209c855f2912ab1a | refs/heads/master | 2016-09-08T10:26:50.338110 | 2016-01-27T00:58:36 | 2016-01-27T00:58:36 | 30,887,654 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,874 | py | """
Retrieve proteins from a given family
Convert these proteins into graph format
"""
import argparse, re, os, time, random
from pdb import *
from graph import *
from echo import *
POSITIVE = []
def build_pfam_reference(pfam_ref, pfam_id):
fh = open(pfam_ref, 'r')
for line in fh:
data = line.split("\t")
pdb_id = data[0]
chain = data[1]
pfam = data[4]
if(not pdb_id == "PDB_ID"):
pdb_name = pdb_id + "_" + chain
pfam_match = re.match(r'(.*)\.(\d+)', pfam)
if(pfam_match):
pfam = pfam_match.group(1)
if(pfam == pfam_id):
POSITIVE.append(pdb_name)
fh.close()
echo("Building Pfam Reference")
# positive graphs
def retrieve_graph(pfam_id, edge_info, dir):
positive_count = 0
positive_candidates = set(POSITIVE)
# select_candidates = random.sample(positive_candidates, 11)
for pdb_chain in sorted(positive_candidates):
(pdb, chain) = pdb_chain.split('_')
# distance file name
pdb_dst_file = dir + pdb_chain + ".dist"
# graph file name
pdb_graph_file = dir + pdb_chain + ".txt"
# parse pdb data
pdb_info = parse_pdb_by_id(pdb, chain)
if(pdb_info):
# comput distance
pw_dist = pairwise_distance(pdb_info, pdb_dst_file)
# convert structure to graph
title = pfam_id+ " " + pdb_chain
pdb_to_graph(pw_dist, pdb_graph_file, edge_info, positive_count, title)
positive_count = positive_count + 1
if(positive_count % 100 == 0):
time.sleep(3)
return positive_count
def main(parser):
options = parser.parse_args()
edge_ref = options.eref
fam_ref = options.fref
fam_id = options.fam
ftype = options.ftype
dir = options.dir
count = options.count
if(dir[-1] != "/"):
dir += "/"
# create directory for selected pfam
fam_dir = dir + fam_id + "/"
os.system("mkdir -p %s " %(fam_dir))
# build references
edge_info = build_edge_guideline(edge_ref)
if(ftype == "pfam"):
build_pfam_reference(fam_ref, fam_id)
elif(ftype == "scop"):
build_scop_reference(fam_ref, fam_id)
num_protein = retrieve_graph(fam_id, edge_info, fam_dir)
if(count and int(count) != num_protein):
echo("Warning! %s contains %s proteins, but only has %d proteins" %(fam_id, count, num_protein))
if(num_protein < 10):
os.system("rm -rf %s " %(fam_dir))
echo("%s does not have more than 10 proteins" %(fam_id))
else:
echo("Retrieving %d proteins for %s" %(num_protein, fam_id))
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='05_proteins_to_graph.py')
parser.add_argument("-e", "--edge_reference", dest = "eref", type=str, help="edge reference file", required = True)
parser.add_argument("-r", "--family_reference", dest = "fref", type=str, help="pfam or scop association file", required = True)
parser.add_argument("-f", "--family_id", dest = "fam", type=str, help="family id", required = True)
parser.add_argument("-t", "--family_type", dest = "ftype", type=str, help="pfam or scop", required = True)
parser.add_argument("-d", "--directory", dest = "dir", type=str, help="directory for output", required = True)
parser.add_argument("-c", "--count", dest = "count", type=str, help="number of protein", required = False)
main(parser)
| [
"chelseaju@ucla.edu"
] | chelseaju@ucla.edu |
3746a660384a515254a4dccf0078e0b6055607fc | 48e294e8cec03960710b10fe6774b8e7145f4d50 | /documentation/environment_canada/ec_density.py | 09567cbf9b16c4a11ab57d39bc738bcf1d7d6a66 | [
"Unlicense"
] | permissive | NOAA-ORR-ERD/OilLibrary | 891744dc9877c40fe55a0f38712d9cc4d3fd12fc | a09a51721e43b040d19549531c0370efc956ebd0 | refs/heads/master | 2021-12-23T16:00:49.425884 | 2021-12-01T22:33:11 | 2021-12-01T22:33:11 | 57,332,172 | 11 | 15 | NOASSERTION | 2021-09-16T08:51:49 | 2016-04-28T20:37:11 | Python | UTF-8 | Python | false | false | 4,510 | py |
from oil_library.models import Density
from ec_xl_parse import (get_oil_properties_by_name,
get_oil_properties_by_category)
from ec_oil_props import get_oil_weathering
def get_oil_densities(oil_columns, field_indexes):
'''
Getting densities out of this datasheet is more tricky than it should
be. There are two categories, density at 15C, and density at 0/5C.
I dunno, I would have organized the data in a more orthogonal way.
'''
weathering = get_oil_weathering(oil_columns, field_indexes)
densities_at_0c = get_oil_densities_at_0c(oil_columns,
field_indexes,
weathering)
densities_at_5c = get_oil_densities_at_5c(oil_columns,
field_indexes,
weathering)
densities_at_15c = get_oil_densities_at_15c(oil_columns,
field_indexes,
weathering)
return densities_at_0c + densities_at_5c + densities_at_15c
def get_oil_densities_at_15c(oil_columns, field_indexes, weathering):
densities = []
props = get_oil_properties_by_category(oil_columns, field_indexes,
'density_at_15_c_g_ml_astm_d5002')
prop_names = props.keys()
for idx, vals in enumerate(zip(*props.values())):
density_obj = dict(zip(prop_names, [v[0].value for v in vals]))
# add some properties to the oil that we expect
density_obj['idx'] = idx
density_obj['weathering'] = weathering[idx]
density_obj['ref_temp_k'] = 273.15 + 15.0
density_obj['kg_m_3'] = density_obj['density_15_c_g_ml']
if density_obj['kg_m_3'] is not None:
density_obj['kg_m_3'] *= 1000.0
# prune some properties that we don't want in our object
del density_obj['density_15_c_g_ml']
densities.append(density_obj)
return [Density(**d) for d in densities
if d['kg_m_3'] not in (None, 0.0)]
def get_oil_densities_at_0c(oil_columns, field_indexes, weathering):
densities = []
props = get_oil_properties_by_category(oil_columns, field_indexes,
'density_at_0_5_c_g_ml_astm_d5002')
prop_names = props.keys()
for idx, vals in enumerate(zip(*props.values())):
density_obj = dict(zip(prop_names, [v[0].value for v in vals]))
# add some properties to the oil that we expect
density_obj['idx'] = idx
density_obj['weathering'] = weathering[idx]
density_obj['ref_temp_k'] = 273.15
density_obj['kg_m_3'] = density_obj['density_0_c_g_ml']
if density_obj['kg_m_3'] is not None:
density_obj['kg_m_3'] *= 1000.0
# prune some properties that we don't want in our object
del density_obj['density_0_c_g_ml']
del density_obj['density_5_c_g_ml']
densities.append(density_obj)
return [Density(**d) for d in densities
if d['kg_m_3'] not in (None, 0.0)]
def get_oil_densities_at_5c(oil_columns, field_indexes, weathering):
densities = []
props = get_oil_properties_by_category(oil_columns, field_indexes,
'density_at_0_5_c_g_ml_astm_d5002')
prop_names = props.keys()
for idx, vals in enumerate(zip(*props.values())):
density_obj = dict(zip(prop_names, [v[0].value for v in vals]))
# add some properties to the oil that we expect
density_obj['idx'] = idx
density_obj['weathering'] = weathering[idx]
density_obj['ref_temp_k'] = 273.15 + 5.0
density_obj['kg_m_3'] = density_obj['density_5_c_g_ml']
if density_obj['kg_m_3'] is not None:
density_obj['kg_m_3'] *= 1000.0
# prune some properties that we don't want in our object
del density_obj['density_0_c_g_ml']
del density_obj['density_5_c_g_ml']
densities.append(density_obj)
return [Density(**d) for d in densities
if d['kg_m_3'] not in (None, 0.0)]
def get_oil_api(oil_columns, field_indexes):
'''
Get the oil API gravity.
'''
cells = get_oil_properties_by_name(oil_columns, field_indexes,
'api_gravity', 'calculated_api_gravity')
return [c[0].value for c in cells if c[0].value is not None]
| [
"james.makela@noaa.gov"
] | james.makela@noaa.gov |
c1d12bd6d916dad983128b2cdd959b18503023ab | 237c1abede4dfb24649087a62bbe7af0cb5f4b80 | /pwncat/commands/connect.py | e36cb6510d7fc4d68087a3d465648c3dce385a1f | [] | no_license | jimmyfish/pwncat | 9f17a2a7807ac0c25a86a9ebcdaee8892f07b47c | 59ab922de9f2674bcdcd60564749c106ccd3a2a6 | refs/heads/master | 2023-02-24T22:49:47.945083 | 2021-02-02T06:10:32 | 2021-02-02T06:10:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,579 | py | #!/usr/bin/env python3
from colorama import Fore
import ipaddress
import os.path
import socket
import re
import paramiko
from prompt_toolkit import prompt
from rich.progress import Progress, BarColumn
import pwncat
from pwncat.util import console
from pwncat.commands.base import (
CommandDefinition,
Complete,
Parameter,
StoreForAction,
StoreConstOnce,
)
# from pwncat.persist import PersistenceError
from pwncat.modules.persist import PersistError
class Command(CommandDefinition):
"""
Connect to a remote victim. This command is only valid prior to an established
connection. This command attempts to act similar to common tools such as netcat
and ssh simultaneosly. Connection strings come in two forms. Firstly, pwncat
can act like netcat. Using `connect [host] [port]` will connect to a bind shell,
while `connect -l [port]` will listen for a reverse shell on the specified port.
The second form is more explicit. A connection string can be used of the form
`[protocol://][user[:password]@][host][:port]`. If a user is specified, the
default protocol is `ssh`. If no user is specified, the default protocol is
`connect` (connect to bind shell). If no host is specified or `host` is "0.0.0.0"
then the `bind` protocol is used (listen for reverse shell). The currently available
protocols are:
- ssh
- connect
- bind
The `--identity/-i` argument is ignored unless the `ssh` protocol is used.
"""
PROG = "connect"
ARGS = {
"--config,-c": Parameter(
Complete.LOCAL_FILE,
help="Path to a configuration script to execute prior to connecting",
),
"--identity,-i": Parameter(
Complete.LOCAL_FILE,
help="The private key for authentication for SSH connections",
),
"--listen,-l": Parameter(
Complete.NONE,
action="store_true",
help="Enable the `bind` protocol (supports netcat-like syntax)",
),
"--port,-p": Parameter(
Complete.NONE,
help="Alternative port number argument supporting netcat-like syntax",
),
"--list": Parameter(
Complete.NONE,
action="store_true",
help="List all known hosts and their installed persistence",
),
"connection_string": Parameter(
Complete.NONE,
metavar="[protocol://][user[:password]@][host][:port]",
help="Connection string describing the victim to connect to",
nargs="?",
),
"pos_port": Parameter(
Complete.NONE,
nargs="?",
metavar="port",
help="Alternative port number argument supporting netcat-like syntax",
),
}
LOCAL = True
CONNECTION_PATTERN = re.compile(
r"""^(?P<protocol>[-a-zA-Z0-9_]*://)?((?P<user>[^:@]*)?(?P<password>:(\\@|[^@])*)?@)?(?P<host>[^:]*)?(?P<port>:[0-9]*)?$"""
)
def run(self, args):
protocol = None
user = None
password = None
host = None
port = None
try_reconnect = False
if not args.config and os.path.exists("./pwncatrc"):
args.config = "./pwncatrc"
elif not args.config and os.path.exists("./data/pwncatrc"):
args.config = "./data/pwncatrc"
if args.config:
try:
# Load the configuration
with open(args.config, "r") as filp:
pwncat.victim.command_parser.eval(filp.read(), args.config)
except OSError as exc:
console.log(f"[red]error[/red]: {exc}")
return
if args.list:
# Grab a list of installed persistence methods for all hosts
# persist.gather will retrieve entries for all hosts if no
# host is currently connected.
modules = list(pwncat.modules.run("persist.gather"))
# Create a mapping of host hash to host object and array of
# persistence methods
hosts = {
host.hash: (host, [])
for host in pwncat.victim.session.query(pwncat.db.Host).all()
}
for module in modules:
hosts[module.persist.host.hash][1].append(module)
for host_hash, (host, modules) in hosts.items():
console.print(
f"[magenta]{host.ip}[/magenta] - "
f"[red]{host.distro}[/red] - "
f"[yellow]{host_hash}[/yellow]"
)
for module in modules:
console.print(f" - {str(module)}")
return
if args.connection_string:
m = self.CONNECTION_PATTERN.match(args.connection_string)
protocol = m.group("protocol")
user = m.group("user")
password = m.group("password")
host = m.group("host")
port = m.group("port")
if protocol is not None and args.listen:
console.log(
f"[red]error[/red]: --listen is not compatible with an explicit connection string"
)
return
if (
sum([port is not None, args.port is not None, args.pos_port is not None])
> 1
):
console.log(f"[red]error[/red]: multiple ports specified")
return
if args.port is not None:
port = args.port
if args.pos_port is not None:
port = args.pos_port
if port is not None:
try:
port = int(port.lstrip(":"))
except:
console.log(f"[red]error[/red]: {port}: invalid port number")
return
# Attempt to assume a protocol based on context
if protocol is None:
if args.listen:
protocol = "bind://"
elif args.port is not None:
protocol = "connect://"
elif user is not None:
protocol = "ssh://"
try_reconnect = True
elif host == "" or host == "0.0.0.0":
protocol = "bind://"
elif args.connection_string is None:
self.parser.print_help()
return
else:
protocol = "connect://"
try_reconnect = True
if protocol != "ssh://" and args.identity is not None:
console.log(f"[red]error[/red]: --identity is only valid for ssh protocols")
return
if pwncat.victim.client is not None:
console.log("connection [red]already active[/red]")
return
if protocol == "reconnect://" or try_reconnect:
level = "[yellow]warning[/yellow]" if try_reconnect else "[red]error[/red]"
try:
addr = ipaddress.ip_address(socket.gethostbyname(host))
row = (
pwncat.victim.session.query(pwncat.db.Host)
.filter_by(ip=str(addr))
.first()
)
if row is None:
console.log(f"{level}: {str(addr)}: not found in database")
host_hash = None
else:
host_hash = row.hash
except ValueError:
host_hash = host
# Reconnect to the given host
if host_hash is not None:
try:
pwncat.victim.reconnect(host_hash, password, user)
return
except Exception as exc:
console.log(f"{level}: {host}: {exc}")
if protocol == "reconnect://" and not try_reconnect:
# This means reconnection failed, and we had an explicit
# reconnect protocol
return
if protocol == "bind://":
if not host or host == "":
host = "0.0.0.0"
if port is None:
console.log(f"[red]error[/red]: no port specified")
return
with Progress(
f"bound to [blue]{host}[/blue]:[cyan]{port}[/cyan]",
BarColumn(bar_width=None),
transient=True,
) as progress:
task_id = progress.add_task("listening", total=1, start=False)
# Create the socket server
server = socket.create_server((host, port), reuse_port=True)
try:
# Wait for a connection
(client, address) = server.accept()
except KeyboardInterrupt:
progress.update(task_id, visible=False)
progress.log("[red]aborting[/red] listener")
return
progress.update(task_id, visible=False)
progress.log(
f"[green]received[/green] connection from [blue]{address[0]}[/blue]:[cyan]{address[1]}[/cyan]"
)
pwncat.victim.connect(client)
elif protocol == "connect://":
if not host:
console.log("[red]error[/red]: no host address provided")
return
if port is None:
console.log(f"[red]error[/red]: no port specified")
return
with Progress(
f"connecting to [blue]{host}[/blue]:[cyan]{port}[/cyan]",
BarColumn(bar_width=None),
transient=True,
) as progress:
task_id = progress.add_task("connecting", total=1, start=False)
# Connect to the remote host
client = socket.create_connection((host, port))
progress.update(task_id, visible=False)
progress.log(
f"connection to "
f"[blue]{host}[/blue]:[cyan]{port}[/cyan] [green]established[/green]"
)
pwncat.victim.connect(client)
elif protocol == "ssh://":
if port is None:
port = 22
if not user or user is None:
self.parser.error("you must specify a user")
if not (password or args.identity):
password = prompt("Password: ", is_password=True)
try:
# Connect to the remote host's ssh server
sock = socket.create_connection((host, port))
except Exception as exc:
console.log(f"[red]error[/red]: {str(exc)}")
return
# Create a paramiko SSH transport layer around the socket
t = paramiko.Transport(sock)
try:
t.start_client()
except paramiko.SSHException:
sock.close()
console.log("[red]error[/red]: ssh negotiation failed")
return
if args.identity:
try:
# Load the private key for the user
key = paramiko.RSAKey.from_private_key_file(args.identity)
except:
password = prompt("RSA Private Key Passphrase: ", is_password=True)
key = paramiko.RSAKey.from_private_key_file(args.identity, password)
# Attempt authentication
try:
t.auth_publickey(user, key)
except paramiko.ssh_exception.AuthenticationException as exc:
console.log(f"[red]error[/red]: authentication failed: {exc}")
else:
try:
t.auth_password(user, password)
except paramiko.ssh_exception.AuthenticationException as exc:
console.log(f"[red]error[/red]: authentication failed: {exc}")
if not t.is_authenticated():
t.close()
sock.close()
return
# Open an interactive session
chan = t.open_session()
chan.get_pty()
chan.invoke_shell()
# Initialize the session!
pwncat.victim.connect(chan)
if user in pwncat.victim.users and password is not None:
console.log(f"storing user password")
pwncat.victim.users[user].password = password
else:
console.log("user not found in database; not storing password")
else:
console.log(f"[red]error[/red]: {args.action}: invalid action")
| [
"caleb.stewart94@gmail.com"
] | caleb.stewart94@gmail.com |
3dd1b171406bc7721b63a12c64a6e50545f3769d | 7c11455f583c73b4c7c57e61a78229231a3798f8 | /reinforcement_learning/models/base_model.py | 1cbd8f29f1bb922c7c1ce17ae3bcc78a6b4b9c25 | [] | no_license | twobackfromtheend/quRL | d75215f24fbe3c4bccfce5f627c20655e0329951 | 8b40017793ca591e8d8cba469bdd71c32b0a1d5a | refs/heads/master | 2020-04-03T17:45:54.975865 | 2019-01-19T12:24:42 | 2019-01-19T12:25:08 | 155,458,154 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | import numpy as np
class BaseModel:
def __init__(self, inputs: int, outputs: int):
self.inputs = inputs
self.outputs = outputs
def build_model(self):
raise NotImplementedError
def save_model(self, filename: str):
raise NotImplementedError
def create_copy(self):
return self.__class__(**self.__dict__)
def set_learning_rate(self, learning_rate: float):
raise NotImplementedError
def predict(self, x: np.ndarray) -> np.ndarray:
raise NotImplementedError
def train_on_batch(self, x: np.ndarray, y: np.ndarray):
raise NotImplementedError
| [
"harry1996@gmail.com"
] | harry1996@gmail.com |
81f9769d137023d2fb464ee389955229c53a657e | b53e3d57d31a47a98d87141e44a5f8940ee15bca | /src/programy/parser/template/nodes/select.py | e820619919546b11e755c04bc00a6a7969a28fcc | [
"MIT"
] | permissive | Chrissimple/program-y | 52177fcc17e75fb97ab3993a4652bcbe7906bd58 | 80d80f0783120c2341e6fc57e7716bbbf28a8b3f | refs/heads/master | 2020-03-29T13:20:08.162177 | 2018-09-26T19:09:20 | 2018-09-26T19:09:20 | 149,952,995 | 1 | 0 | null | 2018-09-23T06:11:04 | 2018-09-23T06:11:04 | null | UTF-8 | Python | false | false | 9,024 | py | """
Copyright (c) 2016-17 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
import json
from programy.parser.template.nodes.base import TemplateNode
from programy.parser.exceptions import ParserException
from programy.utils.text.text import TextUtils
class QueryBase(object):
def __init__(self, subj, pred, obj):
self._subj = subj
self._pred = pred
self._obj = obj
@property
def subj(self):
return self._subj
@property
def pred(self):
return self._pred
@property
def obj(self):
return self._obj
def to_xml(self, bot, clientid):
xml = "<subj>%s</subj>"%self._subj
xml += "<pred>%s</pred>"%self._pred
xml += "<obj>%s</obj>"%self._obj
return xml
def execute(self, bot, clientid):
return []
def get_rdf(self, bot, clientid):
subj = self.subj.resolve(bot, clientid)
if subj.startswith("?") is False:
subj = subj.upper()
pred = self.pred.resolve(bot, clientid)
if pred.startswith("?") is False:
pred = pred.upper()
obj = self.obj.resolve(bot, clientid)
return subj, pred, obj
class Query(QueryBase):
def __init__(self, subj, pred, obj):
QueryBase.__init__(self, subj, pred, obj)
def to_xml(self, bot, clientid):
xml = "<q>"
xml += super(Query, self).to_xml(bot, clientid)
xml + "</q>"
return xml
def execute(self, bot, clientid, vars=None):
subj, pred, obj = self.get_rdf(bot, clientid)
if vars is None:
tuples = bot.brain.rdf.matched_as_tuples(subj, pred, obj)
results = []
for atuple in tuples:
results.append([["subj", atuple[0]], ["pred", atuple[1]], ["obj", atuple[2]]])
return results
else:
tuples = bot.brain.rdf.match_to_vars(subj, pred, obj)
return tuples
class NotQuery(QueryBase):
def __init__(self, subj, pred, obj):
QueryBase.__init__(self, subj, pred, obj)
def get_xml_type(self):
return "notq"
def to_xml(self, bot, clientid):
xml = "<notq>"
xml += super(NotQuery, self).to_xml(bot, clientid)
xml += "</notq>"
return xml
def execute(self, bot, clientid, vars=None):
subj, pred, obj = self.get_rdf(bot, clientid)
if vars is None:
tuples = bot.brain.rdf.not_matched_as_tuples(subj, pred, obj)
results = []
for atuple in tuples:
results.append([["subj", atuple[0]], ["pred", atuple[1]], ["obj", atuple[2]]])
return results
else:
tuples = bot.brain.rdf.not_match_to_vars(subj, pred, obj)
return tuples
class TemplateSelectNode(TemplateNode):
def __init__(self, queries=None, vars=None):
TemplateNode.__init__(self)
if queries is None:
self._queries = []
else:
self._queries = queries[:]
if vars is None:
self._vars = []
else:
self._vars = vars[:]
@property
def queries(self):
return self._queries
@property
def vars(self):
return self._vars
def encode_results(self, bot, results):
# At some point put a config item here that allows us to switch between
# XML, JSON, Yaml, and Picke
return json.dumps(results)
def resolve_to_string(self, bot, clientid):
resolved = ""
if self._queries:
results = []
for query in self._queries:
query_results = query.execute(bot, clientid, self.vars)
results.append(query_results)
if self._vars:
results = bot.brain.rdf.unify(self.vars, results)
resolved = self.encode_results(bot, results)
if logging.getLogger().isEnabledFor(logging.DEBUG):
logging.debug("[%s] resolved to [%s]", self.to_string(), resolved)
return resolved
def resolve(self, bot, clientid):
try:
return self.resolve_to_string(bot, clientid)
except Exception as excep:
logging.exception(excep)
return ""
def to_string(self):
return "SELECT"
def to_xml(self, bot, clientid):
xml = "<select>"
if self._vars:
xml += "<vars>"
xml += " ".join(self._vars)
xml += "</vars>"
if self._queries:
for query in self._queries:
xml += query.to_xml(bot, clientid)
xml += "</select>"
return xml
#######################################################################################################
# SELECT_EXPRESSION ::== <person>TEMPLATE_EXPRESSION</person>
def parse_vars(self, variables):
var_splits = variables.split(" ")
for var_name in var_splits:
self.vars.append(var_name)
def parse_query(self, graph, query_name, query):
for child in query:
tag_name = TextUtils.tag_from_text(child.tag)
if tag_name == 'subj':
if child.text is not None and child.text.startswith("?"):
if child.text not in self.vars:
if logging.getLogger().isEnabledFor(logging.DEBUG):
logging.debug("Variable [%s] defined in query element [%s], but not in vars!"%(child.text, tag_name))
self.vars.append(child.text)
subj = self.parse_children_as_word_node(graph, child)
elif tag_name == 'pred':
if child.text is not None and child.text.startswith("?"):
if child.text not in self.vars:
if logging.getLogger().isEnabledFor(logging.DEBUG):
logging.debug("Variable [%s] defined in query element [%s], but not in vars!"%(child.text, tag_name))
self.vars.append(child.text)
pred = self.parse_children_as_word_node(graph, child)
elif tag_name == 'obj':
if child.text is not None and child.text.startswith("?"):
if child.text not in self.vars:
if logging.getLogger().isEnabledFor(logging.DEBUG):
logging.debug("Variable [%s] defined in query element [%s], but not in vars!"%(child.text, tag_name))
self.vars.append(child.text)
obj = self.parse_children_as_word_node(graph, child)
else:
if logging.getLogger().isEnabledFor(logging.WARNING):
logging.warning("Unknown tag name [%s] in select query", tag_name)
if subj is None:
raise ParserException("<subj> element missing from select query")
if pred is None:
raise ParserException("<pred> element missing from select query")
if obj is None:
raise ParserException("<obj> element missing from select query")
if query_name == "q":
self._queries.append(Query(subj, pred, obj))
else:
self._queries.append(NotQuery(subj, pred, obj))
def parse_expression(self, graph, expression):
variables = expression.findall('./vars')
if variables:
if len(variables) > 1:
if logging.getLogger().isEnabledFor(logging.WARNING):
logging.warning("Multiple <vars> found in select tag, using first")
self.parse_vars(variables[0].text)
queries = expression.findall('./*')
for query in queries:
tag_name = TextUtils.tag_from_text(query.tag)
if tag_name == 'q' or tag_name == 'notq':
self.parse_query(graph, tag_name, query)
if self.children:
raise ParserException("<select> node should not contains child text, use <select><vars></vars><q></q></select> only")
| [
"keith@keithsterling.com"
] | keith@keithsterling.com |
62bb90eaf5656edc3f0d26b8615d22a301750586 | 528976ba81dfab381e2273d9784c7e21d5c90c34 | /snake/main.py | d3523a15e019c4c8e174d2170158039e1a98978f | [
"MIT"
] | permissive | uaiuaief/Machine-Learning-for-Snake-Game | d65478d08b8d2bf4c06e0668dfdb41b1b6d0514c | 1a36ef4468eea9f80f44fb1b8d115790262320c5 | refs/heads/master | 2020-09-14T20:24:55.545625 | 2019-11-28T21:15:46 | 2019-11-28T21:15:46 | 223,244,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,568 | py | import random
from tkinter import mainloop, CENTER, NW
class SnakeAPP:
config = {
"TICK": 100,
"SNAKE_BODY_COLOR": 'green',
"SNAKE_HEAD_COLOR": 'lime green',
"BACKGROUND_COLOR": 'black',
"FOOD_COLOR": 'red',
"GRID_WIDTH_IN_SQMS": 20,
"GRID_HEIGHT_IN_SQMS": 20,
"GRID_SQM_SIZE": 30,
"SCREEN_TITLE": 'Snake Game',
# Starting Screen:
"BUTTON_TEXT_FONT": 'verdana',
"BUTTON_TEXT_ANCHOR": 'nw',
"PLAY_BUTTON_TEXT_COORDINATES": (77, 215),
# "PLAY_BUTTON_TEXT_COORDINATES": (100, 200),
"PLAY_BUTTON_TEXT": "Normal Mode",
"PLAY_BUTTON_TEXT_COLOR": "gray60",
"PLAY_BUTTON_TEXT_SIZE": 50,
"PLAY_BUTTON_ACTIVE_FILL": 'white',
"AI_BUTTON_TEXT_COLOR": "gray60",
"AI_BUTTON_TEXT_SIZE": 50,
"AI_BUTTON_TEXT_COORDINATES": (158, 315),
"AI_BUTTON_TEXT": 'AI Mode',
"AI_BUTTON_ACTIVE_FILL": 'white',
"SEE_FITTEST_BUTTON_TEXT_COLOR": "SkyBlue4",
"SEE_FITTEST_BUTTON_TEXT_SIZE": 12,
"SEE_FITTEST_BUTTON_TEXT_COORDINATES": (472, 10),
"SEE_FITTEST_BUTTON_TEXT": 'Watch Best AI',
"SEE_FITTEST_BUTTON_ACTIVE_FILL": 'SkyBlue2',
"PLOT_DISPLAY_BUTTON_TEXT_COLOR": "SkyBlue4",
"PLOT_DISPLAY_BUTTON_TEXT_SIZE": 12,
"PLOT_DISPLAY_BUTTON_TEXT_COORDINATES": (388, 10),
"PLOT_DISPLAY_BUTTON_TEXT": 'Charts |',
"PLOT_DISPLAY_BUTTON_ACTIVE_FILL": 'SkyBlue2',
# AI Screen:
"MENU_BUTTON_TEXT_FONT": 'verdana',
"MENU_BUTTON_TEXT_COORDINATES": (530, 10),
"MENU_BUTTON_TEXT": "Menu",
"MENU_BUTTON_TEXT_COLOR": "SkyBlue4",
"MENU_BUTTON_TEXT_SIZE": 18,
"MENU_BUTTON_ACTIVE_FILL": 'SkyBlue2',
"TICK_BUTTON_TEXT_FONT": 'arial',
"TICK_BUTTON_TEXT_COORDINATES": (150, 10),
"TICK_BUTTON_TEXT": "Tick",
"TICK_BUTTON_TEXT_COLOR": "gray80",
"TICK_BUTTON_TEXT_SIZE": 8,
"GRAPHICS_BUTTON_TEXT_FONT": 'arial',
"GRAPHICS_BUTTON_TEXT_COORDINATES": (200, 10),
"GRAPHICS_BUTTON_TEXT": "Graphics",
"GRAPHICS_BUTTON_TEXT_COLOR": "gray80",
"GRAPHICS_BUTTON_TEXT_SIZE": 8,
# Score:
# HIGH SCORE
"HIGH_SCORE_TEXT_FONT": 'arial',
"HIGH_SCORE_TEXT_ANCHOR": NW,
"HIGH_SCORE_LABEL_TEXT_COLOR": "gray80",
"HIGH_SCORE_LABEL_TEXT_SIZE": 10,
"HIGH_SCORE_LABEL_TEXT_COORDINATES": (10, 10),
"HIGH_SCORE_LABEL_TEXT": None,
"HIGH_SCORE_LABEL_ACTIVE_FILL": None,
# CURRENT SCORE
"CURRENT_SCORE_TEXT_FONT": 'arial',
"CURRENT_SCORE_TEXT_ANCHOR": CENTER,
"CURRENT_SCORE_LABEL_TEXT_COLOR": "gray3",
"CURRENT_SCORE_LABEL_TEXT_SIZE": 300,
"CURRENT_SCORE_LABEL_TEXT_COORDINATES": None,
"CURRENT_SCORE_LABEL_TEXT": None,
"CURRENT_SCORE_LABEL_ACTIVE_FILL": None,
# AI Configurations:
"WATCHING_TICK": 0.03,
"AI_TICK": 0.0,
"MUTATION_RATE": random.randint(20, 500)/100,
"MUTATION_THRESHOLD": 48,
"LIFE_SPAN": 45,
"POPULATION_SIZE": 120,
"APPLE_AMOUNT_TO_INCREASE": 45,
# PLOT Configurations:
'PLOT_X_AXIS_AMOUNT': 70,
"PLOT_SIZE": (6.04, 5),
}
@staticmethod
def run():
from snake.view import View
view = View()
view.goto_starting_screen()
mainloop()
| [
"="
] | = |
380ab18c6dccc5590c8334095f89d932b866f7ae | b6721322ada8cc2820df67c5d28c568edb28cde9 | /DectoRoman_2019_2020.py | ab3070bf36643dc4704e30c6110caf9d4845908c | [] | no_license | Ticonderoga/CoursInfoL2 | 0d68914b2cc94e8df42b296524a16e4b88d6b44d | 8d65b2f269ca1bd0e923082f9506194600969f0c | refs/heads/master | 2021-04-11T03:38:09.547703 | 2020-05-15T14:32:10 | 2020-05-15T14:32:10 | 248,989,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 24 17:04:01 2020
@author: phil
"""
def DectoRoman(deci) :
"""
Fonction permettant de transformer un nombre en chiffre romain
Parameters
----------
deci : Integer
Nombre à convertir en chiffres romains.
Returns
-------
rom : String
Les chiffres romains correspondant à deci.
"""
L_Dec = (1000,900,500,400,100,90,50,40,10,9,5,4,1)
L_Rom = ('M','CM','D','CD','C','XC','L','XL','X','IX','V','IV','I')
rom = ''
test = True
while test :
inprocess = True
for d,r in zip(L_Dec,L_Rom):
if deci >= d and inprocess :
deci = deci - d
rom = rom + r
inprocess = False
test = (deci>0)
return rom
if __name__ == "__main__" :
nombre = 1924
nombre_Romain = DectoRoman(nombre)
print(nombre_Romain)
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
65cad5d0de4d5aa9756f02fbc08038d47c0d8101 | 381dc64b93f0bd8cb25553f2415a4add64eb1d39 | /arrange_good_one.py | 8d0b0d53c4648f03dacb07dabf4d00c856a60821 | [] | no_license | EmuxEvans/py_learn | ce1abd29e5ba8533b4208101ad52aebd057c2038 | 69476ec5b482eb4b4ddce2aff4ed6e934e08b7f1 | refs/heads/master | 2021-01-21T02:55:59.716050 | 2015-03-19T13:17:29 | 2015-03-19T13:17:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | import copy
def arrange(n):
if n <= 0:
return
result = [[]]
for i in range(n):
temp = []
for each in result:
for j in range(i + 1):
t = copy.copy(each)
t.insert(j, i)
temp.append(t)
result = copy.deepcopy(temp)
return result
def arrangestr(s):
n = len(s)
list_n = arrange(n)
list_s = s[:]
temp = []
for L in list_n:
temp = []
for j in range(n):
temp.append(list_s[L[j]])
print "".join(temp)
print len(list_n)
arrangestr('abcdefg')
| [
"metathinkerk@gmail.com"
] | metathinkerk@gmail.com |
b1fb8191d74cba456913da37765a7243bad750a6 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_examples/_algorithms_challenges/pybites/beginner/038_using_elementtree_to_parse_xml/save4_nopass.py | 610acd10a85062c21c2e9a451793e4d85cb06c73 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,493 | py | import xml.etree.ElementTree as ET
# from OMDB
xmlstring = '''<?xml version="1.0" encoding="UTF-8"?>
<root response="True">
<movie title="The Prestige" year="2006" rated="PG-13" released="20 Oct 2006" runtime="130 min" genre="Drama, Mystery, Sci-Fi" director="Christopher Nolan" />
<movie title="The Dark Knight" year="2008" rated="PG-13" released="18 Jul 2008" runtime="152 min" genre="Action, Crime, Drama" director="Christopher Nolan" />
<movie title="The Dark Knight Rises" year="2012" rated="PG-13" released="20 Jul 2012" runtime="164 min" genre="Action, Thriller" director="Christopher Nolan" />
<movie title="Dunkirk" year="2017" rated="PG-13" released="21 Jul 2017" runtime="106 min" genre="Action, Drama, History" director="Christopher Nolan" />
<movie title="Interstellar" year="2014" rated="PG-13" released="07 Nov 2014" runtime="169 min" genre="Adventure, Drama, Sci-Fi" director="Christopher Nolan"/>
</root>''' # noqa E501
def get_tree():
"""You probably want to use ET.fromstring"""
return ET.fromstring(xmlstring)
def get_movies():
"""Call get_tree and retrieve all movie titles, return a list or generator"""
movie_list = []
for movie_title in get_tree():
movie_list.append(movie_title.attrib["title"])
return movie_list
def get_movie_longest_runtime():
"""Call get_tree again and return the movie title for the movie with the longest
runtime in minutes, for latter consider adding a _get_runtime helper"""
pass | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
0b0b87f8cb05a693503c52e614e6e36a9734221a | d42a65c149b004228fd72586a0f79b17b0545e92 | /python_learn/lect06/venv/Scripts/easy_install-script.py | 110794ca9fc2b2a0627e9841ca82b632eacadab0 | [] | no_license | chenbiningbo/programming_learning | 34a642823c6bc8d54c4b912fa51a4ad6e9f23c15 | 2cfb2ca509d7d1ccc4779be4d7a35625010cb98a | refs/heads/master | 2020-12-27T12:51:05.640281 | 2020-02-18T14:02:40 | 2020-02-18T14:02:40 | 237,903,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | #!D:\python_learn\lect06\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"bi.chen@chnenergy.com.cn"
] | bi.chen@chnenergy.com.cn |
e11f5b8a6bce94fb2fb1eed0c75f6808a52e33b9 | 1aa6e732645f4603c05a1c9262f6fbb1af76b056 | /patchinfo/Google_Apps/gapps-task650.py | 0d9fc26ced139d01b3e77087393f74a2da5240a8 | [] | no_license | nauddin257/DualBootPatcher | f2831bdc72d8f94787a1d3ad94d0d85103316dd5 | 024af7ecb38ba6b4e3f1ae16ab81e32cd213864f | refs/heads/master | 2020-12-11T07:26:46.916515 | 2013-11-14T03:54:41 | 2013-11-14T03:54:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | from fileinfo import FileInfo
import re
file_info = FileInfo()
filename_regex = r"^gapps-jb\([0-9\.]+\)-[0-9\.]+\.zip$"
file_info.patch = 'Google_Apps/gapps-task650.dualboot.patch'
file_info.has_boot_image = False
def matches(filename):
if re.search(filename_regex, filename):
return True
else:
return False
def print_message():
print("Detected Task650's Google Apps zip")
def get_file_info():
return file_info
| [
"chenxiaolong@cxl.epac.to"
] | chenxiaolong@cxl.epac.to |
5e6661b7a1ff8065f2e5e82f2a5ce4f5f69c88b9 | 8b25a7984bd18fc356232083da0bb2f829a1dbd4 | /ineco_point_reward/__init__.py | 446998d61d4fb32feff1de8c38b9b0e55af31f0f | [] | no_license | anndream/new_mixprint_addons | f94067a1248cf3d30ce4e937d5fb3c96bc9cb482 | 1b4b04388e723dc7137dd8d2a29fdef3f59f4861 | refs/heads/master | 2020-04-09T19:17:36.882746 | 2015-09-10T04:41:13 | 2015-09-10T04:41:13 | 42,242,457 | 0 | 2 | null | 2015-09-10T12:13:56 | 2015-09-10T12:13:56 | null | UTF-8 | Python | false | false | 1,106 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import point
import sale
import res_partner
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"thitithup@gmail.com"
] | thitithup@gmail.com |
e9025879b824e2a02721409d1fe7e3c7fa642b83 | 147519505f3c47e5f10d9679e07d3719931b9fd0 | /ecommerce/product/views.py | d03412aa093af7ddfd44127d9b19f824f955fc21 | [] | no_license | grbalmeida/hello-django | 85ed28d8d47a9a2e072f3eecd13d22fb2e977a31 | 9ef261ba5faeac3de8d36eeb7efa8974e5d1e661 | refs/heads/master | 2020-08-12T10:10:48.554349 | 2019-12-20T01:18:33 | 2019-12-20T01:18:33 | 214,748,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,585 | py | from django.shortcuts import render, redirect, reverse, get_object_or_404
from django.views.generic import ListView
from django.views.generic.detail import DetailView
from django.views import View
from django.http import HttpResponse
from django.contrib import messages
from . import models
from user_profile.models import UserProfile, Address
class ProductList(ListView):
model = models.Product
template_name = 'product/list.html'
context_object_name = 'products'
paginate_by = 10
class ProductDetails(DetailView):
model = models.Product
template_name = 'product/details.html'
context_object_name = 'product'
slug_url_kwarg = 'slug'
class AddToCart(View):
def get(self, *args, **kwargs):
http_referer = self.request.META.get(
'HTTP_REFERER',
reverse('product:list')
)
variation_id = self.request.GET.get('vid')
if not variation_id:
messages.error(
self.request,
'Product not found'
)
return redirect(http_referer)
variation = get_object_or_404(models.Variation, id=variation_id)
product = variation.product
product_id = product.id
product_name = product.name
variation_name = variation.name or ''
variation_stock = variation.stock
variation_price = variation.price
variation_promotional_price = variation.promotional_price
amount = 1
slug = product.slug
image = product.image
image = image.name if image else ''
if variation.stock < 1:
messages.error(
self.request,
'Insufficient stock'
)
return redirect(http_referer)
if not self.request.session.get('cart'):
self.request.session['cart'] = {}
self.request.session.save()
cart = self.request.session.get('cart')
if variation_id in cart:
current_amount = cart[variation_id]['amount']
current_amount += 1
if variation_stock < current_amount:
messages.warning(
self.request,
f'Insufficient stock for {current_amount}x in {product_name} product. '
f'We add {variation_stock}x to your cart.'
)
current_amount = variation_stock
cart[variation_id]['amount'] = current_amount
cart[variation_id]['quantitative_price'] = variation_price * current_amount
cart[variation_id]['promotional_quantitative_price'] = \
variation_promotional_price * current_amount
else:
cart[variation_id] = {
'product_id': product_id,
'product_name': product_name,
'variation_id': variation_id,
'variation_name': variation_name,
'variation_price': variation_price,
'variation_promotional_price': variation_promotional_price,
'quantitative_price': variation_price,
'promotional_quantitative_price': variation_promotional_price,
'amount': amount,
'slug': slug,
'image': image
}
self.request.session.save()
messages.success(
self.request,
f'{product_name} {variation_name} product added to your cart'
)
return redirect(http_referer)
class RemoveFromCart(View):
def get(self, *args, **kwargs):
http_referer = self.request.META.get(
'HTTP_REFERER',
reverse('product:list')
)
variation_id = self.request.GET.get('vid')
if not variation_id:
return redirect(http_referer)
cart = self.request.session.get('cart')
if not cart:
return redirect(http_referer)
if variation_id not in cart:
return redirect(http_referer)
messages.success(
self.request,
f'Product {cart[variation_id]["product_name"]} '
f'{cart[variation_id]["variation_name"]} removed from your cart'
)
del cart[variation_id]
self.request.session.save()
return redirect(http_referer)
class Cart(View):
def get(self, *args, **kwargs):
context = {
'cart': self.request.session.get('cart')
}
return render(self.request, 'product/cart.html', context)
class PurchaseSummary(View):
def get(self, *args, **kwargs):
if not self.request.user.is_authenticated:
return redirect('user_profile:create')
user_profile_exists = UserProfile.objects.filter(user=self.request.user).exists()
user_profile = UserProfile.objects.get(user=self.request.user)
address = Address.objects.get(user_profile=user_profile)
if not user_profile_exists:
messages.error(
self.request,
'User without profile'
)
return redirect('user_profile:create')
if not self.request.session.get('cart'):
messages.error(
self.request,
'Empty cart'
)
return redirect('product:list')
context = {
'user': self.request.user,
'cart': self.request.session['cart'],
'address': address,
'user_profile': user_profile
}
return render(self.request, 'product/purchase-summary.html', context)
| [
"g.r.almeida@live.com"
] | g.r.almeida@live.com |
11115846bcf63ce3f18a74a99918a5763ab07da3 | 8da91c26d423bacbeee1163ac7e969904c7e4338 | /pyvisdk/enums/net_ip_config_info_ip_address_status.py | 72780ca843b9d782573c01057d893237fb90437c | [] | no_license | pexip/os-python-infi-pyvisdk | 5d8f3a3858cdd61fb76485574e74ae525cdc7e25 | 1aadea0afbc306d09f6ecb9af0e683dbbf961d20 | refs/heads/master | 2023-08-28T02:40:28.789786 | 2020-07-16T04:00:53 | 2020-07-16T04:00:53 | 10,032,240 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 323 | py |
########################################
# Automatically generated, do not edit.
########################################
from pyvisdk.thirdparty import Enum
NetIpConfigInfoIpAddressStatus = Enum(
'deprecated',
'duplicate',
'inaccessible',
'invalid',
'preferred',
'tentative',
'unknown',
)
| [
"jmb@pexip.com"
] | jmb@pexip.com |
7ff5bc8c0d237ac504daf3e4602b7d93d76731a9 | d841f4b4b1a7d1837c84fbe431df201ecbccb0ca | /hydrus/client/metadata/ClientMetadataMigrationImporters.py | ad4ce3a9411bdf300d3f9a0079c087b8f9f8881f | [
"WTFPL"
] | permissive | floogulinc/hydrus | ed109f2fcc9af0b33543b18a65038b74f2deab3d | dfde69b5f28d11778a2bec3b5f4188c98363d202 | refs/heads/master | 2023-08-16T23:09:43.998955 | 2023-07-22T18:06:10 | 2023-07-22T18:06:10 | 177,238,251 | 1 | 0 | NOASSERTION | 2022-02-15T11:47:59 | 2019-03-23T03:13:59 | Python | UTF-8 | Python | false | false | 26,865 | py | import os
import typing
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusText
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientParsing
from hydrus.client import ClientStrings
from hydrus.client import ClientTime
from hydrus.client.media import ClientMediaResult
from hydrus.client.metadata import ClientMetadataMigrationCore
from hydrus.client.metadata import ClientTags
# TODO: All importers should probably have a string processor
class SingleFileMetadataImporter( ClientMetadataMigrationCore.ImporterExporterNode ):
def __init__( self, string_processor: ClientStrings.StringProcessor ):
self._string_processor = string_processor
def GetStringProcessor( self ) -> ClientStrings.StringProcessor:
return self._string_processor
def Import( self, *args, **kwargs ):
raise NotImplementedError()
def ToString( self ) -> str:
raise NotImplementedError()
class SingleFileMetadataImporterMedia( SingleFileMetadataImporter ):
def Import( self, media_result: ClientMediaResult.MediaResult ):
raise NotImplementedError()
def ToString( self ) -> str:
raise NotImplementedError()
class SingleFileMetadataImporterSidecar( SingleFileMetadataImporter, ClientMetadataMigrationCore.SidecarNode ):
def __init__( self, string_processor: ClientStrings.StringProcessor, remove_actual_filename_ext: bool, suffix: str, filename_string_converter: ClientStrings.StringConverter ):
ClientMetadataMigrationCore.SidecarNode.__init__( self, remove_actual_filename_ext, suffix, filename_string_converter )
SingleFileMetadataImporter.__init__( self, string_processor )
def GetExpectedSidecarPath( self, path: str ):
raise NotImplementedError()
def Import( self, actual_file_path: str ):
raise NotImplementedError()
def ToString( self ) -> str:
raise NotImplementedError()
class SingleFileMetadataImporterMediaNotes( SingleFileMetadataImporterMedia, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_NOTES
SERIALISABLE_NAME = 'Metadata Single File Importer Media Notes'
SERIALISABLE_VERSION = 1
def __init__( self, string_processor: typing.Optional[ ClientStrings.StringProcessor ] = None ):
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterMedia.__init__( self, string_processor )
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
return serialisable_string_processor
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
serialisable_string_processor = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
def GetExampleStrings( self ):
examples = [
'Artist Commentary: This work is one of my favourites.',
'Translation: "What a nice day!"'
]
return examples
def Import( self, media_result: ClientMediaResult.MediaResult ):
names_to_notes = media_result.GetNotesManager().GetNamesToNotes()
rows = [ '{}{}{}'.format( name.replace( ClientMetadataMigrationCore.NOTE_CONNECTOR_STRING, ClientMetadataMigrationCore.NOTE_NAME_ESCAPE_STRING ), ClientMetadataMigrationCore.NOTE_CONNECTOR_STRING, text ) for ( name, text ) in names_to_notes.items() ]
if self._string_processor.MakesChanges():
rows = self._string_processor.ProcessStrings( rows )
return rows
def ToString( self ) -> str:
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return 'notes from media{}'.format( full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_NOTES ] = SingleFileMetadataImporterMediaNotes
class SingleFileMetadataImporterMediaTags( SingleFileMetadataImporterMedia, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_TAGS
SERIALISABLE_NAME = 'Metadata Single File Importer Media Tags'
SERIALISABLE_VERSION = 2
def __init__( self, string_processor = None, service_key = None ):
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterMedia.__init__( self, string_processor )
if service_key is None:
service_key = CC.COMBINED_TAG_SERVICE_KEY
self._service_key = service_key
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
serialisable_service_key = self._service_key.hex()
return ( serialisable_string_processor, serialisable_service_key )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_string_processor, serialisable_service_key ) = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
self._service_key = bytes.fromhex( serialisable_service_key )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
serialisable_service_key = old_serialisable_info
string_processor = ClientStrings.StringProcessor()
serialisable_string_processor = string_processor.GetSerialisableTuple()
new_serialisable_info = ( serialisable_string_processor, serialisable_service_key )
return ( 2, new_serialisable_info )
def GetExampleStrings( self ):
examples = [
'blue eyes',
'blonde hair',
'skirt',
'character:jane smith',
'series:jane smith adventures',
'creator:some guy'
]
return examples
def GetServiceKey( self ) -> bytes:
return self._service_key
def Import( self, media_result: ClientMediaResult.MediaResult ):
tags = media_result.GetTagsManager().GetCurrent( self._service_key, ClientTags.TAG_DISPLAY_STORAGE )
# turning ::) into :)
tags = { HydrusText.re_leading_double_colon.sub( ':', tag ) for tag in tags }
if self._string_processor.MakesChanges():
tags = self._string_processor.ProcessStrings( tags )
return tags
def SetServiceKey( self, service_key: bytes ):
self._service_key = service_key
def ToString( self ) -> str:
try:
name = HG.client_controller.services_manager.GetName( self._service_key )
except:
name = 'unknown service'
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return '"{}" tags from media{}'.format( name, full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_TAGS ] = SingleFileMetadataImporterMediaTags
class SingleFileMetadataImporterMediaTimestamps( SingleFileMetadataImporterMedia, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_TIMESTAMPS
SERIALISABLE_NAME = 'Metadata Single File Importer Media Timestamps'
SERIALISABLE_VERSION = 1
def __init__( self, string_processor = None, timestamp_data_stub = None ):
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterMedia.__init__( self, string_processor )
if timestamp_data_stub is None:
timestamp_data_stub = ClientTime.TimestampData.STATICSimpleStub( HC.TIMESTAMP_TYPE_ARCHIVED )
self._timestamp_data_stub = timestamp_data_stub
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
serialisable_timestamp_data_stub = self._timestamp_data_stub.GetSerialisableTuple()
return ( serialisable_string_processor, serialisable_timestamp_data_stub )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_string_processor, serialisable_timestamp_data_stub ) = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
self._timestamp_data_stub = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_timestamp_data_stub )
def GetExampleStrings( self ):
examples = [
'1681682717'
]
return examples
def GetTimestampDataStub( self ) -> ClientTime.TimestampData:
return self._timestamp_data_stub
def Import( self, media_result: ClientMediaResult.MediaResult ):
rows = []
timestamp = media_result.GetTimestampsManager().GetTimestampFromStub( self._timestamp_data_stub )
if timestamp is not None:
rows.append( str( timestamp ) )
if self._string_processor.MakesChanges():
rows = self._string_processor.ProcessStrings( rows )
return rows
def SetTimestampDataStub( self, timestamp_data_stub: ClientTime.TimestampData ):
self._timestamp_data_stub = timestamp_data_stub
def ToString( self ) -> str:
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return '{} from media{}'.format( self._timestamp_data_stub, full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_TIMESTAMPS ] = SingleFileMetadataImporterMediaTimestamps
class SingleFileMetadataImporterMediaURLs( SingleFileMetadataImporterMedia, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_URLS
SERIALISABLE_NAME = 'Metadata Single File Importer Media URLs'
SERIALISABLE_VERSION = 2
def __init__( self, string_processor = None ):
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterMedia.__init__( self, string_processor )
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
return serialisable_string_processor
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
serialisable_string_processor = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
gumpf = old_serialisable_info
string_processor = ClientStrings.StringProcessor()
serialisable_string_processor = string_processor.GetSerialisableTuple()
new_serialisable_info = serialisable_string_processor
return ( 2, new_serialisable_info )
def GetExampleStrings( self ):
examples = [
'https://example.com/gallery/index.php?post=123456&page=show',
'https://cdn3.expl.com/files/file_id?id=123456&token=0123456789abcdef'
]
return examples
def Import( self, media_result: ClientMediaResult.MediaResult ):
urls = media_result.GetLocationsManager().GetURLs()
if self._string_processor.MakesChanges():
urls = self._string_processor.ProcessStrings( urls )
return urls
def ToString( self ) -> str:
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return 'urls from media{}'.format( full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_MEDIA_URLS ] = SingleFileMetadataImporterMediaURLs
class SingleFileMetadataImporterJSON( SingleFileMetadataImporterSidecar, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_JSON
SERIALISABLE_NAME = 'Metadata Single File Importer JSON'
SERIALISABLE_VERSION = 3
def __init__( self, string_processor = None, remove_actual_filename_ext = None, suffix = None, filename_string_converter = None, json_parsing_formula = None ):
if remove_actual_filename_ext is None:
remove_actual_filename_ext = False
if suffix is None:
suffix = ''
if filename_string_converter is None:
filename_string_converter = ClientStrings.StringConverter( example_string = 'my_image.jpg.json' )
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterSidecar.__init__( self, string_processor, remove_actual_filename_ext, suffix, filename_string_converter )
if json_parsing_formula is None:
parse_rules = [ ( ClientParsing.JSON_PARSE_RULE_TYPE_ALL_ITEMS, None ) ]
json_parsing_formula = ClientParsing.ParseFormulaJSON( parse_rules = parse_rules, content_to_fetch = ClientParsing.JSON_CONTENT_STRING )
self._json_parsing_formula = json_parsing_formula
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
serialisable_filename_string_converter = self._filename_string_converter.GetSerialisableTuple()
serialisable_json_parsing_formula = self._json_parsing_formula.GetSerialisableTuple()
return ( serialisable_string_processor, self._remove_actual_filename_ext, self._suffix, serialisable_filename_string_converter, serialisable_json_parsing_formula )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_string_processor, self._remove_actual_filename_ext, self._suffix, serialisable_filename_string_converter, serialisable_json_parsing_formula ) = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
self._filename_string_converter = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_filename_string_converter )
self._json_parsing_formula = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_json_parsing_formula )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( suffix, serialisable_json_parsing_formula ) = old_serialisable_info
string_processor = ClientStrings.StringProcessor()
serialisable_string_processor = string_processor.GetSerialisableTuple()
new_serialisable_info = ( serialisable_string_processor, suffix, serialisable_json_parsing_formula )
return ( 2, new_serialisable_info )
if version == 2:
( serialisable_string_processor, suffix, serialisable_json_parsing_formula ) = old_serialisable_info
remove_actual_filename_ext = False
filename_string_converter = ClientStrings.StringConverter( example_string = 'my_image.jpg.json' )
serialisable_filename_string_converter = filename_string_converter.GetSerialisableTuple()
new_serialisable_info = ( serialisable_string_processor, remove_actual_filename_ext, suffix, serialisable_filename_string_converter, serialisable_json_parsing_formula )
return ( 3, new_serialisable_info )
def GetExpectedSidecarPath( self, actual_file_path: str ):
return ClientMetadataMigrationCore.GetSidecarPath( actual_file_path, self._remove_actual_filename_ext, self._suffix, self._filename_string_converter, 'json' )
def GetJSONParsingFormula( self ) -> ClientParsing.ParseFormulaJSON:
return self._json_parsing_formula
def Import( self, actual_file_path: str ) -> typing.Collection[ str ]:
path = self.GetExpectedSidecarPath( actual_file_path )
if not os.path.exists( path ):
return []
try:
with open( path, 'r', encoding = 'utf-8' ) as f:
read_raw_json = f.read()
except Exception as e:
raise Exception( 'Could not import from {}: {}'.format( path, str( e ) ) )
parsing_context = {}
collapse_newlines = False
rows = self._json_parsing_formula.Parse( parsing_context, read_raw_json, collapse_newlines )
if self._string_processor.MakesChanges():
rows = self._string_processor.ProcessStrings( rows )
return rows
def SetJSONParsingFormula( self, json_parsing_formula: ClientParsing.ParseFormulaJSON ):
self._json_parsing_formula = json_parsing_formula
def ToString( self ) -> str:
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return 'from JSON sidecar{}'.format( full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_JSON ] = SingleFileMetadataImporterJSON
class SingleFileMetadataImporterTXT( SingleFileMetadataImporterSidecar, HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_TXT
SERIALISABLE_NAME = 'Metadata Single File Importer TXT'
SERIALISABLE_VERSION = 4
def __init__( self, string_processor = None, remove_actual_filename_ext = None, suffix = None, filename_string_converter = None, separator = None ):
if remove_actual_filename_ext is None:
remove_actual_filename_ext = False
if suffix is None:
suffix = ''
if filename_string_converter is None:
filename_string_converter = ClientStrings.StringConverter( example_string = 'my_image.jpg.txt' )
if string_processor is None:
string_processor = ClientStrings.StringProcessor()
if separator is None:
separator = '\n'
self._separator = separator
HydrusSerialisable.SerialisableBase.__init__( self )
SingleFileMetadataImporterSidecar.__init__( self, string_processor, remove_actual_filename_ext, suffix, filename_string_converter )
def _GetSerialisableInfo( self ):
serialisable_string_processor = self._string_processor.GetSerialisableTuple()
serialisable_filename_string_converter = self._filename_string_converter.GetSerialisableTuple()
return ( serialisable_string_processor, self._remove_actual_filename_ext, self._suffix, serialisable_filename_string_converter, self._separator )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_string_processor, self._remove_actual_filename_ext, self._suffix, serialisable_filename_string_converter, self._separator ) = serialisable_info
self._string_processor = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_string_processor )
self._filename_string_converter = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_filename_string_converter )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
suffix = old_serialisable_info
string_processor = ClientStrings.StringProcessor()
serialisable_string_processor = string_processor.GetSerialisableTuple()
new_serialisable_info = ( serialisable_string_processor, suffix )
return ( 2, new_serialisable_info )
if version == 2:
( serialisable_string_processor, suffix ) = old_serialisable_info
remove_actual_filename_ext = False
filename_string_converter = ClientStrings.StringConverter( example_string = 'my_image.jpg.txt' )
serialisable_filename_string_converter = filename_string_converter.GetSerialisableTuple()
new_serialisable_info = ( serialisable_string_processor, remove_actual_filename_ext, suffix, serialisable_filename_string_converter )
return ( 3, new_serialisable_info )
if version == 3:
( serialisable_string_processor, remove_actual_filename_ext, suffix, serialisable_filename_string_converter ) = old_serialisable_info
separator = '\n'
new_serialisable_info = ( serialisable_string_processor, remove_actual_filename_ext, suffix, serialisable_filename_string_converter, separator )
return ( 4, new_serialisable_info )
def GetExpectedSidecarPath( self, actual_file_path: str ):
return ClientMetadataMigrationCore.GetSidecarPath( actual_file_path, self._remove_actual_filename_ext, self._suffix, self._filename_string_converter, 'txt' )
def GetSeparator( self ) -> str:
return self._separator
def Import( self, actual_file_path: str ) -> typing.Collection[ str ]:
path = self.GetExpectedSidecarPath( actual_file_path )
if not os.path.exists( path ):
return []
try:
with open( path, 'r', encoding = 'utf-8' ) as f:
raw_text = f.read()
except Exception as e:
raise Exception( 'Could not import from {}: {}'.format( path, str( e ) ) )
rows = HydrusText.DeserialiseNewlinedTexts( raw_text )
if self._separator != '\n':
# don't want any newlines, so this 'undo' is correct
rejoined_text = ''.join( rows )
rows = rejoined_text.split( self._separator )
if self._string_processor.MakesChanges():
rows = self._string_processor.ProcessStrings( rows )
return rows
def SetSeparator( self, separator: str ):
self._separator = separator
def ToString( self ) -> str:
if self._string_processor.MakesChanges():
full_munge_text = ', applying {}'.format( self._string_processor.ToString() )
else:
full_munge_text = ''
return 'from .txt sidecar{}'.format( full_munge_text )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_METADATA_SINGLE_FILE_IMPORTER_TXT ] = SingleFileMetadataImporterTXT
| [
"hydrus.admin@gmail.com"
] | hydrus.admin@gmail.com |
69b8440a1cf5de7d0991dc2472928b20cde2bdb3 | 43cdd7cb26fe44b1ed7de6a46f8b5e680c9b1372 | /tests/components/mqtt/test_subscription.py | 5ee784b8ab76967c31edb266f4a6e916a62b2fc7 | [
"Apache-2.0"
] | permissive | OpenPeerPower/Open-Peer-Power | 02ec5c133564b47c6f72f669e844a666643cacd6 | 940a04a88e8f78e2d010dc912ad6905ae363503c | refs/heads/master | 2022-08-16T09:38:49.994009 | 2021-05-29T03:54:13 | 2021-05-29T03:54:13 | 183,174,237 | 1 | 0 | Apache-2.0 | 2022-07-15T18:43:02 | 2019-04-24T07:35:47 | Python | UTF-8 | Python | false | false | 5,209 | py | """The tests for the MQTT subscription component."""
from unittest import mock
from openpeerpower.components.mqtt.subscription import (
async_subscribe_topics,
async_unsubscribe_topics,
)
from openpeerpower.core import callback
from tests.common import async_fire_mqtt_message, async_mock_mqtt_component
async def test_subscribe_topics(opp, mqtt_mock, caplog):
"""Test subscription to topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
opp,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(opp, "test-topic1", "test-payload1")
assert len(calls1) == 1
assert calls1[0][0].topic == "test-topic1"
assert calls1[0][0].payload == "test-payload1"
assert len(calls2) == 0
async_fire_mqtt_message(opp, "test-topic2", "test-payload2")
assert len(calls1) == 1
assert len(calls2) == 1
assert calls2[0][0].topic == "test-topic2"
assert calls2[0][0].payload == "test-payload2"
await async_unsubscribe_topics(opp, sub_state)
async_fire_mqtt_message(opp, "test-topic1", "test-payload")
async_fire_mqtt_message(opp, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async def test_modify_topics(opp, mqtt_mock, caplog):
"""Test modification of topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
opp,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(opp, "test-topic1", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 0
async_fire_mqtt_message(opp, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
sub_state = await async_subscribe_topics(
opp,
sub_state,
{"test_topic1": {"topic": "test-topic1_1", "msg_callback": record_calls1}},
)
async_fire_mqtt_message(opp, "test-topic1", "test-payload")
async_fire_mqtt_message(opp, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async_fire_mqtt_message(opp, "test-topic1_1", "test-payload")
assert len(calls1) == 2
assert calls1[1][0].topic == "test-topic1_1"
assert calls1[1][0].payload == "test-payload"
assert len(calls2) == 1
await async_unsubscribe_topics(opp, sub_state)
async_fire_mqtt_message(opp, "test-topic1_1", "test-payload")
async_fire_mqtt_message(opp, "test-topic2", "test-payload")
assert len(calls1) == 2
assert len(calls2) == 1
async def test_qos_encoding_default(opp, mqtt_mock, caplog):
"""Test default qos and encoding."""
mock_mqtt = await async_mock_mqtt_component(opp)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
opp,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
mock_mqtt.async_subscribe.assert_called_once_with(
"test-topic1", mock.ANY, 0, "utf-8"
)
async def test_qos_encoding_custom(opp, mqtt_mock, caplog):
"""Test custom qos and encoding."""
mock_mqtt = await async_mock_mqtt_component(opp)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
opp,
sub_state,
{
"test_topic1": {
"topic": "test-topic1",
"msg_callback": msg_callback,
"qos": 1,
"encoding": "utf-16",
}
},
)
mock_mqtt.async_subscribe.assert_called_once_with(
"test-topic1", mock.ANY, 1, "utf-16"
)
async def test_no_change(opp, mqtt_mock, caplog):
"""Test subscription to topics without change."""
mock_mqtt = await async_mock_mqtt_component(opp)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
opp,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
call_count = mock_mqtt.async_subscribe.call_count
sub_state = await async_subscribe_topics(
opp,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
assert call_count == mock_mqtt.async_subscribe.call_count
| [
"pcaston@arach.net.au"
] | pcaston@arach.net.au |
251e27541c26ade09017922536afaa65c6e7a613 | a816de2c05290e9a1dcfe1e3e50b96e36792898a | /TESSLCclass.py | 30552e94e69fbe6cad0cd43162dfb7f19ae0be04 | [] | no_license | r-cloutier/mdwarfparams | 2998defb6ed62b6ec8fe1d6a868a9541ea350c15 | e62f10473f91405e2f9cf4998e6ba85a5fd73243 | refs/heads/master | 2021-06-17T14:27:25.835864 | 2019-08-26T18:04:08 | 2019-08-26T18:04:08 | 145,714,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | from imports import *
from truncate_cmap import *
def loadpickle(fname):
fObj = open(fname, 'rb')
self = pickle.load(fObj)
fObj.close()
return self
class TESSLC:
def __init__(self, TICid, index):
try:
os.mkdir('PipelineResults')
except OSError:
pass
self.TICid = TICid
self.folder_full = 'PipelineResults/TIC_%i'%self.TICid
self.fname_full = '%s/TESSLC_%.5d'%(self.folder_full, index)
try:
os.mkdir(self.folder_full)
except OSError:
pass
self._pickleobject()
def _pickleobject(self):
fObj = open(self.fname_full, 'wb')
pickle.dump(self, fObj)
fObj.close()
| [
"cloutier@astro.utoronto.ca"
] | cloutier@astro.utoronto.ca |
4a0119a6b16b4ec5de4f7084e981c22e29875eae | b049ec2f36bb63537ca5b73717635f2dc0126cda | /399_Evaluate_Division/399_2.py | 4a28c1a4a9e5a35f0b88010918c5a7d6d2df7b9b | [] | no_license | massquantity/LeetCode | 01d29fe8922b7545140015efbda0f71b04043124 | e298cdab86a4de81bf5a44579c54b5bc7bcb1618 | refs/heads/master | 2021-07-20T07:18:38.109707 | 2021-07-03T04:11:15 | 2021-07-03T04:11:15 | 135,297,184 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | class Solution:
def calcEquation(self, equations: List[List[str]], values: List[float], queries: List[List[str]]) -> List[float]:
graph = collections.defaultdict(set)
weights = dict()
visited = set()
def bfs(start, end):
if (start, end) in weights:
return weights[(start, end)]
if start not in graph or end not in graph:
return -1.0
if start == end:
return 1.0
visited.add(start)
queue = collections.deque()
for n in graph[start]:
queue.append((n, weights[(start, n)]))
while queue:
n, w = queue.popleft()
if n == end:
return w
for neig in graph[n]:
if neig not in visited:
visited.add(neig)
weights[(start, neig)] = w * weights[(n, neig)]
queue.append((neig, weights[(start, neig)]))
return -1.0
for g, v in zip(equations, values):
graph[g[0]].add(g[1])
graph[g[1]].add(g[0])
weights[(g[0], g[1])] = v
weights[(g[1], g[0])] = 1.0 / v
res = list()
for q in queries:
visited.clear()
res.append(bfs(q[0], q[1]))
return res
| [
"wdmjjxg@163.com"
] | wdmjjxg@163.com |
afeaee0d2e5f5995448744d53b52869fc13fa776 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/perm_20200622013715.py | 9bb5e958673f171c3512fadc89c47141dea344c0 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | def sequence(n,k):
newArr = []
for i in range(1,n+1):
newArr.append(i)
# index == k/n-1!
answer = " "
i = 1
factor = 1
while i <= n-1:
factor *=i
i +=1
print(factor)
index = k/factor
sequence(3,3) | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
06052e5198f1bd847230dc07432e93da469d8b0e | 3cca537e780ba900087b187d1494713f0c81a24d | /lenstools/tests/test_limber.py | 00cf52cad33fc6b031b5476bb4e990699ccf8f7a | [
"MIT"
] | permissive | apetri/LensTools | 4119c1b5c0570fb6e4078fa67fb3acd5b443c0a5 | 9151988bfe6fbd6809353a33cfb556d44b6806ed | refs/heads/master | 2023-07-19T02:03:26.708366 | 2021-01-21T14:26:47 | 2021-01-21T14:26:47 | 27,881,137 | 32 | 33 | null | 2023-07-12T10:53:00 | 2014-12-11T16:43:33 | Python | UTF-8 | Python | false | false | 680 | py | import os
from ..simulations.limber import LimberIntegrator
from ..utils.defaults import load_power_default
from .. import dataExtern
import numpy as np
import matplotlib.pyplot as plt
from astropy.cosmology import WMAP9
def test_convergence_power():
l = np.logspace(0.0,5.0,100)
integrator = LimberIntegrator(cosmoModel=WMAP9)
integrator.load3DPowerSpectrum(load_power_default,os.path.join(dataExtern(),"camb_output"),"fiducial_matterpower_")
Cl = integrator.convergencePowerSpectrum(l)
plt.plot(l,l*(l+1)*Cl/(2.0*np.pi))
plt.xscale("log")
plt.yscale("log")
plt.xlabel("l")
plt.ylabel("l(l+1)C_l/2*pi")
try:
plt.savefig("limber_power.png")
except:
pass | [
"apetri@phys.columbia.edu"
] | apetri@phys.columbia.edu |
2c475eea931a29a01165cdd68c61090b2c880580 | a86fda09a185ebf367e31cf26589161303f9497a | /metrics/_version.py | d8cec597f3c241c088dae32efea12b629a4a3b11 | [
"BSD-3-Clause"
] | permissive | kristianeschenburg/metrics | 67ec2cd5b697241eee35da46daf71b2d735cdb64 | 53900f8130cb7dd762ae3e816225fb4f178a5b29 | refs/heads/master | 2020-03-25T10:27:29.623361 | 2019-04-02T21:20:08 | 2019-04-02T21:20:08 | 116,600,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,454 | py |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440-post"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "metrics/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| [
"keschenb@uw.edu"
] | keschenb@uw.edu |
e0f4dcaaccc52eb5169348eead02c5eaf9fdb071 | 925a067ff1473cf45ad8aa9cf99db4311a7799ed | /Sandbox/Old/shawn_model.py | 0bd190b07ece28f4d7b9a2b74c48e1239514d191 | [] | no_license | ORNL-Fusion/Collector-Probes | fd7250738e797befa06fad487e9d2498b61436a5 | 16e15a0d3dcaa8a88da25aaf3ea126e9eb2a5f96 | refs/heads/master | 2022-09-03T01:02:39.520659 | 2022-08-28T13:28:53 | 2022-08-28T13:28:53 | 95,914,293 | 1 | 3 | null | 2019-11-14T15:00:39 | 2017-06-30T18:34:29 | Python | UTF-8 | Python | false | false | 13,635 | py | # This program determines the total flux to a collector probe (maxFlux). The
# basis is net_flux = maxFlux - loss_flux, where loss_flux is due to sputtering.
from __future__ import print_function
from scipy import interpolate
from scipy.optimize import curve_fit
import numpy as np
import openpyxl as xl
import collLengths as coll
import scipy.integrate as integrate
import math
import atomic.atomic as atomic
import matplotlib.pyplot as plt
# Define constants.
# Probe widths in m
aSize = 3.0 / 100.0
bSize = 1.0 / 100.0
cSize = 0.5 / 100.0
# Mass of tungsten, deuterium, iron and carbon in eV s^2 m^-2
massW = 183.84 * 931.49 * 10**6.0 / ((3*10**8.0)**2.0)
massD = 2.01 * 931.49 * 10**6 / ((3*10**8)**2.0)
massFe = 55.85 * 931.49 * 10**6 / ((3*10**8)**2.0)
massC = 12.01 * 931.49 * 10**6 / ((3*10**8)**2.0)
# Z of elements.
chargeW = 74.0
chargeD = 1.0
chargeFe = 26.0
chargeC = 12.0
# Perpendicular diffusion in m^2 s^-1
dPerp = 1.0
# Approx U_0 as the heat of sublimation of tungsten in eV
u0 = 8.68
# Alpha used in yield calculation.
alpha = 3.44
# Sound speed for Ti=Te=25 eV for reference
ref_cs = ((25 + 25) / massD)**(0.5)
# Time of shot in s
timeOfShot = 5.0 * 25
valuesDict = coll.avgAllPlunges()
rminrseps = valuesDict["RminRSeps"]
temps = valuesDict["Temperatures"]
densities = valuesDict["Densities"]
# Open up the Excel file with all the 2 probe data.
wb = xl.load_workbook("A2.xlsx")
A2_sheet = wb.get_sheet_by_name("A2")
B2_sheet = wb.get_sheet_by_name("B2")
C2_sheet = wb.get_sheet_by_name("C2")
def meanZofTungsten(evalTemp, LPtemps=temps, density=1e19):
""" Uses the atomic library to calculate the mean charge state of tungsten
using the temperature and density data from the LP's."""
ad = atomic.element("tungsten")
eq = atomic.CollRadEquilibrium(ad)
y = eq.ionisation_stage_distribution(LPtemps, density)
f = interpolate.interp1d(LPtemps, y.mean_charge())
meanChargeAtT = f(evalTemp)
return meanChargeAtT
def meanZofCarbon(evalTemp, LPtemps=temps, density=1e19):
""" Uses the atomic library to calculate the mean charge state of carbon
using the temperature and density data from the LP's."""
ad = atomic.element("carbon")
eq = atomic.CollRadEquilibrium(ad)
y = eq.ionisation_stage_distribution(LPtemps, density)
f = interpolate.interp1d(LPtemps, y.mean_charge())
meanChargeAtT = f(evalTemp)
return meanChargeAtT
# Function for the yield of sputtering for D+ on W from Was textbook.
def Y(energy):
yield_func = 0.528 * alpha * chargeD * (massD / (u0*(massD + massW))) * 0.059 * energy ** (1/3)
return yield_func
# The flux at the probe as a function of energy assuming Maxwellian distribution
# and a constant Deuterium density in the flux tube (so n(E) -> n).
def fluxD(energy, Ti, ne):
flux_func = ref_cs * ne * 2 * (energy/3.1415)**0.5 * (1/float(Ti))**(1.5) * math.exp(-energy/Ti)
return flux_func
# The flux of W off the probe due to sputtering. sputt_flux = yield * flux of dueterium.
def sputt_flux(ne=10**18, Ti=25.0, Te=25.0):
# Sputtering energy threshold of tungsten oxide in eV. Note pure W is 160 eV.
eThresh = 65
soundSpeed = ((float(Te) + float(Ti)) / massD)**0.5
# Use lambda function for use in integrate,
func = lambda E: 0.528 * alpha * chargeD * (massD / (u0*(massD + massW))) * 0.059 * (E+3*Ti) ** (1.0/3.0) * soundSpeed * ne * 2 * (E/3.1415)**0.5 * (1/float(Ti))**(1.5) * math.exp(-E/Ti)
ans, err = integrate.quad(func, eThresh, np.inf)
print("Sputtered Flux (D): " + str(ans))# The flux of W off the probe due to sputtering. sputt_flux = yield * flux of dueterium.
return ans
# The flux of W off the probe due to sputtering. sputt_flux = yield * flux of dueterium.
def sputt_flux_iron(ne=10**18, Ti=25.0, Te=25.0):
# Sputtering energy threshold of tungsten oxide in eV. Note pure W is 160 eV.
eThresh = 65
soundSpeed = ((float(Te) + float(Ti)) / massD)**0.5
frac_of_D_flux = 0.005
# Use lambda function for use in integrate,
func = lambda E: 0.528 * alpha * chargeFe * (massFe / (u0*(massFe + massW))) * 0.059 * (E+3*Ti) ** (1.0/3.0) * soundSpeed * frac_of_D_flux * ne * 2 * (E/3.1415)**0.5 * (1/float(Ti))**(1.5) * math.exp(-E/Ti)
ans, err = integrate.quad(func, eThresh, np.inf)
print("Sputtered Flux (Fe): " + str(ans))
#print("Sputtered Flux Error: " + str(err/ans * 100) + "%")
return ans
# The flux of W off the probe due to sputtering. sputt_flux = yield * flux of dueterium.
def sputt_flux_carbon(ne=10**18, Ti=25.0, Te=25.0):
# Sputtering energy threshold of tungsten oxide in eV. Note pure W is 160 eV.
eThresh = 65
soundSpeed = ((float(Te) + float(Ti)) / massD)**0.5
frac_of_D_flux = 0.01
# Use lambda function for use in integrate,
func = lambda E: 0.528 * alpha * chargeC * (massC / (u0*(massC + massW))) * 0.059 * (E+3*Ti) ** (1.0/3.0) * soundSpeed * frac_of_D_flux * ne * 2 * (E/3.1415)**0.5 * (1/float(Ti))**(1.5) * math.exp(-E/Ti)
ans, err = integrate.quad(func, eThresh, np.inf)
print("Sputtered Flux (C): " + str(ans))
#print("Sputtered Flux Error: " + str(err/ans * 100) + "%")
return ans
# The loss_flux is that which is sputtered and does NOT return to the probe. It
# is assumed if the sputtered W ionizes in the flux tube it will return to the
# probe.
def loss_flux(ne=10**18, Ti=25.0, Te=25.0, probe="A"):
# Use corresponding size for desired probe.
if probe=="A":
size = aSize
elif probe=="B":
size = bSize
elif probe=="C":
size = cSize
else:
print("Incorrect probe entry. Should be either A, B, or C.")
# Get the ionization rate coefficient for a specific temperature.
ad = atomic.element('tungsten')
temperatureRange = np.logspace(0,4,100)
S = ad.coeffs['ionisation']
f = interpolate.interp1d(temperatureRange, S(0, temperatureRange, ne))
coeff = f(Te)
# Initial speed entering the flux tube approx. v0.
soundSpeed = ((float(Te) + float(Ti)) / massD)**0.5
v0 = 0.5 * soundSpeed
# Calculate lamda_ionization.
lambda_iz = v0 * (ne * coeff)**(-1)
# Fraction ionized in the flux tube (i.e. it will return to the probe)
frac = 1 - math.exp(-size / lambda_iz)
print("Fraction Ionized: " + str(frac))
# Thus the fraction lost is 1-frac of the sputtered flux.
# Due to D, Fe and C.
#fracFluxLost = (1 - frac) * (sputt_flux(ne=ne, Ti=Ti, Te=Te) + sputt_flux_iron(ne=ne, Ti=Ti, Te=Te) + sputt_flux_carbon(ne=ne, Ti=Ti, Te=Te))
# Due to D and C.
#fracFluxLost = (1 - frac) * (sputt_flux(ne=ne, Ti=Ti, Te=Te) + sputt_flux_carbon(ne=ne, Ti=Ti, Te=Te))
# Due to just D
fracFluxLost = (1 - frac) * (sputt_flux(ne=ne, Ti=Ti, Te=Te))
# Due to just C.
#fracFluxLost = (1 - frac) * (sputt_flux_carbon(ne=ne, Ti=Ti, Te=Te))
# Due to just Fe.
#fracFluxLost = (1 - frac) * (sputt_flux_iron(ne=ne, Ti=Ti, Te=Te))
print("Flux Lost: " + str(fracFluxLost))
return fracFluxLost
# net_flux is defined as maxFlux - loss_flux. It can also be approximated
# as net_flux = areal density of W / timeOfShot. This may be too rough an
# approximation though.
def net_flux(probe="AD"):
# Choose correct Excel file sheet
if probe[0]=="A":
sheet = A2_sheet
elif probe[0]=="B":
sheet = B2_sheet
elif probe[0]=="C":
sheet = C2_sheet
# Extract the cells then extract the values from them.
if probe=="AD":
rminrsep_cells = sheet["A2":"A20"]
areal_cells = sheet["C2":"C20"]
elif probe=="AU":
rminrsep_cells = sheet["H2":"H20"]
areal_cells = sheet["I2":"I20"]
elif probe=="BD":
rminrsep_cells = sheet["F2":"F22"]
areal_cells = sheet["C2":"C22"]
elif probe=="BU":
rminrsep_cells = sheet["H2":"H22"]
areal_cells = sheet["I2":"I22"]
elif probe=="CD":
rminrsep_cells = sheet["F2":"F22"]
areal_cells = sheet["C2":"C22"]
elif probe=="CU":
rminrsep_cells = sheet["H2":"H22"]
areal_cells = sheet["I2":"I22"]
else:
print("Incorrect probe entry. Must be AD, AU, BD, BU, CD or CU.")
rminrsep_cells = np.transpose(rminrsep_cells)
areal_cells = np.transpose(areal_cells)
rminrsep_cells = np.reshape(rminrsep_cells, rminrsep_cells.size)
areal_cells = np.reshape(areal_cells, areal_cells.size)
rminrsep = [cell.value for cell in rminrsep_cells]
areal = [cell.value for cell in areal_cells]
# Convert arealD units from 10^15 cm^-2 to just m^-2.
areal = [value*10**19 for value in areal]
# A first order approximation assumes short shots such that net flux = areal density / time of shot
tmp_net = []
for index in range(0, len(areal)):
tmp = areal[index] / timeOfShot
#print ("Net Flux: " + str(tmp) + "\n")
tmp_net.append(tmp)
net_dict = {"rminrsep":rminrsep, "net_flux":tmp_net, "areal":areal}
return net_dict
# max_flux is the total flux of W to the probe. It can be determined from
# max_flux = loss_flux + net_flux.
def max_flux(probe="AD"):
# Get the net flux dictionary, give the same rminrsep to the max flux dict.
net_dict = net_flux(probe=probe)
max_dict = {}
max_dict["rminrsep"] = net_dict["rminrsep"]
# Interpolations from the LP's for rminrsep vs. temp and density
f_LP_temps = interpolate.interp1d(rminrseps, temps)
f_LP_dens = interpolate.interp1d(rminrseps, densities)
# Fill in the max_dict with max_flux = net flux + loss flux
max_dict["max_flux"] = []
max_dict["densityW"] = []
max_dict["loss_flux"] = []
max_dict["net_flux"] = []
for index in range(0, len(max_dict["rminrsep"])):
tmp_rmrs = max_dict["rminrsep"][index]
# If the rminrsep is out of range of the interpolation, just put a zero and continue.
if tmp_rmrs > max(rminrseps):
max_dict["max_flux"].append(0)
max_dict["densityW"].append(0)
max_dict["net_flux"].append(0)
max_dict["loss_flux"].append(0)
continue
if tmp_rmrs < min(rminrseps):
max_dict["max_flux"].append(0)
max_dict["densityW"].append(0)
max_dict["net_flux"].append(0)
max_dict["loss_flux"].append(0)
continue
# Temporary values of the temp/density at the specified rminrsep.
tmp_temp = f_LP_temps(tmp_rmrs)
tmp_dens = f_LP_dens(tmp_rmrs)
tmp_net = net_dict["net_flux"][index]
# Put the net flux in the dict as well.
max_dict["net_flux"].append(tmp_net)
# Get the loss flux at the specified parameters.
tmp_loss = loss_flux(ne=tmp_dens, Ti=tmp_temp, Te=tmp_temp, probe=probe[0])
# Put the loss_flux in the dict as well.
max_dict["loss_flux"].append(tmp_loss)
# Add net flux and loss flux and put into max flux dict.
max_dict["max_flux"].append(tmp_net + tmp_loss)
# We can estimate the tungsten density in the corresponding flux tube under
# our assumptions as densityW = max_flux / soundSpeed.
soundSpeed = ((float(tmp_temp) + float(tmp_temp)) / massD)**0.5
tmp_wdens = (tmp_net + tmp_loss) / soundSpeed
max_dict["densityW"].append(tmp_wdens)
print("Percent W in Flux Tube: " + str(tmp_wdens / tmp_dens * 100) + "%")
if tmp_net == 0:
continue
else:
print("Percent Flux Lost: " + str(tmp_loss / tmp_net * 100.0) + "%")
print("Net Flux: " + str(tmp_net))
print("\n")
# Put the W areal density in max_dict just because.
max_dict["areal"] = net_dict["areal"]
return max_dict
def fit_exp_to_total(max_dict, min_rmin=None, max_rmin=None):
# x and y values to be fit.
x = max_dict["rminrsep"]
y = max_dict["max_flux"]
# Assign the R-Rsep range to be fit.
if min_rmin is None and max_rmin is None:
min_rmin = min(x)
max_rmin = max(x)
elif min_rmin is None and max_rmin is not None:
min_rmin = min(x)
elif min_rmin is not None and max_rmin is None:
max_rmin = max(x)
else:
print("R-Rsep range entered.")
print("R-Rsep range: (" + str(min_rmin) + ", " + str(max_rmin) + ")")
# Function to define the fit.
def exp_fit(x, a, b, c):
return a * np.exp(-b * (x - min_rmin)) + c
guess = (10**17, 1, 1)
popt, pcov = curve_fit(exp_fit, x, y, guess, maxfev=5000)
print("Exp. Parameters:")
print(" a: " + str(popt[0]))
print(" b: " + str(popt[1]))
print(" c: " + str(popt[2]))
print(" lambda: " + str(1.0 / popt[1]) + "\n")
x_fit = np.arange(min_rmin, max_rmin, 0.1)
fit_to_total = exp_fit(x_fit, *popt)
exp_dict = {"rminrsep": x_fit, "exp_fit":fit_to_total}
return exp_dict
def plotFluxes(probe):
myDict = max_flux(probe)
x = myDict["rminrsep"]
max = myDict["max_flux"]
net = myDict["net_flux"]
loss = myDict["loss_flux"]
exp_dict = fit_exp_to_total(myDict)
x_exp = exp_dict["rminrsep"]
y_exp = exp_dict["exp_fit"]
plt.plot(x, max, label="Total Flux")
plt.plot(x, net, label="Net Flux")
plt.plot(x, loss, label="Loss Flux")
#plt.plot(x_exp, y_exp, label="Exp. Fit")
plt.legend()
plt.xlabel(r"${\rm R - R_{sep}\ (cm)}$")
plt.ylabel(r"${\rm Flux\ (cm^{-2} s^{-1})}$")
plt.title("Comparision of Fluxes for " + probe + "2")
plt.show()
| [
"shawnzamperini@gmail.com"
] | shawnzamperini@gmail.com |
bcc5aaa88ec03798f57cbf2092d41e69f6d8be0c | e4806fe953cbb76a6baf1f27ae40562561014f36 | /labravel/MIS_SYS/MAIN.py | ab1d0b37968eeddf9ae34f447d78387237cb0f3c | [] | no_license | LaBravel/Tedu-code | 701daa5f49ab42129db0a4684c8e7b3cbcbe1d65 | 9c93e9d88e940e627c7a3d1e8c2519035b462086 | refs/heads/master | 2020-04-27T16:33:55.535261 | 2019-03-08T07:27:29 | 2019-03-08T07:27:29 | 174,486,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | from MENU import *
from STUDENT import *
def main():
DATE = []
while 1 :
show_menu()
choice = input('要进行什么操作?')
if choice == '1' :
input_student(DATE)
elif choice == '2' :
output_student(DATE)
elif choice == '3' :
delete_student(DATE)
elif choice == '4' :
edit_student(DATE)
elif choice == '5' :
sorted_scores_student(DATE,True)
elif choice == '6' :
sorted_scores_student(DATE,False)
elif choice == '7' :
sorted_ages_student(DATE,True)
elif choice == '8' :
sorted_ages_student(DATE,False)
elif choice == '9' :
load_info_student(DATE)
elif choice == '10' :
save_info_student(DATE)
elif choice == 'q' or choice == 'Q' :
break
else :
print('没有这项操作!')
main()
| [
"463662798@qq.com"
] | 463662798@qq.com |
2967eddce5d6006864ec854b7c7f9d7c3e829f9e | b144c5142226de4e6254e0044a1ca0fcd4c8bbc6 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/flowprofile.py | ea7288f4d656a7167aabf2db92581bc9dd90f409 | [
"MIT"
] | permissive | iwanb/ixnetwork_restpy | fa8b885ea7a4179048ef2636c37ef7d3f6692e31 | c2cb68fee9f2cc2f86660760e9e07bd06c0013c2 | refs/heads/master | 2021-01-02T17:27:37.096268 | 2020-02-11T09:28:15 | 2020-02-11T09:28:15 | 239,721,780 | 0 | 0 | NOASSERTION | 2020-02-11T09:20:22 | 2020-02-11T09:20:21 | null | UTF-8 | Python | false | false | 4,008 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class FlowProfile(Base):
"""Flow Range Profile
The FlowProfile class encapsulates a required flowProfile resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'flowProfile'
def __init__(self, parent):
super(FlowProfile, self).__init__(parent)
@property
def MatchAction(self):
"""An instance of the MatchAction class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.matchaction.MatchAction)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.matchaction import MatchAction
return MatchAction(self)
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
Returns:
number
"""
return self._get_attribute('count')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
def update(self, Name=None):
"""Updates a child instance of flowProfile on the server.
Args:
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def AddFromTemplate(self, *args, **kwargs):
"""Executes the addFromTemplate operation on the server.
Creates a Match Action prototype supported by the template.
addFromTemplate(Arg2:href)
Args:
args[0] is Arg2 (str(None|/api/v1/sessions/1/ixnetwork/?deepchild=*)):
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('addFromTemplate', payload=payload, response_object=None)
| [
"srvc_cm_packages@keysight.com"
] | srvc_cm_packages@keysight.com |
2d04d274781ae60c5385e020cc01d8fc1c3b99bc | 9f7c9201b86128d2459e463d3bb1c60b7e434a78 | /examples/tree_benchmark.py | ea39ec0d7c7b9965ca1cf840687150bca077ec9e | [] | no_license | jackd/deep-cloud | b4a171a290c22a113b8a6dd3a49c875afae84b93 | 9adb25bfcdfd1f2faf3820378cc27a952aa90f9d | refs/heads/master | 2020-07-05T19:32:38.824051 | 2019-10-25T04:04:43 | 2019-10-25T04:04:43 | 202,748,871 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,511 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from time import time
from tqdm import tqdm
import tensorflow_datasets as tfds
from deep_cloud.problems.partnet import PartnetProblem
from deep_cloud.ops.np_utils.tree_utils import pykd
problem = PartnetProblem()
warm_up = 5
benchmark = 10
total = warm_up + benchmark
tree_impl = pykd.KDTree
all_coords = []
for coords, _ in tqdm(tfds.as_numpy(
problem.get_base_dataset('validation').take(total)),
total=total,
desc='getting base data...'):
tree = tree_impl(coords)
dists, indices = tree.query(tree.data, 2, return_distance=True)
del indices
scale = np.mean(dists[:, 1])
coords *= 2 / scale
all_coords.append(coords)
def run_fn(f, data, name):
for i in tqdm(range(warm_up), desc='warming up {}'.format(name)):
f(data[i])
t = time()
for i in tqdm(range(warm_up, total), desc='benchmarking {}'.format(name)):
f(data[i])
dt = time() - t
print('{} runs took {} ms, {} ms / run'.format(benchmark, dt * 1000,
dt * 1000 / benchmark))
trees = [tree_impl(c) for c in all_coords]
def query_tree(tree):
tree.query_ball_point(tree.data, 4, approx_neighbors=16)
run_fn(tree_impl, all_coords, 'just tree')
run_fn(query_tree, trees, 'just query')
run_fn(lambda c: query_tree(tree_impl(c)), all_coords, 'compute both')
| [
"thedomjack@gmail.com"
] | thedomjack@gmail.com |
b273b59111c729a742b4aba94c9189dbef82690c | b16bc512603cbe3bdc5a56586cfc9147fe5fb3f6 | /venv/bin/rst2latex.py | 944a30a66b8d95f943a166fadb0372224d4a4a08 | [] | no_license | hoang-ho/TechTogether | caa565b14165c7b0889bd4232098e16a0137ba67 | fa4ca8375ab00d1791d2fce02384503eff5df7e0 | refs/heads/master | 2020-05-01T08:24:22.561868 | 2019-05-13T06:55:46 | 2019-05-13T06:55:46 | 177,377,979 | 2 | 2 | null | 2019-05-13T06:55:47 | 2019-03-24T06:15:31 | Python | UTF-8 | Python | false | false | 829 | py | #!/Users/hoangho/TechTogether/TTB_Backup/venv/bin/python
# $Id: rst2latex.py 5905 2009-04-16 12:04:49Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing LaTeX.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline
description = ('Generates LaTeX documents from standalone reStructuredText '
'sources. '
'Reads from <source> (default is stdin) and writes to '
'<destination> (default is stdout). See '
'<http://docutils.sourceforge.net/docs/user/latex.html> for '
'the full reference.')
publish_cmdline(writer_name='latex', description=description)
| [
"hoangho@Hoangs-MacBook-Pro.local"
] | hoangho@Hoangs-MacBook-Pro.local |
1defbd42fd03ec6153cd47a3c7858fd4ba026f91 | 360ff148d658caf1736ae159954c928d2ce545f7 | /alembic/env.py | 019c1b1fff7cee38417508e2ad8a39313905a451 | [
"MIT"
] | permissive | beanjo55/KerbalStuff | 3305401122186a692a345a6a0a5fad63f8eb864c | 18e8c517b6f79c2839236a9507464ab0987f103e | refs/heads/master | 2021-01-18T11:11:04.293762 | 2016-02-17T12:10:21 | 2016-02-17T12:10:21 | 51,928,777 | 1 | 0 | null | 2016-02-17T14:34:06 | 2016-02-17T14:34:06 | null | UTF-8 | Python | false | false | 2,076 | py | from __future__ import with_statement
import os, sys
sys.path.append(os.getcwd())
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
from KerbalStuff import app
from KerbalStuff.objects import Base
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| [
"sir@cmpwn.com"
] | sir@cmpwn.com |
a9c826a7328ff7e2ee198e2896b50d8824d8f631 | 367d2670c75d385d122bca60b9f550ca5b3888c1 | /gem5/src/cpu/testers/garnet_synthetic_traffic/GarnetSyntheticTraffic.py | 8ad00b64256a9917a7ada32c5171b7828a640d29 | [
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"MIT"
] | permissive | Anish-Saxena/aqua_rowhammer_mitigation | 4f060037d50fb17707338a6edcaa0ac33c39d559 | 3fef5b6aa80c006a4bd6ed4bedd726016142a81c | refs/heads/main | 2023-04-13T05:35:20.872581 | 2023-01-05T21:10:39 | 2023-01-05T21:10:39 | 519,395,072 | 4 | 3 | Unlicense | 2023-01-05T21:10:40 | 2022-07-30T02:03:02 | C++ | UTF-8 | Python | false | false | 3,118 | py | # Copyright (c) 2016 Georgia Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.objects.ClockedObject import ClockedObject
from m5.params import *
from m5.proxy import *
class GarnetSyntheticTraffic(ClockedObject):
type = 'GarnetSyntheticTraffic'
cxx_header = \
"cpu/testers/garnet_synthetic_traffic/GarnetSyntheticTraffic.hh"
block_offset = Param.Int(6, "block offset in bits")
num_dest = Param.Int(1, "Number of Destinations")
memory_size = Param.Int(65536, "memory size")
sim_cycles = Param.Int(1000, "Number of simulation cycles")
num_packets_max = Param.Int(-1, "Max number of packets to send. \
Default is to keep sending till simulation ends")
single_sender = Param.Int(-1, "Send only from this node. \
By default every node sends")
single_dest = Param.Int(-1, "Send only to this dest. \
Default depends on traffic_type")
traffic_type = Param.String("uniform_random", "Traffic type")
inj_rate = Param.Float(0.1, "Packet injection rate")
inj_vnet = Param.Int(-1, "Vnet to inject in. \
0 and 1 are 1-flit, 2 is 5-flit. \
Default is to inject in all three vnets")
precision = Param.Int(3, "Number of digits of precision \
after decimal point")
response_limit = Param.Cycles(5000000, "Cycles before exiting \
due to lack of progress")
test = RequestPort("Port to the memory system to test")
system = Param.System(Parent.any, "System we belong to")
| [
"asaxena317@krishna-srv4.ece.gatech.edu"
] | asaxena317@krishna-srv4.ece.gatech.edu |
3800a32637fe1d6fdc6c62820da488f167181ae2 | 8f48d12b88048e424ebb0d72ca6dfab5cf12ae0f | /0001_0599/349.py | fd262223f7f0e12f87b1a1d0b8c093a9eedd4ba2 | [] | no_license | renjieliu/leetcode | e1caf13c18a8107ed9252588b339fb76bcb1b246 | 4668b64fcb9320b6c316d8608fc61911ce43b6c7 | refs/heads/master | 2023-03-18T18:16:06.187741 | 2023-03-14T20:31:59 | 2023-03-14T20:31:59 | 128,823,819 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | def intersection(nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
map = {}
output = []
for i in nums1:
map[i] = "a"
for j in nums2:
if map.get(j, "None")!="None":
map[j] = "b"
for k, v in map.items():
if v == "b":
output.append(k)
return output
print(intersection([1, 2, 2, 1], [2, 2,2,2,2,1]))
| [
"anlrj@qq.com"
] | anlrj@qq.com |
728e2b1d2f05db0e391e7186e865451f25c2215e | ca0ffc5606da190274569e3d6ced2543af187fa9 | /get_workday.py | 06f8c01b9e6d486f740ea889b72de0ec804429bc | [] | no_license | littlelienpeanut/TBrain_ETF_prediction | f3a3db045e5274bfca56e28e373e98fa31c1ce67 | c9f2d19e2f97a67cd923928c4b87ffc53c274704 | refs/heads/master | 2020-03-18T01:42:25.374235 | 2018-06-27T16:17:06 | 2018-06-27T16:17:06 | 134,155,671 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | import pandas as pd
import datetime
import csv
def main():
df = pd.read_csv('../TBrain_Round2_DataSet_20180518/tetfp.csv', encoding = 'utf8')
fname = ['50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '6201', '6203', '6204', '6208', '690', '692', '701', '713']
date_title = []
data_v1 = {}
#data['日期']['代碼'][30.41, 30.53, 30.18, 30.45, 6374]
for fe in fname:
date_title = []
for row in range(len(df)):
print(str(row) + '/' + str(len(df)))
if int(df['代碼'][row]) == int(fe):
if df['日期'][row] in date_title:
pass
else:
date_title.append(df['日期'][row])
with open('../stock_workday/' + fe + '_workday.csv', 'w', newline='') as fout:
wr = csv.writer(fout)
title = ['date']
wr.writerow(title)
for date in date_title:
value = []
value.append(date)
wr.writerow(value)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | littlelienpeanut.noreply@github.com |
7417a352702bc75fb38d3aaa9bbcfbdaa341d73b | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /HcLCh8566zewZvZ2j_14.py | 9f55f28459886892e818bcb9a02bddb0c83f2341 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py |
def word(s):
return ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'].index(s)
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
d211ed69c45c9616969fa79f815d55d5f3412cca | 9ea80adb9252c0e67e5e20c3ff9d6a08cf4a172d | /mxonline/apps/courses/migrations/0007_auto_20190517_1615.py | 574173276b261a61cc8699af045aca93332ac58e | [] | no_license | hfxjd9527/djangoweb | 11830fbbaab0d4986b7494c61ac23d7f19266b67 | 1d83c423755b357eb178cc4f384829082623d2e0 | refs/heads/master | 2022-12-10T02:20:43.569239 | 2019-06-04T14:31:18 | 2019-06-04T14:39:11 | 185,593,356 | 0 | 0 | null | 2022-12-08T00:46:58 | 2019-05-08T11:34:07 | HTML | UTF-8 | Python | false | false | 678 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-05-17 16:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0006_video_url'),
]
operations = [
migrations.AddField(
model_name='video',
name='learn_times',
field=models.IntegerField(default=0, verbose_name='学习时长(分钟数)'),
),
migrations.AlterField(
model_name='course',
name='learn_times',
field=models.IntegerField(default=0, verbose_name='学习时长(分钟数)'),
),
]
| [
"1725824530@qq.com"
] | 1725824530@qq.com |
350ed4b6348456ac8bf7a9bdab5919e1a7dec755 | 6e5ab77fee1fb4a0310213dd8c6dd8601828b1b9 | /Algorithm/문제/수업/D-13t/AD/[TST] 책꽂이.py | 4c6bb04f51b13f2fef4ab0a48e707a342aa6520b | [] | no_license | hongyong3/TIL | 36d031c0da9e3e6db3eebb977bd3e12df00a849f | 7f1492128e957a78fc95b255f4f7f2978161e471 | refs/heads/master | 2023-08-19T09:16:03.231757 | 2023-08-18T09:38:47 | 2023-08-18T09:38:47 | 162,100,258 | 1 | 0 | null | 2023-02-11T00:52:32 | 2018-12-17T08:42:42 | Jupyter Notebook | UTF-8 | Python | false | false | 711 | py | import sys
sys.stdin = open("[TST] 책꽂이_input.txt", "r")
<<<<<<< HEAD
def bookshelf(idx, sums):
global minn
if sums - B >= minn:
return
if idx == N:
if 0 <= sums - B <= minn - 1:
minn = sums - B
return
bookshelf(idx + 1, sums)
bookshelf(idx + 1, sums + data[idx])
T = int(input())
for test_case in range(T):
N, B = map(int, input().split())
data = [int(input()) for _ in range(N)]
minn = float('inf')
bookshelf(0, 0)
print(minn)
=======
T = int(input())
for test_case in range(T):
N, B = map(int, input().split())
H_i = [list(input()) for _ in range(N)]
print(H_i)
>>>>>>> 99bd1460f81cfb751d4cdfaea01f0fae18e6c33c
| [
"chy66822495@gmail.com"
] | chy66822495@gmail.com |
f393255f7aea9000b5a00c844b41886415fd2e91 | 20e3ee6642d20578e48756963798acfe307ac6b5 | /Practice/hackerrank/Certifications/Python (Basics)/01AverageFunction.py | e85fad181b9c07a0a2736dbbd750cea5f4a290e4 | [] | no_license | sirinenisaikiran/Python | 538f64276767435de3233b720f547aac0bf4d511 | bdfef0d1c04c7f3b9fc91a164b5fd1789828176c | refs/heads/master | 2023-01-31T00:53:01.650916 | 2021-06-06T10:39:20 | 2021-06-06T10:39:20 | 237,744,104 | 0 | 0 | null | 2023-01-26T03:38:47 | 2020-02-02T08:58:49 | Python | UTF-8 | Python | false | false | 179 | py | def avg(List):
sum = 0
for i in List:
sum += int(i)
return (sum/len(List))
nums = input()
List = nums.split(' ')
Res = avg(List)
print("{:0.2f}".format(Res)) | [
"saikiran.sirneni@gmail.com"
] | saikiran.sirneni@gmail.com |
593a40ccc8e700576ce71bf879140b04f39095fd | 19101bf9478c585f73540f1962494a0315ccd0a6 | /ax/service/managed_loop.py | 0c3e1e47caa6bd03f80680aca0e296ac263bf518 | [
"MIT"
] | permissive | liusulin/Ax | 4ca1dcaa34f129d25faa2f52a8094b5f6e399eba | 850b6975b7c7f9960ad5461e71d0304b2670232a | refs/heads/main | 2023-07-14T01:02:38.044397 | 2021-08-18T15:34:06 | 2021-08-18T15:35:11 | 397,664,102 | 1 | 0 | MIT | 2021-08-18T16:16:10 | 2021-08-18T16:16:09 | null | UTF-8 | Python | false | false | 11,471 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations
import inspect
import logging
from typing import Iterable, List, Optional, Tuple
from ax.core.arm import Arm
from ax.core.base_trial import BaseTrial
from ax.core.batch_trial import BatchTrial
from ax.core.experiment import Experiment
from ax.core.simple_experiment import TEvaluationFunction
from ax.core.trial import Trial
from ax.core.types import (
TEvaluationOutcome,
TModelPredictArm,
TParameterization,
)
from ax.exceptions.constants import CHOLESKY_ERROR_ANNOTATION
from ax.exceptions.core import UserInputError, SearchSpaceExhausted
from ax.modelbridge.base import ModelBridge
from ax.modelbridge.dispatch_utils import choose_generation_strategy
from ax.modelbridge.generation_strategy import GenerationStrategy
from ax.modelbridge.modelbridge_utils import get_pending_observation_features
from ax.service.utils.best_point import (
get_best_from_model_predictions,
get_best_raw_objective_point,
)
from ax.service.utils.instantiation import (
make_experiment,
TParameterRepresentation,
data_and_evaluations_from_raw_data,
)
from ax.utils.common.executils import retry_on_exception
from ax.utils.common.logger import get_logger
from ax.utils.common.typeutils import not_none
logger: logging.Logger = get_logger(__name__)
class OptimizationLoop:
"""Managed optimization loop, in which Ax oversees deployment of trials and
gathering data."""
def __init__(
self,
experiment: Experiment,
evaluation_function: TEvaluationFunction,
total_trials: int = 20,
arms_per_trial: int = 1,
random_seed: Optional[int] = None,
wait_time: int = 0,
run_async: bool = False, # TODO[Lena],
generation_strategy: Optional[GenerationStrategy] = None,
) -> None:
assert not run_async, "OptimizationLoop does not yet support async."
self.wait_time = wait_time
self.total_trials = total_trials
self.arms_per_trial = arms_per_trial
self.random_seed = random_seed
self.evaluation_function = evaluation_function
assert len(experiment.trials) == 0, (
"Optimization Loop should not be initialized with an experiment "
"that has trials already."
)
self.experiment = experiment
if generation_strategy is None:
self.generation_strategy = choose_generation_strategy(
search_space=experiment.search_space,
use_batch_trials=self.arms_per_trial > 1,
random_seed=self.random_seed,
)
else:
self.generation_strategy = generation_strategy
self.current_trial = 0
@staticmethod
def with_evaluation_function(
parameters: List[TParameterRepresentation],
evaluation_function: TEvaluationFunction,
experiment_name: Optional[str] = None,
objective_name: Optional[str] = None,
minimize: bool = False,
parameter_constraints: Optional[List[str]] = None,
outcome_constraints: Optional[List[str]] = None,
total_trials: int = 20,
arms_per_trial: int = 1,
wait_time: int = 0,
random_seed: Optional[int] = None,
generation_strategy: Optional[GenerationStrategy] = None,
) -> "OptimizationLoop":
"""Constructs a synchronous `OptimizationLoop` using an evaluation
function."""
experiment = make_experiment(
name=experiment_name,
parameters=parameters,
objective_name=objective_name,
minimize=minimize,
parameter_constraints=parameter_constraints,
outcome_constraints=outcome_constraints,
)
return OptimizationLoop(
experiment=experiment,
total_trials=total_trials,
arms_per_trial=arms_per_trial,
random_seed=random_seed,
wait_time=wait_time,
generation_strategy=generation_strategy,
evaluation_function=evaluation_function,
)
@classmethod
def with_runners_and_metrics(
cls,
parameters: List[TParameterRepresentation],
path_to_runner: str,
paths_to_metrics: List[str],
experiment_name: Optional[str] = None,
objective_name: Optional[str] = None,
minimize: bool = False,
parameter_constraints: Optional[List[str]] = None,
outcome_constraints: Optional[List[str]] = None,
total_trials: int = 20,
arms_per_trial: int = 1,
wait_time: int = 0,
random_seed: Optional[int] = None,
) -> "OptimizationLoop":
"""Constructs an asynchronous `OptimizationLoop` using Ax runners and
metrics."""
# NOTE: Could use `Scheduler` to implement this if needed.
raise NotImplementedError # pragma: no cover
def _call_evaluation_function(
self, parameterization: TParameterization, weight: Optional[float] = None
) -> TEvaluationOutcome:
signature = inspect.signature(self.evaluation_function)
num_evaluation_function_params = len(signature.parameters.items())
if num_evaluation_function_params == 1:
# pyre-fixme[20]: Anonymous call expects argument `$1`.
evaluation = self.evaluation_function(parameterization)
elif num_evaluation_function_params == 2:
evaluation = self.evaluation_function(parameterization, weight)
else:
raise UserInputError(
"Evaluation function must take either one parameter "
"(parameterization) or two parameters (parameterization and weight)."
)
return evaluation
def _get_new_trial(self) -> BaseTrial:
if self.arms_per_trial == 1:
return self.experiment.new_trial(
generator_run=self.generation_strategy.gen(
experiment=self.experiment,
pending_observations=get_pending_observation_features(
experiment=self.experiment
),
)
)
elif self.arms_per_trial > 1:
return self.experiment.new_batch_trial(
generator_run=self.generation_strategy.gen(
experiment=self.experiment, n=self.arms_per_trial
)
)
else:
raise UserInputError(
f"Invalid number of arms per trial: {self.arms_per_trial}"
)
def _get_weights_by_arm(
self, trial: BaseTrial
) -> Iterable[Tuple[Arm, Optional[float]]]:
if isinstance(trial, Trial):
if trial.arm is not None:
return [(not_none(trial.arm), None)]
return []
elif isinstance(trial, BatchTrial):
return trial.normalized_arm_weights().items()
else:
raise UserInputError(f"Invalid trial type: {type(trial)}")
@retry_on_exception(
logger=logger,
exception_types=(RuntimeError,),
suppress_all_errors=False,
wrap_error_message_in=CHOLESKY_ERROR_ANNOTATION,
)
def run_trial(self) -> None:
"""Run a single step of the optimization plan."""
if self.current_trial >= self.total_trials:
raise ValueError("Optimization is complete, cannot run another trial.")
logger.info(f"Running optimization trial {self.current_trial + 1}...")
trial = self._get_new_trial()
trial.mark_running(no_runner_required=True)
_, data = data_and_evaluations_from_raw_data(
raw_data={
arm.name: self._call_evaluation_function(arm.parameters, weight)
for arm, weight in self._get_weights_by_arm(trial)
},
trial_index=self.current_trial,
sample_sizes={},
metric_names=not_none(
self.experiment.optimization_config
).objective.metric_names,
)
self.experiment.attach_data(data=data)
trial.mark_completed()
self.current_trial += 1
def full_run(self) -> OptimizationLoop:
"""Runs full optimization loop as defined in the provided optimization
plan."""
num_steps = self.total_trials
logger.info(f"Started full optimization with {num_steps} steps.")
for _ in range(num_steps):
try:
self.run_trial()
except SearchSpaceExhausted as err:
logger.info(
f"Stopped optimization as the search space is exhaused. Message "
f"from generation strategy: {err}."
)
return self
except Exception:
logger.exception("Encountered exception during optimization: ")
return self
return self
def get_best_point(self) -> Tuple[TParameterization, Optional[TModelPredictArm]]:
"""Obtains the best point encountered in the course
of this optimization."""
# Find latest trial which has a generator_run attached and get its predictions
model_predictions = get_best_from_model_predictions(experiment=self.experiment)
if model_predictions is not None:
return model_predictions
# Could not find through model, default to using raw objective.
parameterization, values = get_best_raw_objective_point(
experiment=self.experiment
)
# For values, grab just the means to conform to TModelPredictArm format.
return (
parameterization,
(
{k: v[0] for k, v in values.items()}, # v[0] is mean
{k: {k: v[1] * v[1]} for k, v in values.items()}, # v[1] is sem
),
)
def get_current_model(self) -> Optional[ModelBridge]:
"""Obtain the most recently used model in optimization."""
return self.generation_strategy.model
def optimize(
parameters: List[TParameterRepresentation],
evaluation_function: TEvaluationFunction,
experiment_name: Optional[str] = None,
objective_name: Optional[str] = None,
minimize: bool = False,
parameter_constraints: Optional[List[str]] = None,
outcome_constraints: Optional[List[str]] = None,
total_trials: int = 20,
arms_per_trial: int = 1,
random_seed: Optional[int] = None,
generation_strategy: Optional[GenerationStrategy] = None,
) -> Tuple[
TParameterization, Optional[TModelPredictArm], Experiment, Optional[ModelBridge]
]:
"""Construct and run a full optimization loop."""
loop = OptimizationLoop.with_evaluation_function(
parameters=parameters,
objective_name=objective_name,
evaluation_function=evaluation_function,
experiment_name=experiment_name,
minimize=minimize,
parameter_constraints=parameter_constraints,
outcome_constraints=outcome_constraints,
total_trials=total_trials,
arms_per_trial=arms_per_trial,
random_seed=random_seed,
generation_strategy=generation_strategy,
)
loop.full_run()
parameterization, values = loop.get_best_point()
return parameterization, values, loop.experiment, loop.get_current_model()
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
4f0f8ba69b0a8734ccdd99d4bc191cf199cc9e7e | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/stdlib-big-2968.py | e973507ef17d14f6230c4f996b768103a5d902a1 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,000 | py | # ChocoPy library functions
def int_to_str(x: int) -> str:
digits:[str] = None
result:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str2(x: int, x2: int) -> str:
digits:[str] = None
digits2:[str] = None
result:str = ""
result2:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str3(x: int, x2: int, x3: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str4(x: int, x2: int, x3: int, x4: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str5(x: int, x2: int, x3: int, x4: int, x5: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
digits5:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
result5:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def str_to_int(x: str) -> int:
result:int = 0
digit:int = 0
char:str = ""
sign:int = 1
first_char:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int2(x: str, x2: str) -> int:
result:int = 0
result2:int = 0
digit:int = 0
digit2:int = 0
char:str = ""
char2:str = ""
sign:int = 1
sign2:int = 1
first_char:bool = True
first_char2:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int3(x: str, x2: str, x3: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
char:str = ""
char2:str = ""
char3:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int4(x: str, x2: str, x3: str, x4: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int5(x: str, x2: str, x3: str, x4: str, x5: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
result5:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
digit5:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
char5:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
sign5:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
first_char5:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
$Statement
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
# Input parameters
c:int = 42
c2:int = 42
c3:int = 42
c4:int = 42
c5:int = 42
n:int = 10
n2:int = 10
n3:int = 10
n4:int = 10
n5:int = 10
# Run [-nc, nc] with step size c
s:str = ""
s2:str = ""
s3:str = ""
s4:str = ""
s5:str = ""
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
i = -n * c
# Crunch
while i <= n * c:
s = int_to_str(i)
print(s)
i = str_to_int(s) + c
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
efe190f572281fda60f87458816984aefd501e95 | 29783ede1402f93bee06cbc899c41a48911e1285 | /portrait/import/NameAlignImport2Neo.py | 0632ec363abc75e5d619cc814b7cfe7660d9a381 | [
"BSD-2-Clause"
] | permissive | Allen517/alignment | 389f3dd4ff4b2bd6785ecee009f46e589f3b07f5 | a0bfe29b229182634d85b8b383767e7eda8fc2af | refs/heads/master | 2021-01-15T17:59:26.106980 | 2019-01-28T08:18:04 | 2019-01-28T08:18:04 | 99,768,785 | 6 | 2 | null | null | null | null | UTF-8 | Python | false | false | 6,002 | py | # -*- coding:utf8 -*-
import sys
sys.path.append("../../")
from portrait.DB.GraphdbClient import GraphdbClient
from portrait.DB.MongodbClient import MongodbClient
from portrait.utils.GetConfig import GetConfig
from portrait.utils.utilFunction import unicode2utf8
import json
from portrait.utils.LogHandler import LogHandler
from py2neo import Node
import uuid
class NameAlignImport2Neo(object):
def __init__(self):
self.config = GetConfig()
self.graphdb = GraphdbClient()
self.graphdb.setDatabase(self.config.graphdb_host, self.config.graphdb_port, \
self.config.graphdb_user, self.config.graphdb_password)
self.doubandb = MongodbClient()
self.doubandb.setDatabase(self.config.doubandb_host, self.config.doubandb_port, \
self.config.doubandb_name, self.config.doubandb_tab)
self.weibodb = MongodbClient()
self.weibodb.setDatabase(self.config.weibodb_host, self.config.weibodb_port, \
self.config.weibodb_name, self.config.weibodb_tab)
self.logger = LogHandler('name_align_import2neo')
def graphdb_transaction(func):
def wrapper(self, douban_uid_set, weibo_uid_set):
graphdb_tx = self.graphdb.graph.begin()
func(self, douban_uid_set, weibo_uid_set)
graphdb_tx.commit()
return wrapper
@graphdb_transaction
def __relation_data_storage(self, douban_uid_set, weibo_uid_set):
if len(douban_uid_set)!=len(weibo_uid_set):
self.logger.warning(u'The length of douban_uid_set and weib_uid_set is not equal. \
The processed batch is skipped.')
self.logger.warning(douban_uid_set)
return
for k in range(len(douban_uid_set)):
douban_uid = douban_uid_set[k]
weibo_uids = weibo_uid_set[k]
douban_info = self.doubandb.get({'uid':douban_uid}) # get user info from doubandb
weibo_infos = self.weibodb.search('uid', weibo_uids, '$in') # get user infos from weibodb
if '_id' in douban_info: # remove automatically generated key '_id' with type of ObjectId
douban_info.pop('_id')
# set and store graph node of douban
user_graph_node = None
if 'id' in douban_info:
douban_grpah_node = self.graphdb.insert_or_update_node('Douban', douban_info['id'], douban_info)
# use existed user node or generate new user node in graphdb
if not user_graph_node:
user_graph_node = self.graphdb.find_node_by_rel('Douban', {'id':"='{}'".format(\
douban_grpah_node['id'])}, 'HAS')
if user_graph_node:
user_graph_node = user_graph_node[0]
if not user_graph_node:
user_graph_node = self.graphdb.insert_or_update_node('User', uuid.uuid1().get_hex())
self.graphdb.insert_or_update_relation('HAS', user_graph_node, douban_grpah_node)
# set and store graph node of weibo
if weibo_infos:
for weibo_info in weibo_infos:
if 'uid' in weibo_info:
weibo_graph_node = self.graphdb.insert_or_update_node('Weibo', \
weibo_info['uid'], weibo_info)
# store relationship in neo4j
self.graphdb.insert_or_update_relation('ALIGN', douban_grpah_node, \
weibo_graph_node, {'ID':1.})
# use existed user node or generate new user node in graphdb
if not user_graph_node:
user_graph_node = self.graphdb.find_node_by_rel('Weibo', {'uid':"='{}'".format(\
douban_grpah_node['uid'])}, 'HAS')
if not user_graph_node:
user_graph_node = self.graphdb.insert_or_update_node('User', uuid.uuid1().get_hex())
self.graphdb.insert_or_update_relation('HAS', user_graph_node, weibo_graph_node)
def storeDoubanName(self, file_name, batch_proc_num):
with open(file_name, 'aw') as wrtF:
skip_num = 0
while(True):
query_res = self.doubandb.getAll(batch_proc_num, skip_num)
query_num = 0
vals = list()
for douban_res in query_res:
query_num += 1
if 'id' in douban_res:
graph_res = self.graphdb.find_node_by_id("Douban", "douban_{}".format())
if graph_res:
continue
if 'uid' in douban_res and 'name' in douban_res \
and 'desc' in douban_res and 'loc_name' in douban_res:
vals.append({'uid': douban_res['uid'], 'name':douban_res['name'], \
'desc':douban_res['desc'], 'loc_name':douban_res['loc_name']})
if not query_num:
break
for v in vals:
wrtF.write(json.dumps(v, ensure_ascii=False).decode('utf8')+'\t')
self.logger.info('已存储%d条豆瓣数据至本地'%skip_num+query_num)
skip_num += batch_proc_num
def relation_data_finder(self, batch_proc_num):
skip_num = 0
while(True):
# 1. get weibo data from mongo
weibo_query_res = self.weibodb.getAll(batch_proc_num, skip_num)
query_num = 0
for weibo_res in weibo_query_res:
query_num += 1
weibo_res_name = weibo_res['nick_name']
if not query_num: # no results
break
skip_num += batch_proc_num
# 1. get relationships
rels = self.__get_rel()
# 2.1 initialization
proc_num = 0
douban_uid_set = tuple()
weibo_uid_set = list()
# 2.2 start to process relationships
rels = self.__get_rel()
for rel in rels:
proc_num += 1
# 2.3 if processing the max of batch size, find user infos from mongodb
if proc_num%batch_proc_num==0:
self.__relation_data_storage(douban_uid_set, weibo_uid_set)
self.logger.info(u'Already processing %d alignment records'%proc_num)
douban_uid_set = tuple()
weibo_uid_set = list()
# 2.2 fetch douban_uid and weibo_uids from current relationship info
douban_uid = ""
weibo_uids = tuple()
if "doubanId" in rel:
douban_uid = rel["doubanId"]
if "weiboIds" in rel:
for weibo_id_info in rel["weiboIds"]:
if "uid" in weibo_id_info:
weibo_uids += unicode2utf8(weibo_id_info['uid']),
douban_uid_set += unicode2utf8(douban_uid),
weibo_uid_set.append(weibo_uids)
self.__relation_data_storage(douban_uid_set, weibo_uid_set)
self.logger.info(u'Done! Already processing %d alignment records'%proc_num)
if __name__=='__main__':
data2neo = NameAlignImport2Neo()
data2neo.storeDoubanName('douban_tmp', 10) | [
"wangyongqing.casia@gmail.com"
] | wangyongqing.casia@gmail.com |
d609ad3178c98a7b86540b44afa002b47acc664a | 409270c19919496c4083c2c620c52207b1d29ca3 | /Day 41/Solution 1.py | b4e32e45632235670e91c02991f446f68fe42798 | [] | no_license | rayandasoriya/100DaysOfCode | ec275b90c33efd6e354c712b10cf9b5ae3ef3382 | 3158b69f6bf1a13604e36662817ab80c582df557 | refs/heads/master | 2020-03-31T09:20:45.329067 | 2019-01-31T16:16:04 | 2019-01-31T16:16:04 | 152,092,263 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | class Solution(object):
def subsets(self, nums):
nums.sort()
result = [[]]
for num in nums:
result += [i + [num] for i in result]
return result
| [
"dasoriyarayan@gmail.com"
] | dasoriyarayan@gmail.com |
ca8241aedb354d8af933ca4f71388f2d3f4e7420 | 27cb9cc771ffa02c4f7e12dcd4688e311c63aace | /fairseq/modules/sinusoidal_positional_embedding.py | 81324965f7c7d3088d215e3e147ece7b845e74d0 | [
"MIT"
] | permissive | periclesmiranda/TSPNet | 78aee61a4e4497ae82b1bb6731a6edd6230720cd | 8f71315486c78b540382ef6420eab5441333bcda | refs/heads/main | 2023-07-19T16:06:48.169045 | 2021-09-10T15:08:36 | 2021-09-10T15:08:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,985 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from typing import Any, Optional
import torch
import torch.onnx.operators
from fairseq import utils
from torch import Tensor, nn
class SinusoidalPositionalEmbedding(nn.Module):
"""This module produces sinusoidal positional embeddings of any length.
Padding symbols are ignored.
"""
def __init__(self, embedding_dim, padding_idx, init_size=1024):
super().__init__()
self.embedding_dim = embedding_dim
self.padding_idx = padding_idx
self.weights = SinusoidalPositionalEmbedding.get_embedding(
init_size, embedding_dim, padding_idx
)
self.onnx_trace = False
self.register_buffer("_float_tensor", torch.FloatTensor(1))
self.max_positions = int(1e5)
def prepare_for_onnx_export_(self):
self.onnx_trace = True
@staticmethod
def get_embedding(
num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None
):
"""Build sinusoidal embeddings.
This matches the implementation in tensor2tensor, but differs slightly
from the description in Section 3.5 of "Attention Is All You Need".
"""
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(
1
) * emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(
num_embeddings, -1
)
if embedding_dim % 2 == 1:
# zero pad
emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
if padding_idx is not None:
emb[padding_idx, :] = 0
return emb
def forward(
self,
input,
incremental_state: Optional[Any] = None,
timestep: Optional[Tensor] = None,
positions: Optional[Any] = None,
):
"""Input is expected to be of size [bsz x seqlen]."""
bspair = torch.onnx.operators.shape_as_tensor(input)
bsz, seq_len = bspair[0], bspair[1]
max_pos = self.padding_idx + 1 + seq_len
if self.weights is None or max_pos > self.weights.size(0):
# recompute/expand embeddings if needed
self.weights = SinusoidalPositionalEmbedding.get_embedding(
max_pos, self.embedding_dim, self.padding_idx
)
self.weights = self.weights.to(self._float_tensor)
if incremental_state is not None:
# positions is the same for every token when decoding a single step
pos = timestep.view(-1)[0] + 1 if timestep is not None else seq_len
if self.onnx_trace:
return (
self.weights.index_select(index=self.padding_idx + pos, dim=0)
.unsqueeze(1)
.repeat(bsz, 1, 1)
)
return self.weights[self.padding_idx + pos, :].expand(bsz, 1, -1)
positions = utils.make_positions(
input, self.padding_idx, onnx_trace=self.onnx_trace
)
if self.onnx_trace:
flat_embeddings = self.weights.detach().index_select(0, positions.view(-1))
embedding_shape = torch.cat(
(bsz.view(1), seq_len.view(1), torch.tensor([-1], dtype=torch.long))
)
embeddings = torch.onnx.operators.reshape_from_tensor_shape(
flat_embeddings, embedding_shape
)
return embeddings
return (
self.weights.index_select(0, positions.view(-1))
.view(bsz, seq_len, -1)
.detach()
)
| [
"chenchen.xu@anu.edu.au"
] | chenchen.xu@anu.edu.au |
48405bbbb1645e364d550e79258965c7757dafd6 | e7290064b5df4731167bab10606f451b446a21f7 | /rllib/execution/buffers/mixin_replay_buffer.py | bf23abdf6c108b7dd0b79e05dc5cd1dbf09d6844 | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | sven1977/ray | dce9f6fa114741837341f14aef0a8c64c442aba6 | b73a496af19bce627a611e7af2cb02a3c5d99684 | refs/heads/master | 2023-09-02T00:57:47.167794 | 2023-08-17T09:33:04 | 2023-08-17T09:33:04 | 229,269,728 | 2 | 5 | Apache-2.0 | 2023-07-29T07:08:41 | 2019-12-20T13:27:01 | Python | UTF-8 | Python | false | false | 7,564 | py | import collections
import platform
import random
from typing import Optional
from ray.util.timer import _Timer
from ray.rllib.execution.replay_ops import SimpleReplayBuffer
from ray.rllib.policy.sample_batch import DEFAULT_POLICY_ID, concat_samples
from ray.rllib.utils.deprecation import Deprecated
from ray.rllib.utils.replay_buffers.multi_agent_replay_buffer import ReplayMode
from ray.rllib.utils.replay_buffers.replay_buffer import _ALL_POLICIES
from ray.rllib.utils.typing import PolicyID, SampleBatchType
class MixInMultiAgentReplayBuffer:
"""This buffer adds replayed samples to a stream of new experiences.
- Any newly added batch (`add()`) is immediately returned upon
the next `replay` call (close to on-policy) as well as being moved
into the buffer.
- Additionally, a certain number of old samples is mixed into the
returned sample according to a given "replay ratio".
- If >1 calls to `add()` are made without any `replay()` calls
in between, all newly added batches are returned (plus some older samples
according to the "replay ratio").
Examples:
>>> from ray.rllib.execution.replay_buffer import MixInMultiAgentReplayBuffer
# replay ratio 0.66 (2/3 replayed, 1/3 new samples):
>>> buffer = MixInMultiAgentReplayBuffer(capacity=100, # doctest: +SKIP
... replay_ratio=0.66) # doctest: +SKIP
>>> A, B, C, D = ... # doctest: +SKIP
>>> buffer.add(A) # doctest: +SKIP
>>> buffer.add(B) # doctest: +SKIP
>>> buffer.replay() # doctest: +SKIP
[A, B, B]
>>> buffer.add(C) # doctest: +SKIP
>>> buffer.replay() # doctest: +SKIP
[C, A, B]
>>> # or: [C, A, A] or [C, B, B], but always C as it
>>> # is the newest sample
>>> buffer.add(D) # doctest: +SKIP
>>> buffer.replay() # doctest: +SKIP
[D, A, C]
>>> # replay proportion 0.0 -> replay disabled:
>>> from ray.rllib.execution import MixInReplay
>>> buffer = MixInReplay(capacity=100, replay_ratio=0.0) # doctest: +SKIP
>>> buffer.add(A) # doctest: +SKIP
>>> buffer.replay() # doctest: +SKIP
[A]
>>> buffer.add(B) # doctest: +SKIP
>>> buffer.replay() # doctest: +SKIP
[B]
"""
def __init__(
self,
capacity: int,
replay_ratio: float,
replay_mode: ReplayMode = ReplayMode.INDEPENDENT,
):
"""Initializes MixInReplay instance.
Args:
capacity: Number of batches to store in total.
replay_ratio: Ratio of replayed samples in the returned
batches. E.g. a ratio of 0.0 means only return new samples
(no replay), a ratio of 0.5 means always return newest sample
plus one old one (1:1), a ratio of 0.66 means always return
the newest sample plus 2 old (replayed) ones (1:2), etc...
"""
self.capacity = capacity
self.replay_ratio = replay_ratio
self.replay_proportion = None
if self.replay_ratio != 1.0:
self.replay_proportion = self.replay_ratio / (1.0 - self.replay_ratio)
if replay_mode in ["lockstep", ReplayMode.LOCKSTEP]:
self.replay_mode = ReplayMode.LOCKSTEP
elif replay_mode in ["independent", ReplayMode.INDEPENDENT]:
self.replay_mode = ReplayMode.INDEPENDENT
else:
raise ValueError("Unsupported replay mode: {}".format(replay_mode))
def new_buffer():
return SimpleReplayBuffer(num_slots=capacity)
self.replay_buffers = collections.defaultdict(new_buffer)
# Metrics.
self.add_batch_timer = _Timer()
self.replay_timer = _Timer()
self.update_priorities_timer = _Timer()
# Added timesteps over lifetime.
self.num_added = 0
# Last added batch(es).
self.last_added_batches = collections.defaultdict(list)
def add(self, batch: SampleBatchType) -> None:
"""Adds a batch to the appropriate policy's replay buffer.
Turns the batch into a MultiAgentBatch of the DEFAULT_POLICY_ID if
it is not a MultiAgentBatch. Subsequently adds the individual policy
batches to the storage.
Args:
batch: The batch to be added.
"""
# Make a copy so the replay buffer doesn't pin plasma memory.
batch = batch.copy()
batch = batch.as_multi_agent()
with self.add_batch_timer:
if self.replay_mode == ReplayMode.LOCKSTEP:
# Lockstep mode: Store under _ALL_POLICIES key (we will always
# only sample from all policies at the same time).
# This means storing a MultiAgentBatch to the underlying buffer
self.replay_buffers[_ALL_POLICIES].add_batch(batch)
self.last_added_batches[_ALL_POLICIES].append(batch)
else:
# Store independent SampleBatches
for policy_id, sample_batch in batch.policy_batches.items():
self.replay_buffers[policy_id].add_batch(sample_batch)
self.last_added_batches[policy_id].append(sample_batch)
self.num_added += batch.count
def replay(
self, policy_id: PolicyID = DEFAULT_POLICY_ID
) -> Optional[SampleBatchType]:
if self.replay_mode == ReplayMode.LOCKSTEP and policy_id != _ALL_POLICIES:
raise ValueError(
"Trying to sample from single policy's buffer in lockstep "
"mode. In lockstep mode, all policies' experiences are "
"sampled from a single replay buffer which is accessed "
"with the policy id `{}`".format(_ALL_POLICIES)
)
buffer = self.replay_buffers[policy_id]
# Return None, if:
# - Buffer empty or
# - `replay_ratio` < 1.0 (new samples required in returned batch)
# and no new samples to mix with replayed ones.
if len(buffer) == 0 or (
len(self.last_added_batches[policy_id]) == 0 and self.replay_ratio < 1.0
):
return None
# Mix buffer's last added batches with older replayed batches.
with self.replay_timer:
output_batches = self.last_added_batches[policy_id]
self.last_added_batches[policy_id] = []
# No replay desired -> Return here.
if self.replay_ratio == 0.0:
return concat_samples(output_batches)
# Only replay desired -> Return a (replayed) sample from the
# buffer.
elif self.replay_ratio == 1.0:
return buffer.replay()
# Replay ratio = old / [old + new]
# Replay proportion: old / new
num_new = len(output_batches)
replay_proportion = self.replay_proportion
while random.random() < num_new * replay_proportion:
replay_proportion -= 1
output_batches.append(buffer.replay())
return concat_samples(output_batches)
def get_host(self) -> str:
"""Returns the computer's network name.
Returns:
The computer's networks name or an empty string, if the network
name could not be determined.
"""
return platform.node()
@Deprecated(new="MixInMultiAgentReplayBuffer.add()", error=False)
def add_batch(self, *args, **kwargs):
return self.add(*args, **kwargs)
| [
"noreply@github.com"
] | sven1977.noreply@github.com |
281437822c00a3b68c1c21b7d258fc68af90bd8c | 99f222d31e66da026cd284c390ef487d6e8a0270 | /core/experiments/plot_results_split_and_fit.py | 3dad50caee5489c2a8d9a8fe227e425812bff7bd | [] | no_license | dallascard/textile | 0e831b56978654f820de47f5145b7aabab48154e | 814ae148a0d7ca2ab47dd07c51ca42835717b9f2 | refs/heads/master | 2021-01-23T02:39:41.827570 | 2018-03-28T04:55:45 | 2018-03-28T04:55:45 | 86,009,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,197 | py | import os
import re
from optparse import OptionParser
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from ..util import file_handling as fh
def main():
usage = "%prog csv_results_files"
parser = OptionParser(usage=usage)
parser.add_option('--prefix', dest='prefix', default=None,
help='Output prefix (optional): default=%default')
parser.add_option('--similar', action="store_true", dest="similar", default=False,
help='Only use the most similar examples: default=%default')
parser.add_option('--different', action="store_true", dest="different", default=False,
help='Only use the most different examples: default=%default')
parser.add_option('--balanced', action="store_true", dest="balanced", default=False,
help='Only use the most balanced examples: default=%default')
parser.add_option('--unbalanced', action="store_true", dest="unbalanced", default=False,
help='Only use the most unbalanced examples: default=%default')
(options, args) = parser.parse_args()
files = args
n_files = len(files)
use_most_similar = options.similar
use_least_similar = options.different
use_balanced = options.balanced
use_unbalanced = options.unbalanced
output = options.prefix
rows = ['train', 'CC', 'PCC', 'ACC_internal', 'MS_internal', 'PCC_platt2']
values = {}
for row in rows:
values[row] = {}
df = None
mae_values = None
train_estimates = []
train_maes = []
for f_i, f in enumerate(files):
print(f)
#comp = re.sub('_2011', '_cshift_2011', f)
#if not os.path.exists(comp):
# print("Can't find %s" % comp)
n_files += 1
df_f = fh.read_csv_to_df(f)
n_rows, n_cols = df_f.shape
if mae_values is None:
df = df_f
mae_values = np.zeros([n_rows, n_files-1])
mae_values[:, f_i] = df_f['MAE'].values
train_estimates.append(df_f.loc['train', 'estimate'])
train_maes.append(df_f.loc['train', 'MAE'])
n_train = int(df_f.loc['train', 'N'])
if n_train not in values['CC']:
for row in rows:
values[row][n_train] = []
for row in rows:
values[row][n_train].append(df_f.loc[row, 'MAE'])
print("%d files" % len(files))
df = pd.DataFrame(mae_values, index=df.index)
most_similar = train_maes < np.mean(train_maes)
least_similar = train_maes > np.mean(train_maes)
train_unalancedness = np.abs(np.array(train_estimates) - 0.5)
most_balanced = train_unalancedness < np.mean(train_unalancedness)
least_balanced = train_unalancedness > np.mean(train_unalancedness)
selector = np.array(np.ones(len(most_similar)), dtype=bool)
if use_most_similar:
selector *= most_similar
if use_least_similar:
selector *= least_similar
if use_balanced:
selector *= most_balanced
if use_unbalanced:
selector *= least_balanced
df = pd.DataFrame(df.values[:, selector], index=df.index)
print(df.mean(axis=1))
print(df.std(axis=1))
if output is not None:
df.to_csv(output + '.csv')
df.mean(axis=1).to_csv(output + '_mean.csv')
"""
cmap = plt.get_cmap('jet')
colors = cmap(np.linspace(0, 1.0, len(rows)))
fig, ax = plt.subplots()
for r_i, row in enumerate(rows):
means = []
groups = list(values[row].keys())
groups.sort()
for group in groups:
points = values[row][group]
n_points = len(points)
ax.scatter(np.ones(n_points)*group + r_i*8, points, color=colors[r_i], s=5, alpha=0.5)
means.append(np.mean(points))
if row == 'train':
ax.plot(groups, means, linestyle='dashed', color=colors[r_i], label=row, alpha=0.5)
else:
ax.plot(groups, means, color=colors[r_i], label=row, alpha=0.5)
ax.legend()
if output is not None:
plt.savefig(output + '.pdf', bbox_inches='tight')
"""
if __name__ == '__main__':
main()
| [
"dcard@andrew.cmu.edu"
] | dcard@andrew.cmu.edu |
79c2f4eaae72715bc279a2eeed0138317f74f449 | 69bbe2729b178de19938d2be17fff29f99d67f6d | /question-type-find-hum-classifier-builder.py | 4711b71070d861ac64c061c854991d4bd813319d | [] | no_license | imclab/QuestionTypeClassifier | 0b6b51e2e9a85fdb0f61e0f814bca63147fe8bd7 | 20c25dda8ba0b38c3f74aa2914484380f4dd9394 | refs/heads/master | 2021-01-22T05:10:02.025554 | 2014-01-11T19:50:18 | 2014-01-11T19:50:18 | 17,212,625 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,311 | py | #!/usr/bin/env python
"""
done in 2539.012s
Best score: 0.936
Best parameters set:
clf__alpha: 0.0001
clf__n_iter: 80
clf__penalty: 'elasticnet'
tfidf__norm: 'l2'
tfidf__use_idf: False
vect__max_df: 0.75
vect__max_features: None
vect__ngram_range: (1, 2)
vect__stop_words: None
"""
__author__ = 'gavin hackeling'
__email__ = 'gavinhackeling@gmail.com'
import os
from time import time
import pickle
from pprint import pprint
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import SGDClassifier
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
def grid_search():
os.chdir('/home/gavin/PycharmProjects/question-type-classifier/corpora/')
stop_words = [l.strip() for l in open('stop-words.txt', 'rb')]
categories = ['desc', 'gr', 'ind', 'title']
train = load_files('fine/HUM', categories=categories, shuffle=True, random_state=42)
X, y = train.data, train.target
pipeline = Pipeline([
('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', SGDClassifier()),
])
parameters = {
'vect__stop_words': ('english', stop_words, None),
'vect__max_df': (0.5, 0.75, 1.0),
'vect__max_features': (None, 5000, 10000, 50000),
'vect__ngram_range': ((1, 1), (1, 2)), # unigrams or bigrams
'tfidf__use_idf': (True, False),
'tfidf__norm': ('l1', 'l2'),
'clf__alpha': (0.1, 0.01, 0.001, 0.0001, 0.00001, 0.000001),
'clf__penalty': ('l2', 'elasticnet'),
'clf__n_iter': (10, 50, 80),
}
grid_search = GridSearchCV(pipeline, parameters, n_jobs=-1, verbose=1)
t0 = time()
print 'Performing grid search...'
print 'pipeline:', [name for name, _ in pipeline.steps]
print 'parameters:'
pprint(parameters)
grid_search.fit(X, y)
print 'done in %0.3fs' % (time() - t0)
print 'Best score: %0.3f' % grid_search.best_score_
print 'Best parameters set:'
best_parameters = grid_search.best_estimator_.get_params()
for param_name in sorted(parameters.keys()):
print '\t%s: %r' % (param_name, best_parameters[param_name])
def build_model():
os.chdir('/home/gavin/PycharmProjects/question-type-classifier/corpora/')
categories = ['desc', 'gr', 'ind', 'title']
train = load_files('fine/HUM', categories=categories, shuffle=True, random_state=42)
X, y = train.data, train.target
pipeline = Pipeline([
('vect', CountVectorizer(max_df=0.75, ngram_range=(1, 2), stop_words=None)),
('tfidf', TfidfTransformer(norm='l2', use_idf=False)),
('clf', SGDClassifier(n_iter=80, penalty='elasticnet', alpha=0.0001)),
])
X_train, X_test, y_train, y_test = train_test_split(train.data, train.target, test_size=0.25, random_state=42)
pipeline.fit(X_train, y_train)
print 'classifier score:', pipeline.score(X_test, y_test)
pipeline.fit(X, y)
filehandler = open('fine-hum-classifier.p', 'wb')
pickle.dump(pipeline, filehandler)
filehandler.close()
if __name__ == '__main__':
grid_search()
#build_model()
| [
"gavinhackeling@gmail.com"
] | gavinhackeling@gmail.com |
beaf48337d505ec8a471b710b101c24259801287 | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/test/test_io_k8s_api_core_v1_endpoint_port.py | ee2bad7e450dd854eee83b22b6081f6c99cc0309 | [] | no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import argo
from models.io_k8s_api_core_v1_endpoint_port import IoK8sApiCoreV1EndpointPort # noqa: E501
from argo.rest import ApiException
class TestIoK8sApiCoreV1EndpointPort(unittest.TestCase):
"""IoK8sApiCoreV1EndpointPort unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIoK8sApiCoreV1EndpointPort(self):
"""Test IoK8sApiCoreV1EndpointPort"""
# FIXME: construct object with mandatory attributes with example values
# model = argo.models.io_k8s_api_core_v1_endpoint_port.IoK8sApiCoreV1EndpointPort() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"nfillot@weborama.com"
] | nfillot@weborama.com |
5495e799345a5df9d390f7c1e4773d9ae425c11b | ea713f1ea60829898e457ef39693f1ea8d14047a | /workbase/ws_sc/ws_sc/spiders/bqg_spider.py | 67e574f4977ac9f206bb47bdee7532c091ed5757 | [] | no_license | freeflyfish/bqhr | 2ea7220569780c033536587591a40fb6fb82d394 | d6cc82697b843a83826ed278aede4117822a818d | refs/heads/master | 2020-04-30T15:03:55.205319 | 2018-03-12T06:58:59 | 2018-03-12T06:58:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 992 | py | # -*- coding:utf-8 -*-
import time
import scrapy
from scrapy.http import Request
from selenium import webdriver
class DLDLSpider(scrapy.Spider):
name = 'bqg'
allowed_domains = ["biquge.com"]
start_urls = ["http://www.biquge.info/10_10218/5001527.html"]
# username = '18688983498'
# password = 'pm988311'
# cark = '6228480128558663877'
path = 'E:\\xiaoshuo\dldl\\'
def parse(self, response):
title = response.xpath('//h1/text()').extract_first()
content_list = response.xpath('//div[@id="content"]/text()').extract()
page_next = response.xpath('//a/@href').extract()[37]
con = ''
if content_list:
for x in content_list:
con += x.replace('\r', '').replace('\n', '').replace('\xa0', '') + '\n'
with open(self.path+title+'.txt', 'w') as f:
f.write(con)
f.close()
if page_next:
yield Request(page_next, callback=self.parse, dont_filter=True) | [
"380784649@qq.com"
] | 380784649@qq.com |
d64db6766ff616af587c803676cd543d66ea5af3 | 9c50d2310d026583fc32720b2cf59f8a8679a3f1 | /base/checkconfig.py | 40d97e4741acaf64c4a85628067987f3206ca275 | [] | no_license | songhongbao/Ran | 6d69bfc93433bdcdfe01f9b542dd626bb188a00d | 942b767e2492283d0c3ade259261de17d2dee7ff | refs/heads/master | 2020-06-15T06:29:35.927142 | 2015-11-19T10:57:19 | 2015-11-19T10:57:19 | 33,342,851 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,604 | py | # -*- coding: utf-8 -*-
import re
import os
import sys
class Conf():
_config = dict()
_error_msg = ''
_task_propertys = ['progress', 'thread']
_task_list = []
#deal ran config
#ran config only support strict pattern: key=value
def _deal(self, line, num):
line = line.strip()
if len(line) == 0 or line[0] == '#':
return True
if len(line.split('=')) != 2:
return False
key, value = line.split('=')
key = key.strip()
value = value.strip()
self._config[key] = (value, num)
return True
#deal task config
#task config only support strict pattern: taskname.property=numbers
def _task_deal(self, config, key, value):
if len(key.split('.')) != 2:
return False
task_name, task_property = key.split('.')
#property need be in _task_propertys
if not task_property in self._task_propertys:
return False
#property need be numbers
if not re.match(r'^[1-9]\d*$', value):
return False
value = int(value)
#task need be in task folder
if not task_name in self._task_list:
return False
#all is ok, register to the config
if not config.get(task_name):
config[task_name] = dict()
config[task_name]['progress'] = 1
config[task_name]['thread'] = 1
config[task_name][task_property] = value
return config
#deal local config
#local config can support normal pattern: key1.key2.key3...keyn=valuel
def _local_deal(self, config, key, value):
if len(key) == 1:
config[key[0]] = value
else:
if not config.get(key[0]) or not isinstance(config[key[0]], dict):
config[key[0]] = dict()
config[key[0]] = self._local_deal(config[key[0]], key[1:], value)
return config
#deal error config
#error config value include error const, and error info
def _error_deal(self, config, key, value):
if len(value.split(':')) != 2:
return False
error_key, error_value = value.split(':')
error_key = error_key.strip()
error_value = error_value.strip()
config[error_key] = dict()
config[error_key]['num'] = key
config[error_key]['msg'] = error_value
return config
#init the task file name list
def _init_task_folder(self):
self._task_list = []
for task_name in os.listdir('task'):
if task_name[-3 : ] == '.py':
self._task_list.append(task_name[0 : -3])
#config check false, set the errors
def _set_error(self, value, line=0, name='ran'):
if line:
self._error_msg = name + '.config line ' + str(line) + ' error: ' + str(value)
else:
self._error_msg = 'check ' + name + '.config error:\n' + str(value)
#if config check false, you can get errors by the function
#the error info can be write in the ran log
def get_error(self):
return self._error_msg
#ran config check is complex
def check_ran(self, lines):
self._config = dict()
num = 1
for line in lines:
if not self._deal(line, num):
self._set_error(line, num)
return False
num += 1
config = dict()
#set progress dir
config['dir'] = sys.path[0]
#task refresh
value, num = self._config.get('config_refresh', ('no', 0))
if value != 'yes' and value != 'no':
self._set_error(value, num)
return False
config['config_refresh'] = False if value == 'no' else True
#task refresh time
value, num = self._config.get('config_refresh_time', ('60', 0))
if not re.match(r'^[1-9]\d*$', value):
self._set_error(value, num)
return False
config['config_refresh_time'] = int(value)
#socket_folder
value, num = self._config.get('socket_folder', ('tmp', 0))
if value.find('/') == 0:
config['socket_folder'] = value
else:
config['socket_folder'] = config['dir'] + '/' + value
if not os.path.exists(config['socket_folder']):
self._set_error(value + ' folder not exist', num)
return False
#socket_port
value, num = self._config.get('socket_port', ('7664', 0))
if not re.match(r'^[1-9]\d*$', value):
self._set_error(value, num)
return False
config['socket_port'] = int(value)
#log_file_folder
value, num = self._config.get('log_file_folder', ('log', 0))
#if not os.path.exists(file_folder):
config['log_file_folder'] = value
#log_udp_host
value, num = self._config.get('log_udp_host', ('127.0.0.1', 0))
config['log_udp_host'] = value
#log udp port
value, num = self._config.get('log_udp_port', ('5202', 0))
if not re.match(r'^[1-9]\d*$', value):
self._set_error(value, num)
return False
config['log_udp_port'] = int(value)
return config
def check_task(self, lines):
self._config = dict()
num = 1
for line in lines:
if not self._deal(line, num):
self._set_error(line, num, 'task')
return False
num += 1
self._init_task_folder()
config = dict()
for key, value in self._config.iteritems():
if not self._task_deal(config, key, value[0]):
self._set_error(key + '=' + value[0], value[1], 'task')
return False
return config
def check_local(self, lines):
self._config = dict()
num = 1
for line in lines:
if not self._deal(line, num):
self._set_error(line, num, 'local')
return False
num += 1
config = dict()
for key, value in self._config.iteritems():
self._local_deal(config, key.split('.'), value[0])
return config
def check_error(self, lines):
self._config = dict()
num = 1
for line in lines:
if not self._deal(line, num):
self._set_error(line, num, 'local')
return False
num += 1
config = dict()
for key, value in self._config.iteritems():
self._error_deal(config, key, value[0])
return config | [
"root@localhost.localdomain"
] | root@localhost.localdomain |
84f87302873f18b014bc11be8a870f868e346cd5 | bd36269a7d7780e526c6e700f396baf7fffcb224 | /ctech403/module_1/fillin-function.py | 6090f315bbbac4e8f1f02a84c217ba6ec8982fea | [] | no_license | ecornelldev/ctech400s | 7bb26d901bb9daae5c2d0f4f2eb8dabd9fdfe10e | 8e394620dc2f2597161cc3ac94b1b97424f13d1a | refs/heads/master | 2021-06-23T21:23:16.198084 | 2020-12-22T19:06:42 | 2020-12-22T19:06:42 | 161,530,338 | 0 | 0 | null | 2020-12-30T04:10:33 | 2018-12-12T18:43:17 | HTML | UTF-8 | Python | false | false | 1,113 | py | import random
# Story template
story = 'Yesterday, I ___VERB___ to the store to buy a ___NOUN___. But on my way, I ran into a ___ADJ___ ___NOUN___. I was very ___ADJ___. Then I remembered that I had a ___NOUN___ in my pocket. I ___VERB___ behind a ___ADJ___ ___NOUN___.'
# Placeholders
NOUN_PLACEHOLDER = '___NOUN___'
ADJECTIVE_PLACEHOLDER = '___ADJ___'
VERB_PLACEHOLDER = '___VERB___'
# Word lists
NOUNS = ['cat', 'dog', 'zeppelin', 'boomerang', 'trombone']
ADJECTIVES = ['red', 'hunormous', 'intricate', 'merciless']
VERBS = ['vomited', 'catapulted', 'squeaked']
# Returns a string with each instance of placeholder in text replaced by a
# random choice from word_list
def fill_in(text, placeholder, word_list):
while placeholder in text:
new_word = random.choice(word_list)
text = text.replace(placeholder, new_word, 1)
return text
# Perform substitutions for each list of words
story = fill_in(story, NOUN_PLACEHOLDER, NOUNS)
story = fill_in(story, ADJECTIVE_PLACEHOLDER, ADJECTIVES)
story = fill_in(story, VERB_PLACEHOLDER, VERBS)
# Output story with substitutions
print(story)
| [
"james.grimmelmann@cornell.edu"
] | james.grimmelmann@cornell.edu |
e99fd962f33fc4068020593b7c6d5bdbfadd5a15 | a8547f73463eef517b98d1085430732f442c856e | /GDAL-1.11.5-py3.6-macosx-10.13-x86_64.egg/osgeo/gdal_array.py | fd2361c4d9fdf5b20d0a12b089ff8fa01a95d8fa | [] | no_license | EnjoyLifeFund/macHighSierra-py36-pkgs | 63aece1b692225ee2fbb865200279d7ef88a1eca | 5668b5785296b314ea1321057420bcd077dba9ea | refs/heads/master | 2021-01-23T19:13:04.707152 | 2017-12-25T17:41:30 | 2017-12-25T17:41:30 | 102,808,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,908 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.40
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
# This file is compatible with both classic and new-style classes.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_gdal_array', [dirname(__file__)])
except ImportError:
import _gdal_array
return _gdal_array
if fp is not None:
try:
_mod = imp.load_module('_gdal_array', fp, pathname, description)
finally:
fp.close()
return _mod
_gdal_array = swig_import_helper()
del swig_import_helper
else:
import _gdal_array
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
class VirtualMem(_object):
"""Proxy of C++ CPLVirtualMemShadow class"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VirtualMem, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VirtualMem, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal_array.delete_VirtualMem
__del__ = lambda self : None;
def GetAddr(self):
"""GetAddr(self)"""
return _gdal_array.VirtualMem_GetAddr(self)
def Pin(self, start_offset = 0, nsize = 0, bWriteOp = 0):
"""
Pin(self, size_t start_offset = 0, size_t nsize = 0, int bWriteOp = 0)
Pin(self, size_t start_offset = 0, size_t nsize = 0)
Pin(self, size_t start_offset = 0)
Pin(self)
"""
return _gdal_array.VirtualMem_Pin(self, start_offset, nsize, bWriteOp)
VirtualMem_swigregister = _gdal_array.VirtualMem_swigregister
VirtualMem_swigregister(VirtualMem)
def GetArrayFilename(*args):
"""GetArrayFilename(PyArrayObject psArray) -> retStringAndCPLFree"""
return _gdal_array.GetArrayFilename(*args)
def BandRasterIONumPy(*args, **kwargs):
"""
BandRasterIONumPy(Band band, int bWrite, int xoff, int yoff, int xsize,
int ysize, PyArrayObject psArray, int buf_type) -> CPLErr
"""
return _gdal_array.BandRasterIONumPy(*args, **kwargs)
def VirtualMemGetArray(*args):
"""VirtualMemGetArray(VirtualMem virtualmem)"""
return _gdal_array.VirtualMemGetArray(*args)
def RATValuesIONumPyWrite(*args, **kwargs):
"""
RATValuesIONumPyWrite(RasterAttributeTable poRAT, int nField, int nStart,
PyArrayObject psArray) -> CPLErr
"""
return _gdal_array.RATValuesIONumPyWrite(*args, **kwargs)
def RATValuesIONumPyRead(*args, **kwargs):
"""
RATValuesIONumPyRead(RasterAttributeTable poRAT, int nField, int nStart,
int nLength) -> PyObject
"""
return _gdal_array.RATValuesIONumPyRead(*args, **kwargs)
import numpy
import _gdal_array
from . import gdalconst
from . import gdal
gdal.AllRegister()
codes = { gdalconst.GDT_Byte : numpy.uint8,
gdalconst.GDT_UInt16 : numpy.uint16,
gdalconst.GDT_Int16 : numpy.int16,
gdalconst.GDT_UInt32 : numpy.uint32,
gdalconst.GDT_Int32 : numpy.int32,
gdalconst.GDT_Float32 : numpy.float32,
gdalconst.GDT_Float64 : numpy.float64,
gdalconst.GDT_CInt16 : numpy.complex64,
gdalconst.GDT_CInt32 : numpy.complex64,
gdalconst.GDT_CFloat32 : numpy.complex64,
gdalconst.GDT_CFloat64 : numpy.complex128
}
def OpenArray( array, prototype_ds = None ):
ds = gdal.Open( GetArrayFilename(array) )
if ds is not None and prototype_ds is not None:
if type(prototype_ds).__name__ == 'str':
prototype_ds = gdal.Open( prototype_ds )
if prototype_ds is not None:
CopyDatasetInfo( prototype_ds, ds )
return ds
def flip_code(code):
if isinstance(code, (numpy.dtype,type)):
# since several things map to complex64 we must carefully select
# the opposite that is an exact match (ticket 1518)
if code == numpy.int8:
return gdalconst.GDT_Byte
if code == numpy.complex64:
return gdalconst.GDT_CFloat32
for key, value in list(codes.items()):
if value == code:
return key
return None
else:
try:
return codes[code]
except KeyError:
return None
def NumericTypeCodeToGDALTypeCode(numeric_type):
if not isinstance(numeric_type, (numpy.dtype,type)):
raise TypeError("Input must be a type")
return flip_code(numeric_type)
def GDALTypeCodeToNumericTypeCode(gdal_code):
return flip_code(gdal_code)
def LoadFile( filename, xoff=0, yoff=0, xsize=None, ysize=None ):
ds = gdal.Open( filename )
if ds is None:
raise ValueError("Can't open "+filename+"\n\n"+gdal.GetLastErrorMsg())
return DatasetReadAsArray( ds, xoff, yoff, xsize, ysize )
def SaveArray( src_array, filename, format = "GTiff", prototype = None ):
driver = gdal.GetDriverByName( format )
if driver is None:
raise ValueError("Can't find driver "+format)
return driver.CreateCopy( filename, OpenArray(src_array,prototype) )
def DatasetReadAsArray( ds, xoff=0, yoff=0, xsize=None, ysize=None, buf_obj=None ):
if xsize is None:
xsize = ds.RasterXSize
if ysize is None:
ysize = ds.RasterYSize
if ds.RasterCount == 1:
return BandReadAsArray( ds.GetRasterBand(1), xoff, yoff, xsize, ysize, buf_obj = buf_obj)
datatype = ds.GetRasterBand(1).DataType
for band_index in range(2,ds.RasterCount+1):
if datatype != ds.GetRasterBand(band_index).DataType:
datatype = gdalconst.GDT_Float32
typecode = GDALTypeCodeToNumericTypeCode( datatype )
if typecode == None:
datatype = gdalconst.GDT_Float32
typecode = numpy.float32
if buf_obj is not None:
for band_index in range(1,ds.RasterCount+1):
BandReadAsArray( ds.GetRasterBand(band_index),
xoff, yoff, xsize, ysize, buf_obj = buf_obj[band_index-1])
return buf_obj
array_list = []
for band_index in range(1,ds.RasterCount+1):
band_array = BandReadAsArray( ds.GetRasterBand(band_index),
xoff, yoff, xsize, ysize)
array_list.append( numpy.reshape( band_array, [1,ysize,xsize] ) )
return numpy.concatenate( array_list )
def BandReadAsArray( band, xoff = 0, yoff = 0, win_xsize = None, win_ysize = None,
buf_xsize=None, buf_ysize=None, buf_obj=None ):
"""Pure python implementation of reading a chunk of a GDAL file
into a numpy array. Used by the gdal.Band.ReadAsArray method."""
if win_xsize is None:
win_xsize = band.XSize
if win_ysize is None:
win_ysize = band.YSize
if buf_obj is None:
if buf_xsize is None:
buf_xsize = win_xsize
if buf_ysize is None:
buf_ysize = win_ysize
else:
if len(buf_obj.shape) == 2:
shape_buf_xsize = buf_obj.shape[1]
shape_buf_ysize = buf_obj.shape[0]
else:
shape_buf_xsize = buf_obj.shape[2]
shape_buf_ysize = buf_obj.shape[1]
if buf_xsize is not None and buf_xsize != shape_buf_xsize:
raise ValueError('Specified buf_xsize not consistant with array shape')
if buf_ysize is not None and buf_ysize != shape_buf_ysize:
raise ValueError('Specified buf_ysize not consistant with array shape')
buf_xsize = shape_buf_xsize
buf_ysize = shape_buf_ysize
if buf_obj is None:
datatype = band.DataType
typecode = GDALTypeCodeToNumericTypeCode( datatype )
if typecode == None:
datatype = gdalconst.GDT_Float32
typecode = numpy.float32
else:
datatype = NumericTypeCodeToGDALTypeCode( typecode )
if datatype == gdalconst.GDT_Byte and band.GetMetadataItem('PIXELTYPE', 'IMAGE_STRUCTURE') == 'SIGNEDBYTE':
typecode = numpy.int8
ar = numpy.empty([buf_ysize,buf_xsize], dtype = typecode)
if BandRasterIONumPy( band, 0, xoff, yoff, win_xsize, win_ysize,
ar, datatype ) != 0:
return None
return ar
else:
datatype = NumericTypeCodeToGDALTypeCode( buf_obj.dtype.type )
if not datatype:
raise ValueError("array does not have corresponding GDAL data type")
if BandRasterIONumPy( band, 0, xoff, yoff, win_xsize, win_ysize,
buf_obj, datatype ) != 0:
return None
return buf_obj
def BandWriteArray( band, array, xoff=0, yoff=0 ):
"""Pure python implementation of writing a chunk of a GDAL file
from a numpy array. Used by the gdal.Band.WriteArray method."""
if array is None or len(array.shape) != 2:
raise ValueError("expected array of dim 2")
xsize = array.shape[1]
ysize = array.shape[0]
if xsize + xoff > band.XSize or ysize + yoff > band.YSize:
raise ValueError("array larger than output file, or offset off edge")
datatype = NumericTypeCodeToGDALTypeCode( array.dtype.type )
# if we receive some odd type, like int64, try casting to a very
# generic type we do support (#2285)
if not datatype:
gdal.Debug( 'gdal_array', 'force array to float64' )
array = array.astype( numpy.float64 )
datatype = NumericTypeCodeToGDALTypeCode( array.dtype.type )
if not datatype:
raise ValueError("array does not have corresponding GDAL data type")
return BandRasterIONumPy( band, 1, xoff, yoff, xsize, ysize,
array, datatype )
def RATWriteArray(rat, array, field, start=0):
"""
Pure Python implementation of writing a chunk of the RAT
from a numpy array. Type of array is coerced to one of the types
(int, double, string) supported. Called from RasterAttributeTable.WriteArray
"""
if array is None:
raise ValueError("Expected array of dim 1")
# if not the array type convert it to handle lists etc
if not isinstance(array, numpy.ndarray):
array = numpy.array(array)
if array.ndim != 1:
raise ValueError("Expected array of dim 1")
if (start + array.size) > rat.GetRowCount():
raise ValueError("Array too big to fit into RAT from start position")
if numpy.issubdtype(array.dtype, numpy.integer):
# is some type of integer - coerce to standard int
# TODO: must check this is fine on all platforms
# confusingly numpy.int 64 bit even if native type 32 bit
array = array.astype(numpy.int32)
elif numpy.issubdtype(array.dtype, numpy.floating):
# is some type of floating point - coerce to double
array = array.astype(numpy.double)
elif numpy.issubdtype(array.dtype, numpy.character):
# cast away any kind of Unicode etc
array = array.astype(numpy.character)
else:
raise ValueError("Array not of a supported type (integer, double or string)")
return RATValuesIONumPyWrite(rat, field, start, array)
def RATReadArray(rat, field, start=0, length=None):
"""
Pure Python implementation of reading a chunk of the RAT
into a numpy array. Called from RasterAttributeTable.ReadAsArray
"""
if length is None:
length = rat.GetRowCount() - start
return RATValuesIONumPyRead(rat, field, start, length)
def CopyDatasetInfo( src, dst, xoff=0, yoff=0 ):
"""
Copy georeferencing information and metadata from one dataset to another.
src: input dataset
dst: output dataset - It can be a ROI -
xoff, yoff: dst's offset with respect to src in pixel/line.
Notes: Destination dataset must have update access. Certain formats
do not support creation of geotransforms and/or gcps.
"""
dst.SetMetadata( src.GetMetadata() )
#Check for geo transform
gt = src.GetGeoTransform()
if gt != (0,1,0,0,0,1):
dst.SetProjection( src.GetProjectionRef() )
if (xoff == 0) and (yoff == 0):
dst.SetGeoTransform( gt )
else:
ngt = [gt[0],gt[1],gt[2],gt[3],gt[4],gt[5]]
ngt[0] = gt[0] + xoff*gt[1] + yoff*gt[2];
ngt[3] = gt[3] + xoff*gt[4] + yoff*gt[5];
dst.SetGeoTransform( ( ngt[0], ngt[1], ngt[2], ngt[3], ngt[4], ngt[5] ) )
#Check for GCPs
elif src.GetGCPCount() > 0:
if (xoff == 0) and (yoff == 0):
dst.SetGCPs( src.GetGCPs(), src.GetGCPProjection() )
else:
gcps = src.GetGCPs()
#Shift gcps
new_gcps = []
for gcp in gcps:
ngcp = gdal.GCP()
ngcp.GCPX = gcp.GCPX
ngcp.GCPY = gcp.GCPY
ngcp.GCPZ = gcp.GCPZ
ngcp.GCPPixel = gcp.GCPPixel - xoff
ngcp.GCPLine = gcp.GCPLine - yoff
ngcp.Info = gcp.Info
ngcp.Id = gcp.Id
new_gcps.append(ngcp)
try:
dst.SetGCPs( new_gcps , src.GetGCPProjection() )
except:
print ("Failed to set GCPs")
return
return
| [
"raliclo@gmail.com"
] | raliclo@gmail.com |
ce9e6a7de16b5cdfa571a0dee0fd2036c6646815 | b3a90c38c61579a3de26ca398cc354a2fedbd367 | /python_crash_course/chapter_11/test_name_function.py | 56ef5ee3f7bc39c57f6974724ef7e1a833e4d6a3 | [] | no_license | JennifferLockwood/python_learning | a815e05e124aab02de694e48ee405958bbed8aac | fa054bb84778f278247128266095e061e65126b0 | refs/heads/master | 2016-08-12T20:29:36.221498 | 2016-03-13T18:10:04 | 2016-03-13T18:10:04 | 49,471,663 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | import unittest
from name_function import get_formatted_name
class NamesTestCase(unittest.TestCase):
"""Tests for 'name_function.py'."""
def test_first_last_name(self):
"""Do names like 'Janis Joplin' work?"""
formatted_name = get_formatted_name('janis', 'joplin')
self.assertEqual(formatted_name, 'Janis Joplin')
def test_first_last_middle_name(self):
"""Do names like 'Wolfang Amadeus Mozart' work?"""
formatted_name = get_formatted_name('wolfang', 'mozart', 'amadeus')
self.assertEqual(formatted_name, 'Wolfang Amadeus Mozart')
unittest.main()
| [
"jennifferlockwood@gmail.com"
] | jennifferlockwood@gmail.com |
5d2a5440ef001e2e281ac33bb7504dd9e176c1c6 | e463e169c6dcd5222dbba9c4c699f70b5eda3591 | /Chapter_8/private-critter.py | c28cb62c9baf64a7da396de3807110884329e945 | [] | no_license | mrwillbarnz/Python_FAB_Reimplementation | 1e00e3ecdcb6c9e3d671aae9dddf8aa475c01e8e | 5f02c6d3f392612fe17d97e648302ea2e3edf01c | refs/heads/master | 2022-04-12T05:47:11.577985 | 2020-03-01T17:37:40 | 2020-03-01T17:37:40 | 239,611,675 | 0 | 0 | null | 2020-02-16T19:28:19 | 2020-02-10T20:57:21 | Python | UTF-8 | Python | false | false | 705 | py | # Private Critter
# Demonstrates private variables and methods
class Critter(object):
"""A virtual pet."""
def __init__(self, name, mood):
print("A new critter has been born!")
self.name = name # public attribute
self.__mood = mood # private attribute
def talk(self):
print("\nI'm", self.name)
print("Right now I feel", self.__mood, "\n")
def __private_method(self):
print("This is a private method.")
def public_method(self):
print("This is a public method.")
self.__private_method()
# main
crit = Critter(name = "Poochie", mood = "happy")
crit.talk()
crit.public_method()
input("\n\nPress the enter key to exit.")
| [
"willbarnard687@pm.me"
] | willbarnard687@pm.me |
725f3663fa177e21fa5168b1c0c8db6c8e9596a5 | b023dc288ead04ce930fc16034bf47752c0a86a4 | /projecteuler2.py | b6910e07d34234fb29796fe88b6d8b1d7d369e16 | [] | no_license | leezichanga/Project-euler-toyproblems | b98c747b9d2c61cde76e5ad223e66e559ca63a33 | 438a5b48cb42e357def68360598b8d1850128734 | refs/heads/master | 2020-03-11T19:45:33.145514 | 2018-04-19T13:03:48 | 2018-04-19T13:03:48 | 130,217,051 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 761 | py | #Solution 1
numbers = [1, 2]
total = 0
for i in range(4000000):
if i == numbers[-1] + numbers[-2]:
numbers.append(i)
for n in numbers:
if n % 2 == 0:
total += n
print(total)
#Solution 2
#!/bin/python3
import sys
t = int(input().strip())
for a0 in range(t):
fib_start =[1,2] #create the first 2 fibonnaci numbers
total = 0 #create default total value
n = int(input().strip())
while True: #while loop to append the next numbers in the sequence
fib_next = fib_start[-1] +fib_start[-2]
if fib_next < n:
fib_start.append(fib_next)
else:
break
for number in fib_start: #for loop to add the even digits
if number % 2 == 0:
total += number
print (total)
| [
"elizabbethichanga@yahoo.com"
] | elizabbethichanga@yahoo.com |
554c61067cbadc2773055a2c4062c1801556b3e4 | d652c5cd50abc59163288f67aabf511edf2ffc16 | /{{cookiecutter.package_name}}/{{cookiecutter.app_name}}/serializers/blog.py | 7a2777cb74b488f9a6b15e871cad3fd62c639f55 | [
"MIT"
] | permissive | sveetch/cookiecutter-sveetch-djangoapp | 2f883958a665a84423f9dcc0bbd794a67d91fb0e | 6770a00e5ed67702f61543c0495bc55dcebdc76a | refs/heads/master | 2023-04-03T18:05:59.380348 | 2023-03-17T16:26:15 | 2023-03-17T16:26:15 | 297,186,173 | 3 | 1 | null | 2020-10-12T00:52:41 | 2020-09-21T00:04:59 | null | UTF-8 | Python | false | false | 1,525 | py | from rest_framework import serializers
from ..models import Blog
class BlogIdField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return Blog.objects.all()
class BlogSerializer(serializers.HyperlinkedModelSerializer):
"""
Complete representation for detail and writing usage.
"""
id = serializers.ReadOnlyField()
view_url = serializers.SerializerMethodField()
article_count = serializers.SerializerMethodField()
class Meta:
model = Blog
fields = '__all__'
extra_kwargs = {
"url": {
"view_name": "{{ cookiecutter.app_name }}:api-blog-detail"
},
}
def get_view_url(self, obj):
"""
Return the HTML detail view URL.
If request has been given to serializer this will be an absolute URL, else a
relative URL.
"""
url = obj.get_absolute_url()
request = self.context.get("request")
if request:
return request.build_absolute_uri(url)
return url
def get_article_count(self, obj):
"""
Return count of related articles.
"""
return obj.article_set.count()
class BlogResumeSerializer(BlogSerializer):
"""
Simpler Blog representation for nested list. It won't be suitable for writing
usage.
"""
class Meta:
model = BlogSerializer.Meta.model
fields = ["id", "url", "view_url", "title"]
extra_kwargs = BlogSerializer.Meta.extra_kwargs
| [
"sveetch@gmail.com"
] | sveetch@gmail.com |
339dc21c3dce2abec0a2f010d9e28dee1d336019 | bd4aeadb9fe2066db18a2a47d3ea3f99a6832af1 | /my_env/bin/easy_install | 49217d1997bcaf846a447f413bc196b0ae89aa76 | [] | no_license | Sezimm/ProektCourses | 940b9ac221a122136941f0af6e75698449c50706 | 91b83903643991c2de1add9378b22cdd4bf50540 | refs/heads/main | 2023-03-20T11:20:00.285358 | 2021-03-09T03:50:46 | 2021-03-09T03:50:46 | 345,875,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | #!/home/sezim/courses/my_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"you@example.com"
] | you@example.com | |
9beb46105e59a68e3a054d35e6ff5164999cabc5 | 0f24c1e2df268a7c98314d5b3c6f8b5738f88ba9 | /test/test_addresses_api.py | 2b9cf39f54f0bd4d84f46319473ed53db7dbb64e | [
"MIT"
] | permissive | arberx/graphsense-python | b07be2854d4f6e763aacdad4045ae72c338bd4e2 | c0dafc97a04bc3dbf0caf08a981bb591bd1e430a | refs/heads/master | 2023-08-11T14:15:42.576434 | 2021-06-17T08:01:04 | 2021-06-17T08:01:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,079 | py | """
GraphSense API
GraphSense API # noqa: E501
The version of the OpenAPI document: 0.4.5
Generated by: https://openapi-generator.tech
"""
import unittest
import graphsense
from graphsense.api.addresses_api import AddressesApi # noqa: E501
class TestAddressesApi(unittest.TestCase):
"""AddressesApi unit test stubs"""
def setUp(self):
self.api = AddressesApi() # noqa: E501
def tearDown(self):
pass
def test_get_address_entity(self):
"""Test case for get_address_entity
Get an address with tags # noqa: E501
"""
pass
def test_get_address_with_tags(self):
"""Test case for get_address_with_tags
Get an address with tags # noqa: E501
"""
pass
def test_list_address_links(self):
"""Test case for list_address_links
Get transactions between two addresses # noqa: E501
"""
pass
def test_list_address_links_csv(self):
"""Test case for list_address_links_csv
Get transactions between two addresses as CSV # noqa: E501
"""
pass
def test_list_address_links_csv_eth(self):
"""Test case for list_address_links_csv_eth
Get transactions between two addresses as CSV # noqa: E501
"""
pass
def test_list_address_links_eth(self):
"""Test case for list_address_links_eth
Get transactions between two addresses # noqa: E501
"""
pass
def test_list_address_neighbors(self):
"""Test case for list_address_neighbors
Get an addresses' neighbors in the address graph # noqa: E501
"""
pass
def test_list_address_neighbors_csv(self):
"""Test case for list_address_neighbors_csv
Get an addresses' neighbors in the address graph as CSV # noqa: E501
"""
pass
def test_list_address_txs(self):
"""Test case for list_address_txs
Get all transactions an address has been involved in # noqa: E501
"""
pass
def test_list_address_txs_csv(self):
"""Test case for list_address_txs_csv
Get all transactions an address has been involved in as CSV # noqa: E501
"""
pass
def test_list_address_txs_csv_eth(self):
"""Test case for list_address_txs_csv_eth
Get all transactions an address has been involved in as CSV # noqa: E501
"""
pass
def test_list_address_txs_eth(self):
"""Test case for list_address_txs_eth
Get all transactions an address has been involved in # noqa: E501
"""
pass
def test_list_tags_by_address(self):
"""Test case for list_tags_by_address
Get attribution tags for a given address # noqa: E501
"""
pass
def test_list_tags_by_address_csv(self):
"""Test case for list_tags_by_address_csv
Get attribution tags for a given address # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"git@myrho.net"
] | git@myrho.net |
c3a959e4e06b55273a496c095e694aa6c1c774ca | 72f6f274a9e4937f99e61eebe14f9b2f301a83f5 | /utils/tokenizer.py | bf4279be1e7e24a5503d16496dfcd27c2bff72f0 | [] | no_license | studio-ousia/textent | e466f8ef4f6910a0f4270014fa29c18aa5f329e0 | 2a73ef2f6a0d29d4d1c1085a75fa0b7592bdd376 | refs/heads/master | 2021-03-22T04:45:57.582737 | 2018-06-03T07:18:28 | 2018-06-03T07:18:28 | 93,811,887 | 20 | 4 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | # -*- coding: utf-8 -*-
import re
class Token(object):
__slots__ = ('_text', '_span')
def __init__(self, text, span):
self._text = text
self._span = span
@property
def text(self):
return self._text
@property
def span(self):
return self._span
def __repr__(self):
return '<Token %s>' % self.text.encode('utf-8')
def __reduce__(self):
return (self.__class__, (self.text, self.span))
class RegexpTokenizer(object):
__slots__ = ('_rule',)
def __init__(self, rule=ur'[\w\d]+'):
self._rule = re.compile(rule, re.UNICODE)
def tokenize(self, text):
spans = [o.span() for o in self._rule.finditer(text)]
return [Token(text[s[0]:s[1]], s) for s in spans]
| [
"ikuya@ikuya.net"
] | ikuya@ikuya.net |
e2921ce2081516066c75515a68b6b631bcdfd549 | c913c952cf4019d67f02bf1971917116da375c81 | /Data/OMIMresults/omimResults2420to2440.py | e5925a3a67033ff5edeba8a4eae05356773c7bea | [] | no_license | jiangchb/OMIMscraping | 57afa5b2f8b7ca975e7459814e0410a872f71990 | 27d4ac8faea526b1c70937317caec064bed00a0a | refs/heads/master | 2022-03-14T21:35:56.102665 | 2019-11-22T15:48:48 | 2019-11-22T15:48:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95,100 | py | omim = {'omim': {
'version': '1.0',
'searchResponse': {
'search': '*',
'expandedSearch': '*:*',
'parsedSearch': '+*:* ()',
'searchSuggestion': None,
'searchSpelling': None,
'filter': '',
'expandedFilter': None,
'fields': '',
'searchReport': None,
'totalResults': 7368,
'startIndex': 2420,
'endIndex': 2439,
'sort': '',
'operator': '',
'searchTime': 2.0,
'clinicalSynopsisList': [
{'clinicalSynopsis': {
'mimNumber': 611705,
'prefix': '#',
'preferredTitle': 'SALIH MYOPATHY; SALMY',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'headAndNeckFace': 'Facial muscle weakness {SNOMEDCT:95666008} {ICD10CM:R29.810} {ICD9CM:438.83,781.94} {UMLS C0427055,C4553723 HP:0030319,HP:0007209} {HPO HP:0030319 C4022514}',
'headAndNeckEyes': 'Ptosis {SNOMEDCT:11934000,29696001} {ICD10CM:H02.4,H02.40,H02.409} {ICD9CM:374.3,374.30} {UMLS C0005745,C0033377 HP:0000508} {HPO HP:0000508 C0005745} {EOM ID:1bd157b764ec7aea IMG:Ptosis-small.jpg}',
'cardiovascularHeart': '''Dilated cardiomyopathy {SNOMEDCT:399020009,195021004} {ICD10CM:I42.0} {UMLS C0007193,C2984282 HP:0001644} {HPO HP:0001644 C0007193};\nArrhythmia {SNOMEDCT:698247007} {ICD10CM:I49.9} {ICD9CM:427,427.9} {UMLS C0003811 HP:0011675} {HPO HP:0011675 C0003811,C0264886,C0522055,C0855329,C1832603,C1842820};\nCardiac septal defects {SNOMEDCT:396351009,59494005,253273004} {ICD10CM:Q21,Q21.9} {UMLS C0018816 HP:0001671}''',
'skeletal': 'Joint contractures {SNOMEDCT:7890003} {ICD10CM:M24.5} {ICD9CM:718.40,718.4} {UMLS C0009918 HP:0001371} {HPO HP:0001371 C0009917,C0009918,C0333068,C1850530}',
'skeletalSpine': 'Scoliosis {SNOMEDCT:298382003,20944008,111266001} {ICD10CM:Q67.5,M41,M41.9} {UMLS C0559260,C0036439,C4552773,C0700208 HP:0002650} {HPO HP:0002650 C0037932,C0700208}',
'muscleSoftTissue': '''Delayed motor development {UMLS C1854301 HP:0001270} {HPO HP:0001270 C1854301,C4020874};\nMuscle weakness, generalized, proximal and distal {UMLS C1842163};\nCalf hypertrophy {UMLS C1843057 HP:0008981} {HPO HP:0008981 C1843057};\nMuscle biopsy shows centralized nuclei {UMLS C1968626};\nType 1 fiber predominance {UMLS C2673678};\nMinicore-like lesions with mitochondrial depletion and sarcomeric disorganization {UMLS C2673679};\nDisruption of the M-line {UMLS C2673680};\nDystrophic changes occur later {UMLS C2673681}''',
'laboratoryAbnormalities': 'Serum creatine kinase may be increased {UMLS C1969489}',
'miscellaneous': '''Muscle involvement shows onset at birth or in infancy {UMLS C2673683};\nCardiac involvement occurs between 5 and 12 years {UMLS C2673684};\nSudden death due to cardiomyopathy {UMLS C2673685}''',
'molecularBasis': 'Caused by mutation in the titin gene (TTN, {188840.0012})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': True,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611706,
'prefix': '%',
'preferredTitle': 'MIGRAINE WITH OR WITHOUT AURA, SUSCEPTIBILITY TO, 12; MGR12',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'headAndNeckEars': 'Phonophobia {SNOMEDCT:313387002} {UMLS C0751466 HP:0002183} {HPO HP:0002183 C0751466}',
'headAndNeckEyes': 'Photophobia {SNOMEDCT:246622003,409668002} {ICD10CM:H53.14} {UMLS C4554342,C0085636 HP:0000613} {HPO HP:0000613 C0085636,C4020887}',
'abdomenGastrointestinal': '''Nausea {SNOMEDCT:422587007} {ICD10CM:R11.0} {UMLS C4085222,C4085661,C0027497,C4084796,C4552889,C4552888,C2984057,C4085862,C4553767,C1963179,C3829611,C4255480 HP:0002018} {HPO HP:0002018 C0027497};\nVomiting {SNOMEDCT:249497008,422400008,300359004} {ICD10CM:R11.1,R11.10} {UMLS C3898969,C4084768,C4084769,C1963281,C4084766,C4084767,C0042963 HP:0002013} {HPO HP:0002013 C0042963}''',
'neurologicCentralNervousSystem': '''Migraine with or without aura, may be pulsating and/or unilateral {UMLS C4314963};\nHeadache {SNOMEDCT:25064002} {ICD10CM:R51} {ICD9CM:784.0} {UMLS C4553197,C0018681 HP:0002315} {HPO HP:0002315 C0018681}''',
'miscellaneous': 'Predominantly female-dominated inheritance pattern in Finnish families {UMLS C4314962}',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': True,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611773,
'prefix': '#',
'preferredTitle': 'ANGIOPATHY, HEREDITARY, WITH NEPHROPATHY, ANEURYSMS, AND MUSCLE CRAMPS; HANAC',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'headAndNeckEyes': '''Retinal arteriolar tortuosity {UMLS C1843517 HP:0001136} {HPO HP:0001136 C1843517};\nRetinal hemorrhage {SNOMEDCT:28998008} {ICD10CM:H35.6,H35.60} {ICD9CM:362.81} {UMLS C0035317 HP:0000573} {HPO HP:0000573 C0035317}''',
'cardiovascularHeart': 'Arrhythmias, supraventricular {SNOMEDCT:72654001} {UMLS C0428974 HP:0005115} {HPO HP:0005115 C0428974}',
'cardiovascularVascular': '''Aneurysms of right internal carotid artery, intracranial segment {UMLS C3278169};\nAneurysm of right middle cerebral artery, horizontal segment {UMLS C3278170};\nRaynaud phenomenon {SNOMEDCT:266261006} {ICD10CM:I73.0} {ICD9CM:443.0} {UMLS C0034735 HP:0030880} {HPO HP:0030880 C0034735}''',
'genitourinaryKidneys': '''Hematuria, microscopic {SNOMEDCT:197940006} {ICD9CM:599.72} {UMLS C0239937 HP:0002907};\nHematuria, gross (in some patients) {UMLS C3278161};\nRenal cysts {SNOMEDCT:722223000} {UMLS C3887499 HP:0000107} {HPO HP:0000107 C0022679,C3887499};\nRenal failure, mild {UMLS C3278162} {HPO HP:0000083 C0035078,C1565489,C1839604};\nBasement membrane alterations in Bowman capsule, tubules, and interstitial capillaries, with irregular thickening, splitting into multiple layers, and electron-lucent areas {UMLS C3278163};\nGlomerular basement membrane normal {UMLS C3278164}''',
'skinNailsHairSkinElectronMicroscopy': '''Basement membrane duplications at dermoepidermal junction {UMLS C3278165};\nDermal arteriole dissociation in vascular smooth muscle cells {UMLS C3278166};\nBasement membrane abnormally spread in vascular smooth muscle cells {UMLS C3278167}''',
'skinNailsHairNails': 'Capillary tortuosity in nail beds {UMLS C3278168}',
'muscleSoftTissue': 'Muscle cramps {SNOMEDCT:55300003} {UMLS C0026821 HP:0003394} {HPO HP:0003394 C0026821}',
'neurologicCentralNervousSystem': '''Leukoencephalopathy, periventricular {SNOMEDCT:230769007} {ICD10CM:P91.2} {ICD9CM:779.7} {UMLS C0023529 HP:0006970};\nMicrovascular spaces, dilated {UMLS C3278159};\nCerebrovascular accident (in some patients) {UMLS C3278160} {HPO HP:0001297 C0038454}''',
'laboratoryAbnormalities': '''Creatine kinase, serum, elevated {UMLS C0241005 HP:0003236};\nGlomerular filtration rate, decreased {UMLS C0853068 HP:0012213}''',
'molecularBasis': 'Caused by mutation in the collagen IV, alpha-1 polypeptide gene (COL4A1, {120130.0007})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': True,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': True,
'skinNailsHairNailsExists': True,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611717,
'prefix': '%',
'preferredTitle': 'SPONDYLOEPIPHYSEAL DYSPLASIA-BRACHYDACTYLY AND DISTINCTIVE SPEECH',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'growthHeight': '''Short stature, disproportionate short limb {UMLS C1849937 HP:0008873} {HPO HP:0008873 C1849937};\nNormal birth length {UMLS C1853173}''',
'growthOther': 'Growth retardation, progressive {UMLS C2673662}',
'headAndNeckFace': '''Coarse facies {UMLS C1845847 HP:0000280} {HPO HP:0000280 C1845847,C4072825};\nRound face {UMLS C0239479 HP:0000311} {HPO HP:0000311 C0239479,C1856468} {EOM ID:a98d48239172dc71 IMG:Face,Round-small.jpg};\nMidface hypoplasia {UMLS C1853242 HP:0011800} {HPO HP:0011800 C1853242,C2673410,C4280320,C4280321} {EOM ID:5b7ad34ab35682b5 IMG:Midface_Retrusion-small.jpg};\nLong philtrum {UMLS C1865014 HP:0000343} {HPO HP:0000343 C1865014} {EOM ID:e1d74175c310388d IMG:Philtrum,Long-small.jpg}''',
'headAndNeckEars': 'Small pinnae {SNOMEDCT:35045004,306914009} {ICD10CM:Q17.2} {ICD9CM:744.23} {UMLS C0152423,C0584784 HP:0008551} {HPO HP:0008551 C0152423}',
'headAndNeckEyes': '''Blepharophimosis {ICD10CM:H02.52} {ICD9CM:374.46} {UMLS C0005744 HP:0000581} {HPO HP:0000581 C0005744} {EOM ID:88ebc039bbbaef0f IMG:Blepharophimosis-small.jpg};\nUpslanting palpebral fissures {SNOMEDCT:246799009} {UMLS C0423109 HP:0000582} {HPO HP:0000582 C0423109};\nCurly eyebrows {UMLS C2673669};\nCurly eyelashes {UMLS C2673670 HP:0007665} {HPO HP:0007665 C2673670}''',
'headAndNeckNose': '''Broad nasal bridge {SNOMEDCT:249321001} {UMLS C1849367 HP:0000431} {HPO HP:0000431 C1839764,C1849367} {EOM ID:e29866db35162165 IMG:Nasal_Bridge,Wide-small.jpg};\nDepressed nasal bridge {UMLS C1836542 HP:0005280} {HPO HP:0005280 C1836542,C3550546,C4280495} {EOM ID:000fb29123c16757 IMG:Nasal_Bridge,Depressed-small.jpg};\nUpturned nose {SNOMEDCT:708670007} {UMLS C1840077 HP:0000463} {HPO HP:0000463 C1840077}''',
'headAndNeckMouth': '''Large mouth {SNOMEDCT:40159009} {ICD10CM:Q18.4} {ICD9CM:744.83} {UMLS C0024433 HP:0000154} {HPO HP:0000154 C0024433};\nThick lower lip {UMLS C1839739 HP:0000179} {HPO HP:0000179 C1839739,C2053437}''',
'headAndNeckNeck': '''Wide neck {UMLS C1853638 HP:0000475} {HPO HP:0000475 C1853638} {EOM ID:1f45b748bb5aa8fe IMG:Neck,Broad-small.jpg};\nShort neck {SNOMEDCT:95427009} {UMLS C0521525 HP:0000470} {HPO HP:0000470 C0521525} {EOM ID:c75e63fd749ec7a8 IMG:Neck,Short-small.jpg}''',
'respiratoryLung': 'Restrictive lung disease {SNOMEDCT:36485005} {UMLS C0085581 HP:0002091} {HPO HP:0002091 C0085581,C3277226}',
'chestExternalFeatures': 'Small thorax {SNOMEDCT:298709006} {UMLS C0575483,C1837482 HP:0005257} {HPO HP:0005257 C1837482}',
'chestRibsSternumClaviclesAndScapulae': '''Thickened clavicles {UMLS C1865240};\nPectus excavatum {SNOMEDCT:391987005,391982004} {ICD10CM:Q67.6} {ICD9CM:754.81} {UMLS C2051831,C0016842 HP:0000767} {HPO HP:0000767 C2051831}''',
'skeletal': '''Spondyloepiphyseal dysplasia {SNOMEDCT:278713008} {ICD10CM:Q77.7} {UMLS C0038015,C2745959 HP:0002655} {HPO HP:0002655 C0038015};\nMild joint contractures {UMLS C1836379};\nSmall epiphyses {UMLS C1846803 HP:0010585} {HPO HP:0010585 C1846803};\nGeneralized epiphyseal ossification delay {UMLS C2673650}''',
'skeletalSpine': '''Lack of lumbar lordosis {UMLS C2673651};\nMild platyspondyly {UMLS C1848999};\nAnterior scalloping vertebral bodies {UMLS C2673652 HP:0004580} {HPO HP:0004580 C2673652};\nCuboid-shaped vertebral bodies {UMLS C2673653 HP:0004634} {HPO HP:0004634 C2673653}''',
'skeletalPelvis': 'Hypoplastic iliac wings {UMLS C1865027 HP:0002866} {HPO HP:0002866 C1865027}',
'skeletalLimbs': '''Rhizo-meso-acromelic limb shortening {UMLS C2673654 HP:0005069} {HPO HP:0005069 C2673654};\nShort long bones {UMLS C1854912 HP:0003026};\nLimited pronation {UMLS C2673655};\nLimited supination {UMLS C2673656};\nCubitus valgus {SNOMEDCT:54583007} {ICD10CM:M21.02} {ICD9CM:736.01} {UMLS C0158465 HP:0002967} {HPO HP:0002967 C0158465}''',
'skeletalHands': '''Short hands {UMLS C4552108 HP:0004279} {HPO HP:0004279 C1843108};\nBrachydactyly {SNOMEDCT:43476002} {UMLS C0221357 HP:0001156} {HPO HP:0001156 C0221357};\nShort, tapered phalanges {UMLS C2673657};\nSingle interphalangeal crease of fifth finger {UMLS C1850336 HP:0006216} {HPO HP:0006216 C1850336};\nShort, tapered metacarpals {UMLS C2673658};\nBrachymetacarpals {UMLS C2673659};\nSmall carpals {UMLS C1863749 HP:0001498} {HPO HP:0001498 C1863749,C4280594}''',
'skeletalFeet': '''Short feet {SNOMEDCT:299463000} {UMLS C1848673,C0576226 HP:0001773} {HPO HP:0001773 C1848673} {EOM ID:860b87ff24a50048 IMG:Foot,Short-small.jpg};\nBrachydactyly {SNOMEDCT:43476002} {UMLS C0221357 HP:0001156} {HPO HP:0001156 C0221357};\nBrachymetatarsals {UMLS C2673660}''',
'skinNailsHairHair': '''Abundant thick, curly scalp hair {UMLS C2673663};\nAbundant and curly eyelashes {UMLS C2673664};\nAbundant and curly eyebrows {UMLS C2673665};\nIncreased hair on arms and legs {UMLS C2673666};\nLow-set nuchal hair {UMLS C2673667}''',
'voice': 'High-pitched, coarse voice {UMLS C2673668}',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': True,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': True,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': True,
'chestExists': True,
'chestExternalFeaturesExists': True,
'chestRibsSternumClaviclesAndScapulaeExists': True,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': True,
'skeletalPelvisExists': True,
'skeletalLimbsExists': True,
'skeletalHandsExists': True,
'skeletalFeetExists': True,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': True,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': True,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611719,
'prefix': '#',
'preferredTitle': 'COMBINED OXIDATIVE PHOSPHORYLATION DEFICIENCY 5; COXPD5',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Very poor growth {UMLS C3552463 HP:0001510} {HPO HP:0001510 C0151686,C0456070,C0878787,C1837385,C3552463}',
'headAndNeckHead': 'Microcephaly {SNOMEDCT:1829003} {ICD10CM:Q02} {ICD9CM:742.1} {UMLS C4551563,C0025958 HP:0000252} {HPO HP:0000252 C0424688} {EOM ID:8ae2118220c1308f IMG:Microcephaly-small.jpg}',
'headAndNeckFace': 'Retrognathia {SNOMEDCT:109515000} {UMLS C0035353,C3494422 HP:0000278} {HPO HP:0000278 C3494422} {EOM ID:588f04d3f1b40b25 IMG:Retrognathia-small.jpg}',
'headAndNeckEars': '''Low-set ears {SNOMEDCT:95515009} {ICD10CM:Q17.4} {UMLS C0239234 HP:0000369} {HPO HP:0000369 C0239234};\nPosteriorly rotated ears {SNOMEDCT:253251006} {UMLS C0431478 HP:0000358} {HPO HP:0000358 C0431478}''',
'headAndNeckNeck': 'Redundant neck skin {UMLS C1840319 HP:0005989} {HPO HP:0005989 C1840319}',
'cardiovascularHeart': 'Hypertrophic cardiomyopathy {SNOMEDCT:195020003,233873004,45227007} {ICD10CM:I42.1,I42.2} {ICD9CM:425.1,425.11} {UMLS C0340425,C4551472,C0007194 HP:0001639} {HPO HP:0001639 C0007194}',
'abdomen': 'Ascites {SNOMEDCT:389026000} {ICD10CM:R18,R18.8} {ICD9CM:789.5} {UMLS C0003962,C4553641 HP:0001541} {HPO HP:0001541 C0003962}',
'genitourinaryKidneys': 'Tubulopathy (1 family) {UMLS C3552462}',
'muscleSoftTissue': '''Edema {SNOMEDCT:267038008,20741006,79654002} {ICD10CM:R60.9} {ICD9CM:782.3} {UMLS C1717255,C0013604 HP:0000969} {HPO HP:0000969 C0013604};\nHypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120}''',
'neurologicCentralNervousSystem': '''Psychomotor retardation, profound, in those who survive {UMLS C3552461};\nCorpus callosum hypoplasia {SNOMEDCT:204043002} {UMLS C0344482 HP:0002079} {HPO HP:0002079 C0344482};\nLeukoencephalopathy {SNOMEDCT:22811006,16058431000119104} {UMLS C4553203,C0270612 HP:0002352} {HPO HP:0002352 C0270612};\nSeizures {SNOMEDCT:91175000} {UMLS C0036572 HP:0001250} {HPO HP:0001250 C0014544,C0036572};\nDelayed myelination {SNOMEDCT:135810007} {UMLS C1277241 HP:0012448} {HPO HP:0012448 C1277241};\nTruncal hypotonia {UMLS C1853743 HP:0008936} {HPO HP:0008936 C1853743};\nSpastic quadriplegia {SNOMEDCT:192965001} {UMLS C0426970 HP:0002510} {HPO HP:0002510 C0426970}''',
'metabolicFeatures': 'Metabolic acidosis, severe {UMLS C1836805} {HPO HP:0001942 C0220981}',
'laboratoryAbnormalities': '''Increased serum lactate {UMLS C1836440 HP:0002151} {HPO HP:0002151 C1836440};\nIncreased serum ammonia {UMLS C0740942};\nSkeletal muscle shows decreased activities of mitochondrial respiratory complexes I, III, IV, and V {UMLS C2673644}''',
'miscellaneous': '''Antenatal onset {UMLS C2673646 HP:0030674} {HPO HP:0030674 C2673646};\nDeath often in first months of life {UMLS C3552464};\nTwo families have been reported (as of June 2011) {UMLS C3279148}''',
'molecularBasis': 'Caused by mutation in the mitochondrial ribosomal protein S22 gene (MRPS22, {605810.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': True,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611721,
'prefix': '#',
'preferredTitle': 'COMBINED SAPOSIN DEFICIENCY',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'headAndNeckEyes': 'Optic atrophy (reported in 1 patient) {UMLS C2749286} {HPO HP:0000648 C0029124}',
'respiratory': 'Neonatal respiratory failure {SNOMEDCT:95619009} {ICD10CM:P28.5} {ICD9CM:770.84} {UMLS C0521648}',
'abdomenLiver': 'Hepatomegaly {SNOMEDCT:80515008} {ICD10CM:R16.0} {ICD9CM:789.1} {UMLS C0019209 HP:0002240} {HPO HP:0002240 C0019209}',
'abdomenSpleen': 'Splenomegaly {SNOMEDCT:16294009} {ICD10CM:R16.1} {ICD9CM:789.2} {UMLS C0038002 HP:0001744} {HPO HP:0001744 C0038002}',
'abdomenGastrointestinal': 'Poor feeding {SNOMEDCT:78164000,299698007} {ICD10CM:R63.3} {UMLS C0576456,C0232466 HP:0011968} {HPO HP:0011968 C0232466}',
'neurologicCentralNervousSystem': '''Myoclonus {SNOMEDCT:17450006} {ICD10CM:G25.3} {ICD9CM:333.2} {UMLS C0027066 HP:0001336} {HPO HP:0001336 C0027066,C1854302};\nHyperkinetic movements {SNOMEDCT:44548000} {UMLS C3887506 HP:0002487};\nClonic seizures {SNOMEDCT:6208003} {UMLS C0234535};\nFasciculations {SNOMEDCT:82470000} {ICD10CM:R25.3} {UMLS C0015644 HP:0002380} {HPO HP:0002380 C0015644};\nExtensor plantar responses {SNOMEDCT:246586009,366575004} {UMLS C0034935 HP:0003487} {HPO HP:0003487 C0034935};\nExaggerated Moro reflex {UMLS C2673636};\nHypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120};\nHypo- and demyelination of the brain {UMLS C2673637};\nThin corpus callosum {SNOMEDCT:204043002} {UMLS C0344482 HP:0002079} {HPO HP:0002079 C0344482};\nPeriventricular white matter changes {UMLS C1853380};\nNeuronal loss {UMLS C1850496 HP:0002529} {HPO HP:0002529 C1850496};\nFibrillary astrocytes {SNOMEDCT:70357005} {UMLS C0228090}''',
'laboratoryAbnormalities': '''Multiple tissue biopsies show lysosomal storage disease {UMLS C2673638};\nDeficiency of saposins A, B, C, and D {UMLS C2673639};\nDecreased activity of glycosylceramidase, galactosylceramidase, ceramidase, and other lysosomal enzymes {UMLS C2673640};\nIncreased urinary glycosphingolipids, particularly globotriaosylceramide {UMLS C3277848}''',
'miscellaneous': '''Onset at birth {UMLS C1836142 HP:0003577} {HPO HP:0003577 C1836142,C2752013};\nDeath in infancy {UMLS C1858430 HP:0001522} {HPO HP:0001522 C1844947,C1858430}''',
'molecularBasis': 'Caused by mutation in the prosaposin gene (PSAP, {176801.0005})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': True,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': True,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611788,
'prefix': '#',
'preferredTitle': 'AORTIC ANEURYSM, FAMILIAL THORACIC 6; AAT6',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'cardiovascularVascular': '''Thoracic aortic aneurysm {SNOMEDCT:74883004,433068007} {ICD10CM:I71.2} {ICD9CM:441.2} {UMLS C0162872,C3251816 HP:0012727} {HPO HP:0012727 C0162872};\nAscending aortic aneurysm {SNOMEDCT:425963007} {UMLS C0856747 HP:0004970} {HPO HP:0004970 C0345049};\nAortic dissection (in some patients) {UMLS C4315460} {HPO HP:0002647 C0340643}''',
'miscellaneous': 'Range of onset 3-79 years of age {UMLS C4314960}',
'molecularBasis': 'Caused by mutation in the alpha-2 actin gene (ACTA2, {102620.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': True,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611722,
'prefix': '#',
'preferredTitle': 'KRABBE DISEASE, ATYPICAL, DUE TO SAPOSIN A DEFICIENCY',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'respiratory': '''Central apnea {UMLS C3887548,C0520680 HP:0010536,HP:0002871} {HPO HP:0002871 C0520680};\nRespiratory failure {SNOMEDCT:409622000} {ICD10CM:J96.9} {UMLS C4552651,C1145670 HP:0002878} {HPO HP:0002878 C1145670}''',
'neurologicCentralNervousSystem': '''Neurologic regression around age 3 months {UMLS C2673267};\nLoss of spontaneous movements {UMLS C2673268};\nHyporeflexia {SNOMEDCT:22994000,405946002} {UMLS C0151888,C0700078 HP:0001315,HP:0001265} {HPO HP:0001265 C0700078};\nIncreased muscle tone {SNOMEDCT:41581000,56731001} {UMLS C0026826 HP:0001276} {HPO HP:0001276 C0026826};\nGeneralized brain atrophy {UMLS C0241816 HP:0002283} {HPO HP:0002283 C0241816};\nDiffuse white matter dysmyelination {UMLS C2673269};\nIncreased CSF protein {UMLS C1806780 HP:0002922} {HPO HP:0002922 C1806780}''',
'laboratoryAbnormalities': '''Decreased activity of galactocerebrosidase {UMLS C2673270};\nSaposin A deficiency {UMLS C2673271,C2673266}''',
'miscellaneous': '''Onset in infancy {UMLS C1848924 HP:0003593} {HPO HP:0003593 C1848924};\nEarly death {UMLS C1836407}''',
'molecularBasis': 'Caused by mutation in the prosaposin gene (PSAP, {176801.0009}).',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611726,
'prefix': '#',
'preferredTitle': 'EPILEPSY, PROGRESSIVE MYOCLONIC, 3, WITH OR WITHOUT INTRACELLULAR INCLUSIONS; EPM3',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'headAndNeckHead': 'Microcephaly (in 1 family) {UMLS C3552471} {HPO HP:0000252 C0424688} {EOM ID:8ae2118220c1308f IMG:Microcephaly-small.jpg}',
'headAndNeckEyes': '''Visual loss (in 1 family) {UMLS C3552472} {HPO HP:0000572 C3665386};\nOptic atrophy, mild (in 1 patient) {UMLS C3552473} {HPO HP:0000648 C0029124}''',
'neurologicCentralNervousSystem': '''Myoclonic seizures {SNOMEDCT:37356005} {UMLS C4317123,C0014550 HP:0002123} {HPO HP:0002123 C0014550,C0751778,C4021759};\nSecondary generalization {UMLS C0815106};\nInitial normal development {UMLS C2673258};\nNeurologic regression following seizure onset {UMLS C2673259};\nMental retardation {SNOMEDCT:110359009,228156007} {ICD9CM:317-319.99} {UMLS C0025362,C3714756 HP:0001249} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876};\nDysarthria {SNOMEDCT:8011004} {ICD9CM:438.13,784.51} {UMLS C0013362,C4553903 HP:0001260} {HPO HP:0001260 C0013362};\nLimited expressive language {UMLS C2673260};\nTruncal ataxia {SNOMEDCT:250067008} {UMLS C0427190 HP:0002078} {HPO HP:0002078 C0427190};\nLoss of motor function {UMLS C1864672};\nEEG shows slowed dysrhythmia and multifocal discharges {UMLS C2673261};\nCerebral atrophy (in 1 family) {UMLS C2749337} {HPO HP:0002059 C0154671,C0235946,C4020860};\nCerebellar atrophy (in 1 family) {UMLS C2749336} {HPO HP:0001272 C0262404,C0740279,C4020873};\nThinning of the corpus callosum (in 1 family) {UMLS C3277651} {HPO HP:0002079 C0344482}''',
'laboratoryAbnormalities': '''Granular osmiophilic cytoplasmic deposits ultrastructurally in cells {UMLS C3552474};\n\'Fingerprint profiles\' ultrastructurally in cells {UMLS C1836851 HP:0003208} {HPO HP:0003208 C1836851};\n\'Rectilinear profiles\' ultrastructurally in cells {UMLS C3552475}''',
'miscellaneous': '''Onset before age 2 years {UMLS C1836346};\nTwo unrelated families have been reported (last curated July 2012) {UMLS C3552476};\nOnly 1 family had ultrastructural cellular findings of neuronal ceroid lipofuscinosis {UMLS C3552477};\nProgressive disorder {UMLS C1864985 HP:0003676} {HPO HP:0003676 C0205329,C1864985};\nSevere phenotype {UMLS C1836408}''',
'molecularBasis': 'Caused by mutation in the potassium channel tetramerisation domain containing 7 gene (KCTD7, {611725.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': True,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611762,
'prefix': '#',
'preferredTitle': 'FAMILIAL COLD AUTOINFLAMMATORY SYNDROME 2; FCAS2',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'headAndNeckEars': 'Sensorineural deafness (in some patients) {UMLS C3280851} {HPO HP:0000407 C0018784}',
'headAndNeckMouth': 'Aphthous ulcers, episodic {UMLS C4013542}',
'abdomen': 'Abdominal pain, episodic {UMLS C3808022 HP:0002574} {HPO HP:0002027 C0000737}',
'abdomenSpleen': 'Splenomegaly (in some patients) {UMLS C3280956} {HPO HP:0001744 C0038002}',
'skeletal': '''Arthralgias, episodic {UMLS C2674038} {HPO HP:0002829 C0003862};\nArthritis, episodic {UMLS C4693007} {HPO HP:0001369 C0003864}''',
'skinNailsHairSkin': '''Rash, episodic {UMLS C4013544} {HPO HP:0000988 C0015230};\nUrticaria, episodic {SNOMEDCT:402409001} {UMLS C1276118} {HPO HP:0001025 C0042109}''',
'muscleSoftTissue': 'Myalgias, episodic {UMLS C2674035} {HPO HP:0003326 C0231528}',
'neurologicCentralNervousSystem': 'Headache, episodic {UMLS C2675791} {HPO HP:0002315 C0018681}',
'metabolicFeatures': 'Fever, episodic {SNOMEDCT:77957000} {UMLS C0277799 HP:0001954} {HPO HP:0001945 C0015967}',
'immunology': '''Lymphadenopathy (in some patients) {UMLS C4014613} {HPO HP:0002716 C0497156};\nLymphocytosis, episodic (in some patients) {UMLS C4693006} {HPO HP:0100827 C0024282}''',
'laboratoryAbnormalities': '''Serum C-reactive protein may be increased {UMLS C4013543};\nErythrocyte sedimentation rate may be increased {UMLS C4693008};\nIncreased acute phase reactants {UMLS C4693009}''',
'miscellaneous': '''Variable age at onset, range infancy to adult {UMLS C1842007};\nPhenotypic variability {UMLS C1837514 HP:0003812} {HPO HP:0003812 C1837514,C1839039,C1850667,C1866210};\nEpisodes are triggered by cold exposure {UMLS C4013546};\nEpisodes can last hours, days, or weeks {UMLS C4693010};\nResponsive to steroid treatment {UMLS C4693011};\nIncomplete penetrance {UMLS C1836598 HP:0003829} {HPO HP:0003829 C1836598}''',
'molecularBasis': 'Caused by mutations in the NLR family, pyrin-domain containing 12 gene (NLRP12, {609648.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': True,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611862,
'prefix': '#',
'preferredTitle': 'WHITE BLOOD CELL COUNT QUANTITATIVE TRAIT LOCUS 1; WBCQ1',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'hematology': 'Neutropenia, benign {UMLS C3277457}',
'miscellaneous': 'Homozygosity for the common West African allele (rs2814778) eliminates expression of the Duffy blood group antigen and explains approximately 20% of population variation in white blood cell count (WBC) {UMLS C4694102}',
'molecularBasis': 'Caused by mutation in the atypical chemokine receptor 1 gene (ACKR1, {613665.0002})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611804,
'prefix': '#',
'preferredTitle': 'ELLIPTOCYTOSIS 1; EL1',
'inheritance': '''Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147};\nAutosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}''',
'abdomenSpleen': 'Splenomegaly {SNOMEDCT:16294009} {ICD10CM:R16.1} {ICD9CM:789.2} {UMLS C0038002 HP:0001744} {HPO HP:0001744 C0038002}',
'skinNailsHairSkin': '''Pallor {SNOMEDCT:267029006,398979000} {ICD10CM:R23.1} {ICD9CM:782.61} {UMLS C0241137,C0030232 HP:0000980} {HPO HP:0000980 C0030232};\nJaundice {SNOMEDCT:18165001} {ICD10CM:R17} {UMLS C0022346,C2203646,C2010848 HP:0000952} {HPO HP:0000952 C0022346}''',
'hematology': '''Elliptocytosis {SNOMEDCT:250242004,178935009,191169008} {ICD10CM:D58.1} {ICD9CM:282.1} {UMLS C0013902,C0427480 HP:0004445} {HPO HP:0004445 C0013902,C0427480};\nAnemia {SNOMEDCT:271737000} {ICD10CM:D64.9} {ICD9CM:285.9} {UMLS C0002871,C4554633,C1000483 HP:0001903} {HPO HP:0001903 C0002871,C0162119};\nAplastic crisis (in some patients) {UMLS C4693013}''',
'miscellaneous': '''Affected individuals may have heterozygous or homozygous mutations {UMLS C4693015};\nPatients with heterozygous mutations may be clinically asymptomatic {UMLS C4694101}''',
'molecularBasis': 'Caused by mutation in the erythrocyte membrane protein band 4.1 gene (EPB41, {130500.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611808,
'preferredTitle': 'TREMOR, HEREDITARY ESSENTIAL, AND IDIOPATHIC NORMAL PRESSURE HYDROCEPHALUS; ETINPH',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'headAndNeckHead': 'Increased head circumference {UMLS C4083076 HP:0040194} {HPO HP:0040194 C4083076}',
'neurologicCentralNervousSystem': '''Essential tremor {SNOMEDCT:609558009} {ICD10CM:G25.0} {UMLS C0270736 HP:0030186} {HPO HP:0030186 C0234376,C0270736};\nNormal pressure hydrocephalus {SNOMEDCT:30753002} {ICD10CM:G91.2} {UMLS C0020258 HP:0002343} {HPO HP:0002343 C0020258};\nEnlarged ventricles {UMLS C3278923 HP:0002119} {HPO HP:0002119 C3278923};\nImpaired gait {SNOMEDCT:22325002} {ICD9CM:781.2} {UMLS C0575081 HP:0001288} {HPO HP:0001288 C0575081}''',
'miscellaneous': '''Onset of essential tremor between 16 and 44 years {UMLS C2678495};\nOnset of normal pressure hydrocephalus after age 65 years {UMLS C2678496}''',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': True,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611878,
'prefix': '#',
'preferredTitle': 'CARDIOMYOPATHY, DILATED, 1Y; CMD1Y',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'cardiovascularHeart': '''Heart failure, progressive and sometimes fatal {UMLS C3809347};\nVentricular tachycardia, nonsustained (in some patients) {UMLS C4015673};\nDecreased left ventricular ejection fraction {UMLS C1096403};\nDecreased left ventricular fractional shortening {UMLS C4230002};\nEbstein anomaly (in some patients) {UMLS C3808336};\nMitral valve insufficiency (in some patients) {UMLS C3809294} {HPO HP:0001653 C0026266,C3551535};\nIrregular and fragmented thin filaments of sarcomere seen on electron microscopy {UMLS C4230001};\nScalloped appearance of sarcolemma seen on electron microscopy {UMLS C4230000};\nLeft ventricular noncompaction at apex and/or midventricular wall (in some patients) {UMLS C3809349}''',
'miscellaneous': 'Some patients require cardiac transplantation {UMLS C3809351}',
'molecularBasis': 'Caused by mutation in the gene encoding tropomyosin-1 (TPM1, {191010.0004})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611880,
'prefix': '#',
'preferredTitle': 'CARDIOMYOPATHY, DILATED, 2A; CMD2A',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'cardiovascularHeart': '''Cardiomyopathy, dilated {SNOMEDCT:399020009,195021004} {ICD10CM:I42.0} {UMLS C0007193 HP:0001644} {HPO HP:0001644 C0007193};\nCongestive heart failure {SNOMEDCT:42343007} {ICD10CM:I50.9} {ICD9CM:428.0} {UMLS C0018802 HP:0001635} {HPO HP:0001635 C0018801,C0018802}''',
'miscellaneous': 'Onset in second decade {UMLS C1847899}',
'molecularBasis': 'Caused by mutation in the cardiac troponin I gene (TNNI3, {191044.0009})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611812,
'prefix': '#',
'preferredTitle': '46,XX SEX REVERSAL WITH DYSGENESIS OF KIDNEYS, ADRENALS, AND LUNGS; SERKAL',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Intrauterine growth retardation {SNOMEDCT:22033007} {ICD9CM:764.90,764.9} {UMLS C0015934 HP:0001511} {HPO HP:0001511 C0015934,C0021296,C1386048}',
'headAndNeckEars': 'Low-set ears {SNOMEDCT:95515009} {ICD10CM:Q17.4} {UMLS C0239234 HP:0000369} {HPO HP:0000369 C0239234}',
'headAndNeckMouth': '''Cleft lip {SNOMEDCT:80281008} {ICD10CM:Q36.9,Q36} {ICD9CM:749.1,749.10} {UMLS C0008924,C4321245 HP:0000204,HP:0410030} {HPO HP:0410030};\nCleft palate {SNOMEDCT:87979003,63567004} {ICD10CM:Q35.5,Q35,Q35.9} {ICD9CM:749.0,749.00} {UMLS C2981150,C0008925,C2240378 HP:0000175} {HPO HP:0000175 C0008925,C2981150}''',
'cardiovascularHeart': 'Ventricular septal defect {SNOMEDCT:30288003,768552007,253549006} {ICD10CM:Q21.0} {ICD9CM:745.4} {UMLS C0018818 HP:0001629} {HPO HP:0001629 C0018818}',
'cardiovascularVascular': 'Pulmonary artery stenosis {SNOMEDCT:95441000,449125001} {ICD10CM:Q25.6} {UMLS C0265911,C0238397 HP:0004415} {HPO HP:0004415 C0238397}',
'respiratoryLung': '''Small lungs {SNOMEDCT:80825009} {UMLS C0265783 HP:0002089};\nHypoplastic lungs {SNOMEDCT:80825009} {UMLS C0265783 HP:0002089} {HPO HP:0002089 C0265783}''',
'chestDiaphragm': 'Diaphragmatic hernia {SNOMEDCT:39839004,17190001} {ICD10CM:K44,K44.9,Q79.0} {ICD9CM:553.3} {UMLS C0235833,C0494752,C0019284 HP:0000776} {HPO HP:0000776 C0235833}',
'genitourinaryExternalGenitaliaMale': '''Curved penis {UMLS C4478835};\nHypospadias {SNOMEDCT:416010008,204888000} {ICD10CM:Q54.1,Q54.9,Q54} {ICD9CM:752.61} {UMLS C1691215,C0848558 HP:0003244,HP:0000047} {HPO HP:0000047 C1691215}''',
'genitourinaryInternalGenitaliaMale': 'Ovotestis (in 1 fetus) {UMLS C4314958} {HPO HP:0012861 C0266361}',
'genitourinaryKidneys': 'Kidney agenesis/dysgenesis {UMLS C4313041}',
'genitourinaryUreters': 'Thin ureters {UMLS C4314957}',
'genitourinaryBladder': 'Hypoplastic bladder {UMLS C1855335 HP:0005343} {HPO HP:0005343 C1855335}',
'endocrineFeatures': 'Adrenal agenesis/dysgenesis {UMLS C4314959}',
'prenatalManifestationsAmnioticFluid': 'Oligohydramnios {SNOMEDCT:59566000} {ICD10CM:O41.0,O41.00} {ICD9CM:658.0} {UMLS C0079924 HP:0001562} {HPO HP:0001562 C0079924,C3550658}',
'laboratoryAbnormalities': '46,XX male (in 1 fetus) {UMLS C4539250}',
'miscellaneous': '''Based on 3 fetuses in 1 consanguineous family (last curated November 2016) {UMLS C4314954};\nAll 3 pregnancies terminated {UMLS C4314953}''',
'molecularBasis': 'Caused by mutation in the wingless-type MMTV integration site family, member 4 (WNT4, {603490.0002})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': True,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': True,
'chestExists': True,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': True,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': True,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': True,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': True,
'genitourinaryBladderExists': True,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': True,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': True,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': True,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611881,
'prefix': '#',
'preferredTitle': 'GLYCOGEN STORAGE DISEASE XII; GSD12',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthHeight': 'Short stature {SNOMEDCT:422065006,237837007,237836003} {ICD10CM:R62.52,E34.3} {ICD9CM:783.43} {UMLS C0013336,C0349588,C2237041,C2919142 HP:0004322,HP:0003510} {HPO HP:0004322 C0349588}',
'headAndNeckEyes': '''Ptosis {SNOMEDCT:11934000,29696001} {ICD10CM:H02.4,H02.40,H02.409} {ICD9CM:374.3,374.30} {UMLS C0005745,C0033377 HP:0000508} {HPO HP:0000508 C0005745} {EOM ID:1bd157b764ec7aea IMG:Ptosis-small.jpg};\nEpicanthus {SNOMEDCT:74824007} {UMLS C0229249,C0678230 HP:0000286} {HPO HP:0000286 C0678230} {EOM ID:8b4ac9cc8a79aa3e IMG:Epicanthus-small.jpg}''',
'headAndNeckNeck': '''Short neck {SNOMEDCT:95427009} {UMLS C0521525 HP:0000470} {HPO HP:0000470 C0521525} {EOM ID:c75e63fd749ec7a8 IMG:Neck,Short-small.jpg};\nLow posterior hairline {UMLS C1855728 HP:0002162} {HPO HP:0002162 C1855728} {EOM ID:efe02d35c10721b6 IMG:Hairline,Low_Posterior-small.jpg}''',
'abdomenLiver': 'Jaundice {SNOMEDCT:18165001} {ICD10CM:R17} {UMLS C0022346,C2203646,C2010848 HP:0000952} {HPO HP:0000952 C0022346}',
'abdomenBiliaryTract': '''Cholelithiasis {SNOMEDCT:266474003} {ICD10CM:K80} {ICD9CM:574} {UMLS C0008350 HP:0001081} {HPO HP:0001081 C0008350};\nCholecystitis {SNOMEDCT:76581006} {ICD10CM:K81,K81.9} {ICD9CM:575.10} {UMLS C0008325,C4553186,C1963083 HP:0001082} {HPO HP:0001082 C0008325}''',
'abdomenSpleen': 'Splenomegaly {SNOMEDCT:16294009} {ICD10CM:R16.1} {ICD9CM:789.2} {UMLS C0038002 HP:0001744} {HPO HP:0001744 C0038002}',
'skinNailsHairSkin': 'Jaundice {SNOMEDCT:18165001} {ICD10CM:R17} {UMLS C0022346,C2203646,C2010848 HP:0000952} {HPO HP:0000952 C0022346}',
'muscleSoftTissue': 'Myopathy (in some patients) {UMLS C3552487} {HPO HP:0003198 C0026848}',
'neurologicCentralNervousSystem': 'Mental retardation (in some patients) {UMLS C1968646} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876}',
'endocrineFeatures': 'Delayed puberty {SNOMEDCT:400003000,123526007} {ICD10CM:E30.0} {UMLS C0034012,C1883716 HP:0000823} {HPO HP:0000823 C0034012}',
'hematology': '''Congenital nonspherocytic hemolytic anemia {SNOMEDCT:301317008} {UMLS C0002882};\nNormocytic anemia {SNOMEDCT:300980002} {UMLS C0085577 HP:0001897} {HPO HP:0001897 C0085577};\nNormochromic anemia {UMLS C0235983 HP:0001895} {HPO HP:0001895 C0235983};\nNormal red cell osmotic fragility {UMLS C3552488}''',
'laboratoryAbnormalities': 'Aldolase A deficiency {SNOMEDCT:111578003} {UMLS C3539009,C0272066}',
'molecularBasis': 'Caused by mutation in the Aldolase A, fructose-bisphosphatase gene (ALDOA, {103850.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': True,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': True,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': True,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': True,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611816,
'prefix': '#',
'preferredTitle': 'TEMPLE-BARAITSER SYNDROME; TMBTS',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'headAndNeckFace': '''Myopathic facies {SNOMEDCT:26432009} {UMLS C0332615 HP:0002058} {HPO HP:0002058 C0332615};\nFlat forehead {UMLS C1857485 HP:0004425} {HPO HP:0004425 C1857485};\nLong philtrum {UMLS C1865014 HP:0000343} {HPO HP:0000343 C1865014} {EOM ID:e1d74175c310388d IMG:Philtrum,Long-small.jpg}''',
'headAndNeckEyes': '''Hypertelorism {SNOMEDCT:22006008} {ICD10CM:Q75.2} {ICD9CM:376.41} {UMLS C0020534 HP:0000316} {HPO HP:0000316 C0020534} {EOM ID:71d9f1be67c7f8b6 IMG:Eyes,Widely_Spaced-small.jpg};\nEpicanthal folds {SNOMEDCT:74824007} {UMLS C0229249,C0678230 HP:0000286} {HPO HP:0000286 C0678230};\nPoor visual contact {UMLS C3552514}''',
'headAndNeckNose': '''Broad nose {SNOMEDCT:249321001} {UMLS C0426421 HP:0000445} {HPO HP:0000445 C0426421};\nDepressed nasal bridge {UMLS C1836542 HP:0005280} {HPO HP:0005280 C1836542,C3550546,C4280495} {EOM ID:000fb29123c16757 IMG:Nasal_Bridge,Depressed-small.jpg};\nThick nasal alae {UMLS C1844809 HP:0009928} {HPO HP:0009928 C1844809}''',
'headAndNeckMouth': '''Wide mouth {SNOMEDCT:40159009} {ICD10CM:Q18.4} {ICD9CM:744.83} {UMLS C0024433 HP:0000154} {HPO HP:0000154 C0024433} {EOM ID:a6a2d57a281ead72 IMG:Mouth,Wide-small.jpg};\nDownturned corners of the mouth {UMLS C1866195 HP:0002714} {HPO HP:0002714 C1866195};\nThick vermilion border of the lips {UMLS C4013554}''',
'skeletalHands': '''Broad thumbs {SNOMEDCT:249773003} {UMLS C0426891 HP:0011304} {HPO HP:0011304 C0426891};\nProximal implantation of thumb {UMLS C3552483};\nAdducted thumbs {UMLS C3554617 HP:0001181} {HPO HP:0001181 C3554617};\nHypoplasia of terminal phalanges {UMLS C2678488};\nCentral translucency of distal phalanges of thumbs {UMLS C2678487};\nAbnormal secondary ossification center of distal phalanges of thumbs {UMLS C4013555};\nPseudoepiphysis of the thumb {UMLS C3552484 HP:0009693} {HPO HP:0009693 C3552484}''',
'skeletalFeet': '''Broad halluces {UMLS C1867131 HP:0010055} {HPO HP:0010055 C1867131} {EOM ID:7c222af1a91aae93 IMG:Hallux,Broad-small.jpg};\nLong great toes {UMLS C3552485};\nHypoplasia of terminal phalanges {UMLS C2678488};\nCentral translucency of distal phalanges of halluces {UMLS C4013556};\nAbnormal secondary ossification center of distal phalanges of thumbs {UMLS C4013555}''',
'skinNailsHairNails': '''Hypoplastic/aplastic thumb nails {UMLS C4013557};\nHypoplastic/aplastic nails of halluces {UMLS C4013558}''',
'neurologicCentralNervousSystem': '''Delayed psychomotor development {SNOMEDCT:224958001} {ICD10CM:F88} {UMLS C0557874 HP:0001263} {HPO HP:0001263 C0557874,C1864897,C4020875};\nHypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120};\nMental retardation, severe {SNOMEDCT:40700009} {ICD10CM:F72} {ICD9CM:318.1} {UMLS C0036857 HP:0010864} {HPO HP:0010864 C0036857};\nSeizures {SNOMEDCT:91175000} {UMLS C0036572 HP:0001250} {HPO HP:0001250 C0014544,C0036572}''',
'miscellaneous': '''Onset in infancy {UMLS C1848924 HP:0003593} {HPO HP:0003593 C1848924};\nMost mutations occur de novo {UMLS C3149178}''',
'molecularBasis': 'Caused by mutation in the voltage-gated potassium channel, subfamily H, member 1 gene (KCNH1, {603305.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': True,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': True,
'skeletalFeetExists': True,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': True,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611884,
'prefix': '#',
'preferredTitle': 'CILIARY DYSKINESIA, PRIMARY, 7; CILD7',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'respiratory': 'Recurrent respiratory infections due to impaired ciliary motility {UMLS C2749892}',
'respiratoryLung': 'Bronchiectasis {SNOMEDCT:12295008} {ICD10CM:J47,J47.9} {ICD9CM:494} {UMLS C0006267 HP:0002110} {HPO HP:0002110 C0006267}',
'abdomen': 'Situs inversus (in some patients) {UMLS C3807053} {HPO HP:0001696 C0037221,C0266642}',
'genitourinaryInternalGenitaliaMale': 'Male fertility remains intact {UMLS C2749889}',
'laboratoryAbnormalities': '''Cilia show nonflexible and hyperkinetic beating of axonemes {UMLS C2749890};\nCilia may also be static, with slow activity {UMLS C3552490};\nAxonemes show normal structure {UMLS C2749891}''',
'molecularBasis': 'Caused by mutation in the dynein axonemal heavy chain 11 gene (DNAH11, {603339.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': True,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': True,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 611818,
'prefix': '#',
'preferredTitle': 'LONG QT SYNDROME 9; LQT9',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'cardiovascularHeart': '''Prolongation of corrected QT interval {UMLS C4478837};\nNonexertional syncope (in some patients) {UMLS C4478838};\nSinus bradycardia (in some patients) {UMLS C4478839} {HPO HP:0001688 C0085610};\nCardiac arrest (in some patients) {UMLS C4015675} {HPO HP:0001695 C0018790}''',
'laboratoryAbnormalities': 'Marked increase in late sodium current on voltage-clamp studies {UMLS C4478836}',
'miscellaneous': 'Sudden unexplained infant death (SIDS) reported in some patients {UMLS C4479765}',
'molecularBasis': 'Caused by mutation in the caveolin 3 gene (CAV3, {601253.0016})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
}
} ]
}
} } | [
"jhostyk@gmail.com"
] | jhostyk@gmail.com |
c42d55490407bfcfd3a591030db63cd5be9b2b58 | ad4c2aa0398406ccb7e70562560e75fa283ffa1a | /find-and-replace-in-string/find-and-replace-in-string.py | d3ee3f2ce5afaf66d400552e4e375febc9762f26 | [
"Apache-2.0"
] | permissive | kmgowda/kmg-leetcode-python | 427d58f1750735618dfd51936d33240df5ba9ace | 4d32e110ac33563a8bde3fd3200d5804db354d95 | refs/heads/main | 2023-08-22T06:59:43.141131 | 2021-10-16T14:04:32 | 2021-10-16T14:04:32 | 417,841,590 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | // https://leetcode.com/problems/find-and-replace-in-string
class Solution(object):
def findReplaceString(self, S, indexes, sources, targets):
"""
:type S: str
:type indexes: List[int]
:type sources: List[str]
:type targets: List[str]
:rtype: str
"""
d ={}
for i, ind in enumerate(indexes):
d[ind]=[sources[i], targets[i]]
start = 0
out=""
for ind in sorted(d.keys()):
out+=S[start:ind]
src, dst = d[ind]
if src == S[ind:ind+len(src)]:
out+=dst
else:
out+=S[ind:ind+len(src)]
start = ind+len(src)
out+=S[start:]
return out | [
"keshava.gowda@gmail.com"
] | keshava.gowda@gmail.com |
44f2dfba86e5e004678f934e9bfd00a8545929f3 | 0b01cb61a4ae4ae236a354cbfa23064e9057e434 | /alipay/aop/api/domain/KoubeiMarketingCampaignBenefitQueryModel.py | d167d12a86faf113518b209e604ee0b9b496a368 | [
"Apache-2.0"
] | permissive | hipacloud/alipay-sdk-python-all | e4aec2869bf1ea6f7c6fb97ac7cc724be44ecd13 | bdbffbc6d5c7a0a3dd9db69c99443f98aecf907d | refs/heads/master | 2022-11-14T11:12:24.441822 | 2020-07-14T03:12:15 | 2020-07-14T03:12:15 | 277,970,730 | 0 | 0 | Apache-2.0 | 2020-07-08T02:33:15 | 2020-07-08T02:33:14 | null | UTF-8 | Python | false | false | 937 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMarketingCampaignBenefitQueryModel(object):
def __init__(self):
self._benefit_id = None
@property
def benefit_id(self):
return self._benefit_id
@benefit_id.setter
def benefit_id(self, value):
self._benefit_id = value
def to_alipay_dict(self):
params = dict()
if self.benefit_id:
if hasattr(self.benefit_id, 'to_alipay_dict'):
params['benefit_id'] = self.benefit_id.to_alipay_dict()
else:
params['benefit_id'] = self.benefit_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiMarketingCampaignBenefitQueryModel()
if 'benefit_id' in d:
o.benefit_id = d['benefit_id']
return o
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
2c50a5f7684da4ef0800d4cf07d2aac353687e63 | 240ab4c4348b194e8f284935d56fa197a120be10 | /LAC_tf1/eval.py | 71ef651cc0c34b6fb7df5a1718fb6fa44f0e030b | [] | no_license | rickstaa/tf2-eager-vs-graph-grad-problem | 7373ee36219c498f0ce9db25f5773400afd65f03 | be30a3505e6abd9de74a88bc39456fa7e985f16f | refs/heads/master | 2022-12-18T04:42:21.043344 | 2020-09-22T07:22:36 | 2020-09-22T07:22:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,721 | py | """Evaluate trained LAC agent."""
import os
import sys
import argparse
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from utils import get_env_from_name
import logger
from variant import (
ENV_NAME,
ENV_SEED,
ALG_PARAMS,
ENV_PARAMS,
EVAL_PARAMS,
)
from lac import LAC
# Cuda Settings
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
def get_distrubance_function(env_name):
"""Retrieve disturbance function for a given environment.
Args:
env_name (str): Environment you want to use.
Raises:
NameError: If disturbance does not exist for the given environment.
Returns:
object: Disturbance function.
"""
if "oscillator" in env_name:
disturbance_step = oscillator_disturber
elif "Ex3_EKF" in env_name:
disturbance_step = Ex3_EKF_disturber
elif "Ex4_EKF" in env_name:
disturbance_step = Ex4_EKF_disturber
else:
print("no disturber designed for " + env_name)
raise NameError
return disturbance_step
def oscillator_disturber(time, s, action, env, eval_params, disturber=None):
"""Disturbance function used for evaluating the oscillator.
"""
d = np.zeros_like(action)
s_, r, done, info = env.step(action + d)
done = False
return s_, r, done, info
def Ex3_EKF_disturber(time, s, action, env, eval_params, disturber=None):
"""Disturbance function used for evaluating the Ex3_EKF environment.
"""
d = np.zeros_like(action)
s_, r, done, info = env.step(action + d)
done = False
return s_, r, done, info
def Ex4_EKF_disturber(time, s, action, env, eval_params, disturber=None):
"""Disturbance function used for evaluating the Ex3_EKF environment.
"""
d = np.zeros_like(action)
s_, r, done, info = env.step(action + d)
done = False
return s_, r, done, info
def dynamic(policy_path, env_name, env_params, alg_params, eval_params):
"""Performs dynamic robustness evaluation.
Args:
policy_path (str): Log path.
env_name (str): The gym environment you want to use.
alg_params (dict): Dictionary containing the algorithm parameters.
"""
# Retrieve environment
env = get_env_from_name(env_name, ENV_SEED)
# Get trained policy
s_dim = env.observation_space.shape[0]
a_dim = env.action_space.shape[0]
policy = LAC(a_dim, s_dim)
# Configure logger
log_path = policy_path + "/eval/dynamic/" + eval_params["additional_description"]
eval_params.update({"magnitude": 0})
logger.configure(dir=log_path, format_strs=["csv"])
# Evaluate policy results
_, paths = evaluation(policy_path, env_name, env, env_params, eval_params, policy)
max_len = 0
print(len(paths))
for path in paths["s"]:
path_length = len(path)
if path_length > max_len:
max_len = path_length
average_path = np.average(np.array(paths["s"]), axis=0)
std_path = np.std(np.array(paths["s"]), axis=0)
for i in range(max_len):
logger.logkv("average_path", average_path[i])
logger.logkv("std_path", std_path[i])
logger.logkv("reference", paths["reference"][0][i])
logger.dumpkvs()
if eval_params["directly_show"]:
fig = plt.figure(figsize=(9, 6))
ax = fig.add_subplot(111)
if eval_params["plot_average"]:
t = range(max_len)
ax.plot(t, average_path, color="red")
ax.fill_between(
t,
average_path - std_path,
average_path + std_path,
color="red",
alpha=0.1,
)
plt.show()
else:
for path in paths["s"]:
path_length = len(path)
print(path_length)
t = range(path_length)
path = np.array(path)
# Ex3_EKF
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(t, path, color="red")
plt.show()
ax.plot(t, np.array(path), color="blue", label="0.1")
plt.show()
def evaluation(
policy_path, env_name, env, env_params, eval_params, policy, disturber=None
):
# Retrieve disturber and action space dimention
disturbance_step = get_distrubance_function(env_name)
a_dim = env.action_space.shape[0]
a_upperbound = env.action_space.high
a_lowerbound = env.action_space.low
# Training setting
total_cost = []
death_rates = []
trial_list = os.listdir(policy_path)
episode_length = []
cost_paths = []
value_paths = []
state_paths = []
ref_paths = []
# Evalute policy in several rollouts
print(trial_list)
# Check if agent is present
if len(trial_list) == 1:
print(
"The agent you specified for evaluation does not exist please check the "
"'eval_list' parameter."
)
sys.exit(0)
# Loop through agents
for trial in trial_list:
if trial == "eval":
continue
if trial not in eval_params["trials_for_eval"]:
continue
success_load = policy.restore(os.path.join(policy_path, trial) + "/policy")
if not success_load:
continue
die_count = 0
seed_average_cost = []
for i in range(
int(np.ceil(eval_params["num_of_paths"] / (len(trial_list) - 1)))
):
path = []
state_path = []
value_path = []
ref_path = []
cost = 0
s = env.reset()
global initial_pos
initial_pos = np.random.uniform(0.0, np.pi, size=[a_dim])
for j in range(env_params["max_ep_steps"]):
if env_params["eval_render"]:
env.render()
a = policy.choose_action(s, True)
action = a_lowerbound + (a + 1.0) * (a_upperbound - a_lowerbound) / 2
s_, r, done, info = disturbance_step(j, s, action, env, eval_params)
path.append(r)
cost += r
if "reference" in info.keys():
ref_path.append(info["reference"])
if "state_of_interest" in info.keys():
state_path.append(info["state_of_interest"])
if j == env_params["max_ep_steps"] - 1:
done = True
s = s_
if done:
seed_average_cost.append(cost)
episode_length.append(j)
if j < env_params["max_ep_steps"] - 1:
die_count += 1
break
cost_paths.append(path)
value_paths.append(value_path)
state_paths.append(state_path)
ref_paths.append(ref_path)
death_rates.append(die_count / (i + 1) * 100)
total_cost.append(np.mean(seed_average_cost))
total_cost_std = np.std(total_cost, axis=0)
total_cost_mean = np.average(total_cost)
death_rate = np.mean(death_rates)
death_rate_std = np.std(death_rates, axis=0)
average_length = np.average(episode_length)
diagnostic = {
"return": total_cost_mean,
"return_std": total_cost_std,
"death_rate": death_rate,
"death_rate_std": death_rate_std,
"average_length": average_length,
}
path_dict = {"c": cost_paths, "v": value_paths}
if "reference" in info.keys():
path_dict.update({"reference": ref_paths})
if "state_of_interest" in info.keys():
path_dict.update({"s": state_paths})
return diagnostic, path_dict
###############################################
# Main function ###############################
###############################################
if __name__ == "__main__":
# Parse Arguments
parser = argparse.ArgumentParser(
description="Evaluate the LAC agent in a given environment."
)
parser.add_argument(
"--model-name",
type=str,
default=EVAL_PARAMS["eval_list"],
help="The name of the model you want to evaluate.",
)
args = parser.parse_args()
# Evaluate robustness
eval_agents = (
[args.model_name] if not isinstance(args.model_name, list) else args.model_name
)
for name in eval_agents:
dirname = os.path.dirname(__file__)
LOG_PATH = os.path.abspath(
os.path.join(dirname, "../log/" + ENV_NAME + "/" + name)
)
print("evaluating " + name)
dynamic(LOG_PATH, ENV_NAME, ENV_PARAMS, ALG_PARAMS, EVAL_PARAMS)
tf.reset_default_graph()
| [
"rick.staa@outlook.com"
] | rick.staa@outlook.com |
b46c88b1f0e084bdb65d0edb9ad68778c80ce52c | a00fcfa8158316fceff0dc9b5d936bba96cca4c3 | /smodels-database/13TeV/CMS/CMS-SUS-16-033/validation/T2ttoff_2EqMassAx_EqMassBy.py | 4bd6a1a54e8d5b543aab334b97dc2c73b4d1fd0f | [] | no_license | andlessa/stopsEFT | 414fac7493c385c5b6d9fda2f17f9ef4658d7884 | 9fae9ef9c96432fecadd7abbb3578b154edc9282 | refs/heads/master | 2020-06-15T06:59:20.544277 | 2019-11-12T19:48:55 | 2019-11-12T19:48:55 | 195,229,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169,024 | py | validationData = [{'slhafile': 'T2ttoff_799_554_799_554.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_849_591_849_591.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_613_394_613_394.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_492_360_492_360.slha', 'axes': {'x': 492.372239304, 'y': 360.171174024}, 't': 0.13906956397703965, 'signal': 573.58568, 'UL': 371.98200669978934, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_554_419_554_419.slha', 'axes': {'x': 554.541749485, 'y': 419.251024935}, 't': 0.13906956397703965, 'signal': 290.736874, 'UL': 291.6510157311486, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_482_369_482_369.slha', 'axes': {'x': 482.913919761, 'y': 369.688774973}, 't': 0.13906956397703965, 'signal': 637.69704, 'UL': 426.525956896552, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_432_228_432_228.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_418_313_418_313.slha', 'axes': {'x': 418.042032568, 'y': 313.328238619}, 't': 0.13906956397703965, 'signal': 1401.00535, 'UL': 581.9795268714561, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_659_411_659_411.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_217_89_217_89.slha', 'axes': {'x': 217.338603259, 'y': 89.5530444531}, 't': 0.13906956397703965, 'signal': 38336.6973, 'UL': 9229.870966875593, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_505_360_505_360.slha', 'axes': {'x': 505.64166907, 'y': 360.016382935}, 't': 0.13906956397703965, 'signal': 494.350238, 'UL': 315.8367670119845, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_488_364_488_364.slha', 'axes': {'x': 488.318673785, 'y': 364.250145859}, 't': 0.13906956397703965, 'signal': 601.8064199999999, 'UL': 394.4203295386278, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_295_75_295_75.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_707_561_707_561.slha', 'axes': {'x': 707.892957846, 'y': 561.00792771}, 't': 0.13906956397703965, 'signal': 69.19396379999999, 'UL': 165.72814841403357, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_375_94_375_94.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_641_365_641_365.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_476_271_476_271.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_773_445_773_445.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_486_365_486_365.slha', 'axes': {'x': 486.967485279, 'y': 365.609803138}, 't': 0.13906956397703965, 'signal': 611.71796, 'UL': 401.4552549261084, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_192_53_192_53.slha', 'axes': {'x': 192.334196446, 'y': 53.8946612454}, 't': 0.13906956397703965, 'signal': 68431.7936, 'UL': 26206.001066422887, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_431_189_431_189.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_605_365_605_365.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_478_293_478_293.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_395_74_395_74.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_733_537_733_537.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_673_497_673_497.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_521_198_521_198.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_433_297_433_297.slha', 'axes': {'x': 433.538056714, 'y': 297.735090868}, 't': 0.13906956397703965, 'signal': 1157.87102, 'UL': 487.2911715597711, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_221_85_221_85.slha', 'axes': {'x': 221.392168777, 'y': 85.4740726181}, 't': 0.13906956397703965, 'signal': 35007.7382, 'UL': 9233.888972073895, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_735_601_735_601.slha', 'axes': {'x': 735.645526005, 'y': 601.92920678}, 't': 0.13906956397703965, 'signal': 55.1195783, 'UL': 188.1315505450605, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_341_207_341_207.slha', 'axes': {'x': 341.677623622, 'y': 207.712746274}, 't': 0.13906956397703965, 'signal': 4057.3080600000003, 'UL': 1036.5515125944473, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_724_612_724_612.slha', 'axes': {'x': 724.836017956, 'y': 612.806465006}, 't': 0.13906956397703965, 'signal': 59.77347999999999, 'UL': 212.91389013022783, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_791_528_791_528.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_781_589_781_589.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_298_123_298_123.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_434_237_434_237.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_737_533_737_533.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_693_575_693_575.slha', 'axes': {'x': 693.833409554, 'y': 575.155596284}, 't': 0.13906956397703965, 'signal': 78.17204749999999, 'UL': 202.59363748525328, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_614_481_614_481.slha', 'axes': {'x': 614.008882654, 'y': 481.050190402}, 't': 0.13906956397703965, 'signal': 162.257294, 'UL': 251.50188778084825, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_769_500_769_500.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_498_221_498_221.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_752_568_752_568.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_428_192_428_192.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_649_421_649_421.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_368_228_368_228.slha', 'axes': {'x': 368.464218504, 'y': 228.379964513}, 't': 0.13906956397703965, 'signal': 2742.36737, 'UL': 775.5488780120256, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_456_213_456_213.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_561_309_561_309.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_349_121_349_121.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_921_605_921_605.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_438_182_438_182.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_686_582_686_582.slha', 'axes': {'x': 686.803635408, 'y': 582.229430571}, 't': 0.13906956397703965, 'signal': 82.9667974, 'UL': 237.65226393120741, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_423_96_423_96.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_804_566_804_566.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_607_487_607_487.slha', 'axes': {'x': 607.252940124, 'y': 487.848476793}, 't': 0.13906956397703965, 'signal': 173.648929, 'UL': 284.35407597193836, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_567_467_567_467.slha', 'axes': {'x': 567.385459744, 'y': 467.146323647}, 't': 0.13906956397703965, 'signal': 255.050461, 'UL': 305.3744065240899, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_629_526_629_526.slha', 'axes': {'x': 629.554969925, 'y': 526.226174558}, 't': 0.13906956397703965, 'signal': 139.946966, 'UL': 297.4010915386934, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_650_318_650_318.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_448_343_448_343.slha', 'axes': {'x': 448.451193405, 'y': 343.547992713}, 't': 0.13906956397703965, 'signal': 961.671599, 'UL': 482.28599578440236, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_510_236_510_236.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_910_616_910_616.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_639_431_639_431.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_701_368_701_368.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_806_614_806_614.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_619_300_619_300.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_653_366_653_366.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_391_279_391_279.slha', 'axes': {'x': 391.686437248, 'y': 279.029512689}, 't': 0.13906956397703965, 'signal': 1980.72474, 'UL': 710.0835998264906, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_439_133_439_133.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_616_478_616_478.slha', 'axes': {'x': 616.711259666, 'y': 478.330875845}, 't': 0.13906956397703965, 'signal': 157.938323, 'UL': 231.84818370779763, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_500_364_500_364.slha', 'axes': {'x': 500.955152972, 'y': 364.732272459}, 't': 0.13906956397703965, 'signal': 518.924508, 'UL': 322.9554119199214, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_633_386_633_386.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_695_581_695_581.slha', 'axes': {'x': 695.778045625, 'y': 581.227053633}, 't': 0.13906956397703965, 'signal': 76.7977214, 'UL': 215.2082914031921, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_922_597_922_597.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_284_177_284_177.slha', 'axes': {'x': 284.644315904, 'y': 177.888396258}, 't': 0.13906956397703965, 'signal': 10248.307499999999, 'UL': 1930.9369579945796, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_232_39_232_39.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_915_611_915_611.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_420_310_420_310.slha', 'axes': {'x': 420.74440958, 'y': 310.608924062}, 't': 0.13906956397703965, 'signal': 1352.1475599999999, 'UL': 564.9345342722372, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_976_643_976_643.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_717_553_717_553.slha', 'axes': {'x': 717.632171069, 'y': 553.811800642}, 't': 0.13906956397703965, 'signal': 55.5033319, 'UL': 153.02568014083553, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_798_521_798_521.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_713_406_713_406.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_395_90_395_90.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_659_556_659_556.slha', 'axes': {'x': 659.964130763, 'y': 556.445928652}, 't': 0.13906956397703965, 'signal': 105.51727, 'UL': 224.69116378901757, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_377_197_377_197.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_531_302_531_302.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_393_277_393_277.slha', 'axes': {'x': 393.037625754, 'y': 277.669855411}, 't': 0.13906956397703965, 'signal': 1944.4507499999997, 'UL': 703.8661049044169, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_754_649_754_649.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_744_522_744_522.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_335_65_335_65.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_451_168_451_168.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_312_176_312_176.slha', 'axes': {'x': 312.61965129, 'y': 176.133334901}, 't': 0.13906956397703965, 'signal': 6409.11229, 'UL': 1456.951739757299, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_455_336_455_336.slha', 'axes': {'x': 455.207135936, 'y': 336.749706321}, 't': 0.13906956397703965, 'signal': 889.292655, 'UL': 465.1794135499501, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_599_320_599_320.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_694_374_694_374.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_548_285_548_285.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_520_314_520_314.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_234_133_234_133.slha', 'axes': {'x': 234.235879036, 'y': 133.369371331}, 't': 0.13906956397703965, 'signal': 26644.102799999997, 'UL': 4130.22867660343, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_188_57_188_57.slha', 'axes': {'x': 188.280630927, 'y': 57.9736330803}, 't': 0.13906956397703965, 'signal': 75377.37479999999, 'UL': 26513.387905767537, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_668_425_668_425.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_814_505_814_505.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_452_339_452_339.slha', 'axes': {'x': 452.504758923, 'y': 339.469020878}, 't': 0.13906956397703965, 'signal': 914.960915, 'UL': 471.83105375491937, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_759_561_759_561.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_898_628_898_628.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_600_494_600_494.slha', 'axes': {'x': 600.496997593, 'y': 494.646763185}, 't': 0.13906956397703965, 'signal': 184.76283800000002, 'UL': 309.88057786463264, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_284_117_284_117.slha', 'axes': {'x': 284.073188411, 'y': 117.227141757}, 't': 0.13906956397703965, 'signal': 9608.977579999999, 'UL': 3580.7506427970657, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_339_148_339_148.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_741_596_741_596.slha', 'axes': {'x': 741.050280029, 'y': 596.490577666}, 't': 0.13906956397703965, 'signal': 52.620677300000004, 'UL': 152.2621301141019, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_689_491_689_491.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_250_117_250_117.slha', 'axes': {'x': 250.450141109, 'y': 117.053483991}, 't': 0.13906956397703965, 'signal': 19188.547599999998, 'UL': 4663.443603713945, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_417_314_417_314.slha', 'axes': {'x': 417.135250373, 'y': 314.240704204}, 't': 0.13906956397703965, 'signal': 1409.62589, 'UL': 587.6622764864629, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_361_235_361_235.slha', 'axes': {'x': 361.434444358, 'y': 235.4537988}, 't': 0.13906956397703965, 'signal': 3023.16645, 'UL': 825.1493645320198, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_788_531_788_531.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_459_261_459_261.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_562_257_562_257.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_167_18_167_18.slha', 'axes': {'x': 167.329789632, 'y': 18.2362780378}, 't': 0.13906956397703965, 'signal': 130443.52599999998, 'UL': 126587.48667737991, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_527_243_527_243.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_666_353_666_353.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_646_373_646_373.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_809_631_809_631.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_685_384_685_384.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_450_220_450_220.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_719_450_719_450.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_820_549_820_549.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_835_702_835_702.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_706_474_706_474.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_731_489_731_489.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_831_706_831_706.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_849_519_849_519.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_584_449_584_449.slha', 'axes': {'x': 584.950910323, 'y': 449.470779029}, 't': 0.13906956397703965, 'signal': 215.50596099999999, 'UL': 241.02652456722407, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_286_33_286_33.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_328_42_328_42.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_794_560_794_560.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_426_234_426_234.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_235_92_235_92.slha', 'axes': {'x': 235.973284035, 'y': 92.0276565672}, 't': 0.13906956397703965, 'signal': 25695.9699, 'UL': 7335.5920695636005, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_474_296_474_296.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_532_213_532_213.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_376_233_376_233.slha', 'axes': {'x': 376.140349977, 'y': 233.853528533}, 't': 0.13906956397703965, 'signal': 2453.58506, 'UL': 728.0387763558805, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_645_509_645_509.slha', 'axes': {'x': 645.769231998, 'y': 509.910287218}, 't': 0.13906956397703965, 'signal': 120.223017, 'UL': 229.41407200943775, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_371_202_371_202.slha', 'axes': {'x': 371.680978888, 'y': 202.990940139}, 't': 0.13906956397703965, 'signal': 2419.98959, 'UL': 2527.964368941178, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_661_555_661_555.slha', 'axes': {'x': 661.315319269, 'y': 555.086271374}, 't': 0.13906956397703965, 'signal': 104.15068, 'UL': 216.26581880966938, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_578_456_578_456.slha', 'axes': {'x': 578.194967792, 'y': 456.269065421}, 't': 0.13906956397703965, 'signal': 231.001784, 'UL': 265.9509458128077, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_569_351_569_351.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_407_262_407_262.slha', 'axes': {'x': 407.900699321, 'y': 262.71362535}, 't': 0.13906956397703965, 'signal': 1593.70688, 'UL': 580.9824872026498, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_816_536_816_536.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_686_484_686_484.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_821_620_821_620.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_521_225_521_225.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_359_111_359_111.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_415_245_415_245.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_711_509_711_509.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_302_68_302_68.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_582_452_582_452.slha', 'axes': {'x': 582.24853331, 'y': 452.190093586}, 't': 0.13906956397703965, 'signal': 221.446484, 'UL': 251.2109730742319, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_756_646_756_646.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_570_429_570_429.slha', 'axes': {'x': 570.71550728, 'y': 429.371509289}, 't': 0.13906956397703965, 'signal': 246.07214299999998, 'UL': 263.40595170010783, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_174_71_174_71.slha', 'axes': {'x': 174.768745867, 'y': 71.5702058635}, 't': 0.13906956397703965, 'signal': 106792.49799999999, 'UL': 31594.771489392413, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_709_461_709_461.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_797_573_797_573.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_333_86_333_86.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_384_102_384_102.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_566_434_566_434.slha', 'axes': {'x': 566.028991182, 'y': 434.087398814}, 't': 0.13906956397703965, 'signal': 258.733092, 'UL': 290.0127634867235, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_371_149_371_149.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_432_298_432_298.slha', 'axes': {'x': 432.905106134, 'y': 298.372008557}, 't': 0.13906956397703965, 'signal': 1167.62061, 'UL': 488.6448690274675, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_728_492_728_492.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_832_587_832_587.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_681_439_681_439.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_338_32_338_32.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_119_5_119_5.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_428_145_428_145.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_190_55_190_55.slha', 'axes': {'x': 190.98300794, 'y': 55.2543185237}, 't': 0.13906956397703965, 'signal': 70816.21149999999, 'UL': 26287.467483566434, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_855_564_855_564.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_486_183_486_183.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_374_235_374_235.slha', 'axes': {'x': 374.789161471, 'y': 235.213185812}, 't': 0.13906956397703965, 'signal': 2502.4031, 'UL': 736.4302759941522, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_492_228_492_228.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_147_46_147_46.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_795_473_795_473.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_235_35_235_35.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_180_66_180_66.slha', 'axes': {'x': 180.173499891, 'y': 66.1315767502}, 't': 0.13906956397703965, 'signal': 92452.4497, 'UL': 29488.688168085806, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_442_127_442_127.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_261_9_261_9.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_573_427_573_427.slha', 'axes': {'x': 573.058765328, 'y': 427.013564527}, 't': 0.13906956397703965, 'signal': 241.468436, 'UL': 253.36621168322782, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_514_398_514_398.slha', 'axes': {'x': 514.674269105, 'y': 398.548871789}, 't': 0.13906956397703965, 'signal': 445.427412, 'UL': 364.37145841025676, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_680_536_680_536.slha', 'axes': {'x': 680.231958354, 'y': 536.051069477}, 't': 0.13906956397703965, 'signal': 88.2468038, 'UL': 173.797986322703, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_352_47_352_47.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_817_553_817_553.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_673_542_673_542.slha', 'axes': {'x': 673.476015823, 'y': 542.849355869}, 't': 0.13906956397703965, 'signal': 93.33422449999999, 'UL': 188.47583623693404, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_708_512_708_512.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_691_585_691_585.slha', 'axes': {'x': 691.724480106, 'y': 585.306025468}, 't': 0.13906956397703965, 'signal': 79.2662766, 'UL': 242.5151154978787, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_640_380_640_380.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_267_53_267_53.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_407_79_407_79.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_374_146_374_146.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_498_367_498_367.slha', 'axes': {'x': 498.611894924, 'y': 367.090217222}, 't': 0.13906956397703965, 'signal': 533.3146720000001, 'UL': 326.48462095999014, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_409_160_409_160.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_680_491_680_491.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_775_628_775_628.slha', 'axes': {'x': 775.310054104, 'y': 628.005109302}, 't': 0.13906956397703965, 'signal': 39.5901905, 'UL': 128.0705406411446, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_258_108_258_108.slha', 'axes': {'x': 258.557272145, 'y': 108.895540321}, 't': 0.13906956397703965, 'signal': 16513.167599999997, 'UL': 5024.31425869724, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_743_593_743_593.slha', 'axes': {'x': 743.752657041, 'y': 593.77126311}, 't': 0.13906956397703965, 'signal': 51.400795099999996, 'UL': 134.79696229320723, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_272_98_272_98.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_590_417_590_417.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_511_401_511_401.slha', 'axes': {'x': 511.971892093, 'y': 401.268186345}, 't': 0.13906956397703965, 'signal': 459.88095899999996, 'UL': 373.2954855713955, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_870_599_870_599.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_257_56_257_56.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_662_408_662_408.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_639_516_639_516.slha', 'axes': {'x': 639.013289468, 'y': 516.70857361}, 't': 0.13906956397703965, 'signal': 128.284207, 'UL': 275.37851093506674, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_610_410_610_410.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_606_314_606_314.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_559_413_559_413.slha', 'axes': {'x': 559.946503509, 'y': 413.812395822}, 't': 0.13906956397703965, 'signal': 275.75206099999997, 'UL': 270.99928106134973, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_482_238_482_238.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_270_50_270_50.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_676_540_676_540.slha', 'axes': {'x': 676.178392836, 'y': 540.130041312}, 't': 0.13906956397703965, 'signal': 91.1290976, 'UL': 180.3452940646401, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_644_325_644_325.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_447_344_447_344.slha', 'axes': {'x': 447.100004899, 'y': 344.907649991}, 't': 0.13906956397703965, 'signal': 978.237759, 'UL': 489.76248179742873, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_768_603_768_603.slha', 'axes': {'x': 768.149287442, 'y': 603.265619302}, 't': 0.13906956397703965, 'signal': 36.1320789, 'UL': 112.67931339175033, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_567_302_567_302.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_443_278_443_278.slha', 'axes': {'x': 443.05297339, 'y': 278.4806567}, 't': 0.13906956397703965, 'signal': 939.049505, 'UL': 482.9110474738677, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_268_45_268_45.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_546_374_546_374.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_678_537_678_537.slha', 'axes': {'x': 678.880769848, 'y': 537.410726756}, 't': 0.13906956397703965, 'signal': 88.8832249, 'UL': 171.983265582656, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_601_420_601_420.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_355_166_355_166.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_316_172_316_172.slha', 'axes': {'x': 316.673216808, 'y': 172.054363066}, 't': 0.13906956397703965, 'signal': 5996.87398, 'UL': 1403.410326204495, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_544_377_544_377.slha', 'axes': {'x': 544.087206135, 'y': 377.38829402}, 't': 0.13906956397703965, 'signal': 295.089096, 'UL': 301.0423718160519, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_305_156_305_156.slha', 'axes': {'x': 305.733638342, 'y': 156.666893397}, 't': 0.13906956397703965, 'signal': 7140.157069999999, 'UL': 1725.6253910849455, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_367_204_367_204.slha', 'axes': {'x': 367.277298831, 'y': 204.29992871}, 't': 0.13906956397703965, 'signal': 2580.33445, 'UL': 854.0714835996635, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_640_515_640_515.slha', 'axes': {'x': 640.364477974, 'y': 515.348916331}, 't': 0.13906956397703965, 'signal': 126.587468, 'UL': 267.11298566817834, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_477_142_477_142.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_621_512_621_512.slha', 'axes': {'x': 621.729797198, 'y': 512.874304217}, 't': 0.13906956397703965, 'signal': 150.990886, 'UL': 269.0970442148264, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_883_586_883_586.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_662_553_662_553.slha', 'axes': {'x': 662.666507775, 'y': 553.726614096}, 't': 0.13906956397703965, 'signal': 102.829399, 'UL': 210.84999999999982, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_279_33_279_33.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_892_627_892_627.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_388_81_388_81.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_941_628_941_628.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_836_533_836_533.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_470_321_470_321.slha', 'axes': {'x': 470.070209502, 'y': 321.79347626}, 't': 0.13906956397703965, 'signal': 744.881184, 'UL': 364.9217132732976, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_904_622_904_622.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_631_503_631_503.slha', 'axes': {'x': 631.102829392, 'y': 503.442525168}, 't': 0.13906956397703965, 'signal': 137.837602, 'UL': 240.66431782788248, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_428_302_428_302.slha', 'axes': {'x': 428.851540616, 'y': 302.450980392}, 't': 0.13906956397703965, 'signal': 1229.90204, 'UL': 510.0903192514478, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_893_576_893_576.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_597_423_597_423.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_772_548_772_548.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_383_187_383_187.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_700_419_700_419.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_307_181_307_181.slha', 'axes': {'x': 307.214897266, 'y': 181.571964015}, 't': 0.13906956397703965, 'signal': 6965.21504, 'UL': 1542.3817469662383, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_161_32_161_32.slha', 'axes': {'x': 161.52641363, 'y': 32.1043092623}, 't': 0.13906956397703965, 'signal': 152620.926, 'UL': 86658.26492541359, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_486_378_486_378.slha', 'axes': {'x': 486.89560468, 'y': 378.879941033}, 't': 0.13906956397703965, 'signal': 612.181686, 'UL': 397.34665718490953, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_712_468_712_468.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_630_524_630_524.slha', 'axes': {'x': 630.906158431, 'y': 524.866517279}, 't': 0.13906956397703965, 'signal': 138.13096299999998, 'UL': 291.65403724016505, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_887_639_887_639.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_326_222_326_222.slha', 'axes': {'x': 326.814550055, 'y': 222.668976335}, 't': 0.13906956397703965, 'signal': 5107.000410000001, 'UL': 1105.5222181596587, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_401_68_401_68.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_560_360_560_360.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_557_364_557_364.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_742_578_742_578.slha', 'axes': {'x': 742.890729255, 'y': 578.538709972}, 't': 0.13906956397703965, 'signal': 44.7324597, 'UL': 118.33998991363549, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_824_495_824_495.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_398_272_398_272.slha', 'axes': {'x': 398.442379779, 'y': 272.231226298}, 't': 0.13906956397703965, 'signal': 1804.5194099999999, 'UL': 665.613664412244, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_418_202_418_202.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_801_518_801_518.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_890_579_890_579.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_736_600_736_600.slha', 'axes': {'x': 736.996714511, 'y': 600.569549501}, 't': 0.13906956397703965, 'signal': 54.5035247, 'UL': 178.98967589771397, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_464_327_464_327.slha', 'axes': {'x': 464.665455478, 'y': 327.232105373}, 't': 0.13906956397703965, 'signal': 790.7897459999999, 'UL': 397.6350215536003, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_678_391_678_391.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_240_126_240_126.slha', 'axes': {'x': 240.991821566, 'y': 126.571084939}, 't': 0.13906956397703965, 'signal': 23145.1079, 'UL': 4466.3935915318125, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_484_368_484_368.slha', 'axes': {'x': 484.265108267, 'y': 368.329117694}, 't': 0.13906956397703965, 'signal': 627.60903, 'UL': 419.0416465517241, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_762_558_762_558.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_154_39_154_39.slha', 'axes': {'x': 154.496639484, 'y': 39.1781435493}, 't': 0.13906956397703965, 'signal': 186549.46699999998, 'UL': 96394.71886549928, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_460_331_460_331.slha', 'axes': {'x': 460.61188996, 'y': 331.311077208}, 't': 0.13906956397703965, 'signal': 831.0454659999999, 'UL': 430.7898413343265, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_272_155_272_155.slha', 'axes': {'x': 272.75217091, 'y': 155.431181755}, 't': 0.13906956397703965, 'signal': 12662.225199999999, 'UL': 2601.142179802954, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_510_259_510_259.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_354_242_354_242.slha', 'axes': {'x': 354.404670212, 'y': 242.527633087}, 't': 0.13906956397703965, 'signal': 3354.04551, 'UL': 882.7410361336947, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_225_81_225_81.slha', 'axes': {'x': 225.445734295, 'y': 81.3951007832}, 't': 0.13906956397703965, 'signal': 32018.3173, 'UL': 9446.83199114831, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_693_477_693_477.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_425_195_425_195.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_296_192_296_192.slha', 'axes': {'x': 296.405389217, 'y': 192.449222241}, 't': 0.13906956397703965, 'signal': 8322.13912, 'UL': 1622.6591811677613, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_703_565_703_565.slha', 'axes': {'x': 703.206441748, 'y': 565.723817235}, 't': 0.13906956397703965, 'signal': 71.8116733, 'UL': 178.35588498738426, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_559_273_559_273.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_699_471_699_471.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_355_114_355_114.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_173_72_173_72.slha', 'axes': {'x': 173.417557361, 'y': 72.9298631418}, 't': 0.13906956397703965, 'signal': 110799.845, 'UL': 32483.413521796087, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_431_299_431_299.slha', 'axes': {'x': 431.553917628, 'y': 299.731665836}, 't': 0.13906956397703965, 'signal': 1187.58132, 'UL': 495.072299691105, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_640_494_640_494.slha', 'axes': {'x': 640.475861587, 'y': 494.010746119}, 't': 0.13906956397703965, 'signal': 126.464897, 'UL': 184.93839212543102, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_170_22_170_22.slha', 'axes': {'x': 170.899445825, 'y': 22.672530213}, 't': 0.13906956397703965, 'signal': 118015.50899999999, 'UL': 89498.81200761622, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_461_158_461_158.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_749_571_749_571.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_842_695_842_695.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_664_342_664_342.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_309_10_309_10.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_558_362_558_362.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_550_370_550_370.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_372_98_372_98.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_629_441_629_441.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_297_191_297_191.slha', 'axes': {'x': 297.756577723, 'y': 191.089564963}, 't': 0.13906956397703965, 'signal': 8181.143949999999, 'UL': 1615.873675491457, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_551_319_551_319.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_434_297_434_297.slha', 'axes': {'x': 434.256294641, 'y': 297.012351279}, 't': 0.13906956397703965, 'signal': 1147.05686, 'UL': 485.9455106142728, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_699_577_699_577.slha', 'axes': {'x': 699.831611143, 'y': 577.148081798}, 't': 0.13906956397703965, 'signal': 73.9636449, 'UL': 191.11614144539237, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_298_72_298_72.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_789_480_789_480.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_311_177_311_177.slha', 'axes': {'x': 311.268462784, 'y': 177.49299218}, 't': 0.13906956397703965, 'signal': 6557.525079999999, 'UL': 1474.8107904245712, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_734_445_734_445.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_656_363_656_363.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_455_205_455_205.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_373_236_373_236.slha', 'axes': {'x': 373.437972965, 'y': 236.57284309}, 't': 0.13906956397703965, 'signal': 2550.3814500000003, 'UL': 744.7416840926628, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_593_275_593_275.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_112_12_112_12.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_526_294_526_294.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_453_338_453_338.slha', 'axes': {'x': 453.85594743, 'y': 338.1093636}, 't': 0.13906956397703965, 'signal': 904.5863079999999, 'UL': 469.299447711506, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_722_615_722_615.slha', 'axes': {'x': 722.133640944, 'y': 615.525779563}, 't': 0.13906956397703965, 'signal': 61.2298853, 'UL': 208.57482858239106, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_333_154_333_154.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_393_177_393_177.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_811_508_811_508.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_674_419_674_419.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_176_70_176_70.slha', 'axes': {'x': 176.119934373, 'y': 70.2105485852}, 't': 0.13906956397703965, 'signal': 103005.429, 'UL': 31114.31045605424, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_691_577_691_577.slha', 'axes': {'x': 691.490151505, 'y': 577.513541047}, 't': 0.13906956397703965, 'signal': 79.42882499999999, 'UL': 216.70092968663226, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_758_644_758_644.slha', 'axes': {'x': 758.907247764, 'y': 644.510722639}, 't': 0.13906956397703965, 'signal': 45.3610864, 'UL': 145.92204554351096, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_298_163_298_163.slha', 'axes': {'x': 298.703864196, 'y': 163.740727684}, 't': 0.13906956397703965, 'signal': 8052.6167399999995, 'UL': 1827.6573833088673, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_337_83_337_83.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_760_459_760_459.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_333_215_333_215.slha', 'axes': {'x': 333.570492585, 'y': 215.870689944}, 't': 0.13906956397703965, 'signal': 4576.29643, 'UL': 1072.9157290250146, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_124_0_124_0.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_116_8_116_8.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_268_159_268_159.slha', 'axes': {'x': 268.698605392, 'y': 159.51015359}, 't': 0.13906956397703965, 'signal': 13674.6186, 'UL': 2605.8513101767207, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_720_616_720_616.slha', 'axes': {'x': 720.782452438, 'y': 616.885436841}, 't': 0.13906956397703965, 'signal': 61.9563114, 'UL': 208.31802202189584, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_254_15_254_15.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_481_289_481_289.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_536_436_536_436.slha', 'axes': {'x': 536.976298906, 'y': 436.926569553}, 't': 0.13906956397703965, 'signal': 349.480149, 'UL': 401.1005047759221, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_255_111_255_111.slha', 'axes': {'x': 255.854895133, 'y': 111.614854878}, 't': 0.13906956397703965, 'signal': 17397.1322, 'UL': 4843.06942120478, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_377_232_377_232.slha', 'axes': {'x': 377.491538484, 'y': 232.493871255}, 't': 0.13906956397703965, 'signal': 2404.6786199999997, 'UL': 717.2316800505564, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_241_29_241_29.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_468_303_468_303.slha', 'axes': {'x': 468.311531577, 'y': 303.20756603}, 't': 0.13906956397703965, 'signal': 690.54293, 'UL': 387.0076437532082, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_652_353_652_353.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_877_592_877_592.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_280_40_280_40.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_727_610_727_610.slha', 'axes': {'x': 727.538394968, 'y': 610.087150449}, 't': 0.13906956397703965, 'signal': 58.358218699999995, 'UL': 217.250993702026, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_323_76_323_76.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_182_63_182_63.slha', 'axes': {'x': 182.875876903, 'y': 63.4122621936}, 't': 0.13906956397703965, 'signal': 86559.1581, 'UL': 28417.903903882267, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_761_505_761_505.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_618_476_618_476.slha', 'axes': {'x': 618.062448172, 'y': 476.971218567}, 't': 0.13906956397703965, 'signal': 155.79253, 'UL': 229.77366730746124, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_571_262_571_262.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_721_499_721_499.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_406_264_406_264.slha', 'axes': {'x': 406.549510815, 'y': 264.073282628}, 't': 0.13906956397703965, 'signal': 1623.50506, 'UL': 593.6254019873534, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_624_382_624_382.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_574_460_574_460.slha', 'axes': {'x': 574.141402274, 'y': 460.348037256}, 't': 0.13906956397703965, 'signal': 238.734251, 'UL': 280.82866666666655, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_705_563_705_563.slha', 'axes': {'x': 705.549699797, 'y': 563.365872473}, 't': 0.13906956397703965, 'signal': 70.2928057, 'UL': 172.1890593836357, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_677_538_677_538.slha', 'axes': {'x': 677.529581342, 'y': 538.770384034}, 't': 0.13906956397703965, 'signal': 89.96962719999999, 'UL': 176.34028174936947, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_601_368_601_368.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_347_73_347_73.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_545_274_545_274.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_334_35_334_35.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_157_27_157_27.slha', 'axes': {'x': 157.87147009, 'y': 27.7538789859}, 't': 0.13906956397703965, 'signal': 169776.73799999998, 'UL': 102351.35671891544, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_727_540_727_540.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_323_148_323_148.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_580_453_580_453.slha', 'axes': {'x': 580.897344804, 'y': 453.549750864}, 't': 0.13906956397703965, 'signal': 224.657388, 'UL': 255.9334518331067, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_728_608_728_608.slha', 'axes': {'x': 728.889583474, 'y': 608.727493171}, 't': 0.13906956397703965, 'signal': 57.6561498, 'UL': 217.3697358483204, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_282_180_282_180.slha', 'axes': {'x': 282.301057856, 'y': 180.24634102}, 't': 0.13906956397703965, 'signal': 10691.193500000001, 'UL': 1924.621653869318, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_321_48_321_48.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_665_551_665_551.slha', 'axes': {'x': 665.368884787, 'y': 551.007299539}, 't': 0.13906956397703965, 'signal': 100.17250299999999, 'UL': 206.30586236933817, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_698_578_698_578.slha', 'axes': {'x': 698.480422637, 'y': 578.507739077}, 't': 0.13906956397703965, 'signal': 74.9054011, 'UL': 196.2931578457287, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_875_544_875_544.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_115_9_115_9.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_765_555_765_555.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_493_327_493_327.slha', 'axes': {'x': 493.570089763, 'y': 327.93447536}, 't': 0.13906956397703965, 'signal': 512.819247, 'UL': 325.83156522131895, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_832_608_832_608.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_404_116_404_116.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_609_485_609_485.slha', 'axes': {'x': 609.955317136, 'y': 485.129162237}, 't': 0.13906956397703965, 'signal': 168.921415, 'UL': 272.84228248358033, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_658_557_658_557.slha', 'axes': {'x': 658.612942257, 'y': 557.80558593}, 't': 0.13906956397703965, 'signal': 106.880421, 'UL': 233.76461222831603, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_360_60_360_60.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_448_172_448_172.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_363_36_363_36.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_399_174_399_174.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_655_414_655_414.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_485_366_485_366.slha', 'axes': {'x': 485.616296773, 'y': 366.969460416}, 't': 0.13906956397703965, 'signal': 621.4795379999999, 'UL': 410.7119790640394, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_703_416_703_416.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_523_246_523_246.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_288_139_288_139.slha', 'axes': {'x': 288.966432983, 'y': 139.115294415}, 't': 0.13906956397703965, 'signal': 9481.74344, 'UL': 2607.7648919468184, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_155_30_155_30.slha', 'axes': {'x': 155.169093078, 'y': 30.4731935426}, 't': 0.13906956397703965, 'signal': 183998.05699999997, 'UL': 108845.69988172899, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_295_105_295_105.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_238_32_238_32.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_477_375_477_375.slha', 'axes': {'x': 477.509165737, 'y': 375.127404086}, 't': 0.13906956397703965, 'signal': 680.0487579999999, 'UL': 443.92108470444856, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_305_182_305_182.slha', 'axes': {'x': 305.86370876, 'y': 182.931621293}, 't': 0.13906956397703965, 'signal': 7123.10858, 'UL': 1556.0528721614808, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_623_448_623_448.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_451_340_451_340.slha', 'axes': {'x': 451.153570417, 'y': 340.828678156}, 't': 0.13906956397703965, 'signal': 929.857588, 'UL': 474.18413207845265, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_880_589_880_589.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_813_556_813_556.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_213_14_213_14.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_372_237_372_237.slha', 'axes': {'x': 372.086784459, 'y': 237.932500368}, 't': 0.13906956397703965, 'signal': 2601.6891299999997, 'UL': 752.3157423984532, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_549_219_549_219.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_647_359_647_359.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_508_211_508_211.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_496_173_496_173.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_325_45_325_45.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_165_19_165_19.slha', 'axes': {'x': 165.978601126, 'y': 19.5959353161}, 't': 0.13906956397703965, 'signal': 135483.656, 'UL': 88740.79667765129, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_206_100_206_100.slha', 'axes': {'x': 206.52909521, 'y': 100.43030268}, 't': 0.13906956397703965, 'signal': 48926.0908, 'UL': 8575.195221071164, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_508_325_508_325.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_766_503_766_503.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_203_103_203_103.slha', 'axes': {'x': 203.826718198, 'y': 103.149617236}, 't': 0.13906956397703965, 'signal': 52127.8451, 'UL': 8616.711857268605, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_934_635_934_635.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_373_113_373_113.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_507_263_507_263.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_861_609_861_609.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_303_17_303_17.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_245_122_245_122.slha', 'axes': {'x': 245.045387084, 'y': 122.492113104}, 't': 0.13906956397703965, 'signal': 21288.0155, 'UL': 4579.369691675448, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_865_554_865_554.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_615_304_615_304.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_509_403_509_403.slha', 'axes': {'x': 509.269515081, 'y': 403.987500902}, 't': 0.13906956397703965, 'signal': 474.160114, 'UL': 379.83532587062433, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_750_520_750_520.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_276_95_276_95.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_283_37_283_37.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_459_288_459_288.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_788_565_788_565.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_262_51_262_51.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_548_322_548_322.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_619_515_619_515.slha', 'axes': {'x': 619.386539149, 'y': 515.232248979}, 't': 0.13906956397703965, 'signal': 153.71105400000002, 'UL': 272.8022764227643, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_330_218_330_218.slha', 'axes': {'x': 330.868115573, 'y': 218.590004501}, 't': 0.13906956397703965, 'signal': 4778.86142, 'UL': 1083.7046659942425, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_628_527_628_527.slha', 'axes': {'x': 628.203781419, 'y': 527.585831836}, 't': 0.13906956397703965, 'signal': 141.864305, 'UL': 319.52009347591013, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_286_141_286_141.slha', 'axes': {'x': 286.264055971, 'y': 141.834608972}, 't': 0.13906956397703965, 'signal': 9952.79689, 'UL': 2488.580336092074, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_483_176_483_176.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_826_543_826_543.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_168_25_168_25.slha', 'axes': {'x': 168.556187776, 'y': 25.0304749754}, 't': 0.13906956397703965, 'signal': 126052.431, 'UL': 70582.71318311835, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_602_405_602_405.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_163_29_163_29.slha', 'axes': {'x': 163.869671679, 'y': 29.7463645}, 't': 0.13906956397703965, 'signal': 143803.438, 'UL': 79287.19172354885, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_615_479_615_479.slha', 'axes': {'x': 615.36007116, 'y': 479.690533124}, 't': 0.13906956397703965, 'signal': 160.08307599999998, 'UL': 241.31174480355634, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_397_122_397_122.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_356_63_356_63.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_413_106_413_106.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_356_240_356_240.slha', 'axes': {'x': 356.74792826, 'y': 240.169688325}, 't': 0.13906956397703965, 'signal': 3241.52569, 'UL': 864.2855981016462, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_471_299_471_299.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_365_104_365_104.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_879_640_879_640.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_701_575_701_575.slha', 'axes': {'x': 701.182799649, 'y': 575.78842452}, 't': 0.13906956397703965, 'signal': 73.0782524, 'UL': 183.7552140454158, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_282_145_282_145.slha', 'axes': {'x': 282.210490452, 'y': 145.913580807}, 't': 0.13906956397703965, 'signal': 10708.3858, 'UL': 2509.737228997291, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_641_513_641_513.slha', 'axes': {'x': 641.71566648, 'y': 513.989259053}, 't': 0.13906956397703965, 'signal': 124.99136299999999, 'UL': 257.87639646617015, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_558_415_558_415.slha', 'axes': {'x': 558.595315003, 'y': 415.1720531}, 't': 0.13906956397703965, 'signal': 279.63982999999996, 'UL': 275.81556633735886, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_827_491_827_491.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_692_529_692_529.slha', 'axes': {'x': 692.373612883, 'y': 529.084891312}, 't': 0.13906956397703965, 'signal': 69.37781939999999, 'UL': 195.41880277758963, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_772_630_772_630.slha', 'axes': {'x': 772.966796056, 'y': 630.363054065}, 't': 0.13906956397703965, 'signal': 40.3749052, 'UL': 131.4989502676188, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_353_66_353_66.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_746_433_746_433.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_283_144_283_144.slha', 'axes': {'x': 283.561678958, 'y': 144.553923529}, 't': 0.13906956397703965, 'signal': 10444.8188, 'UL': 2496.194865132765, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_761_642_761_642.slha', 'axes': {'x': 761.250505812, 'y': 642.152777876}, 't': 0.13906956397703965, 'signal': 44.4375868, 'UL': 143.11044037787843, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_588_382_588_382.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_525_308_525_308.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_651_442_651_442.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_667_452_667_452.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_541_432_541_432.slha', 'axes': {'x': 541.029864424, 'y': 432.847597718}, 't': 0.13906956397703965, 'signal': 336.538387, 'UL': 369.56386489246677, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_619_451_619_451.slha', 'axes': {'x': 619.862880694, 'y': 451.56902201}, 't': 0.13906956397703965, 'signal': 136.687547, 'UL': 208.09681148624296, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_547_374_547_374.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_407_112_407_112.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_457_334_457_334.slha', 'axes': {'x': 457.909512948, 'y': 334.030391765}, 't': 0.13906956397703965, 'signal': 859.258591, 'UL': 445.06633729883566, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_343_76_343_76.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_833_536_833_536.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_403_167_403_167.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_399_270_399_270.slha', 'axes': {'x': 399.793568285, 'y': 270.871569019}, 't': 0.13906956397703965, 'signal': 1770.53866, 'UL': 654.3536440894202, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_434_185_434_185.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_730_607_730_607.slha', 'axes': {'x': 730.24077198, 'y': 607.367835893}, 't': 0.13906956397703965, 'signal': 56.9586567, 'UL': 213.24818280334875, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_529_383_529_383.slha', 'axes': {'x': 529.537342672, 'y': 383.592641727}, 't': 0.13906956397703965, 'signal': 378.47074699999996, 'UL': 291.4304309219119, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_713_379_713_379.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_579_392_579_392.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_160_25_160_25.slha', 'axes': {'x': 160.573847102, 'y': 25.0345644293}, 't': 0.13906956397703965, 'signal': 156959.271, 'UL': 99192.64734390492, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_403_266_403_266.slha', 'axes': {'x': 403.847133803, 'y': 266.792597185}, 't': 0.13906956397703965, 'signal': 1685.05296, 'UL': 611.3511041691697, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_702_390_702_390.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_645_476_645_476.slha', 'axes': {'x': 645.121438881, 'y': 476.295931339}, 't': 0.13906956397703965, 'signal': 107.969119, 'UL': 249.72378358097836, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_301_120_301_120.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_443_216_443_216.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_554_445_554_445.slha', 'axes': {'x': 554.312700939, 'y': 445.877122625}, 't': 0.13906956397703965, 'signal': 291.452048, 'UL': 353.1022089138247, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_612_482_612_482.slha', 'axes': {'x': 612.657694148, 'y': 482.40984768}, 't': 0.13906956397703965, 'signal': 164.4757, 'UL': 259.22125871758396, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_320_151_320_151.slha', 'axes': {'x': 320.025124829, 'y': 151.510968737}, 't': 0.13906956397703965, 'signal': 5269.6016899999995, 'UL': 2859.0681585568736, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_674_446_674_446.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_463_328_463_328.slha', 'axes': {'x': 463.314266972, 'y': 328.591762652}, 't': 0.13906956397703965, 'signal': 804.154725, 'UL': 407.6201277450518, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_589_445_589_445.slha', 'axes': {'x': 589.004475841, 'y': 445.391807194}, 't': 0.13906956397703965, 'signal': 207.92412299999998, 'UL': 226.22465620806727, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_430_240_430_240.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_398_71_398_71.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_556_443_556_443.slha', 'axes': {'x': 556.655958988, 'y': 443.519177863}, 't': 0.13906956397703965, 'signal': 285.698713, 'UL': 337.29177754162845, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_344_142_344_142.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_388_281_388_281.slha', 'axes': {'x': 388.984060236, 'y': 281.748827246}, 't': 0.13906956397703965, 'signal': 2055.9438499999997, 'UL': 732.5596573487869, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_417_253_417_253.slha', 'axes': {'x': 417.794415204, 'y': 253.75374737}, 't': 0.13906956397703965, 'signal': 1292.9746799999998, 'UL': 562.5425092533388, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_351_169_351_169.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_678_503_678_503.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_700_568_700_568.slha', 'axes': {'x': 700.8631837, 'y': 568.081761997}, 't': 0.13906956397703965, 'signal': 73.2816183, 'UL': 183.6019083860283, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_466_325_466_325.slha', 'axes': {'x': 466.016643984, 'y': 325.872448095}, 't': 0.13906956397703965, 'signal': 778.202249, 'UL': 388.09799069712983, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_793_627_793_627.slha', 'axes': {'x': 793.407845628, 'y': 627.992528632}, 't': 0.13906956397703965, 'signal': 29.6667465, 'UL': 119.35440196263839, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_551_448_551_448.slha', 'axes': {'x': 551.96944289, 'y': 448.235067388}, 't': 0.13906956397703965, 'signal': 299.113004, 'UL': 367.7315934054388, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_697_422_697_422.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_597_272_597_272.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_329_219_329_219.slha', 'axes': {'x': 329.516927067, 'y': 219.949661779}, 't': 0.13906956397703965, 'signal': 4886.80076, 'UL': 1089.9443957637266, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_231_96_231_96.slha', 'axes': {'x': 231.286767938, 'y': 96.7435460919}, 't': 0.13906956397703965, 'signal': 28360.765199999998, 'UL': 6871.56905083604, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_760_510_760_510.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_601_493_601_493.slha', 'axes': {'x': 601.848186099, 'y': 493.287105907}, 't': 0.13906956397703965, 'signal': 182.290036, 'UL': 306.73116857381467, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_708_385_708_385.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_468_323_468_323.slha', 'axes': {'x': 468.719020996, 'y': 323.153133538}, 't': 0.13906956397703965, 'signal': 753.3403589999999, 'UL': 369.5962303473492, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_326_144_326_144.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_421_240_421_240.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_479_190_479_190.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_687_432_687_432.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_735_434_735_434.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_621_348_621_348.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_478_373_478_373.slha', 'axes': {'x': 478.860354243, 'y': 373.767746807}, 't': 0.13906956397703965, 'signal': 669.311325, 'UL': 439.37533076037664, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_309_178_309_178.slha', 'axes': {'x': 309.917274278, 'y': 178.852649458}, 't': 0.13906956397703965, 'signal': 6663.79746, 'UL': 1493.1268247515814, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_236_130_236_130.slha', 'axes': {'x': 236.938256048, 'y': 130.650056774}, 't': 0.13906956397703965, 'signal': 25175.9937, 'UL': 4219.055687538839, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_327_93_327_93.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_770_632_770_632.slha', 'axes': {'x': 770.623538007, 'y': 632.720998827}, 't': 0.13906956397703965, 'signal': 41.1755293, 'UL': 134.9550208770623, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_937_632_937_632.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_586_448_586_448.slha', 'axes': {'x': 586.302098829, 'y': 448.111121751}, 't': 0.13906956397703965, 'signal': 212.53428799999998, 'UL': 236.1455714555886, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_927_642_927_642.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_697_579_697_579.slha', 'axes': {'x': 697.129234131, 'y': 579.867396355}, 't': 0.13906956397703965, 'signal': 75.8336034, 'UL': 205.11686035832568, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_368_101_368_101.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_291_22_291_22.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_685_408_685_408.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_689_481_689_481.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_251_115_251_115.slha', 'axes': {'x': 251.801329615, 'y': 115.693826713}, 't': 0.13906956397703965, 'signal': 18678.751800000002, 'UL': 4689.251899248426, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_500_352_500_352.slha', 'axes': {'x': 500.47937034, 'y': 352.013230354}, 't': 0.13906956397703965, 'signal': 521.568451, 'UL': 339.43498002572954, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_449_211_449_211.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_439_130_439_130.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_691_402_691_402.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_599_496_599_496.slha', 'axes': {'x': 599.145809087, 'y': 496.006420463}, 't': 0.13906956397703965, 'signal': 187.25465999999997, 'UL': 321.1299339132994, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_159_34_159_34.slha', 'axes': {'x': 159.183155582, 'y': 34.4622540246}, 't': 0.13906956397703965, 'signal': 163385.84499999997, 'UL': 90880.45444219702, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_690_586_690_586.slha', 'axes': {'x': 690.3732916, 'y': 586.665682747}, 't': 0.13906956397703965, 'signal': 80.2478607, 'UL': 251.64366621338087, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_652_503_652_503.slha', 'axes': {'x': 652.525174528, 'y': 503.112000826}, 't': 0.13906956397703965, 'signal': 113.35785499999999, 'UL': 175.14080845013223, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_675_394_675_394.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_346_53_346_53.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_469_251_469_251.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_727_543_727_543.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_457_162_457_162.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_292_78_292_78.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_782_571_782_571.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_152_33_152_33.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_886_634_886_634.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_545_428_545_428.slha', 'axes': {'x': 545.083429942, 'y': 428.768625883}, 't': 0.13906956397703965, 'signal': 322.19929, 'UL': 339.16356496537196, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_496_356_496_356.slha', 'axes': {'x': 496.425804822, 'y': 356.092202189}, 't': 0.13906956397703965, 'signal': 547.29944, 'UL': 352.27461603388207, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_716_454_716_454.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_595_375_595_375.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_228_78_228_78.slha', 'axes': {'x': 228.148111308, 'y': 78.6757862266}, 't': 0.13906956397703965, 'signal': 30175.5804, 'UL': 10797.055024446481, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_352_118_352_118.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_767_452_767_452.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_384_136_384_136.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_632_438_632_438.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_275_152_275_152.slha', 'axes': {'x': 275.454547922, 'y': 152.711867198}, 't': 0.13906956397703965, 'signal': 12040.3037, 'UL': 2595.8666871921178, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_303_185_303_185.slha', 'axes': {'x': 303.161331747, 'y': 185.65093585}, 't': 0.13906956397703965, 'signal': 7465.9755399999995, 'UL': 1588.8241211101767, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_590_279_590_279.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_564_305_564_305.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_487_259_487_259.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_658_347_658_347.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_145_39_145_39.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_345_203_345_203.slha', 'axes': {'x': 345.73118914, 'y': 203.633774439}, 't': 0.13906956397703965, 'signal': 3811.71679, 'UL': 1010.9098575301017, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_408_212_408_212.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_521_391_521_391.slha', 'axes': {'x': 521.430211635, 'y': 391.750585397}, 't': 0.13906956397703965, 'signal': 414.616693, 'UL': 335.2727192208284, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_503_317_503_317.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_315_173_315_173.slha', 'axes': {'x': 315.322028302, 'y': 173.414020345}, 't': 0.13906956397703965, 'signal': 6130.07331, 'UL': 1412.457953262045, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_388_185_388_185.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_643_376_643_376.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_432_137_432_137.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_248_22_248_22.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_305_65_305_65.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_549_424_549_424.slha', 'axes': {'x': 549.136995461, 'y': 424.689654048}, 't': 0.13906956397703965, 'signal': 308.373235, 'UL': 313.49909919158983, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_233_94_233_94.slha', 'axes': {'x': 233.630025986, 'y': 94.3856013296}, 't': 0.13906956397703965, 'signal': 26985.988899999997, 'UL': 7112.55527300717, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_505_215_505_215.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_472_248_472_248.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_648_472_648_472.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_654_466_654_466.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_632_523_632_523.slha', 'axes': {'x': 632.257346937, 'y': 523.506860001}, 't': 0.13906956397703965, 'signal': 136.232724, 'UL': 287.13596199246166, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_628_466_628_466.slha', 'axes': {'x': 628.842222158, 'y': 466.066203849}, 't': 0.13906956397703965, 'signal': 126.50407899999999, 'UL': 208.0351050102127, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_493_254_493_254.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_282_88_282_88.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_325_224_325_224.slha', 'axes': {'x': 325.463361549, 'y': 224.028633614}, 't': 0.13906956397703965, 'signal': 5216.05202, 'UL': 1119.8238905816236, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_506_314_506_314.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_270_158_270_158.slha', 'axes': {'x': 270.049793898, 'y': 158.150496312}, 't': 0.13906956397703965, 'signal': 13322.9317, 'UL': 2611.936178861787, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_829_591_829_591.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_740_439_740_439.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_317_103_317_103.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_361_125_361_125.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_438_222_438_222.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_422_150_422_150.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_405_265_405_265.slha', 'axes': {'x': 405.198322309, 'y': 265.432939906}, 't': 0.13906956397703965, 'signal': 1652.49739, 'UL': 605.365251430292, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_378_231_378_231.slha', 'axes': {'x': 378.84272699, 'y': 231.134213977}, 't': 0.13906956397703965, 'signal': 2358.3951899999997, 'UL': 710.2803053242152, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_765_588_765_588.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_502_167_502_167.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_839_581_839_581.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_645_448_645_448.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_361_159_361_159.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_439_291_439_291.slha', 'axes': {'x': 439.661048665, 'y': 291.573722166}, 't': 0.13906956397703965, 'signal': 1076.24931, 'UL': 480.15731533101035, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_423_307_423_307.slha', 'axes': {'x': 423.446786592, 'y': 307.889609505}, 't': 0.13906956397703965, 'signal': 1305.22229, 'UL': 547.8908535897264, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_751_651_751_651.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_344_204_344_204.slha', 'axes': {'x': 344.380000634, 'y': 204.993431717}, 't': 0.13906956397703965, 'signal': 3890.64297, 'UL': 1019.1034060392112, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_382_88_382_88.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_640_454_640_454.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_181_64_181_64.slha', 'axes': {'x': 181.524688397, 'y': 64.7719194719}, 't': 0.13906956397703965, 'signal': 89741.7303, 'UL': 28973.069343820647, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_634_460_634_460.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_249_118_249_118.slha', 'axes': {'x': 249.098952603, 'y': 118.413141269}, 't': 0.13906956397703965, 'signal': 19594.0162, 'UL': 4646.572098294024, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_568_431_568_431.slha', 'axes': {'x': 568.372249231, 'y': 431.729454051}, 't': 0.13906956397703965, 'signal': 252.38413599999998, 'UL': 276.00269950738914, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_851_619_851_619.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_748_420_748_420.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_397_273_397_273.slha', 'axes': {'x': 397.091191273, 'y': 273.590883576}, 't': 0.13906956397703965, 'signal': 1838.46731, 'UL': 676.9847564713333, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_626_444_626_444.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_293_27_293_27.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_453_217_453_217.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_580_339_580_339.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_729_451_729_451.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_794_576_794_576.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_492_177_492_177.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_343_206_343_206.slha', 'axes': {'x': 343.028812128, 'y': 206.353088996}, 't': 0.13906956397703965, 'signal': 3974.4891199999997, 'UL': 1028.2402275735985, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_676_343_676_343.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_540_229_540_229.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_239_127_239_127.slha', 'axes': {'x': 239.64063306, 'y': 127.930742217}, 't': 0.13906956397703965, 'signal': 23791.6331, 'UL': 4387.327362719211, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_620_299_620_299.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_631_338_631_338.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_329_141_329_141.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_711_564_711_564.slha', 'axes': {'x': 711.992307697, 'y': 564.911166294}, 't': 0.13906956397703965, 'signal': 66.61156079999999, 'UL': 151.39260394809128, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_226_101_226_101.slha', 'axes': {'x': 226.60025184, 'y': 101.459435617}, 't': 0.13906956397703965, 'signal': 31240.4783, 'UL': 6462.091966275217, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_470_277_470_277.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_657_437_657_437.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_510_402_510_402.slha', 'axes': {'x': 510.620703587, 'y': 402.627843624}, 't': 0.13906956397703965, 'signal': 466.71047999999996, 'UL': 376.53522014391075, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_808_511_808_511.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_681_388_681_388.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_352_244_352_244.slha', 'axes': {'x': 352.061412163, 'y': 244.88557785}, 't': 0.13906956397703965, 'signal': 3476.48258, 'UL': 906.2329557362239, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_800_569_800_569.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_666_549_666_549.slha', 'axes': {'x': 666.720073293, 'y': 549.647642261}, 't': 0.13906956397703965, 'signal': 99.4617557, 'UL': 204.1006955736224, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_499_170_499_170.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_805_548_805_548.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_295_193_295_193.slha', 'axes': {'x': 295.054200711, 'y': 193.808879519}, 't': 0.13906956397703965, 'signal': 8525.072030000001, 'UL': 1627.472006285966, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_299_20_299_20.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_597_322_597_322.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_866_573_866_573.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_591_442_591_442.slha', 'axes': {'x': 591.706852853, 'y': 442.672492638}, 't': 0.13906956397703965, 'signal': 202.13062100000002, 'UL': 216.30701335542716, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_356_131_356_131.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_707_568_707_568.slha', 'axes': {'x': 707.938742179, 'y': 568.990138128}, 't': 0.13906956397703965, 'signal': 69.1547547, 'UL': 162.07435606744951, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_276_151_276_151.slha', 'axes': {'x': 276.805736428, 'y': 151.35220992}, 't': 0.13906956397703965, 'signal': 11744.1467, 'UL': 2593.652524630542, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_405_215_405_215.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_947_622_947_622.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_178_67_178_67.slha', 'axes': {'x': 178.822311385, 'y': 67.4912340285}, 't': 0.13906956397703965, 'signal': 95789.62370000001, 'UL': 30043.96997883291, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_778_488_778_488.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_520_393_520_393.slha', 'axes': {'x': 520.079023129, 'y': 393.110242675}, 't': 0.13906956397703965, 'signal': 418.64415599999995, 'UL': 340.7589235560972, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_296_23_296_23.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_549_271_549_271.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_553_367_553_367.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_329_70_329_70.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_786_483_786_483.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_366_230_366_230.slha', 'axes': {'x': 366.120960455, 'y': 230.737909276}, 't': 0.13906956397703965, 'signal': 2837.01284, 'UL': 794.3573333333334, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_449_342_449_342.slha', 'axes': {'x': 449.802381911, 'y': 342.188335435}, 't': 0.13906956397703965, 'signal': 945.158957, 'UL': 476.71866702778345, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_612_307_612_307.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_672_398_672_398.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_254_67_254_67.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_826_594_826_594.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_831_639_831_639.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_491_361_491_361.slha', 'axes': {'x': 491.021050798, 'y': 361.530831303}, 't': 0.13906956397703965, 'signal': 582.952405, 'UL': 380.46126746461914, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_366_53_366_53.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_689_579_689_579.slha', 'axes': {'x': 689.146893456, 'y': 579.871485809}, 't': 0.13906956397703965, 'signal': 81.1881667, 'UL': 224.58913602816702, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_217_110_217_110.slha', 'axes': {'x': 217.227219646, 'y': 110.891214666}, 't': 0.13906956397703965, 'signal': 38431.0777, 'UL': 6602.834224295531, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_313_174_313_174.slha', 'axes': {'x': 313.970839796, 'y': 174.773677623}, 't': 0.13906956397703965, 'signal': 6268.741819999999, 'UL': 1433.4305562897994, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_592_328_592_328.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_816_604_816_604.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_670_545_670_545.slha', 'axes': {'x': 670.773638811, 'y': 545.568670426}, 't': 0.13906956397703965, 'signal': 95.744665, 'UL': 194.41567613841187, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_452_268_452_268.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_189_56_189_56.slha', 'axes': {'x': 189.631819433, 'y': 56.613975802}, 't': 0.13906956397703965, 'signal': 72783.6623, 'UL': 26431.48332904494, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_488_231_488_231.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_844_626_844_626.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_716_402_716_402.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_809_611_809_611.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_440_230_440_230.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_817_501_817_501.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_196_49_196_49.slha', 'axes': {'x': 196.387761964, 'y': 49.8156894105}, 't': 0.13906956397703965, 'signal': 61929.0743, 'UL': 28851.511236512844, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_410_109_410_109.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_615_355_615_355.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_373_197_373_197.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_258_12_258_12.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_864_606_864_606.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_228_99_228_99.slha', 'axes': {'x': 228.943509889, 'y': 99.1014908542}, 't': 0.13906956397703965, 'signal': 29656.779199999997, 'UL': 6565.253296229063, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_751_469_751_469.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_677_442_677_442.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_835_584_835_584.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_318_52_318_52.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_516_304_516_304.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_803_617_803_617.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_555_264_555_264.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_912_607_912_607.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_853_516_853_516.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_625_294_625_294.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_278_149_278_149.slha', 'axes': {'x': 278.156924934, 'y': 149.992552642}, 't': 0.13906956397703965, 'signal': 11449.461899999998, 'UL': 2568.033631202693, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_149_43_149_43.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_740_530_740_530.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_460_210_460_210.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_513_399_513_399.slha', 'axes': {'x': 513.323080599, 'y': 399.908529067}, 't': 0.13906956397703965, 'signal': 452.440028, 'UL': 369.2959220468115, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_554_279_554_279.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_245_25_245_25.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_542_291_542_291.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_571_247_571_247.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_857_612_857_612.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_542_431_542_431.slha', 'axes': {'x': 542.38105293, 'y': 431.48794044}, 't': 0.13906956397703965, 'signal': 331.782056, 'UL': 360.84079027994727, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_693_583_693_583.slha', 'axes': {'x': 693.075668612, 'y': 583.94636819}, 't': 0.13906956397703965, 'signal': 78.2794866, 'UL': 233.31672931619812, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_347_22_347_22.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_424_306_424_306.slha', 'axes': {'x': 424.797975098, 'y': 306.529952227}, 't': 0.13906956397703965, 'signal': 1282.42852, 'UL': 538.2360751734345, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_722_546_722_546.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_475_245_475_245.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_470_200_470_200.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_584_285_584_285.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_647_508_647_508.slha', 'axes': {'x': 647.120420504, 'y': 508.55062994}, 't': 0.13906956397703965, 'signal': 118.701708, 'UL': 218.86498158337906, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_419_150_419_150.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_274_154_274_154.slha', 'axes': {'x': 274.103359416, 'y': 154.071524477}, 't': 0.13906956397703965, 'signal': 12342.034099999999, 'UL': 2598.5437807881754, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_723_614_723_614.slha', 'axes': {'x': 723.48482945, 'y': 614.166122284}, 't': 0.13906956397703965, 'signal': 60.4983545, 'UL': 210.66668765657786, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_284_143_284_143.slha', 'axes': {'x': 284.912867465, 'y': 143.19426625}, 't': 0.13906956397703965, 'signal': 10196.232999999998, 'UL': 2477.551527393971, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_430_301_430_301.slha', 'axes': {'x': 430.202729122, 'y': 301.091323114}, 't': 0.13906956397703965, 'signal': 1208.6553, 'UL': 502.319386202694, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_838_632_838_632.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_447_223_447_223.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_573_347_573_347.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_583_450_583_450.slha', 'axes': {'x': 583.599721817, 'y': 450.830436308}, 't': 0.13906956397703965, 'signal': 218.46462499999998, 'UL': 246.06665005529587, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_576_457_576_457.slha', 'axes': {'x': 576.843779286, 'y': 457.628722699}, 't': 0.13906956397703965, 'signal': 234.317764, 'UL': 270.95911382113826, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_551_421_551_421.slha', 'axes': {'x': 551.839372473, 'y': 421.970339491}, 't': 0.13906956397703965, 'signal': 299.57267199999995, 'UL': 303.83439876562124, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_669_546_669_546.slha', 'axes': {'x': 669.422450305, 'y': 546.928327704}, 't': 0.13906956397703965, 'signal': 96.9378875, 'UL': 197.98151445347818, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_739_528_739_528.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_828_525_828_525.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_927_599_927_599.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_312_7_312_7.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_833_519_833_519.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_604_416_604_416.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_778_541_778_541.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_815_626_815_626.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_543_226_543_226.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_630_376_630_376.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_359_250_359_250.slha', 'axes': {'x': 359.926087905, 'y': 250.169415873}, 't': 0.13906956397703965, 'signal': 3090.94251, 'UL': 844.1652028114863, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_441_178_441_178.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_485_235_485_235.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_304_184_304_184.slha', 'axes': {'x': 304.512520254, 'y': 184.291278571}, 't': 0.13906956397703965, 'signal': 7289.53425, 'UL': 1570.7756223717404, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_634_520_634_520.slha', 'axes': {'x': 634.959723949, 'y': 520.787545445}, 't': 0.13906956397703965, 'signal': 132.633063, 'UL': 278.4171126280682, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_538_435_538_435.slha', 'axes': {'x': 538.327487412, 'y': 435.566912275}, 't': 0.13906956397703965, 'signal': 344.38413099999997, 'UL': 389.18398519163804, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_525_387_525_387.slha', 'axes': {'x': 525.483777153, 'y': 387.671613562}, 't': 0.13906956397703965, 'signal': 396.10660199999995, 'UL': 314.92820755185414, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_724_547_724_547.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_402_268_402_268.slha', 'axes': {'x': 402.495945297, 'y': 268.152254463}, 't': 0.13906956397703965, 'signal': 1716.05187, 'UL': 625.9687828907845, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_905_614_905_614.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_553_216_553_216.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_289_30_289_30.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_755_565_755_565.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_668_401_668_401.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_698_570_698_570.slha', 'axes': {'x': 698.519925651, 'y': 570.43970676}, 't': 0.13906956397703965, 'signal': 74.86335299999999, 'UL': 189.3345879253236, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_195_51_195_51.slha', 'axes': {'x': 195.036573458, 'y': 51.1753466888}, 't': 0.13906956397703965, 'signal': 64049.670000000006, 'UL': 27367.857791197428, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_365_244_365_244.slha', 'axes': {'x': 365.330841929, 'y': 244.73078676}, 't': 0.13906956397703965, 'signal': 2855.3204299999998, 'UL': 796.2064010840105, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_463_207_463_207.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_532_287_532_287.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_267_160_267_160.slha', 'axes': {'x': 267.347416886, 'y': 160.869810868}, 't': 0.13906956397703965, 'signal': 14024.630500000001, 'UL': 2590.4643491999004, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_307_113_307_113.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_580_289_580_289.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_568_251_568_251.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_628_342_628_342.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_756_513_756_513.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_417_102_417_102.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_664_552_664_552.slha', 'axes': {'x': 664.017696281, 'y': 552.366956817}, 't': 0.13906956397703965, 'signal': 101.52093500000001, 'UL': 208.66208188153325, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_511_208_511_208.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_218_88_218_88.slha', 'axes': {'x': 218.689791765, 'y': 88.1933871747}, 't': 0.13906956397703965, 'signal': 37168.3666, 'UL': 9220.5129968377, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_848_622_848_622.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_476_193_476_193.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_893_634_893_634.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_585_385_585_385.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_276_43_276_43.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_406_164_406_164.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_238_89_238_89.slha', 'axes': {'x': 238.316542084, 'y': 89.6697118049}, 't': 0.13906956397703965, 'signal': 24454.732799999998, 'UL': 7758.748610446637, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_320_100_320_100.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_617_403_617_403.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_574_295_574_295.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_411_162_411_162.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_399_222_399_222.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_719_618_719_618.slha', 'axes': {'x': 719.431263932, 'y': 618.245094119}, 't': 0.13906956397703965, 'signal': 62.334241, 'UL': 207.42065282193056, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_626_293_626_293.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_628_505_628_505.slha', 'axes': {'x': 628.759571344, 'y': 505.80046993}, 't': 0.13906956397703965, 'signal': 141.076168, 'UL': 256.9261933152664, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_572_398_572_398.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_915_604_915_604.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_811_542_811_542.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_370_239_370_239.slha', 'axes': {'x': 370.735595953, 'y': 239.292157647}, 't': 0.13906956397703965, 'signal': 2654.8039, 'UL': 760.1157900685332, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_604_490_604_490.slha', 'axes': {'x': 604.550563112, 'y': 490.56779135}, 't': 0.13906956397703965, 'signal': 178.521481, 'UL': 293.4994199110752, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_593_327_593_327.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_770_449_770_449.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_246_121_246_121.slha', 'axes': {'x': 246.39657559, 'y': 121.132455826}, 't': 0.13906956397703965, 'signal': 20712.944799999997, 'UL': 4602.297847341675, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_405_168_405_168.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_635_371_635_371.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_776_493_776_493.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_524_389_524_389.slha', 'axes': {'x': 524.132588647, 'y': 389.03127084}, 't': 0.13906956397703965, 'signal': 402.109016, 'UL': 321.89850753325345, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_576_344_576_344.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_763_507_763_507.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_376_43_376_43.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_747_472_747_472.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_378_108_378_108.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_503_362_503_362.slha', 'axes': {'x': 503.298411021, 'y': 362.374327697}, 't': 0.13906956397703965, 'signal': 507.762016, 'UL': 319.6819041686415, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_773_493_773_493.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_345_176_345_176.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_784_586_784_586.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_586_334_586_334.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_838_602_838_602.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_843_526_843_526.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_622_297_622_297.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_296_16_296_16.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_603_491_603_491.slha', 'axes': {'x': 603.199374606, 'y': 491.927448628}, 't': 0.13906956397703965, 'signal': 179.965095, 'UL': 301.36754278105036, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_611_358_611_358.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_659_360_659_360.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_582_388_582_388.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_539_434_539_434.slha', 'axes': {'x': 539.678675918, 'y': 434.207254996}, 't': 0.13906956397703965, 'signal': 341.41869099999997, 'UL': 378.1816319235852, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_301_187_301_187.slha', 'axes': {'x': 301.810143241, 'y': 187.010593128}, 't': 0.13906956397703965, 'signal': 7643.09823, 'UL': 1599.3748636309015, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_665_404_665_404.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_527_219_527_219.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_839_529_839_529.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_215_90_215_90.slha', 'axes': {'x': 215.987414753, 'y': 90.9127017314}, 't': 0.13906956397703965, 'signal': 39534.2535, 'UL': 9272.707194297232, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_834_636_834_636.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_636_434_636_434.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_695_525_695_525.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_683_497_683_497.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_489_180_489_180.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_626_508_626_508.slha', 'axes': {'x': 626.416313295, 'y': 508.158414692}, 't': 0.13906956397703965, 'signal': 144.363063, 'UL': 261.53307509311543, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_426_144_426_144.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_566_354_566_354.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_263_57_263_57.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_254_112_254_112.slha', 'axes': {'x': 254.503706627, 'y': 112.974512156}, 't': 0.13906956397703965, 'signal': 17695.0231, 'UL': 4784.904671947999, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_579_454_579_454.slha', 'axes': {'x': 579.546156298, 'y': 454.909408142}, 't': 0.13906956397703965, 'signal': 227.84679999999997, 'UL': 261.1017826504865, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_421_309_421_309.slha', 'axes': {'x': 421.82176647, 'y': 309.524814679}, 't': 0.13906956397703965, 'signal': 1332.9123, 'UL': 557.9908125126956, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_622_472_622_472.slha', 'axes': {'x': 622.116013691, 'y': 472.892246732}, 't': 0.13906956397703965, 'signal': 150.439779, 'UL': 217.79243526046366, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_287_140_287_140.slha', 'axes': {'x': 287.615244477, 'y': 140.474951694}, 't': 0.13906956397703965, 'signal': 9714.14622, 'UL': 2546.369009645086, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_662_431_662_431.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_571_299_571_299.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_546_427_546_427.slha', 'axes': {'x': 546.434618449, 'y': 427.408968605}, 't': 0.13906956397703965, 'signal': 317.428462, 'UL': 329.4656104336043, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_554_315_554_315.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_529_291_529_291.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_845_574_845_574.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_838_699_838_699.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_514_256_514_256.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_436_294_436_294.slha', 'axes': {'x': 436.958671653, 'y': 294.293036722}, 't': 0.13906956397703965, 'signal': 1114.1324299999999, 'UL': 481.390383485522, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_582_250_582_250.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_344_25_344_25.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_144_41_144_41.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_279_92_279_92.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_210_11_210_11.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_643_512_643_512.slha', 'axes': {'x': 643.066854986, 'y': 512.629601775}, 't': 0.13906956397703965, 'signal': 123.422604, 'UL': 247.94605542097068, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_702_519_702_519.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_491_279_491_279.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_771_599_771_599.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_481_265_481_265.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_440_290_440_290.slha', 'axes': {'x': 440.56783086, 'y': 290.661256581}, 't': 0.13906956397703965, 'signal': 1063.37969, 'UL': 482.10978003613366, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_760_594_760_594.slha', 'axes': {'x': 760.253907873, 'y': 594.711901422}, 't': 0.13906956397703965, 'signal': 38.5105244, 'UL': 107.29802461552333, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_516_397_516_397.slha', 'axes': {'x': 516.025457611, 'y': 397.18921451}, 't': 0.13906956397703965, 'signal': 438.771631, 'UL': 358.7890163993479, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_607_413_607_413.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_390_96_390_96.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_466_193_466_193.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_542_277_542_277.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_670_501_670_501.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_552_368_552_368.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_484_381_484_381.slha', 'axes': {'x': 484.552346632, 'y': 381.237885796}, 't': 0.13906956397703965, 'signal': 625.461916, 'UL': 415.4737995013817, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_630_390_630_390.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_161_23_161_23.slha', 'axes': {'x': 161.925035608, 'y': 23.674907151}, 't': 0.13906956397703965, 'signal': 150922.503, 'UL': 97835.29762087681, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_833_704_833_704.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_318_82_318_82.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_302_10_302_10.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_332_217_332_217.slha', 'axes': {'x': 332.219304079, 'y': 217.230347222}, 't': 0.13906956397703965, 'signal': 4676.5923999999995, 'UL': 1079.1874784989327, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_207_99_207_99.slha', 'axes': {'x': 207.880283716, 'y': 99.0706454012}, 't': 0.13906956397703965, 'signal': 47402.1286, 'UL': 8708.158121828237, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_830_539_830_539.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_271_156_271_156.slha', 'axes': {'x': 271.400982404, 'y': 156.790839033}, 't': 0.13906956397703965, 'signal': 12991.5511, 'UL': 2608.7466778805715, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_568_465_568_465.slha', 'axes': {'x': 568.73664825, 'y': 465.786666369}, 't': 0.13906956397703965, 'signal': 251.41438399999998, 'UL': 300.640919740478, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_444_175_444_175.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_219_8_219_8.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_792_477_792_477.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_346_124_346_124.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_340_80_340_80.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_538_207_538_207.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_557_416_557_416.slha', 'axes': {'x': 557.244126497, 'y': 416.531710378}, 't': 0.13906956397703965, 'signal': 283.89946, 'UL': 280.76650713099957, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_822_531_822_531.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_768_635_768_635.slha', 'axes': {'x': 768.280279958, 'y': 635.078943589}, 't': 0.13906956397703965, 'signal': 41.9771818, 'UL': 138.38699189744287, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_211_94_211_94.slha', 'axes': {'x': 211.933849235, 'y': 94.9916735663}, 't': 0.13906956397703965, 'signal': 43119.5252, 'UL': 9205.92180235389, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_795_470_795_470.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_756_511_756_511.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_367_119_367_119.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_370_200_370_200.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_872_568_872_568.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_494_276_494_276.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_377_142_377_142.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_296_166_296_166.slha', 'axes': {'x': 296.360606148, 'y': 166.098672446}, 't': 0.13906956397703965, 'signal': 8328.71744, 'UL': 1885.8918514004563, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_477_182_477_182.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_424_247_424_247.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_558_261_558_261.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_669_336_669_336.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_672_544_672_544.slha', 'axes': {'x': 672.124827317, 'y': 544.209013147}, 't': 0.13906956397703965, 'signal': 94.5457237, 'UL': 191.48513396611804, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_871_547_871_547.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_494_164_494_164.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_707_412_707_412.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_627_393_627_393.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_754_465_754_465.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_620_400_620_400.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_464_282_464_282.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_395_225_395_225.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_341_28_341_28.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_726_392_726_392.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_587_282_587_282.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_113_11_113_11.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_214_113_214_113.slha', 'axes': {'x': 214.883961597, 'y': 113.249159428}, 't': 0.13906956397703965, 'signal': 40546.419, 'UL': 6725.717243324987, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_359_237_359_237.slha', 'axes': {'x': 359.091186309, 'y': 237.811743563}, 't': 0.13906956397703965, 'signal': 3128.67243, 'UL': 842.0353776282589, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_500_321_500_321.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_342_128_342_128.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_710_409_710_409.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_753_517_753_517.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_634_335_634_335.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_658_462_658_462.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_497_354_497_354.slha', 'axes': {'x': 497.776993328, 'y': 354.732544911}, 't': 0.13906956397703965, 'signal': 538.563262, 'UL': 346.18547146943683, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_764_455_764_455.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_709_567_709_567.slha', 'axes': {'x': 709.289930685, 'y': 567.63048085}, 't': 0.13906956397703965, 'signal': 68.3038857, 'UL': 158.35558762420956, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_445_124_445_124.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_304_116_304_116.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_338_210_338_210.slha', 'axes': {'x': 338.975246609, 'y': 210.432060831}, 't': 0.13906956397703965, 'signal': 4234.05108, 'UL': 1052.7838161012933, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_705_515_705_515.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_649_505_649_505.slha', 'axes': {'x': 649.822797516, 'y': 505.831315383}, 't': 0.13906956397703965, 'signal': 115.67374699999999, 'UL': 196.7086695104611, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_257_110_257_110.slha', 'axes': {'x': 257.206083639, 'y': 110.255197599}, 't': 0.13906956397703965, 'signal': 16941.7466, 'UL': 4897.006408443729, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_333_138_333_138.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_848_571_848_571.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_149_35_149_35.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_351_18_351_18.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_710_566_710_566.slha', 'axes': {'x': 710.641119191, 'y': 566.270823572}, 't': 0.13906956397703965, 'signal': 67.4691548, 'UL': 155.19816217109195, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_416_314_416_314.slha', 'axes': {'x': 416.690844062, 'y': 314.687895897}, 't': 0.13906956397703965, 'signal': 1417.4282600000001, 'UL': 590.2441006174897, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_720_550_720_550.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_861_557_861_557.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_431_300_431_300.slha', 'axes': {'x': 431.194798665, 'y': 300.09303563}, 't': 0.13906956397703965, 'signal': 1192.8955999999998, 'UL': 497.1428958372734, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_383_191_383_191.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_464_155_464_155.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_619_475_619_475.slha', 'axes': {'x': 619.413636678, 'y': 475.611561289}, 't': 0.13906956397703965, 'signal': 153.664314, 'UL': 226.03771536705526, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_289_82_289_82.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_311_58_311_58.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_470_148_470_148.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_589_330_589_330.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_732_437_732_437.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_705_571_705_571.slha', 'axes': {'x': 705.236365167, 'y': 571.709452685}, 't': 0.13906956397703965, 'signal': 70.4812506, 'UL': 169.24486056710302, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_638_496_638_496.slha', 'axes': {'x': 638.132603538, 'y': 496.368690881}, 't': 0.13906956397703965, 'signal': 129.382707, 'UL': 194.31721976567195, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_285_28_285_28.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_597_497_597_497.slha', 'axes': {'x': 597.794620581, 'y': 497.366077742}, 't': 0.13906956397703965, 'signal': 189.94585999999998, 'UL': 332.42589096219734, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_576_256_576_256.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_448_120_448_120.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_944_625_944_625.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_514_320_514_320.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_741_479_741_479.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_773_497_773_497.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_531_339_531_339.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_334_214_334_214.slha', 'axes': {'x': 334.921681091, 'y': 214.511032666}, 't': 0.13906956397703965, 'signal': 4479.09416, 'UL': 1068.876495798849, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_495_357_495_357.slha', 'axes': {'x': 495.074616316, 'y': 357.451859468}, 't': 0.13906956397703965, 'signal': 555.517106, 'UL': 357.8093532079778, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_177_68_177_68.slha', 'axes': {'x': 177.471122879, 'y': 68.8508913068}, 't': 0.13906956397703965, 'signal': 99322.2535, 'UL': 30524.536534853516, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_723_396_723_396.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_541_329_541_329.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_250_70_250_70.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_874_596_874_596.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_473_197_473_197.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_466_203_466_203.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_620_474_620_474.slha', 'axes': {'x': 620.764825185, 'y': 474.25190401}, 't': 0.13906956397703965, 'signal': 152.445111, 'UL': 221.1057287035924, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_285_85_285_85.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_220_86_220_86.slha', 'axes': {'x': 220.040980271, 'y': 86.8337298964}, 't': 0.13906956397703965, 'signal': 36084.8899, 'UL': 9174.837533674894, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_580_340_580_340.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_767_499_767_499.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_327_160_327_160.slha', 'axes': {'x': 327.877083649, 'y': 160.109040948}, 't': 0.13906956397703965, 'signal': 4666.58025, 'UL': 2414.688615403355, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_496_369_496_369.slha', 'axes': {'x': 496.268636875, 'y': 369.448161984}, 't': 0.13906956397703965, 'signal': 548.2175169999999, 'UL': 338.78967610837424, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_396_174_396_174.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_674_541_674_541.slha', 'axes': {'x': 674.827204329, 'y': 541.489698591}, 't': 0.13906956397703965, 'signal': 92.2706585, 'UL': 184.7063159317556, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_854_616_854_616.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_605_489_605_489.slha', 'axes': {'x': 605.901751618, 'y': 489.208134072}, 't': 0.13906956397703965, 'signal': 176.105721, 'UL': 287.4157961173501, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_751_427_751_427.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_757_422_757_422.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_412_157_412_157.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_682_533_682_533.slha', 'axes': {'x': 682.934335366, 'y': 533.331754921}, 't': 0.13906956397703965, 'signal': 86.04167779999999, 'UL': 178.8645902264328, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_416_154_416_154.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_472_188_472_188.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_294_168_294_168.slha', 'axes': {'x': 294.017348099, 'y': 168.456617208}, 't': 0.13906956397703965, 'signal': 8678.86026, 'UL': 1924.7914686166978, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_501_218_501_218.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_280_147_280_147.slha', 'axes': {'x': 280.859301946, 'y': 147.273238085}, 't': 0.13906956397703965, 'signal': 10903.408300000001, 'UL': 2524.15259334538, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_671_449_671_449.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_427_303_427_303.slha', 'axes': {'x': 427.50035211, 'y': 303.81063767}, 't': 0.13906956397703965, 'signal': 1250.9899, 'UL': 520.5218668780001, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_575_345_575_345.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_518_394_518_394.slha', 'axes': {'x': 518.727834623, 'y': 394.469899954}, 't': 0.13906956397703965, 'signal': 425.315536, 'UL': 346.6844944139751, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_315_55_315_55.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_752_417_752_417.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_746_575_746_575.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_769_551_769_551.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_533_236_533_236.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_852_567_852_567.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_515_230_515_230.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_499_353_499_353.slha', 'axes': {'x': 499.128181834, 'y': 353.372887633}, 't': 0.13906956397703965, 'signal': 530.166036, 'UL': 341.8119089985781, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_120_4_120_4.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_897_572_897_572.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_738_599_738_599.slha', 'axes': {'x': 738.347903017, 'y': 599.209892223}, 't': 0.13906956397703965, 'signal': 53.8689578, 'UL': 170.24509196302932, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_558_312_558_312.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_623_396_623_396.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_739_597_739_597.slha', 'axes': {'x': 739.699091523, 'y': 597.850234945}, 't': 0.13906956397703965, 'signal': 53.235372500000004, 'UL': 161.3181430582699, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_530_239_530_239.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_462_258_462_258.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_503_331_503_331.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_828_642_828_642.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_340_209_340_209.slha', 'axes': {'x': 340.326435116, 'y': 209.072403552}, 't': 0.13906956397703965, 'signal': 4145.13649, 'UL': 1045.634482938734, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_845_507_845_507.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_619_388_619_388.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_596_324_596_324.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_123_1_123_1.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_614_406_614_406.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_454_165_454_165.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_842_577_842_577.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_422_309_422_309.slha', 'axes': {'x': 422.095598086, 'y': 309.249266784}, 't': 0.13906956397703965, 'signal': 1328.33583, 'UL': 556.0970702146387, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_779_490_779_490.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_122_2_122_2.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_219_108_219_108.slha', 'axes': {'x': 219.570477694, 'y': 108.533269903}, 't': 0.13906956397703965, 'signal': 36457.426400000004, 'UL': 6475.753574655602, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_527_191_527_191.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_775_545_775_545.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_489_362_489_362.slha', 'axes': {'x': 489.669862292, 'y': 362.890488581}, 't': 0.13906956397703965, 'signal': 592.1888680000001, 'UL': 387.3782289220116, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_592_378_592_378.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_364_156_364_156.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_868_551_868_551.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_877_562_877_562.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_663_356_663_356.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_377_194_377_194.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_694_426_694_426.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_698_522_698_522.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_668_548_668_548.slha', 'axes': {'x': 668.071261799, 'y': 548.287984982}, 't': 0.13906956397703965, 'signal': 98.2267633, 'UL': 201.3975624811806, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_166_27_166_27.slha', 'axes': {'x': 166.212929728, 'y': 27.3884197377}, 't': 0.13906956397703965, 'signal': 134613.63700000002, 'UL': 73135.53893570488, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_513_307_513_307.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_725_495_725_495.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_361_248_361_248.slha', 'axes': {'x': 361.277276411, 'y': 248.809758595}, 't': 0.13906956397703965, 'signal': 3030.20782, 'UL': 828.7079723657332, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_644_511_644_511.slha', 'axes': {'x': 644.418043492, 'y': 511.269944496}, 't': 0.13906956397703965, 'signal': 121.79276899999999, 'UL': 238.72502445042812, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_433_139_433_139.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_273_47_273_47.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_156_29_156_29.slha', 'axes': {'x': 156.520281584, 'y': 29.1135362643}, 't': 0.13906956397703965, 'signal': 176635.933, 'UL': 105835.9043680971, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_143_42_143_42.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_381_139_381_139.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_493_371_493_371.slha', 'axes': {'x': 493.925378826, 'y': 371.806106746}, 't': 0.13906956397703965, 'signal': 563.06932, 'UL': 351.6199224137932, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_422_147_422_147.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_489_170_489_170.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_795_525_795_525.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_618_352_618_352.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_214_92_214_92.slha', 'axes': {'x': 214.636226247, 'y': 92.2723590097}, 't': 0.13906956397703965, 'signal': 40780.8202, 'UL': 9316.973259468, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_438_292_438_292.slha', 'axes': {'x': 438.309860159, 'y': 292.933379444}, 't': 0.13906956397703965, 'signal': 1094.8702400000002, 'UL': 479.1912812988105, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_734_485_734_485.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_369_50_369_50.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_362_247_362_247.slha', 'axes': {'x': 362.628464917, 'y': 247.450101317}, 't': 0.13906956397703965, 'signal': 2971.90596, 'UL': 817.5912698546194, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_563_436_563_436.slha', 'axes': {'x': 563.685733134, 'y': 436.445343576}, 't': 0.13906956397703965, 'signal': 265.168234, 'UL': 303.42042813014984, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_642_428_642_428.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_633_522_633_522.slha', 'axes': {'x': 633.608535443, 'y': 522.147202723}, 't': 0.13906956397703965, 'signal': 134.356741, 'UL': 282.4112989796563, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_443_227_443_227.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_445_127_445_127.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_260_60_260_60.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_774_596_774_596.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_558_441_558_441.slha', 'axes': {'x': 558.999217036, 'y': 441.161233101}, 't': 0.13906956397703965, 'signal': 278.49816599999997, 'UL': 326.3052251270551, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_467_324_467_324.slha', 'axes': {'x': 467.36783249, 'y': 324.512790817}, 't': 0.13906956397703965, 'signal': 765.697998, 'UL': 377.9434738970291, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_821_498_821_498.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_386_184_386_184.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_289_173_289_173.slha', 'axes': {'x': 289.330832002, 'y': 173.172506733}, 't': 0.13906956397703965, 'signal': 9422.00921, 'UL': 1938.4821882607926, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_459_332_459_332.slha', 'axes': {'x': 459.260701454, 'y': 332.670734487}, 't': 0.13906956397703965, 'signal': 844.882468, 'UL': 437.4042245056631, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_812_607_812_607.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_896_624_896_624.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_650_370_650_370.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_694_582_694_582.slha', 'axes': {'x': 694.426857118, 'y': 582.586710912}, 't': 0.13906956397703965, 'signal': 77.7698857, 'UL': 226.0341322590109, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_950_618_950_618.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_358_162_358_162.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_683_487_683_487.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_577_292_577_292.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_420_99_420_99.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_373_46_373_46.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_725_444_725_444.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_498_248_498_248.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_357_252_357_252.slha', 'axes': {'x': 357.223710892, 'y': 252.88873043}, 't': 0.13906956397703965, 'signal': 3218.84535, 'UL': 888.2116713324525, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_799_470_799_470.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_528_384_528_384.slha', 'axes': {'x': 528.186154166, 'y': 384.952299005}, 't': 0.13906956397703965, 'signal': 384.33773599999995, 'UL': 299.34731785045255, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_421_250_421_250.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_569_402_569_402.slha', 'axes': {'x': 569.345764322, 'y': 402.11520335}, 't': 0.13906956397703965, 'signal': 226.35721999999998, 'UL': 240.25362777844526, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_782_538_782_538.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_688_381_688_381.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_387_132_387_132.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_520_249_520_249.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_819_601_819_601.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_552_267_552_267.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_426_304_426_304.slha', 'axes': {'x': 426.508282568, 'y': 304.808925154}, 't': 0.13906956397703965, 'signal': 1254.84696, 'UL': 526.5660757738133, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_651_469_651_469.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_385_84_385_84.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_247_119_247_119.slha', 'axes': {'x': 247.747764097, 'y': 119.772798547}, 't': 0.13906956397703965, 'signal': 20140.3562, 'UL': 4623.609459869178, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_757_462_757_462.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_300_188_300_188.slha', 'axes': {'x': 300.458954735, 'y': 188.370250406}, 't': 0.13906956397703965, 'signal': 7816.631530000001, 'UL': 1603.8476490665607, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_726_611_726_611.slha', 'axes': {'x': 726.187206462, 'y': 611.446807728}, 't': 0.13906956397703965, 'signal': 59.058445899999995, 'UL': 215.1636738532014, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_743_527_743_527.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_738_482_738_482.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_565_268_565_268.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_575_244_575_244.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_840_697_840_697.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_539_281_539_281.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_437_233_437_233.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_509_311_509_311.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_264_163_264_163.slha', 'axes': {'x': 264.645039873, 'y': 163.589125425}, 't': 0.13906956397703965, 'signal': 14670.234699999999, 'UL': 2566.0456786232103, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_672_346_672_346.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_213_7_213_7.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_635_498_635_498.slha', 'axes': {'x': 635.78934549, 'y': 498.726635643}, 't': 0.13906956397703965, 'signal': 132.267973, 'UL': 205.66144518691306, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_722_447_722_447.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_308_180_308_180.slha', 'axes': {'x': 308.566085772, 'y': 180.212306736}, 't': 0.13906956397703965, 'signal': 6812.0377100000005, 'UL': 1514.1701626016256, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_387_283_387_283.slha', 'axes': {'x': 387.63287173, 'y': 283.108484524}, 't': 0.13906956397703965, 'signal': 2095.2691200000004, 'UL': 748.9029550346268, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_483_187_483_187.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_826_614_826_614.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_652_418_652_418.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_435_295_435_295.slha', 'axes': {'x': 435.881314762, 'y': 295.377146105}, 't': 0.13906956397703965, 'signal': 1129.4152, 'UL': 483.24727514302924, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_274_39_274_39.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_637_518_637_518.slha', 'axes': {'x': 637.662100961, 'y': 518.068230888}, 't': 0.13906956397703965, 'signal': 129.93188999999998, 'UL': 276.45233267880934, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_456_335_456_335.slha', 'axes': {'x': 456.558324442, 'y': 335.390049043}, 't': 0.13906956397703965, 'signal': 874.110863, 'UL': 455.28735686217993, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_712_457_712_457.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_782_487_782_487.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_186_59_186_59.slha', 'axes': {'x': 186.929442421, 'y': 59.3332903587}, 't': 0.13906956397703965, 'signal': 78076.75709999999, 'UL': 26661.75870461769, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_419_311_419_311.slha', 'axes': {'x': 419.478508422, 'y': 311.882759441}, 't': 0.13906956397703965, 'signal': 1375.1805, 'UL': 572.5647245311793, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_518_352_518_352.slha', 'axes': {'x': 518.828647949, 'y': 352.66138469}, 't': 0.13906956397703965, 'signal': 386.880607, 'UL': 291.33009918258426, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_518_201_518_201.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_497_273_497_273.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_637_383_637_383.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_495_225_495_225.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_378_91_378_91.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_362_108_362_108.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_738_430_738_430.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_522_297_522_297.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_887_582_887_582.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_496_324_496_324.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_747_523_747_523.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_645_424_645_424.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_867_602_867_602.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_163_22_163_22.slha', 'axes': {'x': 163.276224114, 'y': 22.3152498727}, 't': 0.13906956397703965, 'signal': 146213.27, 'UL': 99547.75712691399, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_401_269_401_269.slha', 'axes': {'x': 401.144756791, 'y': 269.511911741}, 't': 0.13906956397703965, 'signal': 1747.2714, 'UL': 640.6928991950017, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_550_423_550_423.slha', 'axes': {'x': 550.488183967, 'y': 423.32999677}, 't': 0.13906956397703965, 'signal': 303.986224, 'UL': 308.4877730447913, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_243_123_243_123.slha', 'axes': {'x': 243.694198578, 'y': 123.851770382}, 't': 0.13906956397703965, 'signal': 21880.6616, 'UL': 4495.09279095109, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_607_400_607_400.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_598_372_598_372.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_347_202_347_202.slha', 'axes': {'x': 347.082377646, 'y': 202.274117161}, 't': 0.13906956397703965, 'signal': 3731.08452, 'UL': 1001.2620486126743, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_703_467_703_467.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_279_148_279_148.slha', 'axes': {'x': 279.50811344, 'y': 148.632895364}, 't': 0.13906956397703965, 'signal': 11169.2974, 'UL': 2543.235806233063, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_480_372_480_372.slha', 'axes': {'x': 480.211542749, 'y': 372.408089529}, 't': 0.13906956397703965, 'signal': 658.53071, 'UL': 435.36425846931644, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_734_603_734_603.slha', 'axes': {'x': 734.294337499, 'y': 603.288864058}, 't': 0.13906956397703965, 'signal': 55.2188455, 'UL': 196.91865461439534, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_538_332_538_332.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_571_463_571_463.slha', 'axes': {'x': 571.439025262, 'y': 463.067351812}, 't': 0.13906956397703965, 'signal': 244.146942, 'UL': 290.79376631022467, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_609_310_609_310.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_841_629_841_629.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_855_585_855_585.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_426_305_426_305.slha', 'axes': {'x': 426.149163604, 'y': 305.170294949}, 't': 0.13906956397703965, 'signal': 1260.78884, 'UL': 528.9837593321386, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_517_395_517_395.slha', 'axes': {'x': 517.376646117, 'y': 395.829557232}, 't': 0.13906956397703965, 'signal': 431.84448199999997, 'UL': 352.6459804094161, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_636_519_636_519.slha', 'axes': {'x': 636.310912455, 'y': 519.427888166}, 't': 0.13906956397703965, 'signal': 131.631056, 'UL': 277.34756504354243, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_777_593_777_593.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_641_328_641_328.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_337_211_337_211.slha', 'axes': {'x': 337.624058103, 'y': 211.791718109}, 't': 0.13906956397703965, 'signal': 4323.23578, 'UL': 1058.1197223914953, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_412_209_412_209.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_449_271_449_271.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_301_161_301_161.slha', 'axes': {'x': 301.047122245, 'y': 161.382782921}, 't': 0.13906956397703965, 'signal': 7738.50454, 'UL': 1769.2899913845627, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_153_31_153_31.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_537_296_537_296.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_507_405_507_405.slha', 'axes': {'x': 507.918326574, 'y': 405.34715818}, 't': 0.13906956397703965, 'signal': 481.695868, 'UL': 383.0799882637055, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_703_573_703_573.slha', 'axes': {'x': 703.885176661, 'y': 573.069109963}, 't': 0.13906956397703965, 'signal': 71.3424455, 'UL': 172.6111728343145, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_450_122_450_122.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_526_386_526_386.slha', 'axes': {'x': 526.83496566, 'y': 386.311956284}, 't': 0.13906956397703965, 'signal': 390.274011, 'UL': 307.0038285598214, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_339_131_339_131.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_696_474_696_474.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_555_417_555_417.slha', 'axes': {'x': 555.892937991, 'y': 417.891367657}, 't': 0.13906956397703965, 'signal': 286.412353, 'UL': 285.5831703793983, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_330_90_330_90.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_415_205_415_205.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_791_579_791_579.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_535_335_535_335.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_395_274_395_274.slha', 'axes': {'x': 395.740002766, 'y': 274.950540854}, 't': 0.13906956397703965, 'signal': 1873.36779, 'UL': 685.6090249847653, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_366_243_366_243.slha', 'axes': {'x': 366.682030435, 'y': 243.371129482}, 't': 0.13906956397703965, 'signal': 2813.61935, 'UL': 785.5169430894309, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_543_430_543_430.slha', 'axes': {'x': 543.732241436, 'y': 430.128283161}, 't': 0.13906956397703965, 'signal': 326.986061, 'UL': 352.2230232488285, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_669_350_669_350.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_889_630_889_630.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_750_517_750_517.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_664_456_664_456.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_336_213_336_213.slha', 'axes': {'x': 336.272869597, 'y': 213.151375387}, 't': 0.13906956397703965, 'signal': 4385.56785, 'UL': 1062.400418444038, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_318_170_318_170.slha', 'axes': {'x': 318.024405314, 'y': 170.694705788}, 't': 0.13906956397703965, 'signal': 5864.19892, 'UL': 1398.1963438663954, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_369_240_369_240.slha', 'axes': {'x': 369.384407447, 'y': 240.651814925}, 't': 0.13906956397703965, 'signal': 2706.5579999999995, 'UL': 767.9836019578355, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_802_467_802_467.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_647_322_647_322.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_363_246_363_246.slha', 'axes': {'x': 363.979653423, 'y': 246.090444038}, 't': 0.13906956397703965, 'signal': 2913.8450900000003, 'UL': 805.9180406504064, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_185_60_185_60.slha', 'axes': {'x': 185.578253915, 'y': 60.692947637}, 't': 0.13906956397703965, 'signal': 80784.9771, 'UL': 27231.810365853624, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_151_34_151_34.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_771_583_771_583.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_804_515_804_515.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_400_119_400_119.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_536_233_536_233.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_594_426_594_426.slha', 'axes': {'x': 594.604322508, 'y': 426.84211268}, 't': 0.13906956397703965, 'signal': 175.24883699999998, 'UL': 208.162057963575, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_730_540_730_540.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_590_444_590_444.slha', 'axes': {'x': 590.355664347, 'y': 444.032149916}, 't': 0.13906956397703965, 'signal': 205.11374399999997, 'UL': 221.08333507443206, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_409_261_409_261.slha', 'axes': {'x': 409.251887827, 'y': 261.353968071}, 't': 0.13906956397703965, 'signal': 1564.4003099999998, 'UL': 568.8458164123512, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_575_458_575_458.slha', 'axes': {'x': 575.49259078, 'y': 458.988379977}, 't': 0.13906956397703965, 'signal': 237.54154599999998, 'UL': 275.8153182776271, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_331_38_331_38.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_667_504_667_504.slha', 'axes': {'x': 667.115054696, 'y': 504.357981982}, 't': 0.13906956397703965, 'signal': 87.9945924, 'UL': 148.6210957587408, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_799_621_799_621.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_224_82_224_82.slha', 'axes': {'x': 224.094545789, 'y': 82.7547580615}, 't': 0.13906956397703965, 'signal': 32996.8497, 'UL': 9369.793648073313, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_489_376_489_376.slha', 'axes': {'x': 489.238862729, 'y': 376.521996271}, 't': 0.13906956397703965, 'signal': 595.212262, 'UL': 380.28110960591147, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_723_456_723_456.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_368_152_368_152.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_900_569_900_569.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_390_129_390_129.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_672_508_672_508.slha', 'axes': {'x': 672.646117397, 'y': 508.94810304}, 't': 0.13906956397703965, 'signal': 83.58742389999999, 'UL': 149.5897992010092, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_858_561_858_561.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_306_94_306_94.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_363_233_363_233.slha', 'axes': {'x': 363.777702406, 'y': 233.095854038}, 't': 0.13906956397703965, 'signal': 2922.1497, 'UL': 810.7423333333332, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_651_504_651_504.slha', 'axes': {'x': 651.173986022, 'y': 504.471658105}, 't': 0.13906956397703965, 'signal': 114.796309, 'UL': 186.16088970140146, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_689_588_689_588.slha', 'axes': {'x': 689.022103094, 'y': 588.025340025}, 't': 0.13906956397703965, 'signal': 81.2872536, 'UL': 260.26747403344854, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_575_395_575_395.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_696_396_696_396.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_242_125_242_125.slha', 'axes': {'x': 242.343010072, 'y': 125.211427661}, 't': 0.13906956397703965, 'signal': 22498.2976, 'UL': 4473.042979213217, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_399_170_399_170.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_522_349_522_349.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_713_563_713_563.slha', 'axes': {'x': 713.343496204, 'y': 563.551509015}, 't': 0.13906956397703965, 'signal': 65.8234315, 'UL': 150.910074221834, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_611_483_611_483.slha', 'axes': {'x': 611.306505642, 'y': 483.769504959}, 't': 0.13906956397703965, 'signal': 166.648085, 'UL': 266.28401178143406, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_763_639_763_639.slha', 'axes': {'x': 763.593763861, 'y': 639.794833114}, 't': 0.13906956397703965, 'signal': 43.720759199999996, 'UL': 142.36633228567226, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_899_620_899_620.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_718_502_718_502.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_720_399_720_399.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_312_88_312_88.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_553_420_553_420.slha', 'axes': {'x': 553.190560979, 'y': 420.610682213}, 't': 0.13906956397703965, 'signal': 295.20196999999996, 'UL': 298.1831379691696, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_696_572_696_572.slha', 'axes': {'x': 696.176667602, 'y': 572.797651522}, 't': 0.13906956397703965, 'signal': 76.5178955, 'UL': 194.62834272497884, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_691_378_691_378.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_424_307_424_307.slha', 'axes': {'x': 424.165024519, 'y': 307.166869917}, 't': 0.13906956397703965, 'signal': 1292.93695, 'UL': 542.8424358870449, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_209_97_209_97.slha', 'axes': {'x': 209.231472222, 'y': 97.7109881229}, 't': 0.13906956397703965, 'signal': 45920.3869, 'UL': 8880.345843586107, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_596_411_596_411.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_456_265_456_265.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_251_62_251_62.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_544_325_544_325.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_602_317_602_317.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_648_507_648_507.slha', 'axes': {'x': 648.47160901, 'y': 507.190972661}, 't': 0.13906956397703965, 'signal': 117.170366, 'UL': 207.70701020343824, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_487_283_487_283.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_336_134_336_134.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_452_117_452_117.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_729_440_729_440.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_390_180_390_180.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_684_436_684_436.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_303_159_303_159.slha', 'axes': {'x': 303.390380294, 'y': 159.024838159}, 't': 0.13906956397703965, 'signal': 7431.97243, 'UL': 1739.7281481481482, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_603_316_603_316.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_777_577_777_577.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_931_638_931_638.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_919_600_919_600.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_368_242_368_242.slha', 'axes': {'x': 368.033218941, 'y': 242.011472203}, 't': 0.13906956397703965, 'signal': 2759.5455599999996, 'UL': 776.6863194225555, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_394_179_394_179.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_563_357_563_357.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_784_482_784_482.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_517_253_517_253.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_401_84_401_84.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_221_106_221_106.slha', 'axes': {'x': 221.913735743, 'y': 106.175325141}, 't': 0.13906956397703965, 'signal': 34608.1258, 'UL': 6431.560531959628, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_357_42_357_42.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_504_266_504_266.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_860_579_860_579.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_608_362_608_362.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_224_103_224_103.slha', 'axes': {'x': 224.256993792, 'y': 103.817380379}, 't': 0.13906956397703965, 'signal': 32868.2664, 'UL': 6420.754044715445, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_238_129_238_129.slha', 'axes': {'x': 238.289444554, 'y': 129.290399496}, 't': 0.13906956397703965, 'signal': 24468.4536, 'UL': 4298.199481272683, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_716_551_716_551.slha', 'axes': {'x': 716.450012635, 'y': 551.830002231}, 't': 0.13906956397703965, 'signal': 56.115462199999996, 'UL': 154.64209425147274, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_348_200_348_200.slha', 'axes': {'x': 348.433566152, 'y': 200.914459882}, 't': 0.13906956397703965, 'signal': 3653.1439899999996, 'UL': 1000.8117985871312, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_633_501_633_501.slha', 'axes': {'x': 633.446087441, 'y': 501.084580405}, 't': 0.13906956397703965, 'signal': 134.557015, 'UL': 222.02150217787448, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_570_464_570_464.slha', 'axes': {'x': 570.087836756, 'y': 464.427009091}, 't': 0.13906956397703965, 'signal': 247.688715, 'UL': 295.5402342905201, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_690_429_690_429.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_546_223_546_223.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_314_106_314_106.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_380_190_380_190.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_257_63_257_63.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_970_642_970_642.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_706_464_706_464.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_822_597_822_597.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_435_134_435_134.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_698_371_698_371.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_715_505_715_505.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_681_534_681_534.slha', 'axes': {'x': 681.58314686, 'y': 534.691412199}, 't': 0.13906956397703965, 'signal': 87.16448159999999, 'UL': 177.01835330760372, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_394_126_394_126.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_324_96_324_96.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_732_604_732_604.slha', 'axes': {'x': 732.943148993, 'y': 604.648521336}, 't': 0.13906956397703965, 'signal': 55.5831991, 'UL': 205.75959930716317, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_679_414_679_414.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_311_110_311_110.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_164_20_164_20.slha', 'axes': {'x': 164.62741262, 'y': 20.9555925944}, 't': 0.13906956397703965, 'signal': 140733.229, 'UL': 96924.14587908203, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_608_486_608_486.slha', 'axes': {'x': 608.60412863, 'y': 486.488819515}, 't': 0.13906956397703965, 'signal': 171.292174, 'UL': 279.89528732630276, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_478_241_478_241.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_785_535_785_535.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_787_583_787_583.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_391_78_391_78.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_810_559_810_559.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_504_242_504_242.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_700_480_700_480.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_390_280_390_280.slha', 'axes': {'x': 390.335248742, 'y': 280.389169968}, 't': 0.13906956397703965, 'signal': 2017.3417100000001, 'UL': 716.8176329013606, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_370_226_370_226.slha', 'axes': {'x': 370.807476552, 'y': 226.022019751}, 't': 0.13906956397703965, 'signal': 2651.41114, 'UL': 757.4848161690026, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_614_305_614_305.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_159_26_159_26.slha', 'axes': {'x': 159.222658596, 'y': 26.3942217076}, 't': 0.13906956397703965, 'signal': 163207.874, 'UL': 100655.63286242366, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_235_132_235_132.slha', 'axes': {'x': 235.587067542, 'y': 132.009714052}, 't': 0.13906956397703965, 'signal': 25909.8721, 'UL': 4155.928286074738, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_402_219_402_219.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_392_229_392_229.slha', 'axes': {'x': 392.535857018, 'y': 229.02683804}, 't': 0.13906956397703965, 'signal': 1811.0500599999998, 'UL': 751.4111471626002, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_294_126_294_126.slha', 'axes': {'x': 294.766566643, 'y': 126.784059408}, 't': 0.13906956397703965, 'signal': 8005.256810000001, 'UL': 3321.2315266433116, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_583_337_583_337.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_524_195_524_195.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_661_459_661_459.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_213_93_213_93.slha', 'axes': {'x': 213.285037741, 'y': 93.632016288}, 't': 0.13906956397703965, 'signal': 41799.9894, 'UL': 9288.490849586595, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_308_62_308_62.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_286_175_286_175.slha', 'axes': {'x': 286.987573953, 'y': 175.530451495}, 't': 0.13906956397703965, 'signal': 9823.463829999999, 'UL': 1942.666185012743, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_745_424_745_424.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_801_465_801_465.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_355_254_355_254.slha', 'axes': {'x': 355.872522386, 'y': 254.248387708}, 't': 0.13906956397703965, 'signal': 3284.09256, 'UL': 909.3839605911331, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_493_358_493_358.slha', 'axes': {'x': 493.72342781, 'y': 358.811516746}, 't': 0.13906956397703965, 'signal': 564.385057, 'UL': 364.0807355821219, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_536_284_536_284.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_624_345_624_345.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_909_610_909_610.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_299_189_299_189.slha', 'axes': {'x': 299.107766229, 'y': 189.729907684}, 't': 0.13906956397703965, 'signal': 7999.40637, 'UL': 1610.308906041934, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_222_84_222_84.slha', 'axes': {'x': 222.743357283, 'y': 84.1144153398}, 't': 0.13906956397703965, 'signal': 33992.5606, 'UL': 9310.742209674314, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_883_556_883_556.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_481_371_481_371.slha', 'axes': {'x': 481.562731255, 'y': 371.048432251}, 't': 0.13906956397703965, 'signal': 648.033209, 'UL': 431.69102876415013, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_744_475_744_475.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_519_301_519_301.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_717_462_717_462.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_253_114_253_114.slha', 'axes': {'x': 253.152518121, 'y': 114.334169434}, 't': 0.13906956397703965, 'signal': 18177.9471, 'UL': 4730.740147155078, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_152_41_152_41.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_147_38_147_38.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_561_412_561_412.slha', 'axes': {'x': 561.297692015, 'y': 412.452738543}, 't': 0.13906956397703965, 'signal': 271.906262, 'UL': 265.22813843535283, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_301_99_301_99.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_465_255_465_255.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_373_223_373_223.slha', 'axes': {'x': 373.150734601, 'y': 223.664074989}, 't': 0.13906956397703965, 'signal': 2560.84389, 'UL': 748.7392680614802, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_484_286_484_286.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_358_251_358_251.slha', 'axes': {'x': 358.574899398, 'y': 251.529073152}, 't': 0.13906956397703965, 'signal': 3153.48853, 'UL': 864.6223147302652, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_349_247_349_247.slha', 'axes': {'x': 349.718154114, 'y': 247.243522612}, 't': 0.13906956397703965, 'signal': 3581.67934, 'UL': 930.6403381633811, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_587_446_587_446.slha', 'axes': {'x': 587.653287335, 'y': 446.751464473}, 't': 0.13906956397703965, 'signal': 209.631152, 'UL': 231.1055803712141, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_184_62_184_62.slha', 'axes': {'x': 184.227065409, 'y': 62.0526049153}, 't': 0.13906956397703965, 'signal': 83618.4805, 'UL': 27857.785037523437, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_427_243_427_243.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_461_329_461_329.slha', 'axes': {'x': 461.963078466, 'y': 329.95141993}, 't': 0.13906956397703965, 'signal': 817.336777, 'UL': 419.3141873744556, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_491_374_491_374.slha', 'axes': {'x': 491.582120778, 'y': 374.164051509}, 't': 0.13906956397703965, 'signal': 578.922269, 'UL': 365.53780555555545, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_777_442_777_442.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_637_332_637_332.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_823_546_823_546.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_846_523_846_523.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_525_345_525_345.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_306_13_306_13.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_624_510_624_510.slha', 'axes': {'x': 624.073055246, 'y': 510.516359455}, 't': 0.13906956397703965, 'signal': 147.73504400000002, 'UL': 265.2369312747807, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_706_570_706_570.slha', 'axes': {'x': 706.587553673, 'y': 570.349795407}, 't': 0.13906956397703965, 'signal': 69.64499559999999, 'UL': 165.527498193315, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_578_241_578_241.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_807_563_807_563.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_765_637_765_637.slha', 'axes': {'x': 765.93702191, 'y': 637.436888352}, 't': 0.13906956397703965, 'signal': 42.836120300000005, 'UL': 141.48281688745595, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_522_390_522_390.slha', 'axes': {'x': 522.781400141, 'y': 390.390928119}, 't': 0.13906956397703965, 'signal': 408.21731900000003, 'UL': 328.3229219987812, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_438_293_438_293.slha', 'axes': {'x': 438.224572811, 'y': 293.019201343}, 't': 0.13906956397703965, 'signal': 1096.21587, 'UL': 479.3601588069206, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_695_485_695_485.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_742_427_742_427.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_742_595_742_595.slha', 'axes': {'x': 742.401468535, 'y': 595.130920388}, 't': 0.13906956397703965, 'signal': 52.0016247, 'UL': 143.72391119796603, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_210_96_210_96.slha', 'axes': {'x': 210.582660729, 'y': 96.3513308446}, 't': 0.13906956397703965, 'signal': 44513.9056, 'UL': 9015.936366320282, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_796_624_796_624.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_804_637_804_637.slha', 'axes': {'x': 804.057803112, 'y': 637.593800613}, 't': 0.13906956397703965, 'signal': 27.1329992, 'UL': 118.67285841530165, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_386_284_386_284.slha', 'axes': {'x': 386.281683224, 'y': 284.468141803}, 't': 0.13906956397703965, 'signal': 2135.23237, 'UL': 758.2527606619777, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_446_275_446_275.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_350_136_350_136.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_565_254_565_254.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_291_170_291_170.slha', 'axes': {'x': 291.67409005, 'y': 170.814561971}, 't': 0.13906956397703965, 'signal': 9045.43396, 'UL': 1924.833903843325, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_429_140_429_140.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_547_426_547_426.slha', 'axes': {'x': 547.785806955, 'y': 426.049311326}, 't': 0.13906956397703965, 'signal': 312.79601299999996, 'UL': 321.67795761818735, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_467_152_467_152.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_474_145_474_145.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_226_80_226_80.slha', 'axes': {'x': 226.796922801, 'y': 80.0354435049}, 't': 0.13906956397703965, 'signal': 31103.7067, 'UL': 10004.12656907362, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_193_52_193_52.slha', 'axes': {'x': 193.685384952, 'y': 52.5350039671}, 't': 0.13906956397703965, 'signal': 66208.9969, 'UL': 26521.811511327658, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_878_541_878_541.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_572_461_572_461.slha', 'axes': {'x': 572.790213768, 'y': 461.707694534}, 't': 0.13906956397703965, 'signal': 242.126945, 'UL': 285.6832616844888, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_883_637_883_637.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_460_199_460_199.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_289_111_289_111.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_205_101_205_101.slha', 'axes': {'x': 205.177906704, 'y': 101.789959958}, 't': 0.13906956397703965, 'signal': 50509.4038, 'UL': 8584.103231092158, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_570_350_570_350.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_925_594_925_594.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_902_617_902_617.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_600_269_600_269.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_530_382_530_382.slha', 'axes': {'x': 530.888531178, 'y': 382.232984449}, 't': 0.13906956397703965, 'signal': 372.926172, 'UL': 283.3660189779752, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_148_37_148_37.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_156_36_156_36.slha', 'axes': {'x': 156.839897533, 'y': 36.820198787}, 't': 0.13906956397703965, 'signal': 174962.374, 'UL': 94834.7767750579, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_500_269_500_269.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_416_156_416_156.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_350_70_350_70.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_702_574_702_574.slha', 'axes': {'x': 702.533988155, 'y': 574.428767242}, 't': 0.13906956397703965, 'signal': 72.2211588, 'UL': 175.61006265769547, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_514_205_514_205.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_328_221_328_221.slha', 'axes': {'x': 328.165738561, 'y': 221.309319057}, 't': 0.13906956397703965, 'signal': 4996.135429999999, 'UL': 1094.9379394397865, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_790_476_790_476.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_363_56_363_56.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_528_342_528_342.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_197_48_197_48.slha', 'axes': {'x': 197.73895047, 'y': 48.4560321322}, 't': 0.13906956397703965, 'signal': 59916.618800000004, 'UL': 34632.479780770365, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_421_199_421_199.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_426_92_426_92.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_251_19_251_19.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_561_438_561_438.slha', 'axes': {'x': 561.342475085, 'y': 438.803288338}, 't': 0.13906956397703965, 'signal': 271.75678899999997, 'UL': 315.55737377840984, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_733_534_733_534.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_731_606_731_606.slha', 'axes': {'x': 731.591960486, 'y': 606.008178614}, 't': 0.13906956397703965, 'signal': 56.2834737, 'UL': 209.28347667378142, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_394_276_394_276.slha', 'axes': {'x': 394.38881426, 'y': 276.310198133}, 't': 0.13906956397703965, 'signal': 1908.45387, 'UL': 697.1753327916583, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_265_162_265_162.slha', 'axes': {'x': 265.996228379, 'y': 162.229468147}, 't': 0.13906956397703965, 'signal': 14303.1406, 'UL': 2579.726340120616, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'T2ttoff_843_597_843_597.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_340_59_340_59.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_348_172_348_172.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_118_7_118_7.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_676_494_676_494.slha', 'error': 'no results'}, {'slhafile': 'T2ttoff_839_513_839_513.slha', 'error': 'no results'}] | [
"lessa.a.p@gmail.com"
] | lessa.a.p@gmail.com |
1d360fbb5e6d6248351227c7dbb7361fc5d36f9b | c71d332dd845036c21c9fd8f4f571f9209bf2672 | /Binary Tree Zingzag Level Order Traversal.py | 5573d8950e53f17add11faff31d2f61793ba6f3e | [] | no_license | diksha12p/DSA_Practice_Problems | 2884fd9e77094d9662cb8747744dd2ef563e25e4 | d56e3d07620d51871199f61ae82cff2bd75b4744 | refs/heads/master | 2023-01-20T15:31:37.824918 | 2020-11-29T21:37:12 | 2020-11-29T21:37:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,425 | py | """
LC 103. Binary Tree Zigzag Level Order Traversal
Given a binary tree, return the zigzag level order traversal of its nodes' values. (ie, from left to right, then right to left for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
"""
from typing import List
class Node:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def zigzagLevelOrder(self, root: Node) -> List[List[int]]:
if not root:
return []
queue = [root]
flag, result = 1, []
while queue:
list_entry = list()
if flag == -1:
result.append([node.val for node in queue[::-1] if node is not None ])
else:
result.append([node.val for node in queue if node is not None])
flag *= -1
for node in queue:
if node.left: list_entry.append(node.left)
if node.right: list_entry.append(node.right)
queue = list_entry
return result
root = Node(3)
root.left = Node(9)
root.right = Node(20)
# root.left.left = Node(7)
# root.left.right = Node(6)
root.right.left = Node(15)
root.right.right = Node(7)
sol = Solution()
print(sol.zigzagLevelOrder(root)) | [
"noreply@github.com"
] | diksha12p.noreply@github.com |
551d529e69d4ddcf6ee18ff69888a85c3916d14c | 7e96ba20c25c6fb56af6ccd36b3b6d68df6a081c | /Kyle_Marienthal/DJANGO/TRAVEL_BUDDY_REDO/apps/travels_app/views.py | 39b2f550fcb8e27dacd385ab3fc16e1471ac4812 | [] | no_license | CodingDojoDallas/python_september_2017 | 9d8cd74131a809bc6b13b7f465594cf8b1e2fd75 | f9f2f7b39bf9c4fceda3df5dc7424164aa5d5df5 | refs/heads/master | 2021-01-23T08:52:22.899994 | 2017-10-30T17:00:55 | 2017-10-30T17:00:55 | 102,558,291 | 2 | 14 | null | 2018-01-13T05:28:34 | 2017-09-06T03:28:38 | Python | UTF-8 | Python | false | false | 1,573 | py | from django.shortcuts import render, redirect, reverse
from django.contrib import messages
from .models import Trip, User
# Create your views here.
# if request.method == "POST":
# context = {
# 'current_user' : current_user(request)
# }
def flash_errors(errors, request):
print '*****youre in the flash_errors method*****'
for error in errors:
messages.error(request, error)
def current_user(request):
print '*****youre in the current_user method*****'
if 'user_id' in request.session:
return User.objects.get(id=request.session['user_id'])
def dashboard(request):
print '***** youre in the travel dashboard method*****'
trips = Trip.objects.all()
other_users = {User.objects.all().exclude(id=current_user(request).id)}
context = {
'current_user' : current_user(request),
'trips' : trips,
'other_users' : other_users
}
return render(request, 'travels_app/dashboard.html', context)
def add_trip(request):
print '***** youre in the travel add_trip method*****'
context = {
'current_user' : current_user(request)
}
return render(request, 'travels_app/add_trip.html', context)
def create_trip(request):
print '***** youre in the travel create_trip method*****'
user = current_user(request)
trips = Trip.objects.create_trip(request.POST, user)
return redirect(reverse('dashboard'))
def destination(request, id):
context = {
'current_user' : current_user(request)
}
return render(request, 'travels_app/destination.html', context)
| [
"kylemarienthal@gmail.com"
] | kylemarienthal@gmail.com |
cd5f6d120bc92d0c0c9c0e040a3e4b7b80b53691 | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/test/test_io_k8s_api_core_v1_service_account_token_projection.py | 05a59298e73201e88e387277ff653382026d9a8f | [] | no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import argo
from models.io_k8s_api_core_v1_service_account_token_projection import IoK8sApiCoreV1ServiceAccountTokenProjection # noqa: E501
from argo.rest import ApiException
class TestIoK8sApiCoreV1ServiceAccountTokenProjection(unittest.TestCase):
"""IoK8sApiCoreV1ServiceAccountTokenProjection unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIoK8sApiCoreV1ServiceAccountTokenProjection(self):
"""Test IoK8sApiCoreV1ServiceAccountTokenProjection"""
# FIXME: construct object with mandatory attributes with example values
# model = argo.models.io_k8s_api_core_v1_service_account_token_projection.IoK8sApiCoreV1ServiceAccountTokenProjection() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"nfillot@weborama.com"
] | nfillot@weborama.com |
922489df49a0e7b815588906680161ed4b166cc8 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2/perel/bs.py | f2dc670dffa12519d8b54937cfa2bb0693987963 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,003 | py | import time
##import sys
##sys.setrecursionlimit(10002)
from collections import deque
def flip(s):
return tuple(reversed([not i for i in s]))
def solve(s):
s=tuple([i=='+' for i in s])
return str(solve11(s))
def solve11(s):
for i in range(len(s)-1,-1,-1):
if not s[i]:
break
else:
return 0
s=s[:i+1]
step=0
for i in range(len(s)):
if not s[i]:
break
else:
return 1
if i:
step+=1
s=flip(s[:i])+s[i:]
return solve11(flip(s))+step+1
def main():
fi=file('bl.in')
fo=file('b.out','w')
time0=time.time()
t=int(fi.readline())
for ti in range(t):
time1=time.time()
s=fi.readline()[:-1]
ans="Case #%d: %s"%(ti+1,solve(s))
print ans,"%.3f"%(time.time()-time1)
fo.write(ans+'\n')
print "%.3f"%(time.time()-time0)
fi.close()
fo.close()
if __name__ == '__main__':
main()
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
f67d673e20f5b4ea25b5e71532405d816a977b47 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2580/8318/319102.py | 3935e927bbc2f82b8da2de889d54f179ef80f578 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | m = int(input())
n = int(input())
ops = int(input())
list = [[]]
x = m
y = n
for i in range(ops):
a,b = input().split(",")
a1=int(a)
b1=int(b)
if x>a1 :
x=a1
if y>b1:
y=b1
print(x*y) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
bcbb60f98cfc63d55a8855fc311ca5dd1180d585 | a6d7f52f838e60e86fad5b85a28941e7da048262 | /backend/manage.py | 9b53997b7e8d3eb1b6f74f8b571488a6fc6b5a3d | [] | no_license | crowdbotics-apps/dateanerd-20416 | c5bdb51e601cca686bc3d594487b79e19b224379 | 55080989d6578f5c591066c1366a4e5f9e10c6c9 | refs/heads/master | 2022-12-16T05:55:45.725899 | 2020-09-18T21:03:24 | 2020-09-18T21:03:24 | 296,728,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dateanerd_20416.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
f8ef5490192913bf8811b092910983b29985832f | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/arc094/A/4025845.py | 9522ebc713f410e45b2cfec4a62d282a3b36443e | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import array
from bisect import *
from collections import *
import fractions
import heapq
from itertools import *
import math
import random
import re
import string
import sys
nums = list(map(int, input().split()))
nums.sort()
ans = 0
while nums[0] != nums[-1]:
if nums[0] < nums[1]:
nums[0] += 2
else:
nums[0] += 1
nums[1] += 1
ans += 1
nums.sort()
print(ans) | [
"kwnafi@yahoo.com"
] | kwnafi@yahoo.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.