text
stringlengths 8
6.05M
|
|---|
import requests
payload={'key1':'value1','key2':['value2','value3']}
headers={'user-agent':'1'}
r=requests.get("https://www.baidu.com",params=payload,headers=headers)
# print (r.url)
# print (r.text)
# print (r.status_code)
# print (r.headers) r.headers['Content-Type|...'] r.headers.get('content-type|...')
# print (r.encoding)
# print (r.apparent_encoding)
# print (r.content)
# print (r.json())
# 二进制内容 比如图片
# from PIL import Image
# from io import BytesIO
#
# i=Image.open(BytesIO(r.content))
# Post请求 Json格式
# url='https://api.github.com/some/endpoint'
# payload={'some':'data'}
# r=requests.post(url,json=payload)
# 上传文件
# files={'file':('report.xls',open('report.xls','rb'),'application/vd.ms-excel',{'Expires':'0'})}
# r=requests.post(url,files=files)
# r.text
# 发送cookie到服务器
# cookies=dict(cookies_are='working')
# r.request.get(url,cookies=cookies)
# 禁止重定向处理
# r=requests.get(url,allow_redirects=False)
# 服务器返回给我们的响应头部信息
# r.headers
# 发送到服务器的请求的头部
# r.request.headers
# 身份验证
# from requests.auth import AuthBase
#
# class PizzaAuth(AuthBase):
# def __init__(self,username):
# self.username=username
#
# def __call__(self,r):
# r.headers['']=self.username
# return r
#
# requests.get('http://pizzabin.org/admin',auth=PizzaAuth('rockyfire'))
def getHtmlText(url):
try:
headers = {'user-agent': '1'}
r=requests.get(url,timeout=3000,verify=True)
r.raise_for_status() # if r.status_code==request.codes.ok
r.encoding=r.apparent_encoding
return r.status_code
except:
return "Something Wrong!"
if __name__=="__main__":
# url = "https://www.baidu.com"
url = "https://www.github.com"
print (getHtmlText(url))
|
__author__ = 'QC1'
from main.page.base import *
from selenium.webdriver.common.by import By
from utils.function.general import *
import os, time, sys, json, requests
import urllib.parse
import urllib.request
class AdminPage(BasePage):
_tokopedia_backend_image_loc = (By.XPATH, "/html/body/div[1]/div/div/a/img")
#backend tabs locator
_general_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[1]")
_user_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[2]")
_shop_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[3]")
_catalog_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[4]")
_product_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[5]")
_transaction_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[6]")
_order_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[7]")
_statistic_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[8]")
_monitor_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[9]")
_shipping_agency_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[10]")
_seo_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[11]")
_marketing_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[12]")
_system_tab_loc = (By.XPATH, "/html/body/div[1]/div/ul/li[13]")
_abuser_name_input_loc = (By.XPATH, "/html/body/div[2]/div/div[1]/div[1]/div[1]/input")
_total_abuse_input_loc = (By.XPATH, "/html/body/div[2]/div/div[1]/div[1]/div[2]/input")
_search_abuser_button_loc = (By.XPATH, "/html/body/div[2]/div/div[1]/div[2]/button")
_view_abuse_button_loc = (By.XPATH, "/html/body/div[2]/div/div[2]/div/div[4]/div[2]/table/tbody/tr/td[5]/a[1]")
#look for the report description and compare it with dict ['description']
_reason_message_loc = (By.CSS_SELECTOR, "html.dialog-mode body.img-down.admin-page div#dialog.jqmWindow.jqmID1 div.jqm-inner div.content div#content-table div#admin-view-abuse_wrapper.dataTables_wrapper table#admin-view-abuse.display.data-table tbody tr.odd td.fs-12 div")
def domain(self, site, x=""):
self._open(site, x)
self.target_domain = x
def check_admin_page(self):
print("Inspecting elements on backend page #1: tokopedia image...")
self.mouse_hover_to(*self._tokopedia_backend_image_loc)
print("Inspecting elements on backend page #1 succeeded: tokopedia image found!")
time.sleep(1)
print("Inspecting elements on backend page #2: general tab...")
self.mouse_hover_to(*self._general_tab_loc)
print("Inspecting elements on backend page #2 succeeded: general tab found!")
time.sleep(1)
print("Inspecting elements on backend page #3: user tab...")
self.mouse_hover_to(*self._user_tab_loc)
print("Inspecting elements on backend page #3 succeeded: user tab found!")
time.sleep(1)
print("Inspecting elements on backend page #4: shop tab...")
self.mouse_hover_to(*self._shop_tab_loc)
print("Inspecting elements on backend page #4 succeeded: shop tab found!")
time.sleep(1)
print("Inspecting elements on backend page #5: catalog tab...")
self.mouse_hover_to(*self._catalog_tab_loc)
print("Inspecting elements on backend page #5 succeeded: catalog tab found!")
time.sleep(1)
print("Inspecting elements on backend page #6: product tab...")
self.mouse_hover_to(*self._product_tab_loc)
print("Inspecting elements on backend page #6 succeeded: product tab found!")
time.sleep(1)
print("Inspecting elements on backend page #7: transaction tab...")
self.mouse_hover_to(*self._transaction_tab_loc)
print("Inspecting elements on backend page #7 succeeded: transaction tab found!")
time.sleep(1)
print("Inspecting elements on backend page #8: order tab...")
self.mouse_hover_to(*self._order_tab_loc)
print("Inspecting elements on backend page #8 succeeded: order tab found!")
time.sleep(1)
print("Inspecting elements on backend page #9: statistic tab...")
self.mouse_hover_to(*self._statistic_tab_loc)
print("Inspecting elements on backend page #9 succeeded: statistic tab found!")
time.sleep(1)
print("Inspecting elements on backend page #10: monitor tab...")
self.mouse_hover_to(*self._monitor_tab_loc)
print("Inspecting elements on backend page #10 succeeded: monitor tab found!")
time.sleep(1)
print("Inspecting elements on backend page #11: shipping tab...")
self.mouse_hover_to(*self._shipping_agency_tab_loc)
print("Inspecting elements on backend page #11 succeeded: shipping tab found!")
time.sleep(1)
print("Inspecting elements on backend page #12: seo tab...")
self.mouse_hover_to(*self._seo_tab_loc)
print("Inspecting elements on backend page #12 succeeded: seo tab found!")
time.sleep(1)
print("Inspecting elements on backend page #13: marketing tab...")
self.mouse_hover_to(*self._marketing_tab_loc)
print("Inspecting elements on backend page #13 succeeded: marketing tab found!")
time.sleep(1)
print("Inspecting elements on backend page #14: system tab...")
self.mouse_hover_to(*self._system_tab_loc)
print("Inspecting elements on backend page #14 succeeded: system tab found!")
time.sleep(1)
print("All elements found! Backend element inspection completed!")
def search_abuser_name_and_report(self, abuserName, desc):
print("Searching for the report started")
print("Sending the abuser name..")
self.find_element(*self._abuser_name_input_loc).send_keys(abuserName)
self.find_element(*self._search_abuser_button_loc).click()
print("Finding the abuser name. . .")
time.sleep(1)
print("Name found! Checking reason. . .")
self.find_element(*self._view_abuse_button_loc).click()
time.sleep(2)
print ("The reason message in the backend is : ", self.find_element(*self._reason_message_loc).text)
time.sleep(2)
reasonMessage = self.find_element(*self._reason_message_loc).text
return reasonMessage
|
# -*- coding: utf-8 -*-
"""
ytelapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class Body73(object):
"""Implementation of the 'body_73' model.
TODO: type model description here.
Attributes:
number_type (NumberType2Enum): The capability the number supports.
area_code (string): Specifies the area code for the returned list of
available numbers. Only available for North American numbers.
quantity (string): A positive integer that tells how many number you
want to buy at a time.
leftover (string): If desired quantity is unavailable purchase what is
available .
"""
# Create a mapping from Model property names to API property names
_names = {
"number_type":'NumberType',
"area_code":'AreaCode',
"quantity":'Quantity',
"leftover":'Leftover'
}
def __init__(self,
number_type=None,
area_code=None,
quantity=None,
leftover=None):
"""Constructor for the Body73 class"""
# Initialize members of the class
self.number_type = number_type
self.area_code = area_code
self.quantity = quantity
self.leftover = leftover
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
number_type = dictionary.get('NumberType')
area_code = dictionary.get('AreaCode')
quantity = dictionary.get('Quantity')
leftover = dictionary.get('Leftover')
# Return an object of this model
return cls(number_type,
area_code,
quantity,
leftover)
|
import subprocess
cmd = input().split()
subprocess.run(cmd)
|
#文件的读和写
f=open("D://hello.txt","r")
a=f.readlines()
print(a)
f.close()
b=open("D://hello2.txt","w")
c=b.writelines(a)
b.close()
print("成功啦")
|
import time,re
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import pandas as pd
def spider(artist):
driver = webdriver.Chrome()
driver.implicitly_wait(5)
driver.get("http://tool.liumingye.cn/music/?page=searchPage")
input_tag = driver.find_element_by_id('input')
input_tag.send_keys('周杰伦')
input_tag.send_keys(Keys.ENTER)
download_icons = driver.find_elements_by_class_name('init')
for item in download_icons:
# Attention:class name 中包含空格时会出现BUG,用CSS选择器可以实现
downloader_icon = item.find_element_by_css_selector("[class='aplayer-list-download iconfont icon-xiazai']")
downloader_icon.click()
links = driver.find_elements_by_css_selector("[class='btn btn-outline-secondary download']")
# 解析完成下载链接之后,要关闭dialog,返回上一级,从而实现遍历
for link in links:
print(link.get_attribute('outerHTML'))
time.sleep(2)
driver.quit()
#spider(' ')
# token_m=re.compile('resourceType=')
# musical_urls=[['http://218.205.239.34/MIGUM2.0/v1.0/content/sub/listenSong.do?toneFlag=LQ&netType=00&copyrightId=0&contentId=600907000009041441&resourceType=2&channel=0'], ['http://218.205.239.34/MIGUM2.0/v1.0/content/sub/listenSong.do?toneFlag=PQ&netType=00&copyrightId=0&contentId=600907000009041441&resourceType=2&channel=0'], ['http://218.205.239.34/MIGUM2.0/v1.0/content/sub/listenSong.do?toneFlag=HQ&netType=00&copyrightId=0&contentId=600907000009041441&resourceType=2&channel=0'], ['http://218.205.239.34/MIGUM2.0/v1.0/content/sub/listenSong.do?toneFlag=SQ&netType=00&copyrightId=0&contentId=600907000009041441&resourceType=E&channel=0'], [], [], []]
# musical_urls=list(filter(None,musical_urls))
# musical_urls=[musical_url[0] for musical_url in musical_urls]
#
# for url in musical_urls:
# type_pos=token_m.search(url).span()[1]
# type=url[type_pos:type_pos+1]
#
#
#
# print(type)
download_df = pd.DataFrame(columns=['Artist', 'Music_name', 'Quality', 'Url'])
a=['a']*4
download_df['Artist']=a
print(download_df)
|
from bs4 import BeautifulSoup
from urllib2 import urlopen
def retrieveRecipe(url):
recipePage = urlopen(url)
soup = BeautifulSoup(recipePage.read())
recipeInfo = {}
# Recipe Components
recipeInfo["title"] = soup.find(id="itemTitle").string
recipeInfo["rating"] = soup.find(itemprop="ratingValue")["content"]
recipeInfo["author"] = soup.find("span", {"id": "lblSubmitter"}).text
recipeInfo["servings"] = soup.find(id="lblYield").string
recipeInfo["time"] = soup.find_all("span", {"class":"time"})
if recipeInfo["time"]:
recipeInfo["time"] = recipeInfo["time"][0].text
ingredientsListing = soup.findAll(itemprop="ingredients")
ingredients = []
for ingredient in ingredientsListing:
if ingredient.find_next(id="lblIngName"):
nextEl = ingredient.find_next(id="lblIngName")
if nextEl["class"][0] == "ingred-heading" or nextEl.string.replace(u'\xa0', u' ') == " ":
continue
else:
amount = ""
name = ""
if ingredient.find_next(id="lblIngAmount"):
amount = ingredient.find_next(id="lblIngAmount").string
if ingredient.find_next(id="lblIngName"):
name = ingredient.find_next(id="lblIngName").string
ingredients.append({"name": name, "amount": amount})
recipeInfo["ingredients"] = ingredients
directionsListing = soup.find_all("span", {"class":"plaincharacterwrap break"})
directions = []
for direction in directionsListing:
directions.append(direction.string)
recipeInfo["directions"] = directions
return recipeInfo
|
def calculate_sum(a, N):
m = N / a
sum = m * (m + 1) / 2
ans = a * sum
print("Sum of multiples of ", a,
" up to ", N, " = ", ans)
calculate_sum(7, 49)
|
import numpy as np
import readData
import matplotlib.pyplot as plt
days = readData.days
flow = np.array(readData.flow)
flowList = np.array(readData.flowList)
time = np.array(readData.time)
postMile = np.array(readData.postMile)
fPM = 67.99
timeSlot = 24*12
points = 136
flowAtPoint = np.empty((0, days))
'''
indexTmp = np.where(postMile == fPM)
print (indexTmp)
for i in range(0, np.size(indexTmp)):
if(time[indexTmp[0][i]] == fTime):
indexT = indexTmp[0][i]
print (indexT)
tmpArray = np.array([])
for i in range (0, days):
flowAtPoint = np.append(flowAtPoint, flow[i][indexT])
print (flowAtPoint)
'''
def Analysis(fTime):
indexTmp = np.where(postMile == fPM)
for i in range(0, np.size(indexTmp)):
if(time[indexTmp[0][i]] == fTime):
indexT = indexTmp[0][i]
tmpArray = np.array([])
for i in range (0, days):
tmpArray = np.append(tmpArray, flow[i][indexT])
print(np.size(tmpArray))
return tmpArray
for i, val in enumerate (np.unique(time)):
print(val)
tmpArray = Analysis(val)
flowAtPoint = np.append(flowAtPoint, [tmpArray], axis=0)
print (flowAtPoint)
print(np.shape(flowAtPoint))
pCoeffT = np.empty([0, np.size(np.unique(time))])
print(pCoeffT)
print(np.shape(pCoeffT))
for i in range(0, np.size(np.unique(time))):
pCoeffT = np.append(pCoeffT, np.corrcoef(flowAtPoint[160], flowAtPoint[i])[0][1])
print (pCoeffT)
plt.figure(1)
plt.plot(pCoeffT)
plt.xlabel("Time slot")
plt.ylabel("Correlation")
plt.show()
|
N = [-10, -5, 1, 2, 3, 6, 5]
def print_positive(array: list):
for num in array:
if num > 0:
print(num)
print_positive(N)
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from note import views
app_name = 'note'
urlpatterns = [
url(r'^api/notes/$', views.AllNote.as_view()),
url(r'^api/ready_notes/$', views.ListReadyNotes.as_view()),
url(r'^api/no_ready_notes/$', views.ListNotReadyList.as_view()),
url(r'^api/notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
# This file makes the python files in this folder accessible from other folders
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 13:54:10 2020
https://gist.github.com/CMCDragonkai/dd420c0800cba33142505eff5a7d2589
"""
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import cv2
def surface_plot (matrix, **kwargs):
# acquire the cartesian coordinate matrices from the matrix
# x is cols, y is rows
(x, y) = np.meshgrid(np.arange(matrix.shape[1]), np.arange(matrix.shape[0]))
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(x, y, matrix, **kwargs)
return (fig, ax, surf)
if __name__ == "__main__":
img = cv2.imread('pueba3d.png',0)
(fig2, ax2, surf) = surface_plot(img, cmap=plt.cm.coolwarm)
fig2.colorbar(surf)
ax2.set_xlabel('X (cols)')
ax2.set_ylabel('Y (rows)')
ax2.set_zlabel('Z (values)')
plt.show()
|
from office365.runtime.client_value import ClientValue
from office365.sharepoint.principal.principal_source import PrincipalSource
from office365.sharepoint.principal.principal_type import PrincipalType
class ClientPeoplePickerQueryParameters(ClientValue):
def __init__(self, queryString, allowEmailAddresses=True, allowMultipleEntities=True, allowOnlyEmailAddresses=False,
allUrlZones=False, enabledClaimProviders=None, forceClaims=False, maximumEntitySuggestions=1,
principalSource=PrincipalSource.All, principalType=PrincipalType.All, urlZone=0,
urlZoneSpecified=False, sharePointGroupID=0):
"""
Specifies the properties of a principal query
:type int urlZone: Specifies a location in the topology of the farm for the principal query.
:param int sharePointGroupID: specifies a group containing allowed principals to be used in the principal query.
:param str queryString: Specifies the value to be used in the principal query.
:param int principalType: Specifies the type to be used in the principal query.
:param int principalSource: Specifies the source to be used in the principal query.
:param int maximumEntitySuggestions: Specifies the maximum number of principals to be returned by the
principal query.
:param bool forceClaims: Specifies whether the principal query SHOULD be handled by claims providers.
:param bool enabledClaimProviders: Specifies the claims providers to be used in the principal query.
:param bool allUrlZones: Specifies whether the principal query will search all locations in the topology
of the farm.
:param bool allowOnlyEmailAddresses: Specifies whether to allow the picker to resolve only email addresses as
valid entities. This property is only used when AllowEmailAddresses (section 3.2.5.217.1.1.1) is set to True.
Otherwise it is ignored.
:param bool allowMultipleEntities: Specifies whether the principal query allows multiple values.
:param bool allowEmailAddresses: Specifies whether the principal query can return a resolved principal
matching an unverified e-mail address when unable to resolve to a known principal.
"""
super().__init__()
self.QueryString = queryString
self.AllowEmailAddresses = allowEmailAddresses
self.AllowMultipleEntities = allowMultipleEntities
self.AllowOnlyEmailAddresses = allowOnlyEmailAddresses
self.AllUrlZones = allUrlZones
self.EnabledClaimProviders = enabledClaimProviders
self.ForceClaims = forceClaims
self.MaximumEntitySuggestions = maximumEntitySuggestions
self.PrincipalSource = principalSource
self.PrincipalType = principalType
self.UrlZone = urlZone
self.UrlZoneSpecified = urlZoneSpecified
self.SharePointGroupID = sharePointGroupID
@property
def entity_type_name(self):
return "SP.UI.ApplicationPages.ClientPeoplePickerQueryParameters"
|
#!/usr/bin/env python
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
tgltf.py : Shakedown analytic geometry
==========================================================
Loads test events from Opticks
Create the events by running tgltf-transitional
Huh, top of cyl-z should not be there::
In [8]: lpos[lpos[:,2] > 1500 ][:100]
Out[8]:
A()sliced
A([[ -367.125 , 236.7812, 1535. , 1. ],
[ 337. , -1032. , 1535. , 1. ],
[ 568.8125, -1328.9688, 1535. , 1. ],
[ 1212.875 , -858.375 , 1535. , 1. ],
[ 137.0625, -371.6875, 1535. , 1. ],
[ 849.6875, 997.6562, 1545.9814, 1. ],
[ -936.5625, 868.7812, 1547.71 , 1. ],
[ 196.3125, 411.9688, 1535. , 1. ],
[ -55.625 , -304.75 , 1535. , 1. ],
[ -144.5 , -538.3125, 1535. , 1. ],
[ 1299.0625, -612.9375, 1535. , 1. ],
[ -407.5 , 13.3438, 1535. , 1. ],
[ 865.375 , 370.4062, 1535. , 1. ],
[ 416.75 , 478.5938, 1535. , 1. ],
[ 431.75 , 800.6875, 1535. , 1. ],
[ -8.5625, 1549.9375, 1526.9644, 1. ],
[ 948.25 , -512.3438, 1535. , 1. ],
[ 229. , -32.5625, 1535. , 1. ],
[-1007.125 , -461.25 , 1535. , 1. ],
[ -74.6875, -607.125 , 1535. , 1. ],
[ 503.625 , -807.9062, 1535. , 1. ],
[ 160.125 , -1057.0625, 1535. , 1. ],
[ -798.3125, 67.3125, 1535. , 1. ],
[-1278.25 , 865.4062, 1535. , 1. ],
[ -509.625 , 477.1562, 1535. , 1. ],
[ -141.875 , 1289.5 , 1535. , 1. ],
"""
import os, sys, logging, argparse, numpy as np
import numpy.linalg as la
log = logging.getLogger(__name__)
from opticks.ana.base import opticks_main
from opticks.ana.nbase import vnorm
from opticks.ana.evt import Evt
from opticks.analytic.sc import gdml2gltf_main
if __name__ == '__main__':
np.set_printoptions(precision=4, linewidth=200)
os.environ['OPTICKS_QUERY']="range:3159:3160"
args = opticks_main(doc=__doc__, tag="1", src="torch", det="gltf" )
sc = gdml2gltf_main(args)
tx = sc.get_transform(3159)
print tx
itx = la.inv(tx)
print itx
log.info("tag %s src %s det %s " % (args.utag,args.src,args.det))
seqs=[]
try:
a = Evt(tag="%s" % args.utag, src=args.src, det=args.det, seqs=seqs, args=args)
except IOError as err:
log.fatal(err)
#sys.exit(args.mrc) this causes a sysrap-t test fail from lack of a tmp file
sys.exit(0)
log.info( " a : %s " % a.brief)
print a.seqhis_ana.table
a.sel = "TO SA"
ox = a.ox
print ox.shape # masked array with those photons
pos = ox[:,0,:4]
pos[:,3] = 1.
lpos = np.dot( pos, itx )
|
#!/usr/local/bin/python3
import asyncio
import aiohttp
import logging
logger = logging.getLogger('discord')
async def download_page(url):
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36"
loop = asyncio.get_event_loop()
try:
async with aiohttp.ClientSession(loop=loop) as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return await r.text()
except Exception as e:
logger.error(e)
return None
|
# -*- coding: utf-8 -*-
import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.DataType as DataType
print("Importing Flow Core Technology Master Data...")
tr = service.transaction()
# ==============================================================================
#
# FILE FORMATS
#
# ==============================================================================
# CSV
file_type_CSV = tr.getOrCreateNewFileFormatType('CSV')
file_type_CSV.setDescription('files with values separated by comma or semicolon')
# FCS
file_type_FCS = tr.getOrCreateNewFileFormatType('FCS')
file_type_FCS.setDescription('Flow Cytometry Standard file.')
# UNKOWN
file_type_UNKNOWN = tr.getOrCreateNewFileFormatType('UNKNOWN')
file_type_UNKNOWN.setDescription('Unknown file format')
# ==============================================================================
#
# VOCABULARIES
#
# ==============================================================================
# BD LSR FORTESSA
# ------------------------------------------------------------------------------
# LSR_FORTESSA_PLATE_GEOMETRY
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY = tr.getOrCreateNewVocabulary('LSR_FORTESSA_PLATE_GEOMETRY')
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.setDescription('Plate geometries for the BD LSR Fortessa Flow Cytometer.')
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.setUrlTemplate(None)
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.setManagedInternally(False)
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.setInternalNamespace(False)
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.setChosenFromList(True)
# LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12 = tr.createNewVocabularyTerm('96_WELLS_8X12')
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12.setDescription(None)
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12.setLabel(None)
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12.setOrdinal(1)
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.addTerm(vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_96_WELLS_8X12)
# LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24 = tr.createNewVocabularyTerm('384_WELLS_16X24')
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24.setDescription(None)
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24.setLabel(None)
vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24.setOrdinal(2)
vocabulary_LSR_FORTESSA_PLATE_GEOMETRY.addTerm(vocabulary_term_LSR_FORTESSA_PLATE_GEOMETRY_384_WELLS_16X24)
# BC CYTOFLEX S
# ------------------------------------------------------------------------------
# CYTOFLEX_S_PLATE_GEOMETRY
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY = tr.getOrCreateNewVocabulary('CYTOFLEX_S_PLATE_GEOMETRY')
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.setDescription('Plate geometries for the BC CytoFLEX S Flow Cytometer.')
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.setUrlTemplate(None)
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.setManagedInternally(False)
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.setInternalNamespace(False)
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.setChosenFromList(True)
# CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12
vocabulary_term_CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12 = tr.createNewVocabularyTerm('96_WELLS_8X12')
vocabulary_term_CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12.setDescription(None)
vocabulary_term_CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12.setLabel(None)
vocabulary_term_CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12.setOrdinal(1)
vocabulary_CYTOFLEX_S_PLATE_GEOMETRY.addTerm(vocabulary_term_CYTOFLEX_S_PLATE_GEOMETRY_96_WELLS_8X12)
# ==============================================================================
#
# EXPERIMENT/COLLECTION TYPES
#
# ==============================================================================
# COLLECTION
exp_type_COLLECTION = tr.getOrCreateNewExperimentType('COLLECTION')
exp_type_COLLECTION.setDescription('Used as a folder for things.')
# ==============================================================================
#
# SAMPLE TYPES
#
# ==============================================================================
# COMMON
# ------------------------------------------------------------------------------
# ORGANIZATION_UNIT
samp_type_ORGANIZATION_UNIT = tr.getOrCreateNewSampleType('ORGANIZATION_UNIT')
samp_type_ORGANIZATION_UNIT.setDescription('Used to create different organisations for samples since they can\'t belong to more than one experiment.')
samp_type_ORGANIZATION_UNIT.setListable(True)
samp_type_ORGANIZATION_UNIT.setShowContainer(False)
samp_type_ORGANIZATION_UNIT.setShowParents(True)
samp_type_ORGANIZATION_UNIT.setSubcodeUnique(False)
samp_type_ORGANIZATION_UNIT.setAutoGeneratedCode(True)
samp_type_ORGANIZATION_UNIT.setShowParentMetadata(False)
samp_type_ORGANIZATION_UNIT.setGeneratedCodePrefix('OU.')
# BD FACS ARIA
# ------------------------------------------------------------------------------
# FACS_ARIA_EXPERIMENT
samp_type_FACS_ARIA_EXPERIMENT = tr.getOrCreateNewSampleType('FACS_ARIA_EXPERIMENT')
samp_type_FACS_ARIA_EXPERIMENT.setDescription('Experiments from the BD FACS Aria III Cell Sorter.')
samp_type_FACS_ARIA_EXPERIMENT.setListable(True)
samp_type_FACS_ARIA_EXPERIMENT.setShowContainer(False)
samp_type_FACS_ARIA_EXPERIMENT.setShowParents(True)
samp_type_FACS_ARIA_EXPERIMENT.setSubcodeUnique(False)
samp_type_FACS_ARIA_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_FACS_ARIA_EXPERIMENT.setShowParentMetadata(False)
samp_type_FACS_ARIA_EXPERIMENT.setGeneratedCodePrefix('FACS_ARIA_EXPERIMENT.')
# FACS_ARIA_SPECIMEN
samp_type_FACS_ARIA_SPECIMEN = tr.getOrCreateNewSampleType('FACS_ARIA_SPECIMEN')
samp_type_FACS_ARIA_SPECIMEN.setDescription('A specimen for the BD FACS Aria III Cell Sorter.')
samp_type_FACS_ARIA_SPECIMEN.setListable(True)
samp_type_FACS_ARIA_SPECIMEN.setShowContainer(False)
samp_type_FACS_ARIA_SPECIMEN.setShowParents(True)
samp_type_FACS_ARIA_SPECIMEN.setSubcodeUnique(False)
samp_type_FACS_ARIA_SPECIMEN.setAutoGeneratedCode(True)
samp_type_FACS_ARIA_SPECIMEN.setShowParentMetadata(False)
samp_type_FACS_ARIA_SPECIMEN.setGeneratedCodePrefix('FACS_ARIA_SPECIMEN.')
# FACS_ARIA_TUBE
samp_type_FACS_ARIA_TUBE = tr.getOrCreateNewSampleType('FACS_ARIA_TUBE')
samp_type_FACS_ARIA_TUBE.setDescription('A tube for the BD FACS Aria III Cell Sorter.')
samp_type_FACS_ARIA_TUBE.setListable(True)
samp_type_FACS_ARIA_TUBE.setShowContainer(False)
samp_type_FACS_ARIA_TUBE.setShowParents(True)
samp_type_FACS_ARIA_TUBE.setSubcodeUnique(False)
samp_type_FACS_ARIA_TUBE.setAutoGeneratedCode(True)
samp_type_FACS_ARIA_TUBE.setShowParentMetadata(False)
samp_type_FACS_ARIA_TUBE.setGeneratedCodePrefix('FACS_ARIA_TUBE.')
# FACS_ARIA_TUBESET
samp_type_FACS_ARIA_TUBESET = tr.getOrCreateNewSampleType('FACS_ARIA_TUBESET')
samp_type_FACS_ARIA_TUBESET.setDescription('A virtual container for tubes for the BD FACS Aria III Cell Sorter.')
samp_type_FACS_ARIA_TUBESET.setListable(False)
samp_type_FACS_ARIA_TUBESET.setShowContainer(False)
samp_type_FACS_ARIA_TUBESET.setShowParents(True)
samp_type_FACS_ARIA_TUBESET.setSubcodeUnique(False)
samp_type_FACS_ARIA_TUBESET.setAutoGeneratedCode(True)
samp_type_FACS_ARIA_TUBESET.setShowParentMetadata(False)
samp_type_FACS_ARIA_TUBESET.setGeneratedCodePrefix('FACS_ARIA_TUBESET.')
# BD INFLUX
# ------------------------------------------------------------------------------
# INFLUX_EXPERIMENT
samp_type_INFLUX_EXPERIMENT = tr.getOrCreateNewSampleType('INFLUX_EXPERIMENT')
samp_type_INFLUX_EXPERIMENT.setDescription('Experiments from the BD Influx Cell Sorter.')
samp_type_INFLUX_EXPERIMENT.setListable(True)
samp_type_INFLUX_EXPERIMENT.setShowContainer(False)
samp_type_INFLUX_EXPERIMENT.setShowParents(True)
samp_type_INFLUX_EXPERIMENT.setSubcodeUnique(False)
samp_type_INFLUX_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_INFLUX_EXPERIMENT.setShowParentMetadata(False)
samp_type_INFLUX_EXPERIMENT.setGeneratedCodePrefix('INFLUX_EXPERIMENT.')
# INFLUX_SPECIMEN
samp_type_INFLUX_SPECIMEN = tr.getOrCreateNewSampleType('INFLUX_SPECIMEN')
samp_type_INFLUX_SPECIMEN.setDescription('A specimen for the BD Influx Cell Sorter.')
samp_type_INFLUX_SPECIMEN.setListable(True)
samp_type_INFLUX_SPECIMEN.setShowContainer(False)
samp_type_INFLUX_SPECIMEN.setShowParents(True)
samp_type_INFLUX_SPECIMEN.setSubcodeUnique(False)
samp_type_INFLUX_SPECIMEN.setAutoGeneratedCode(True)
samp_type_INFLUX_SPECIMEN.setShowParentMetadata(False)
samp_type_INFLUX_SPECIMEN.setGeneratedCodePrefix('INFLUX_SPECIMEN.')
# INFLUX_TUBE
samp_type_INFLUX_TUBE = tr.getOrCreateNewSampleType('INFLUX_TUBE')
samp_type_INFLUX_TUBE.setDescription('A tube for the BD Influx Cell Sorter.')
samp_type_INFLUX_TUBE.setListable(True)
samp_type_INFLUX_TUBE.setShowContainer(False)
samp_type_INFLUX_TUBE.setShowParents(True)
samp_type_INFLUX_TUBE.setSubcodeUnique(False)
samp_type_INFLUX_TUBE.setAutoGeneratedCode(True)
samp_type_INFLUX_TUBE.setShowParentMetadata(False)
samp_type_INFLUX_TUBE.setGeneratedCodePrefix('INFLUX_TUBE.')
# INFLUX_TUBESET
samp_type_INFLUX_TUBESET = tr.getOrCreateNewSampleType('INFLUX_TUBESET')
samp_type_INFLUX_TUBESET.setDescription('A virtual container for tubes for the BD Influx Cell Sorter.')
samp_type_INFLUX_TUBESET.setListable(True)
samp_type_INFLUX_TUBESET.setShowContainer(False)
samp_type_INFLUX_TUBESET.setShowParents(True)
samp_type_INFLUX_TUBESET.setSubcodeUnique(False)
samp_type_INFLUX_TUBESET.setAutoGeneratedCode(True)
samp_type_INFLUX_TUBESET.setShowParentMetadata(False)
samp_type_INFLUX_TUBESET.setGeneratedCodePrefix('INFLUX_TUBESET.')
# BD LSR FORTESSA
# ------------------------------------------------------------------------------
# LSR_FORTESSA_EXPERIMENT
samp_type_LSR_FORTESSA_EXPERIMENT = tr.getOrCreateNewSampleType('LSR_FORTESSA_EXPERIMENT')
samp_type_LSR_FORTESSA_EXPERIMENT.setDescription('Experiments from the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_EXPERIMENT.setListable(True)
samp_type_LSR_FORTESSA_EXPERIMENT.setShowContainer(False)
samp_type_LSR_FORTESSA_EXPERIMENT.setShowParents(True)
samp_type_LSR_FORTESSA_EXPERIMENT.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_LSR_FORTESSA_EXPERIMENT.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_EXPERIMENT.setGeneratedCodePrefix('LSR_FORTESSA_EXPERIMENT.')
# LSR_FORTESSA_PLATE
samp_type_LSR_FORTESSA_PLATE = tr.getOrCreateNewSampleType('LSR_FORTESSA_PLATE')
samp_type_LSR_FORTESSA_PLATE.setDescription('A plate for the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_PLATE.setListable(True)
samp_type_LSR_FORTESSA_PLATE.setShowContainer(False)
samp_type_LSR_FORTESSA_PLATE.setShowParents(True)
samp_type_LSR_FORTESSA_PLATE.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_PLATE.setAutoGeneratedCode(True)
samp_type_LSR_FORTESSA_PLATE.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_PLATE.setGeneratedCodePrefix('LSR_FORTESSA_PLATE.')
# LSR_FORTESSA_SPECIMEN
samp_type_LSR_FORTESSA_SPECIMEN = tr.getOrCreateNewSampleType('LSR_FORTESSA_SPECIMEN')
samp_type_LSR_FORTESSA_SPECIMEN.setDescription('A specimen for the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_SPECIMEN.setListable(True)
samp_type_LSR_FORTESSA_SPECIMEN.setShowContainer(False)
samp_type_LSR_FORTESSA_SPECIMEN.setShowParents(True)
samp_type_LSR_FORTESSA_SPECIMEN.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_SPECIMEN.setAutoGeneratedCode(True)
samp_type_LSR_FORTESSA_SPECIMEN.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_SPECIMEN.setGeneratedCodePrefix('LSR_FORTESSA_SPECIMEN.')
# LSR_FORTESSA_TUBE
samp_type_LSR_FORTESSA_TUBE = tr.getOrCreateNewSampleType('LSR_FORTESSA_TUBE')
samp_type_LSR_FORTESSA_TUBE.setDescription('A tube for the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_TUBE.setListable(True)
samp_type_LSR_FORTESSA_TUBE.setShowContainer(False)
samp_type_LSR_FORTESSA_TUBE.setShowParents(True)
samp_type_LSR_FORTESSA_TUBE.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_TUBE.setAutoGeneratedCode(True)
samp_type_LSR_FORTESSA_TUBE.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_TUBE.setGeneratedCodePrefix('LSR_FORTESSA_TUBE.')
# LSR_FORTESSA_TUBESET
samp_type_LSR_FORTESSA_TUBESET = tr.getOrCreateNewSampleType('LSR_FORTESSA_TUBESET')
samp_type_LSR_FORTESSA_TUBESET.setDescription('A virtual container for tubes for the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_TUBESET.setListable(False)
samp_type_LSR_FORTESSA_TUBESET.setShowContainer(False)
samp_type_LSR_FORTESSA_TUBESET.setShowParents(True)
samp_type_LSR_FORTESSA_TUBESET.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_TUBESET.setAutoGeneratedCode(True)
samp_type_LSR_FORTESSA_TUBESET.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_TUBESET.setGeneratedCodePrefix('LSR_FORTESSA_TUBESET.')
# LSR_FORTESSA_WELL
samp_type_LSR_FORTESSA_WELL = tr.getOrCreateNewSampleType('LSR_FORTESSA_WELL')
samp_type_LSR_FORTESSA_WELL.setDescription('A well for the BD LSR Fortessa Flow Cytometer.')
samp_type_LSR_FORTESSA_WELL.setListable(True)
samp_type_LSR_FORTESSA_WELL.setShowContainer(True)
samp_type_LSR_FORTESSA_WELL.setShowParents(True)
samp_type_LSR_FORTESSA_WELL.setSubcodeUnique(False)
samp_type_LSR_FORTESSA_WELL.setAutoGeneratedCode(True)
samp_type_LSR_FORTESSA_WELL.setShowParentMetadata(False)
samp_type_LSR_FORTESSA_WELL.setGeneratedCodePrefix('LSR_FORTESSA_WELL.')
# BC CYTOFLEX S
# ------------------------------------------------------------------------------
# CYTOFLEX_S_EXPERIMENT
samp_type_CYTOFLEX_S_EXPERIMENT = tr.getOrCreateNewSampleType('CYTOFLEX_S_EXPERIMENT')
samp_type_CYTOFLEX_S_EXPERIMENT.setDescription('Experiments from the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_EXPERIMENT.setListable(True)
samp_type_CYTOFLEX_S_EXPERIMENT.setShowContainer(False)
samp_type_CYTOFLEX_S_EXPERIMENT.setShowParents(True)
samp_type_CYTOFLEX_S_EXPERIMENT.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_CYTOFLEX_S_EXPERIMENT.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_EXPERIMENT.setGeneratedCodePrefix('CYTOFLEX_S_EXPERIMENT.')
# CYTOFLEX_S_PLATE
samp_type_CYTOFLEX_S_PLATE = tr.getOrCreateNewSampleType('CYTOFLEX_S_PLATE')
samp_type_CYTOFLEX_S_PLATE.setDescription('A plate for the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_PLATE.setListable(True)
samp_type_CYTOFLEX_S_PLATE.setShowContainer(False)
samp_type_CYTOFLEX_S_PLATE.setShowParents(True)
samp_type_CYTOFLEX_S_PLATE.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_PLATE.setAutoGeneratedCode(True)
samp_type_CYTOFLEX_S_PLATE.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_PLATE.setGeneratedCodePrefix('CYTOFLEX_S_PLATE.')
# CYTOFLEX_S_SPECIMEN
samp_type_CYTOFLEX_S_SPECIMEN = tr.getOrCreateNewSampleType('CYTOFLEX_S_SPECIMEN')
samp_type_CYTOFLEX_S_SPECIMEN.setDescription('A specimen for the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_SPECIMEN.setListable(True)
samp_type_CYTOFLEX_S_SPECIMEN.setShowContainer(False)
samp_type_CYTOFLEX_S_SPECIMEN.setShowParents(True)
samp_type_CYTOFLEX_S_SPECIMEN.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_SPECIMEN.setAutoGeneratedCode(True)
samp_type_CYTOFLEX_S_SPECIMEN.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_SPECIMEN.setGeneratedCodePrefix('CYTOFLEX_S_SPECIMEN.')
# CYTOFLEX_S_TUBE
samp_type_CYTOFLEX_S_TUBE = tr.getOrCreateNewSampleType('CYTOFLEX_S_TUBE')
samp_type_CYTOFLEX_S_TUBE.setDescription('A tube for the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_TUBE.setListable(True)
samp_type_CYTOFLEX_S_TUBE.setShowContainer(False)
samp_type_CYTOFLEX_S_TUBE.setShowParents(True)
samp_type_CYTOFLEX_S_TUBE.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_TUBE.setAutoGeneratedCode(True)
samp_type_CYTOFLEX_S_TUBE.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_TUBE.setGeneratedCodePrefix('CYTOFLEX_S_TUBE.')
# CYTOFLEX_S_TUBESET
samp_type_CYTOFLEX_S_TUBESET = tr.getOrCreateNewSampleType('CYTOFLEX_S_TUBESET')
samp_type_CYTOFLEX_S_TUBESET.setDescription('A virtual container for tubes for the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_TUBESET.setListable(False)
samp_type_CYTOFLEX_S_TUBESET.setShowContainer(False)
samp_type_CYTOFLEX_S_TUBESET.setShowParents(True)
samp_type_CYTOFLEX_S_TUBESET.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_TUBESET.setAutoGeneratedCode(True)
samp_type_CYTOFLEX_S_TUBESET.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_TUBESET.setGeneratedCodePrefix('CYTOFLEX_S_TUBESET.')
# CYTOFLEX_S_WELL
samp_type_CYTOFLEX_S_WELL = tr.getOrCreateNewSampleType('CYTOFLEX_S_WELL')
samp_type_CYTOFLEX_S_WELL.setDescription('A well for the BC CytoFLEX S Flow Cytometer.')
samp_type_CYTOFLEX_S_WELL.setListable(True)
samp_type_CYTOFLEX_S_WELL.setShowContainer(True)
samp_type_CYTOFLEX_S_WELL.setShowParents(True)
samp_type_CYTOFLEX_S_WELL.setSubcodeUnique(False)
samp_type_CYTOFLEX_S_WELL.setAutoGeneratedCode(True)
samp_type_CYTOFLEX_S_WELL.setShowParentMetadata(False)
samp_type_CYTOFLEX_S_WELL.setGeneratedCodePrefix('CYTOFLEX_S_WELL.')
# BC MOFLO XDP
# ------------------------------------------------------------------------------
# MOFLO_XDP_EXPERIMENT
samp_type_MOFLO_XDP_EXPERIMENT = tr.getOrCreateNewSampleType('MOFLO_XDP_EXPERIMENT')
samp_type_MOFLO_XDP_EXPERIMENT.setDescription('Experiments from the BC MoFlo XDP Cell Sorter.')
samp_type_MOFLO_XDP_EXPERIMENT.setListable(True)
samp_type_MOFLO_XDP_EXPERIMENT.setShowContainer(False)
samp_type_MOFLO_XDP_EXPERIMENT.setShowParents(True)
samp_type_MOFLO_XDP_EXPERIMENT.setSubcodeUnique(False)
samp_type_MOFLO_XDP_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_MOFLO_XDP_EXPERIMENT.setShowParentMetadata(False)
samp_type_MOFLO_XDP_EXPERIMENT.setGeneratedCodePrefix('MOFLO_XDP_EXPERIMENT.')
# MOFLO_XDP_SPECIMEN
samp_type_MOFLO_XDP_SPECIMEN = tr.getOrCreateNewSampleType('MOFLO_XDP_SPECIMEN')
samp_type_MOFLO_XDP_SPECIMEN.setDescription('A specimen for the BC MoFlo XDP Cell Sorter.')
samp_type_MOFLO_XDP_SPECIMEN.setListable(True)
samp_type_MOFLO_XDP_SPECIMEN.setShowContainer(False)
samp_type_MOFLO_XDP_SPECIMEN.setShowParents(True)
samp_type_MOFLO_XDP_SPECIMEN.setSubcodeUnique(False)
samp_type_MOFLO_XDP_SPECIMEN.setAutoGeneratedCode(True)
samp_type_MOFLO_XDP_SPECIMEN.setShowParentMetadata(False)
samp_type_MOFLO_XDP_SPECIMEN.setGeneratedCodePrefix('MOFLO_XDP_SPECIMEN.')
# MOFLO_XDP_TUBE
samp_type_MOFLO_XDP_TUBE = tr.getOrCreateNewSampleType('MOFLO_XDP_TUBE')
samp_type_MOFLO_XDP_TUBE.setDescription('A tube for the BC MoFlo XDP Cell Sorter.')
samp_type_MOFLO_XDP_TUBE.setListable(True)
samp_type_MOFLO_XDP_TUBE.setShowContainer(False)
samp_type_MOFLO_XDP_TUBE.setShowParents(True)
samp_type_MOFLO_XDP_TUBE.setSubcodeUnique(False)
samp_type_MOFLO_XDP_TUBE.setAutoGeneratedCode(True)
samp_type_MOFLO_XDP_TUBE.setShowParentMetadata(False)
samp_type_MOFLO_XDP_TUBE.setGeneratedCodePrefix('MOFLO_XDP_TUBE.')
# MOFLO_XDP_TUBESET
samp_type_MOFLO_XDP_TUBESET = tr.getOrCreateNewSampleType('MOFLO_XDP_TUBESET')
samp_type_MOFLO_XDP_TUBESET.setDescription('A virtual container for tubes for the BC MoFlo XDP Cell Sorter.')
samp_type_MOFLO_XDP_TUBESET.setListable(True)
samp_type_MOFLO_XDP_TUBESET.setShowContainer(False)
samp_type_MOFLO_XDP_TUBESET.setShowParents(True)
samp_type_MOFLO_XDP_TUBESET.setSubcodeUnique(False)
samp_type_MOFLO_XDP_TUBESET.setAutoGeneratedCode(True)
samp_type_MOFLO_XDP_TUBESET.setShowParentMetadata(False)
samp_type_MOFLO_XDP_TUBESET.setGeneratedCodePrefix('MOFLO_XDP_TUBESET.')
# BIORAD S3E
# ------------------------------------------------------------------------------
# S3E_EXPERIMENT
samp_type_S3E_EXPERIMENT = tr.getOrCreateNewSampleType('S3E_EXPERIMENT')
samp_type_S3E_EXPERIMENT.setDescription('Experiments from the BIORAD S3e Cell Sorter.')
samp_type_S3E_EXPERIMENT.setListable(True)
samp_type_S3E_EXPERIMENT.setShowContainer(False)
samp_type_S3E_EXPERIMENT.setShowParents(True)
samp_type_S3E_EXPERIMENT.setSubcodeUnique(False)
samp_type_S3E_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_S3E_EXPERIMENT.setShowParentMetadata(False)
samp_type_S3E_EXPERIMENT.setGeneratedCodePrefix('S3E_EXPERIMENT.')
# S3E_SPECIMEN
samp_type_S3E_SPECIMEN = tr.getOrCreateNewSampleType('S3E_SPECIMEN')
samp_type_S3E_SPECIMEN.setDescription('A specimen for the BIORAD S3e Cell Sorter.')
samp_type_S3E_SPECIMEN.setListable(True)
samp_type_S3E_SPECIMEN.setShowContainer(False)
samp_type_S3E_SPECIMEN.setShowParents(True)
samp_type_S3E_SPECIMEN.setSubcodeUnique(False)
samp_type_S3E_SPECIMEN.setAutoGeneratedCode(True)
samp_type_S3E_SPECIMEN.setShowParentMetadata(False)
samp_type_S3E_SPECIMEN.setGeneratedCodePrefix('S3E_SPECIMEN.')
# S3E_TUBE
samp_type_S3E_TUBE = tr.getOrCreateNewSampleType('S3E_TUBE')
samp_type_S3E_TUBE.setDescription('A tube for the BIORAD S3e Cell Sorter.')
samp_type_S3E_TUBE.setListable(True)
samp_type_S3E_TUBE.setShowContainer(False)
samp_type_S3E_TUBE.setShowParents(True)
samp_type_S3E_TUBE.setSubcodeUnique(False)
samp_type_S3E_TUBE.setAutoGeneratedCode(True)
samp_type_S3E_TUBE.setShowParentMetadata(False)
samp_type_S3E_TUBE.setGeneratedCodePrefix('S3E_TUBE.')
# S3E_TUBESET
samp_type_S3E_TUBESET = tr.getOrCreateNewSampleType('S3E_TUBESET')
samp_type_S3E_TUBESET.setDescription('A virtual container for tubes for the BIORAD S3e Cell Sorter.')
samp_type_S3E_TUBESET.setListable(True)
samp_type_S3E_TUBESET.setShowContainer(False)
samp_type_S3E_TUBESET.setShowParents(True)
samp_type_S3E_TUBESET.setSubcodeUnique(False)
samp_type_S3E_TUBESET.setAutoGeneratedCode(True)
samp_type_S3E_TUBESET.setShowParentMetadata(False)
samp_type_S3E_TUBESET.setGeneratedCodePrefix('S3E_TUBESET.')
# SONY SH800S
# ------------------------------------------------------------------------------
# SONY_SH800S_EXPERIMENT
samp_type_SONY_SH800S_EXPERIMENT = tr.getOrCreateNewSampleType('SONY_SH800S_EXPERIMENT')
samp_type_SONY_SH800S_EXPERIMENT.setDescription('Experiments from the SONY SH800S Cell Sorter.')
samp_type_SONY_SH800S_EXPERIMENT.setListable(True)
samp_type_SONY_SH800S_EXPERIMENT.setShowContainer(False)
samp_type_SONY_SH800S_EXPERIMENT.setShowParents(True)
samp_type_SONY_SH800S_EXPERIMENT.setSubcodeUnique(False)
samp_type_SONY_SH800S_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_SONY_SH800S_EXPERIMENT.setShowParentMetadata(False)
samp_type_SONY_SH800S_EXPERIMENT.setGeneratedCodePrefix('SONY_SH800S_EXPERIMENT.')
# SONY_SH800S_SPECIMEN
samp_type_SONY_SH800S_SPECIMEN = tr.getOrCreateNewSampleType('SONY_SH800S_SPECIMEN')
samp_type_SONY_SH800S_SPECIMEN.setDescription('A specimen for the SONY SH800S Cell Sorter.')
samp_type_SONY_SH800S_SPECIMEN.setListable(True)
samp_type_SONY_SH800S_SPECIMEN.setShowContainer(False)
samp_type_SONY_SH800S_SPECIMEN.setShowParents(True)
samp_type_SONY_SH800S_SPECIMEN.setSubcodeUnique(False)
samp_type_SONY_SH800S_SPECIMEN.setAutoGeneratedCode(True)
samp_type_SONY_SH800S_SPECIMEN.setShowParentMetadata(False)
samp_type_SONY_SH800S_SPECIMEN.setGeneratedCodePrefix('SONY_SH800S_SPECIMEN.')
# SONY_SH800S_TUBE
samp_type_SONY_SH800S_TUBE = tr.getOrCreateNewSampleType('SONY_SH800S_TUBE')
samp_type_SONY_SH800S_TUBE.setDescription('A tube for the SONY SH800S Cell Sorter.')
samp_type_SONY_SH800S_TUBE.setListable(True)
samp_type_SONY_SH800S_TUBE.setShowContainer(False)
samp_type_SONY_SH800S_TUBE.setShowParents(True)
samp_type_SONY_SH800S_TUBE.setSubcodeUnique(False)
samp_type_SONY_SH800S_TUBE.setAutoGeneratedCode(True)
samp_type_SONY_SH800S_TUBE.setShowParentMetadata(False)
samp_type_SONY_SH800S_TUBE.setGeneratedCodePrefix('SONY_SH800S_TUBE.')
# SONY_SH800S_TUBESET
samp_type_SONY_SH800S_TUBESET = tr.getOrCreateNewSampleType('SONY_SH800S_TUBESET')
samp_type_SONY_SH800S_TUBESET.setDescription('A virtual container for tubes for the SONY SH800S Cell Sorter.')
samp_type_SONY_SH800S_TUBESET.setListable(False)
samp_type_SONY_SH800S_TUBESET.setShowContainer(False)
samp_type_SONY_SH800S_TUBESET.setShowParents(True)
samp_type_SONY_SH800S_TUBESET.setSubcodeUnique(False)
samp_type_SONY_SH800S_TUBESET.setAutoGeneratedCode(True)
samp_type_SONY_SH800S_TUBESET.setShowParentMetadata(False)
samp_type_SONY_SH800S_TUBESET.setGeneratedCodePrefix('SONY_SH800S_TUBESET.')
# SONY MA900
# ------------------------------------------------------------------------------
# SONY_MA900_EXPERIMENT
samp_type_SONY_MA900_EXPERIMENT = tr.getOrCreateNewSampleType('SONY_MA900_EXPERIMENT')
samp_type_SONY_MA900_EXPERIMENT.setDescription('Experiments from the SONY MA900 Cell Sorter.')
samp_type_SONY_MA900_EXPERIMENT.setListable(True)
samp_type_SONY_MA900_EXPERIMENT.setShowContainer(False)
samp_type_SONY_MA900_EXPERIMENT.setShowParents(True)
samp_type_SONY_MA900_EXPERIMENT.setSubcodeUnique(False)
samp_type_SONY_MA900_EXPERIMENT.setAutoGeneratedCode(False)
samp_type_SONY_MA900_EXPERIMENT.setShowParentMetadata(False)
samp_type_SONY_MA900_EXPERIMENT.setGeneratedCodePrefix('SONY_MA900_EXPERIMENT.')
# SONY_MA900_SPECIMEN
samp_type_SONY_MA900_SPECIMEN = tr.getOrCreateNewSampleType('SONY_MA900_SPECIMEN')
samp_type_SONY_MA900_SPECIMEN.setDescription('A specimen for the SONY MA900 Cell Sorter.')
samp_type_SONY_MA900_SPECIMEN.setListable(True)
samp_type_SONY_MA900_SPECIMEN.setShowContainer(False)
samp_type_SONY_MA900_SPECIMEN.setShowParents(True)
samp_type_SONY_MA900_SPECIMEN.setSubcodeUnique(False)
samp_type_SONY_MA900_SPECIMEN.setAutoGeneratedCode(True)
samp_type_SONY_MA900_SPECIMEN.setShowParentMetadata(False)
samp_type_SONY_MA900_SPECIMEN.setGeneratedCodePrefix('SONY_MA900_SPECIMEN.')
# SONY_MA900_TUBE
samp_type_SONY_MA900_TUBE = tr.getOrCreateNewSampleType('SONY_MA900_TUBE')
samp_type_SONY_MA900_TUBE.setDescription('A tube for the SONY MA900 Cell Sorter.')
samp_type_SONY_MA900_TUBE.setListable(True)
samp_type_SONY_MA900_TUBE.setShowContainer(False)
samp_type_SONY_MA900_TUBE.setShowParents(True)
samp_type_SONY_MA900_TUBE.setSubcodeUnique(False)
samp_type_SONY_MA900_TUBE.setAutoGeneratedCode(True)
samp_type_SONY_MA900_TUBE.setShowParentMetadata(False)
samp_type_SONY_MA900_TUBE.setGeneratedCodePrefix('SONY_MA900_TUBE.')
# SONY_MA900_TUBESET
samp_type_SONY_MA900_TUBESET = tr.getOrCreateNewSampleType('SONY_MA900_TUBESET')
samp_type_SONY_MA900_TUBESET.setDescription('A virtual container for tubes for the SONY MA900 Cell Sorter.')
samp_type_SONY_MA900_TUBESET.setListable(False)
samp_type_SONY_MA900_TUBESET.setShowContainer(False)
samp_type_SONY_MA900_TUBESET.setShowParents(True)
samp_type_SONY_MA900_TUBESET.setSubcodeUnique(False)
samp_type_SONY_MA900_TUBESET.setAutoGeneratedCode(True)
samp_type_SONY_MA900_TUBESET.setShowParentMetadata(False)
samp_type_SONY_MA900_TUBESET.setGeneratedCodePrefix('SONY_MA900_TUBESET.')
# ==============================================================================
#
# DATASET TYPES
#
# ==============================================================================
# COMMON
# ------------------------------------------------------------------------------
# ATTACHMENT
data_set_type_ATTACHMENT = tr.getOrCreateNewDataSetType('ATTACHMENT')
data_set_type_ATTACHMENT.setDescription('Used to attach files to entities.')
data_set_type_ATTACHMENT.setMainDataSetPattern(None)
data_set_type_ATTACHMENT.setMainDataSetPath(None)
data_set_type_ATTACHMENT.setDeletionDisallowed(False)
# BD FACS ARIA
# ------------------------------------------------------------------------------
# FACS_ARIA_FCSFILE
data_set_type_FACS_ARIA_FCSFILE = tr.getOrCreateNewDataSetType('FACS_ARIA_FCSFILE')
data_set_type_FACS_ARIA_FCSFILE.setDescription('An FCS file from the BD FACS Aria III Cell Sorter.')
data_set_type_FACS_ARIA_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_FACS_ARIA_FCSFILE.setMainDataSetPath(None)
data_set_type_FACS_ARIA_FCSFILE.setDeletionDisallowed(False)
# FACS_ARIA_ACCESSORY_FILE
data_set_type_FACS_ARIA_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('FACS_ARIA_ACCESSORY_FILE')
data_set_type_FACS_ARIA_ACCESSORY_FILE.setDescription('An accessory dataset file associated with a FACS Aria experiment.')
data_set_type_FACS_ARIA_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_FACS_ARIA_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_FACS_ARIA_ACCESSORY_FILE.setDeletionDisallowed(False)
# BD INFLUX
# ------------------------------------------------------------------------------
# INFLUX_FCSFILE
data_set_type_INFLUX_FCSFILE = tr.getOrCreateNewDataSetType('INFLUX_FCSFILE')
data_set_type_INFLUX_FCSFILE.setDescription('An FCS file from the BD Influx Cell Sorter.')
data_set_type_INFLUX_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_INFLUX_FCSFILE.setMainDataSetPath(None)
data_set_type_INFLUX_FCSFILE.setDeletionDisallowed(False)
# INFLUX_ACCESSORY_FILE
data_set_type_INFLUX_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('INFLUX_ACCESSORY_FILE')
data_set_type_INFLUX_ACCESSORY_FILE.setDescription('An accessory dataset file associated with an Influx experiment.')
data_set_type_INFLUX_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_INFLUX_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_INFLUX_ACCESSORY_FILE.setDeletionDisallowed(False)
# BD LSR FORTESSA
# ------------------------------------------------------------------------------
# LSR_FORTESSA_FCSFILE
data_set_type_LSR_FORTESSA_FCSFILE = tr.getOrCreateNewDataSetType('LSR_FORTESSA_FCSFILE')
data_set_type_LSR_FORTESSA_FCSFILE.setDescription('An FCS file from the BD LSR Fortessa Flow Cytometer.')
data_set_type_LSR_FORTESSA_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_LSR_FORTESSA_FCSFILE.setMainDataSetPath(None)
data_set_type_LSR_FORTESSA_FCSFILE.setDeletionDisallowed(False)
# LSR_FORTESSA_ACCESSORY_FILE
data_set_type_LSR_FORTESSA_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('LSR_FORTESSA_ACCESSORY_FILE')
data_set_type_LSR_FORTESSA_ACCESSORY_FILE.setDescription('An accessory dataset file associated with an LSR Fortessa experiment.')
data_set_type_LSR_FORTESSA_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_LSR_FORTESSA_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_LSR_FORTESSA_ACCESSORY_FILE.setDeletionDisallowed(False)
# BC CYTOFLEX S
# ------------------------------------------------------------------------------
# CYTOFLEX_S_FCSFILE
data_set_type_CYTOFLEX_S_FCSFILE = tr.getOrCreateNewDataSetType('CYTOFLEX_S_FCSFILE')
data_set_type_CYTOFLEX_S_FCSFILE.setDescription('An FCS file from the BC CytoFLEX S Flow Cytometer.')
data_set_type_CYTOFLEX_S_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_CYTOFLEX_S_FCSFILE.setMainDataSetPath(None)
data_set_type_CYTOFLEX_S_FCSFILE.setDeletionDisallowed(False)
# CYTOFLEX_S_ACCESSORY_FILE
data_set_type_CYTOFLEX_S_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('CYTOFLEX_S_ACCESSORY_FILE')
data_set_type_CYTOFLEX_S_ACCESSORY_FILE.setDescription('An accessory dataset file associated with a CytoFLEX S experiment.')
data_set_type_CYTOFLEX_S_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_CYTOFLEX_S_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_CYTOFLEX_S_ACCESSORY_FILE.setDeletionDisallowed(False)
# BC MOFLO XDP
# ------------------------------------------------------------------------------
# MOFLO_XDP_FCSFILE
data_set_type_MOFLO_XDP_FCSFILE = tr.getOrCreateNewDataSetType('MOFLO_XDP_FCSFILE')
data_set_type_MOFLO_XDP_FCSFILE.setDescription('An FCS file from the BC MoFlo XDP Cell Sorter.')
data_set_type_MOFLO_XDP_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_MOFLO_XDP_FCSFILE.setMainDataSetPath(None)
data_set_type_MOFLO_XDP_FCSFILE.setDeletionDisallowed(False)
# MOFLO_XDP_ACCESSORY_FILE
data_set_type_MOFLO_XDP_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('MOFLO_XDP_ACCESSORY_FILE')
data_set_type_MOFLO_XDP_ACCESSORY_FILE.setDescription('An accessory dataset file associated with a MOFLO XDP experiment.')
data_set_type_MOFLO_XDP_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_MOFLO_XDP_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_MOFLO_XDP_ACCESSORY_FILE.setDeletionDisallowed(False)
# BIORAD S3E
# ------------------------------------------------------------------------------
# S3E_FCSFILE
data_set_type_S3E_FCSFILE = tr.getOrCreateNewDataSetType('S3E_FCSFILE')
data_set_type_S3E_FCSFILE.setDescription('An FCS file from the BIORAD S3e Cell Sorter.')
data_set_type_S3E_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_S3E_FCSFILE.setMainDataSetPath(None)
data_set_type_S3E_FCSFILE.setDeletionDisallowed(False)
# S3E_ACCESSORY_FILE
data_set_type_S3E_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('S3E_ACCESSORY_FILE')
data_set_type_S3E_ACCESSORY_FILE.setDescription('An accessory dataset file associated with an S3E experiment.')
data_set_type_S3E_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_S3E_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_S3E_ACCESSORY_FILE.setDeletionDisallowed(False)
# SONY SH800S
# ------------------------------------------------------------------------------
# SONY_SH800S_FCSFILE
data_set_type_SONY_SH800S_FCSFILE = tr.getOrCreateNewDataSetType('SONY_SH800S_FCSFILE')
data_set_type_SONY_SH800S_FCSFILE.setDescription('An FCS file from the SONY SH800S Cell Sorter.')
data_set_type_SONY_SH800S_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_SONY_SH800S_FCSFILE.setMainDataSetPath(None)
data_set_type_SONY_SH800S_FCSFILE.setDeletionDisallowed(False)
# SONY_SH800S_ACCESSORY_FILE
data_set_type_SONY_SH800S_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('SONY_SH800S_ACCESSORY_FILE')
data_set_type_SONY_SH800S_ACCESSORY_FILE.setDescription('An accessory dataset file associated with a SONY SH800S experiment.')
data_set_type_SONY_SH800S_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_SONY_SH800S_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_SONY_SH800S_ACCESSORY_FILE.setDeletionDisallowed(False)
# SONY MA900
# ------------------------------------------------------------------------------
# SONY_MA900_FCSFILE
data_set_type_SONY_MA900_FCSFILE = tr.getOrCreateNewDataSetType('SONY_MA900_FCSFILE')
data_set_type_SONY_MA900_FCSFILE.setDescription('An FCS file from the SONY MA900 Cell Sorter.')
data_set_type_SONY_MA900_FCSFILE.setMainDataSetPattern('.*\.fcs')
data_set_type_SONY_MA900_FCSFILE.setMainDataSetPath(None)
data_set_type_SONY_MA900_FCSFILE.setDeletionDisallowed(False)
# SONY_MA900_ACCESSORY_FILE
data_set_type_SONY_MA900_ACCESSORY_FILE = tr.getOrCreateNewDataSetType('SONY_MA900_ACCESSORY_FILE')
data_set_type_SONY_MA900_ACCESSORY_FILE.setDescription('An accessory dataset file associated with a SONY MA900 experiment.')
data_set_type_SONY_MA900_ACCESSORY_FILE.setMainDataSetPattern(None)
data_set_type_SONY_MA900_ACCESSORY_FILE.setMainDataSetPath(None)
data_set_type_SONY_MA900_ACCESSORY_FILE.setDeletionDisallowed(False)
# ==============================================================================
#
# PROPERTY TYPES
#
# ==============================================================================
# COMMON
# ------------------------------------------------------------------------------
# ANNOTATIONS_STATE
prop_type_ANNOTATIONS_STATE = tr.getOrCreateNewPropertyType('ANNOTATIONS_STATE', DataType.XML)
prop_type_ANNOTATIONS_STATE.setLabel('Annotations State')
prop_type_ANNOTATIONS_STATE.setManagedInternally(False)
prop_type_ANNOTATIONS_STATE.setInternalNamespace(True)
# DEFAULT_OBJECT_TYPE
prop_type_DEFAULT_OBJECT_TYPE = tr.getOrCreateNewPropertyType('DEFAULT_OBJECT_TYPE', DataType.VARCHAR)
prop_type_DEFAULT_OBJECT_TYPE.setLabel('Default Object Type')
prop_type_DEFAULT_OBJECT_TYPE.setManagedInternally(False)
prop_type_DEFAULT_OBJECT_TYPE.setInternalNamespace(True)
# DESCRIPTION
prop_type_DESCRIPTION = tr.getOrCreateNewPropertyType('DESCRIPTION', DataType.VARCHAR)
prop_type_DESCRIPTION.setLabel('Description')
prop_type_DESCRIPTION.setManagedInternally(False)
prop_type_DESCRIPTION.setInternalNamespace(False)
# NAME
prop_type_NAME = tr.getOrCreateNewPropertyType('NAME', DataType.VARCHAR)
prop_type_NAME.setLabel('Name')
prop_type_NAME.setManagedInternally(False)
prop_type_NAME.setInternalNamespace(True)
# NOTES
prop_type_NOTES = tr.getOrCreateNewPropertyType('NOTES', DataType.VARCHAR)
prop_type_NOTES.setLabel('Notes')
prop_type_NOTES.setManagedInternally(False)
prop_type_NOTES.setInternalNamespace(False)
# RESOLUTION
prop_type_RESOLUTION = tr.getOrCreateNewPropertyType('RESOLUTION', DataType.VARCHAR)
prop_type_RESOLUTION.setLabel('Resolution')
prop_type_RESOLUTION.setManagedInternally(False)
prop_type_RESOLUTION.setInternalNamespace(True)
# XMLCOMMENTS
prop_type_XMLCOMMENTS = tr.getOrCreateNewPropertyType('XMLCOMMENTS', DataType.XML)
prop_type_XMLCOMMENTS.setLabel('XML Comments')
prop_type_XMLCOMMENTS.setManagedInternally(False)
prop_type_XMLCOMMENTS.setInternalNamespace(True)
# BD FACS ARIA
# ------------------------------------------------------------------------------
# FACS_ARIA_EXPERIMENT_ACQ_HARDWARE
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE
prop_type_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_DATE
prop_type_FACS_ARIA_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_FACS_ARIA_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_FACS_ARIA_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_DATE.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_DESCRIPTION
prop_type_FACS_ARIA_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_FACS_ARIA_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_NAME
prop_type_FACS_ARIA_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_FACS_ARIA_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_NAME.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_OWNER
prop_type_FACS_ARIA_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_FACS_ARIA_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_FACS_ARIA_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_OWNER.setInternalNamespace(False)
# FACS_ARIA_EXPERIMENT_VERSION
prop_type_FACS_ARIA_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('FACS_ARIA_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_FACS_ARIA_EXPERIMENT_VERSION.setLabel('Version')
prop_type_FACS_ARIA_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_FACS_ARIA_EXPERIMENT_VERSION.setInternalNamespace(False)
# FACS_ARIA_FCSFILE_ACQ_DATE
prop_type_FACS_ARIA_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('FACS_ARIA_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_FACS_ARIA_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_FACS_ARIA_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_FACS_ARIA_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# FACS_ARIA_FCSFILE_PARAMETERS
prop_type_FACS_ARIA_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('FACS_ARIA_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_FACS_ARIA_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_FACS_ARIA_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_FACS_ARIA_FCSFILE_PARAMETERS.setInternalNamespace(False)
# FACS_ARIA_TUBE_ISINDEXSORT
prop_type_FACS_ARIA_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('FACS_ARIA_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_FACS_ARIA_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_FACS_ARIA_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_FACS_ARIA_TUBE_ISINDEXSORT.setInternalNamespace(False)
# BD INFLUX
# ------------------------------------------------------------------------------
# INFLUX_EXPERIMENT_ACQ_HARDWARE
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# INFLUX_EXPERIMENT_ACQ_SOFTWARE
prop_type_INFLUX_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# INFLUX_EXPERIMENT_DATE
prop_type_INFLUX_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_INFLUX_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_INFLUX_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_DATE.setInternalNamespace(False)
# INFLUX_EXPERIMENT_DESCRIPTION
prop_type_INFLUX_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_INFLUX_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_INFLUX_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# INFLUX_EXPERIMENT_NAME
prop_type_INFLUX_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_INFLUX_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_INFLUX_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_NAME.setInternalNamespace(False)
# INFLUX_EXPERIMENT_OWNER
prop_type_INFLUX_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_INFLUX_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_INFLUX_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_OWNER.setInternalNamespace(False)
# INFLUX_EXPERIMENT_VERSION
prop_type_INFLUX_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('INFLUX_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_INFLUX_EXPERIMENT_VERSION.setLabel('Version')
prop_type_INFLUX_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_INFLUX_EXPERIMENT_VERSION.setInternalNamespace(False)
# INFLUX_FCSFILE_ACQ_DATE
prop_type_INFLUX_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('INFLUX_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_INFLUX_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_INFLUX_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_INFLUX_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# INFLUX_FCSFILE_PARAMETERS
prop_type_INFLUX_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('INFLUX_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_INFLUX_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_INFLUX_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_INFLUX_FCSFILE_PARAMETERS.setInternalNamespace(False)
# INFLUX_TUBE_ISINDEXSORT
prop_type_INFLUX_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('INFLUX_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_INFLUX_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_INFLUX_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_INFLUX_TUBE_ISINDEXSORT.setInternalNamespace(False)
# BD LSR FORTESSA
# ------------------------------------------------------------------------------
# LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_DATE
prop_type_LSR_FORTESSA_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_LSR_FORTESSA_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_LSR_FORTESSA_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_DATE.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_DESCRIPTION
prop_type_LSR_FORTESSA_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_NAME
prop_type_LSR_FORTESSA_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_LSR_FORTESSA_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_NAME.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_OWNER
prop_type_LSR_FORTESSA_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_LSR_FORTESSA_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_LSR_FORTESSA_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_OWNER.setInternalNamespace(False)
# LSR_FORTESSA_EXPERIMENT_VERSION
prop_type_LSR_FORTESSA_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('LSR_FORTESSA_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_LSR_FORTESSA_EXPERIMENT_VERSION.setLabel('Version')
prop_type_LSR_FORTESSA_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_LSR_FORTESSA_EXPERIMENT_VERSION.setInternalNamespace(False)
# LSR_FORTESSA_FCSFILE_ACQ_DATE
prop_type_LSR_FORTESSA_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('LSR_FORTESSA_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_LSR_FORTESSA_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_LSR_FORTESSA_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_LSR_FORTESSA_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# LSR_FORTESSA_FCSFILE_PARAMETERS
prop_type_LSR_FORTESSA_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('LSR_FORTESSA_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_LSR_FORTESSA_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_LSR_FORTESSA_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_LSR_FORTESSA_FCSFILE_PARAMETERS.setInternalNamespace(False)
# LSR_FORTESSA_PLATE_GEOMETRY
prop_type_LSR_FORTESSA_PLATE_GEOMETRY = tr.getOrCreateNewPropertyType('LSR_FORTESSA_PLATE_GEOMETRY', DataType.CONTROLLEDVOCABULARY)
prop_type_LSR_FORTESSA_PLATE_GEOMETRY.setLabel('Plate Geometry')
prop_type_LSR_FORTESSA_PLATE_GEOMETRY.setManagedInternally(False)
prop_type_LSR_FORTESSA_PLATE_GEOMETRY.setInternalNamespace(False)
prop_type_LSR_FORTESSA_PLATE_GEOMETRY.setVocabulary(vocabulary_LSR_FORTESSA_PLATE_GEOMETRY)
# BC CYTOFLEX S
# ------------------------------------------------------------------------------
# CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_DATE
prop_type_CYTOFLEX_S_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_CYTOFLEX_S_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_CYTOFLEX_S_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_DATE.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_DESCRIPTION
prop_type_CYTOFLEX_S_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_NAME
prop_type_CYTOFLEX_S_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_CYTOFLEX_S_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_NAME.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_OWNER
prop_type_CYTOFLEX_S_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_CYTOFLEX_S_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_CYTOFLEX_S_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_OWNER.setInternalNamespace(False)
# CYTOFLEX_S_EXPERIMENT_VERSION
prop_type_CYTOFLEX_S_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('CYTOFLEX_S_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_CYTOFLEX_S_EXPERIMENT_VERSION.setLabel('Version')
prop_type_CYTOFLEX_S_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_CYTOFLEX_S_EXPERIMENT_VERSION.setInternalNamespace(False)
# CYTOFLEX_S_FCSFILE_ACQ_DATE
prop_type_CYTOFLEX_S_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('CYTOFLEX_S_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_CYTOFLEX_S_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_CYTOFLEX_S_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_CYTOFLEX_S_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# CYTOFLEX_S_FCSFILE_PARAMETERS
prop_type_CYTOFLEX_S_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('CYTOFLEX_S_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_CYTOFLEX_S_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_CYTOFLEX_S_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_CYTOFLEX_S_FCSFILE_PARAMETERS.setInternalNamespace(False)
# CYTOFLEX_S_PLATE_GEOMETRY
prop_type_CYTOFLEX_S_PLATE_GEOMETRY = tr.getOrCreateNewPropertyType('CYTOFLEX_S_PLATE_GEOMETRY', DataType.CONTROLLEDVOCABULARY)
prop_type_CYTOFLEX_S_PLATE_GEOMETRY.setLabel('Plate Geometry')
prop_type_CYTOFLEX_S_PLATE_GEOMETRY.setManagedInternally(False)
prop_type_CYTOFLEX_S_PLATE_GEOMETRY.setInternalNamespace(False)
prop_type_CYTOFLEX_S_PLATE_GEOMETRY.setVocabulary(vocabulary_CYTOFLEX_S_PLATE_GEOMETRY)
# BC MOFLO XDP
# ------------------------------------------------------------------------------
# MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_DATE
prop_type_MOFLO_XDP_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_MOFLO_XDP_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_MOFLO_XDP_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_DATE.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_DESCRIPTION
prop_type_MOFLO_XDP_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_NAME
prop_type_MOFLO_XDP_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_MOFLO_XDP_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_NAME.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_OWNER
prop_type_MOFLO_XDP_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_MOFLO_XDP_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_MOFLO_XDP_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_OWNER.setInternalNamespace(False)
# MOFLO_XDP_EXPERIMENT_VERSION
prop_type_MOFLO_XDP_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('MOFLO_XDP_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_MOFLO_XDP_EXPERIMENT_VERSION.setLabel('Version')
prop_type_MOFLO_XDP_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_MOFLO_XDP_EXPERIMENT_VERSION.setInternalNamespace(False)
# MOFLO_XDP_FCSFILE_ACQ_DATE
prop_type_MOFLO_XDP_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('MOFLO_XDP_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_MOFLO_XDP_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_MOFLO_XDP_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_MOFLO_XDP_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# MOFLO_XDP_FCSFILE_PARAMETERS
prop_type_MOFLO_XDP_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('MOFLO_XDP_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_MOFLO_XDP_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_MOFLO_XDP_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_MOFLO_XDP_FCSFILE_PARAMETERS.setInternalNamespace(False)
# MOFLO_XDP_TUBE_ISINDEXSORT
prop_type_MOFLO_XDP_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('MOFLO_XDP_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_MOFLO_XDP_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_MOFLO_XDP_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_MOFLO_XDP_TUBE_ISINDEXSORT.setInternalNamespace(False)
# BIORAD S3E
# ------------------------------------------------------------------------------
# S3E_EXPERIMENT_ACQ_HARDWARE
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# S3E_EXPERIMENT_ACQ_SOFTWARE
prop_type_S3E_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_S3E_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_S3E_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# S3E_EXPERIMENT_DATE
prop_type_S3E_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_S3E_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_S3E_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_DATE.setInternalNamespace(False)
# S3E_EXPERIMENT_DESCRIPTION
prop_type_S3E_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_S3E_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_S3E_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# S3E_EXPERIMENT_NAME
prop_type_S3E_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_S3E_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_S3E_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_NAME.setInternalNamespace(False)
# S3E_EXPERIMENT_OWNER
prop_type_S3E_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_S3E_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_S3E_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_OWNER.setInternalNamespace(False)
# S3E_EXPERIMENT_VERSION
prop_type_S3E_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('S3E_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_S3E_EXPERIMENT_VERSION.setLabel('Version')
prop_type_S3E_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_S3E_EXPERIMENT_VERSION.setInternalNamespace(False)
# S3E_FCSFILE_ACQ_DATE
prop_type_S3E_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('S3E_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_S3E_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_S3E_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_S3E_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# S3E_FCSFILE_PARAMETERS
prop_type_S3E_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('S3E_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_S3E_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_S3E_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_S3E_FCSFILE_PARAMETERS.setInternalNamespace(False)
# S3E_TUBE_ISINDEXSORT
prop_type_S3E_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('S3E_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_S3E_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_S3E_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_S3E_TUBE_ISINDEXSORT.setInternalNamespace(False)
# SONY SH800S
# ------------------------------------------------------------------------------
# SONY_SH800S_EXPERIMENT_ACQ_HARDWARE
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE
prop_type_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_DATE
prop_type_SONY_SH800S_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_SONY_SH800S_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_SONY_SH800S_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_DATE.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_DESCRIPTION
prop_type_SONY_SH800S_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_SONY_SH800S_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_NAME
prop_type_SONY_SH800S_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_SONY_SH800S_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_NAME.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_OWNER
prop_type_SONY_SH800S_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_SONY_SH800S_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_SONY_SH800S_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_OWNER.setInternalNamespace(False)
# SONY_SH800S_EXPERIMENT_VERSION
prop_type_SONY_SH800S_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('SONY_SH800S_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_SONY_SH800S_EXPERIMENT_VERSION.setLabel('Version')
prop_type_SONY_SH800S_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_SONY_SH800S_EXPERIMENT_VERSION.setInternalNamespace(False)
# SONY_SH800S_FCSFILE_ACQ_DATE
prop_type_SONY_SH800S_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('SONY_SH800S_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_SONY_SH800S_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_SONY_SH800S_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_SONY_SH800S_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# SONY_SH800S_FCSFILE_PARAMETERS
prop_type_SONY_SH800S_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('SONY_SH800S_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_SONY_SH800S_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_SONY_SH800S_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_SONY_SH800S_FCSFILE_PARAMETERS.setInternalNamespace(False)
# SONY_SH800S_TUBE_ISINDEXSORT
prop_type_SONY_SH800S_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('SONY_SH800S_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_SONY_SH800S_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_SONY_SH800S_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_SONY_SH800S_TUBE_ISINDEXSORT.setInternalNamespace(False)
# SONY MA900
# ------------------------------------------------------------------------------
# SONY_MA900_EXPERIMENT_ACQ_HARDWARE
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_ACQ_HARDWARE', DataType.VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setLabel('Acquisition hardware')
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME', DataType.VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setLabel('Acquisition station name')
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_ACQ_SOFTWARE
prop_type_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_ACQ_SOFTWARE', DataType.VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setLabel('Acquisition software')
prop_type_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_DATE
prop_type_SONY_MA900_EXPERIMENT_DATE = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_DATE', DataType.TIMESTAMP)
prop_type_SONY_MA900_EXPERIMENT_DATE.setLabel('Experiment date')
prop_type_SONY_MA900_EXPERIMENT_DATE.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_DATE.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_DESCRIPTION
prop_type_SONY_MA900_EXPERIMENT_DESCRIPTION = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_DESCRIPTION', DataType.MULTILINE_VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_DESCRIPTION.setLabel('Description')
prop_type_SONY_MA900_EXPERIMENT_DESCRIPTION.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_DESCRIPTION.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_NAME
prop_type_SONY_MA900_EXPERIMENT_NAME = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_NAME', DataType.VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_NAME.setLabel('Experiment name')
prop_type_SONY_MA900_EXPERIMENT_NAME.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_NAME.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_OWNER
prop_type_SONY_MA900_EXPERIMENT_OWNER = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_OWNER', DataType.VARCHAR)
prop_type_SONY_MA900_EXPERIMENT_OWNER.setLabel('Owner')
prop_type_SONY_MA900_EXPERIMENT_OWNER.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_OWNER.setInternalNamespace(False)
# SONY_MA900_EXPERIMENT_VERSION
prop_type_SONY_MA900_EXPERIMENT_VERSION = tr.getOrCreateNewPropertyType('SONY_MA900_EXPERIMENT_VERSION', DataType.INTEGER)
prop_type_SONY_MA900_EXPERIMENT_VERSION.setLabel('Version')
prop_type_SONY_MA900_EXPERIMENT_VERSION.setManagedInternally(False)
prop_type_SONY_MA900_EXPERIMENT_VERSION.setInternalNamespace(False)
# SONY_MA900_FCSFILE_ACQ_DATE
prop_type_SONY_MA900_FCSFILE_ACQ_DATE = tr.getOrCreateNewPropertyType('SONY_MA900_FCSFILE_ACQ_DATE', DataType.TIMESTAMP)
prop_type_SONY_MA900_FCSFILE_ACQ_DATE.setLabel('Acquisition date')
prop_type_SONY_MA900_FCSFILE_ACQ_DATE.setManagedInternally(False)
prop_type_SONY_MA900_FCSFILE_ACQ_DATE.setInternalNamespace(False)
# SONY_MA900_FCSFILE_PARAMETERS
prop_type_SONY_MA900_FCSFILE_PARAMETERS = tr.getOrCreateNewPropertyType('SONY_MA900_FCSFILE_PARAMETERS', DataType.MULTILINE_VARCHAR)
prop_type_SONY_MA900_FCSFILE_PARAMETERS.setLabel('FCS parameters')
prop_type_SONY_MA900_FCSFILE_PARAMETERS.setManagedInternally(False)
prop_type_SONY_MA900_FCSFILE_PARAMETERS.setInternalNamespace(False)
# SONY_MA900_TUBE_ISINDEXSORT
prop_type_SONY_MA900_TUBE_ISINDEXSORT = tr.getOrCreateNewPropertyType('SONY_MA900_TUBE_ISINDEXSORT', DataType.BOOLEAN)
prop_type_SONY_MA900_TUBE_ISINDEXSORT.setLabel('Index sort')
prop_type_SONY_MA900_TUBE_ISINDEXSORT.setManagedInternally(False)
prop_type_SONY_MA900_TUBE_ISINDEXSORT.setInternalNamespace(False)
# ==============================================================================
#
# PROPERTY ASSIGNMENTS
#
# ==============================================================================
# COMMON
# ------------------------------------------------------------------------------
# DATA_SET_ATTACHMENT_NAME
assignment_DATA_SET_ATTACHMENT_NAME = tr.assignPropertyType(data_set_type_ATTACHMENT, prop_type_NAME)
assignment_DATA_SET_ATTACHMENT_NAME.setMandatory(False)
assignment_DATA_SET_ATTACHMENT_NAME.setSection(None)
assignment_DATA_SET_ATTACHMENT_NAME.setPositionInForms(1)
assignment_DATA_SET_ATTACHMENT_NAME.setShownEdit(False)
# DATA_SET_ATTACHMENT_DESCRIPTION
assignment_DATA_SET_ATTACHMENT_DESCRIPTION = tr.assignPropertyType(data_set_type_ATTACHMENT, prop_type_DESCRIPTION)
assignment_DATA_SET_ATTACHMENT_DESCRIPTION.setMandatory(False)
assignment_DATA_SET_ATTACHMENT_DESCRIPTION.setSection(None)
assignment_DATA_SET_ATTACHMENT_DESCRIPTION.setPositionInForms(2)
assignment_DATA_SET_ATTACHMENT_DESCRIPTION.setShownEdit(False)
# DATA_SET_ATTACHMENT_NOTES
assignment_DATA_SET_ATTACHMENT_NOTES = tr.assignPropertyType(data_set_type_ATTACHMENT, prop_type_NOTES)
assignment_DATA_SET_ATTACHMENT_NOTES.setMandatory(False)
assignment_DATA_SET_ATTACHMENT_NOTES.setSection(None)
assignment_DATA_SET_ATTACHMENT_NOTES.setPositionInForms(3)
assignment_DATA_SET_ATTACHMENT_NOTES.setShownEdit(False)
# DATA_SET_ATTACHMENT_XMLCOMMENTS
assignment_DATA_SET_ATTACHMENT_XMLCOMMENTS = tr.assignPropertyType(data_set_type_ATTACHMENT, prop_type_XMLCOMMENTS)
assignment_DATA_SET_ATTACHMENT_XMLCOMMENTS.setMandatory(False)
assignment_DATA_SET_ATTACHMENT_XMLCOMMENTS.setSection(None)
assignment_DATA_SET_ATTACHMENT_XMLCOMMENTS.setPositionInForms(4)
assignment_DATA_SET_ATTACHMENT_XMLCOMMENTS.setShownEdit(False)
# EXPERIMENT_COLLECTION_NAME
assignment_EXPERIMENT_COLLECTION_NAME = tr.assignPropertyType(exp_type_COLLECTION, prop_type_NAME)
assignment_EXPERIMENT_COLLECTION_NAME.setMandatory(False)
assignment_EXPERIMENT_COLLECTION_NAME.setSection(None)
assignment_EXPERIMENT_COLLECTION_NAME.setPositionInForms(1)
assignment_EXPERIMENT_COLLECTION_NAME.setShownEdit(False)
# EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE
assignment_EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE = tr.assignPropertyType(exp_type_COLLECTION, prop_type_DEFAULT_OBJECT_TYPE)
assignment_EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE.setMandatory(False)
assignment_EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE.setSection(None)
assignment_EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE.setPositionInForms(2)
assignment_EXPERIMENT_COLLECTION_DEFAULT_OBJECT_TYPE.setShownEdit(False)
# SAMPLE_ORGANIZATION_UNIT_NAME
assignment_SAMPLE_ORGANIZATION_UNIT_NAME = tr.assignPropertyType(samp_type_ORGANIZATION_UNIT, prop_type_NAME)
assignment_SAMPLE_ORGANIZATION_UNIT_NAME.setMandatory(False)
assignment_SAMPLE_ORGANIZATION_UNIT_NAME.setSection(None)
assignment_SAMPLE_ORGANIZATION_UNIT_NAME.setPositionInForms(1)
assignment_SAMPLE_ORGANIZATION_UNIT_NAME.setShownEdit(False)
# SAMPLE_ORGANIZATION_UNIT_DESCRIPTION
assignment_SAMPLE_ORGANIZATION_UNIT_DESCRIPTION = tr.assignPropertyType(samp_type_ORGANIZATION_UNIT, prop_type_DESCRIPTION)
assignment_SAMPLE_ORGANIZATION_UNIT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_ORGANIZATION_UNIT_DESCRIPTION.setSection(None)
assignment_SAMPLE_ORGANIZATION_UNIT_DESCRIPTION.setPositionInForms(2)
assignment_SAMPLE_ORGANIZATION_UNIT_DESCRIPTION.setShownEdit(False)
# SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS
assignment_SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS = tr.assignPropertyType(samp_type_ORGANIZATION_UNIT, prop_type_XMLCOMMENTS)
assignment_SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS.setMandatory(False)
assignment_SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS.setSection(None)
assignment_SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS.setPositionInForms(3)
assignment_SAMPLE_ORGANIZATION_UNIT_XMLCOMMENTS.setShownEdit(False)
# SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE
assignment_SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_ORGANIZATION_UNIT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE.setSection(None)
assignment_SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE.setPositionInForms(4)
assignment_SAMPLE_ORGANIZATION_UNIT_ANNOTATIONS_STATE.setShownEdit(False)
# BD FACS ARIA
# ------------------------------------------------------------------------------
# SAMPLE_FACS_ARIA_EXPERIMENT_NAME
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_NAME)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME.setPositionInForms(3)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_DATE)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE.setPositionInForms(4)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DATE.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION.setPositionInForms(5)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_VERSION)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION.setPositionInForms(8)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_VERSION.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_OWNER)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER.setPositionInForms(9)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_OWNER.setShownEdit(False)
# SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_EXPERIMENT, prop_type_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(10)
assignment_SAMPLE_FACS_ARIA_EXPERIMENT_FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_FACS_ARIA_FCSFILE, prop_type_FACS_ARIA_FCSFILE_PARAMETERS)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_FACS_ARIA_FCSFILE, prop_type_FACS_ARIA_FCSFILE_ACQ_DATE)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_FACS_ARIA_FCSFILE_FACS_ARIA_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_FACS_ARIA_FCSFILE_NAME
assignment_DATA_SET_FACS_ARIA_FCSFILE_NAME = tr.assignPropertyType(data_set_type_FACS_ARIA_FCSFILE, prop_type_NAME)
assignment_DATA_SET_FACS_ARIA_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_FACS_ARIA_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_FACS_ARIA_FCSFILE_NAME.setPositionInForms(3)
assignment_DATA_SET_FACS_ARIA_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_FACS_ARIA_SPECIMEN_NAME
assignment_SAMPLE_FACS_ARIA_SPECIMEN_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_FACS_ARIA_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_FACS_ARIA_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_FACS_ARIA_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_FACS_ARIA_TUBE_NAME
assignment_SAMPLE_FACS_ARIA_TUBE_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_TUBE, prop_type_NAME)
assignment_SAMPLE_FACS_ARIA_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_FACS_ARIA_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_FACS_ARIA_TUBE_NAME.setShownEdit(True)
# SAMPLE_FACS_ARIA_TUBESET_NAME
assignment_SAMPLE_FACS_ARIA_TUBESET_NAME = tr.assignPropertyType(samp_type_FACS_ARIA_TUBESET, prop_type_NAME)
assignment_SAMPLE_FACS_ARIA_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_TUBESET_NAME.setShownEdit(False)
# SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT
assignment_SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_FACS_ARIA_TUBE, prop_type_FACS_ARIA_TUBE_ISINDEXSORT)
assignment_SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT.setPositionInForms(3)
assignment_SAMPLE_FACS_ARIA_TUBE_FACS_ARIA_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME
assignment_DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_FACS_ARIA_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_FACS_ARIA_ACCESSORY_FILE_NAME.setShownEdit(False)
# BD INFLUX
# ------------------------------------------------------------------------------
# SAMPLE_INFLUX_EXPERIMENT_NAME
assignment_SAMPLE_INFLUX_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_INFLUX_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_INFLUX_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_INFLUX_EXPERIMENT_NAME.setShownEdit(True)
# SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_INFLUX_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION.setPositionInForms(3)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_NAME)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME.setPositionInForms(4)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_DATE)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE.setPositionInForms(5)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_DATE.setShownEdit(True)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_OWNER)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_OWNER.setShownEdit(True)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(9)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_INFLUX_EXPERIMENT, prop_type_INFLUX_EXPERIMENT_VERSION)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION.setPositionInForms(10)
assignment_SAMPLE_INFLUX_EXPERIMENT_INFLUX_EXPERIMENT_VERSION.setShownEdit(False)
# DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_INFLUX_FCSFILE, prop_type_INFLUX_FCSFILE_PARAMETERS)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_INFLUX_FCSFILE, prop_type_INFLUX_FCSFILE_ACQ_DATE)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_INFLUX_FCSFILE_INFLUX_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_INFLUX_FCSFILE_NAME
assignment_DATA_SET_NFLUX_FCSFILE_NAME = tr.assignPropertyType(data_set_type_INFLUX_FCSFILE, prop_type_NAME)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setPositionInForms(3)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_INFLUX_SPECIMEN_NAME
assignment_SAMPLE_INFLUX_SPECIMEN_NAME = tr.assignPropertyType(samp_type_INFLUX_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_INFLUX_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_INFLUX_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_INFLUX_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_INFLUX_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_INFLUX_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_INFLUX_TUBE_NAME
assignment_SAMPLE_INFLUX_TUBE_NAME = tr.assignPropertyType(samp_type_INFLUX_TUBE, prop_type_NAME)
assignment_SAMPLE_INFLUX_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_INFLUX_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_INFLUX_TUBE_NAME.setShownEdit(True)
# SAMPLE_INFLUX_TUBESET_NAME
assignment_SAMPLE_INFLUX_TUBESET_NAME = tr.assignPropertyType(samp_type_INFLUX_TUBESET, prop_type_NAME)
assignment_SAMPLE_INFLUX_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_INFLUX_TUBESET_NAME.setShownEdit(False)
# SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT
assignment_SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_INFLUX_TUBE, prop_type_INFLUX_TUBE_ISINDEXSORT)
assignment_SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT.setPositionInForms(4)
assignment_SAMPLE_INFLUX_TUBE_INFLUX_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_INFLUX_ACCESSORY_FILE_NAME
assignment_DATA_SET_INFLUX_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_INFLUX_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_INFLUX_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_INFLUX_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_INFLUX_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_INFLUX_ACCESSORY_FILE_NAME.setShownEdit(False)
# BD LSR FORTESSA
# ------------------------------------------------------------------------------
# SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_NAME)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME.setPositionInForms(3)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_DATE)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE.setPositionInForms(4)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DATE.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setPositionInForms(5)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_OWNER)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_OWNER.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_VERSION)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION.setPositionInForms(9)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_VERSION.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_EXPERIMENT, prop_type_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(10)
assignment_SAMPLE_LSR_FORTESSA_EXPERIMENT_LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_LSR_FORTESSA_FCSFILE, prop_type_LSR_FORTESSA_FCSFILE_PARAMETERS)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_LSR_FORTESSA_FCSFILE, prop_type_LSR_FORTESSA_FCSFILE_ACQ_DATE)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_LSR_FORTESSA_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_LSR_FORTESSA_FCSFILE_NAME
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_NAME = tr.assignPropertyType(data_set_type_LSR_FORTESSA_FCSFILE, prop_type_NAME)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_NAME.setPositionInForms(4)
assignment_DATA_SET_LSR_FORTESSA_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY
assignment_SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY = tr.assignPropertyType(samp_type_LSR_FORTESSA_PLATE, prop_type_LSR_FORTESSA_PLATE_GEOMETRY)
assignment_SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY.setSection(None)
assignment_SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_PLATE_LSR_FORTESSA_PLATE_GEOMETRY.setShownEdit(True)
# SAMPLE_LSR_FORTESSA_PLATE_NAME
assignment_SAMPLE_LSR_FORTESSA_PLATE_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_PLATE, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_PLATE_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_PLATE_NAME.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_PLATE_NAME.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_PLATE_NAME.setShownEdit(True)
# SAMPLE_LSR_FORTESSA_SPECIMEN_NAME
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_LSR_FORTESSA_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_LSR_FORTESSA_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_TUBE_NAME
assignment_SAMPLE_LSR_FORTESSA_TUBE_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_TUBE, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_TUBE_NAME.setShownEdit(True)
# SAMPLE_LSR_FORTESSA_TUBESET_NAME
assignment_SAMPLE_LSR_FORTESSA_TUBESET_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_TUBESET, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_TUBESET_NAME.setShownEdit(False)
# SAMPLE_LSR_FORTESSA_WELL_NAME
assignment_SAMPLE_LSR_FORTESSA_WELL_NAME = tr.assignPropertyType(samp_type_LSR_FORTESSA_WELL, prop_type_NAME)
assignment_SAMPLE_LSR_FORTESSA_WELL_NAME.setMandatory(False)
assignment_SAMPLE_LSR_FORTESSA_WELL_NAME.setSection('General Info')
assignment_SAMPLE_LSR_FORTESSA_WELL_NAME.setPositionInForms(1)
assignment_SAMPLE_LSR_FORTESSA_WELL_NAME.setShownEdit(True)
# DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME
assignment_DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_LSR_FORTESSA_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_LSR_FORTESSA_ACCESSORY_FILE_NAME.setShownEdit(False)
# BC CYTOFLEX S
# ------------------------------------------------------------------------------
# SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_NAME)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME.setPositionInForms(3)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_DATE)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE.setPositionInForms(4)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DATE.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setPositionInForms(5)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_OWNER)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_OWNER.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_VERSION)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION.setPositionInForms(9)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_VERSION.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_EXPERIMENT, prop_type_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(10)
assignment_SAMPLE_CYTOFLEX_S_EXPERIMENT_CYTOFLEX_S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_CYTOFLEX_S_FCSFILE, prop_type_CYTOFLEX_S_FCSFILE_PARAMETERS)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_CYTOFLEX_S_FCSFILE, prop_type_CYTOFLEX_S_FCSFILE_ACQ_DATE)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_CYTOFLEX_S_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_CYTOFLEX_S_FCSFILE_NAME
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_NAME = tr.assignPropertyType(data_set_type_CYTOFLEX_S_FCSFILE, prop_type_NAME)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_NAME.setPositionInForms(4)
assignment_DATA_SET_CYTOFLEX_S_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY
assignment_SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY = tr.assignPropertyType(samp_type_CYTOFLEX_S_PLATE, prop_type_CYTOFLEX_S_PLATE_GEOMETRY)
assignment_SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY.setSection(None)
assignment_SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_PLATE_CYTOFLEX_S_PLATE_GEOMETRY.setShownEdit(True)
# SAMPLE_CYTOFLEX_S_PLATE_NAME
assignment_SAMPLE_CYTOFLEX_S_PLATE_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_PLATE, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_PLATE_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_PLATE_NAME.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_PLATE_NAME.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_PLATE_NAME.setShownEdit(True)
# SAMPLE_CYTOFLEX_S_SPECIMEN_NAME
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_CYTOFLEX_S_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_CYTOFLEX_S_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_TUBE_NAME
assignment_SAMPLE_CYTOFLEX_S_TUBE_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_TUBE, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_TUBE_NAME.setShownEdit(True)
# SAMPLE_CYTOFLEX_S_TUBESET_NAME
assignment_SAMPLE_CYTOFLEX_S_TUBESET_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_TUBESET, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_TUBESET_NAME.setShownEdit(False)
# SAMPLE_CYTOFLEX_S_WELL_NAME
assignment_SAMPLE_CYTOFLEX_S_WELL_NAME = tr.assignPropertyType(samp_type_CYTOFLEX_S_WELL, prop_type_NAME)
assignment_SAMPLE_CYTOFLEX_S_WELL_NAME.setMandatory(False)
assignment_SAMPLE_CYTOFLEX_S_WELL_NAME.setSection('General Info')
assignment_SAMPLE_CYTOFLEX_S_WELL_NAME.setPositionInForms(1)
assignment_SAMPLE_CYTOFLEX_S_WELL_NAME.setShownEdit(True)
# DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME
assignment_DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_CYTOFLEX_S_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_CYTOFLEX_S_ACCESSORY_FILE_NAME.setShownEdit(False)
# BC MOFLO XDP
# ------------------------------------------------------------------------------
# SAMPLE_MOFLO_XDP_EXPERIMENT_NAME
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_NAME.setShownEdit(True)
# SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setPositionInForms(3)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_NAME)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME.setPositionInForms(4)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_DATE)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE.setPositionInForms(5)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_DATE.setShownEdit(True)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_OWNER)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_OWNER.setShownEdit(True)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(9)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_MOFLO_XDP_EXPERIMENT, prop_type_MOFLO_XDP_EXPERIMENT_VERSION)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION.setPositionInForms(10)
assignment_SAMPLE_MOFLO_XDP_EXPERIMENT_MOFLO_XDP_EXPERIMENT_VERSION.setShownEdit(False)
# DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_MOFLO_XDP_FCSFILE, prop_type_MOFLO_XDP_FCSFILE_PARAMETERS)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_MOFLO_XDP_FCSFILE, prop_type_MOFLO_XDP_FCSFILE_ACQ_DATE)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_MOFLO_XDP_FCSFILE_MOFLO_XDP_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_MOFLO_XDP_FCSFILE_NAME
assignment_DATA_SET_MOFLO_XDP_NAME = tr.assignPropertyType(data_set_type_MOFLO_XDP_FCSFILE, prop_type_NAME)
assignment_DATA_SET_MOFLO_XDP_NAME.setMandatory(False)
assignment_DATA_SET_MOFLO_XDP_NAME.setSection(None)
assignment_DATA_SET_MOFLO_XDP_NAME.setPositionInForms(4)
assignment_DATA_SET_MOFLO_XDP_NAME.setShownEdit(False)
# SAMPLE_MOFLO_XDP_SPECIMEN_NAME
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_MOFLO_XDP_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_MOFLO_XDP_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_MOFLO_XDP_TUBE_NAME
assignment_SAMPLE_MOFLO_XDP_TUBE_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_TUBE, prop_type_NAME)
assignment_SAMPLE_MOFLO_XDP_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_MOFLO_XDP_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_MOFLO_XDP_TUBE_NAME.setShownEdit(True)
# SAMPLE_MOFLO_XDP_TUBESET_NAME
assignment_SAMPLE_MOFLO_XDP_TUBESET_NAME = tr.assignPropertyType(samp_type_MOFLO_XDP_TUBESET, prop_type_NAME)
assignment_SAMPLE_MOFLO_XDP_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_TUBESET_NAME.setShownEdit(False)
# SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT
assignment_SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_MOFLO_XDP_TUBE, prop_type_MOFLO_XDP_TUBE_ISINDEXSORT)
assignment_SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT.setPositionInForms(4)
assignment_SAMPLE_MOFLO_XDP_TUBE_MOFLO_XDP_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME
assignment_DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_MOFLO_XDP_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_MOFLO_XDP_ACCESSORY_FILE_NAME.setShownEdit(False)
# BIORAD S3E
# ------------------------------------------------------------------------------
# SAMPLE_S3E_EXPERIMENT_NAME
assignment_SAMPLE_S3E_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_S3E_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_S3E_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_S3E_EXPERIMENT_NAME.setShownEdit(True)
# SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_S3E_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION.setPositionInForms(3)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_NAME)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME.setPositionInForms(4)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_DATE)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE.setPositionInForms(5)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_DATE.setShownEdit(True)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_OWNER)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_OWNER.setShownEdit(True)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(9)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_S3E_EXPERIMENT, prop_type_S3E_EXPERIMENT_VERSION)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION.setPositionInForms(10)
assignment_SAMPLE_S3E_EXPERIMENT_S3E_EXPERIMENT_VERSION.setShownEdit(False)
# DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_S3E_FCSFILE, prop_type_S3E_FCSFILE_PARAMETERS)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_S3E_FCSFILE, prop_type_S3E_FCSFILE_ACQ_DATE)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_S3E_FCSFILE_S3E_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_S3E_FCSFILE_NAME
assignment_DATA_SET_S3E_FCSFILE_NAME = tr.assignPropertyType(data_set_type_S3E_FCSFILE, prop_type_NAME)
assignment_DATA_SET_S3E_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_S3E_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_S3E_FCSFILE_NAME.setPositionInForms(4)
assignment_DATA_SET_S3E_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_S3E_SPECIMEN_NAME
assignment_SAMPLE_S3E_SPECIMEN_NAME = tr.assignPropertyType(samp_type_S3E_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_S3E_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_S3E_SPECIMEN_NAME.setSection(None)
assignment_SAMPLE_S3E_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_S3E_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_S3E_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE.setSection(None)
assignment_SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_S3E_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(True)
# SAMPLE_S3E_TUBE_NAME
assignment_SAMPLE_S3_TUBE_NAME = tr.assignPropertyType(samp_type_S3E_TUBE, prop_type_NAME)
assignment_SAMPLE_S3_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_S3_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_S3_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_S3_TUBE_NAME.setShownEdit(True)
# SAMPLE_S3E_TUBESET_NAME
assignment_SAMPLE_S3_TUBESET_NAME = tr.assignPropertyType(samp_type_S3E_TUBESET, prop_type_NAME)
assignment_SAMPLE_S3_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_S3_TUBESET_NAME.setShownEdit(False)
# SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT
assignment_SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_S3E_TUBE, prop_type_S3E_TUBE_ISINDEXSORT)
assignment_SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT.setPositionInForms(4)
assignment_SAMPLE_S3E_TUBE_S3E_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_S3E_ACCESSORY_FILE_NAME
assignment_DATA_SET_S3E_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_S3E_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_S3E_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_S3E_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_S3E_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_S3E_ACCESSORY_FILE_NAME.setShownEdit(False)
# SONY SH800S
# ------------------------------------------------------------------------------
# SAMPLE_SONY_SH800S_EXPERIMENT_NAME
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_NAME.setShownEdit(True)
# SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION.setPositionInForms(3)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_NAME)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME.setPositionInForms(4)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_DATE)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE.setPositionInForms(5)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_DATE.setShownEdit(True)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_OWNER)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_OWNER.setShownEdit(True)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(9)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_SONY_SH800S_EXPERIMENT, prop_type_SONY_SH800S_EXPERIMENT_VERSION)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION.setPositionInForms(10)
assignment_SAMPLE_SONY_SH800S_EXPERIMENT_SONY_SH800S_EXPERIMENT_VERSION.setShownEdit(False)
# DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_SONY_SH800S_FCSFILE, prop_type_SONY_SH800S_FCSFILE_PARAMETERS)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_SONY_SH800S_FCSFILE, prop_type_SONY_SH800S_FCSFILE_ACQ_DATE)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_SONY_SH800S_FCSFILE_SONY_SH800S_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_SONY_SH800S_FCSFILE_NAME
assignment_DATA_SET_NFLUX_FCSFILE_NAME = tr.assignPropertyType(data_set_type_SONY_SH800S_FCSFILE, prop_type_NAME)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setPositionInForms(3)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_SONY_SH800S_SPECIMEN_NAME
assignment_SAMPLE_SONY_SH800S_SPECIMEN_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_SONY_SH800S_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_SONY_SH800S_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_SONY_SH800S_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_SONY_SH800S_TUBE_NAME
assignment_SAMPLE_SONY_SH800S_TUBE_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_TUBE, prop_type_NAME)
assignment_SAMPLE_SONY_SH800S_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_SONY_SH800S_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_SH800S_TUBE_NAME.setShownEdit(True)
# SAMPLE_SONY_SH800S_TUBESET_NAME
assignment_SAMPLE_SONY_SH800S_TUBESET_NAME = tr.assignPropertyType(samp_type_SONY_SH800S_TUBESET, prop_type_NAME)
assignment_SAMPLE_SONY_SH800S_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_TUBESET_NAME.setShownEdit(False)
# SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT
assignment_SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_SONY_SH800S_TUBE, prop_type_SONY_SH800S_TUBE_ISINDEXSORT)
assignment_SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT.setPositionInForms(4)
assignment_SAMPLE_SONY_SH800S_TUBE_SONY_SH800S_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME
assignment_DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_SONY_SH800S_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_SONY_SH800S_ACCESSORY_FILE_NAME.setShownEdit(False)
# SONY MA900
# ------------------------------------------------------------------------------
# SAMPLE_SONY_MA900_EXPERIMENT_NAME
assignment_SAMPLE_SONY_MA900_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_NAME)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_NAME.setSection('General Info')
assignment_SAMPLE_SONY_MA900_EXPERIMENT_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_NAME.setShownEdit(True)
# SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE
assignment_SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_DESCRIPTION)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION.setPositionInForms(3)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DESCRIPTION.setShownEdit(True)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_NAME)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME.setPositionInForms(4)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_NAME.setShownEdit(False)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_DATE)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE.setPositionInForms(5)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_DATE.setShownEdit(True)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setPositionInForms(6)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE.setShownEdit(False)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setPositionInForms(7)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_SOFTWARE.setShownEdit(False)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_OWNER)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER.setPositionInForms(8)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_OWNER.setShownEdit(True)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setPositionInForms(9)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME.setShownEdit(True)
# SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION = tr.assignPropertyType(samp_type_SONY_MA900_EXPERIMENT, prop_type_SONY_MA900_EXPERIMENT_VERSION)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION.setMandatory(False)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION.setSection(None)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION.setPositionInForms(10)
assignment_SAMPLE_SONY_MA900_EXPERIMENT_SONY_MA900_EXPERIMENT_VERSION.setShownEdit(False)
# DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS = tr.assignPropertyType(data_set_type_SONY_MA900_FCSFILE, prop_type_SONY_MA900_FCSFILE_PARAMETERS)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS.setMandatory(False)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS.setSection(None)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS.setPositionInForms(2)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_PARAMETERS.setShownEdit(False)
# DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE = tr.assignPropertyType(data_set_type_SONY_MA900_FCSFILE, prop_type_SONY_MA900_FCSFILE_ACQ_DATE)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE.setMandatory(False)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE.setSection(None)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE.setPositionInForms(3)
assignment_DATA_SET_SONY_MA900_FCSFILE_SONY_MA900_FCSFILE_ACQ_DATE.setShownEdit(False)
# DATA_SET_SONY_MA900_FCSFILE_NAME
assignment_DATA_SET_NFLUX_FCSFILE_NAME = tr.assignPropertyType(data_set_type_SONY_MA900_FCSFILE, prop_type_NAME)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setMandatory(False)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setSection(None)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setPositionInForms(3)
assignment_DATA_SET_NFLUX_FCSFILE_NAME.setShownEdit(False)
# SAMPLE_SONY_MA900_SPECIMEN_NAME
assignment_SAMPLE_SONY_MA900_SPECIMEN_NAME = tr.assignPropertyType(samp_type_SONY_MA900_SPECIMEN, prop_type_NAME)
assignment_SAMPLE_SONY_MA900_SPECIMEN_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_SPECIMEN_NAME.setSection('General Info')
assignment_SAMPLE_SONY_MA900_SPECIMEN_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_MA900_SPECIMEN_NAME.setShownEdit(True)
# SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE
assignment_SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE = tr.assignPropertyType(samp_type_SONY_MA900_SPECIMEN, prop_type_ANNOTATIONS_STATE)
assignment_SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE.setMandatory(False)
assignment_SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE.setSection('General Info')
assignment_SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE.setPositionInForms(2)
assignment_SAMPLE_SONY_MA900_SPECIMEN_ANNOTATIONS_STATE.setShownEdit(False)
# SAMPLE_SONY_MA900_TUBE_NAME
assignment_SAMPLE_SONY_MA900_TUBE_NAME = tr.assignPropertyType(samp_type_SONY_MA900_TUBE, prop_type_NAME)
assignment_SAMPLE_SONY_MA900_TUBE_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_TUBE_NAME.setSection('General Info')
assignment_SAMPLE_SONY_MA900_TUBE_NAME.setPositionInForms(1)
assignment_SAMPLE_SONY_MA900_TUBE_NAME.setShownEdit(True)
# SAMPLE_SONY_MA900_TUBESET_NAME
assignment_SAMPLE_SONY_MA900_TUBESET_NAME = tr.assignPropertyType(samp_type_SONY_MA900_TUBESET, prop_type_NAME)
assignment_SAMPLE_SONY_MA900_TUBESET_NAME.setMandatory(False)
assignment_SAMPLE_SONY_MA900_TUBESET_NAME.setShownEdit(False)
# SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT
assignment_SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT = tr.assignPropertyType(samp_type_SONY_MA900_TUBE, prop_type_SONY_MA900_TUBE_ISINDEXSORT)
assignment_SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT.setMandatory(False)
assignment_SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT.setSection(None)
assignment_SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT.setPositionInForms(4)
assignment_SAMPLE_SONY_MA900_TUBE_SONY_MA900_TUBE_ISINDEXSORT.setShownEdit(False)
# DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME
assignment_DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME = tr.assignPropertyType(data_set_type_SONY_MA900_ACCESSORY_FILE, prop_type_NAME)
assignment_DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME.setMandatory(False)
assignment_DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME.setSection(None)
assignment_DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME.setPositionInForms(1)
assignment_DATA_SET_SONY_MA900_ACCESSORY_FILE_NAME.setShownEdit(False)
print("Import of Flow Cytometry Core Technology Master Data finished.")
|
import os
os.system("thorq --add --mode single --device gpu/7970 ./test")
|
reservation_schema = {
"id": int,
"customer_id": int,
"start_latitude": float,
"start_longitude": float,
"srid": int,
"net_price": int,
"location_id": int
}
location_schema = {
"id": int,
"wgs84_polygon": str,
"title": str
}
|
import numpy as np
class Calculadora:
def __init__(self):
print ("Se creo una calculadora")
def sumar(self,x,y):
return x + y
def restar(self,x,y):
return x - y
def multiplicar(self,x,y):
return x*y
def dividir(self,x,y):
return x/y
class CalcAleatorea(Calculadora):
def __init__(self):
Calculadora.__init__(self)
def media(self,x):
return (np.sum(x))/len(x)
def mediaCuadratica(self,x):
z = np.power(x,2)
return (np.sum(z))/len(z)
def varianza(self,x):
media = self.media(x)
return self.mediaCuadratica(x-media)
def desviacionEstandar(self,x):
return np.sqrt(self.varianza(x))
def correlacion(self,x,y):
return self.media(x*y)
calc2 = CalcAleatorea()
t = np.linspace(0,20,2000000)
y = np.exp(-0.5*t)
media = calc2.media(y)
print("media de y es: " , media)
|
from django.shortcuts import render
from django.views.generic import View
import fredboardScales as gs
import random
# Create your views here.
postcount = 0
class ScalesPage(View):
def get(self, request, *args, **kwargs):
add = gs.create_svg('C', 'Ionian', 0)
svg = add.draw_single_box()
svg1 = ''.join(svg)
return render(request, "scales.html", {'svg': svg1})
# def post(self, request, *args, **kwargs):
# color = random.randrange(0, 3)
# x1 = request.POST.get('note', 'C')
# x2 = request.POST.get('shape', 'M7')
# x3 = request.POST.get('stringset', 'strS1')
# add = gs.create_svg(x1 + x2, 'drop2_inv1_' + x3, color)
# add = add.create()
# svg = ''.join(add)
# title = x1 + x2
# return render(request, 'scales.html', {'svg': svg, 'title': title})
class AjaxScales(View):
def post(self, request, *args, **kwargs):
color = random.randrange(0, 2)
print(request.POST)
x1 = request.POST.get('note', 'C')
x2 = request.POST.get('mode', 'Ionian')
x3 = request.POST.get('box', 1)
print(x1 + x2, x3)
add1 = gs.create_svg(x1, x2, int(x3))
add1 = add1.draw_single_box()
svg3 = ''.join(add1)
print('POST request was recieved on AjaxScales')
return render(request, 'scales.html', {'svg': svg3})
|
from kivy.app import App
from kivy.uix.scatter import Scatter
from kivy.uix.label import Label
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import ListProperty
import random
"""
Kivy example by Alexander Taylor:
https://www.youtube.com/user/kivycrashcourse
"""
class ScatterTextWidget(BoxLayout):
text_colour = ListProperty([0, 0 ,1, 1])
def change_label_colour(self, *args):
colour = [random.random() for i in xrange(3)] + [1]
self.text_colour = colour
pass
class TutorialApp(App):
def build(self):
return ScatterTextWidget()
if __name__ == "__main__":
TutorialApp().run()
|
from classes.util.date_formater import DateFormater
from classes.lock_modifier.result import Result
from classes.lock_replacer import LockReplacer
from classes.yaml_parser import YamlParser
from classes.package_matcher.match import Match
from pathlib import Path
from typing import Any, IO, List, Optional
class LockModifier:
def __init__(self, config_path: str):
self.yaml_parser: YamlParser = YamlParser(config_path)
def update_package(self, match: Match, fields: List[str]) -> Result:
lock_path: Path = match.lock_path
package: str = match.package_name
sha: str = match.package_x.get_value(fields)
result: Result = Result(package)
stream: Optional[IO] = None
try:
stream, data = self.__get_stream_with_data(lock_path)
time: str = DateFormater.get_current_utc_datetime()
replacer: LockReplacer = LockReplacer(data)
if replacer.find_package(package):
result.directory = lock_path.resolve().parent
if not replacer.replace_required(sha):
result.set_ignored("Current sha, no required action")
else:
lock_content: str = replacer.replace(sha, time)
length: int = self.__save_content(stream, lock_content)
if length == len(lock_content):
result.set_success()
else:
result.set_warning("Saved {expected} characters instead of {actual}".format(
expected=len(lock_content), actual=length
))
except Exception as exception:
result.set_failed(self.__get_exception_message(exception))
self.__close_stream(stream)
return result
def __save_content(self, stream: IO, content: str) -> int:
stream.seek(0)
length: int = stream.write(content)
stream.truncate()
return length
def __get_stream_with_data(self, path: Path) -> List:
stream: IO = path.open("r+", encoding="utf8")
data: Any = stream.read()
return [stream, data]
def __close_stream(self, stream: Optional[IO]):
if isinstance(stream, IO) and not stream.closed:
stream.close()
def __get_exception_message(self, exception: Exception) -> Optional[str]:
return exception.message if hasattr(exception, "message") else None
|
# -*- coding: utf-8 -*-
# from __future__ import unicode_literals
from django.db import models
INVESTMENTHOUSE = (
('ALT', 'אלטשולר שחם'),
('EXL', 'אקסלנט'),
('PSA', 'פסגות'),
('LAP', 'לפידות'),
('YAL', 'ילין לפידות'),
('MEI', 'מיטב - דש'),
)
PLAN = (
('GEM', 'קרן גמל'),
('HIS', 'קרן השתלמות'),
)
PLANSTATE = (
('ACT', 'פעיל'),
('OFR', 'הצעה'),
('PAI', 'מסולק'),
)
INSURANCECOMPANY = (
('HAR', 'הראל'),
('MIG', 'מגדל'),
('HAF', 'הפניקס'),
('CLA', 'כלל'),
('MEN', 'מנורה'),
)
HEALTHLIFEPOLICY = (
('HEA', 'בריאות'),
('LIF', 'חיים'),
('NUR', 'סיעוד'),
)
#גמל והשתלמות
class ProvidentFund(models.Model):
nameId = models.CharField('תעודת זהות', max_length=10)
name = models.CharField('שם', max_length=30)
investmentHouse = models.CharField('בית השקעות', max_length=3, choices=INVESTMENTHOUSE, default='ALT')
plan = models.CharField('סוג תוכנית', max_length=3, choices=PLAN, default='GEM')
placeholderID = models.IntegerField('מספר עמית', null=True)
monthlyDeposit = models.DecimalField('הפקדות חודשיות', max_digits=6, decimal_places=2, default=1.00)
managementFeeFunded = models.DecimalField('דמי ניהול מצבירה', max_digits=4, decimal_places=2, default=1.00)
fullyFunded = models.DecimalField('סך צבירה', max_digits=12, decimal_places=2, default=1.00)
yearlCommissionFunded = models.DecimalField('עמלה מצבירה שנתית', max_digits=12, decimal_places=2, blank=True,
null=True, editable=False)
payee = models.DecimalField('נפרעים', max_digits=12, decimal_places=2, blank=True, null=True, editable=False)
planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
def calc_total1(self):
amount1 = (self.managementFeeFunded / 100 * self.fullyFunded)
return amount1
def calc_total2(self):
amount2 = (self.managementFeeFunded / 100 * self.fullyFunded / 12)
return amount2
def save(self):
self.yearlCommissionFunded = self.calc_total1()
self.payee = self.calc_total2()
super(ProvidentFund, self).save()
class Meta:
verbose_name = 'לקוח'
verbose_name_plural = 'גמול והשתלמות'
#חיסכון פיננסי
class FinancialSavings(models.Model):
name = models.CharField('שם', max_length=30)
nameId = models.CharField('תעודת זהות', max_length=10)
investmentHouse = models.CharField('בית השקעות', max_length=3, choices=INSURANCECOMPANY, default='HAR')
policyType = models.CharField('סוג פוליסה', max_length=10, default='חיסכון פיננסי', editable=False)
policyID = models.IntegerField('מספר פוליסה', null=True)
monthlyDeposit = models.DecimalField('הפקדות חודשיות', max_digits=6, decimal_places=0, default=0)
managementFeeFunded = models.DecimalField('דמי ניהול מצבירה', max_digits=2, decimal_places=1, default=1.0)
yearlyPremium = models.IntegerField('פרמיה שנתית', null=True, editable=False)
fullyFunded = models.DecimalField('צבירות', max_digits=5, decimal_places=0, default=0)
yearlCommissionFunded = models.DecimalField('עמלה מצבירה שנתית', max_digits=12, decimal_places=2, blank=True,
null=True, editable=False)
planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
def calc_total1(self):
amount1 = (self.monthlyDeposit * 12)
return amount1
def calc_total2(self):
amount2 = (self.managementFeeFunded / 100 * self.monthlyDeposit)
return amount2
def save(self):
self.yearlyPremium = self.calc_total1()
self.yearlCommissionFunded = self.calc_total2()
super(FinancialSavings, self).save()
class Meta:
verbose_name = 'לקוח'
verbose_name_plural = 'חיסכון פיננסי'
#ביטוח מנהלים
class seniorEmployeesInsurance (models.Model):
name = models.CharField('שם', max_length=30)
nameId = models.CharField('תעודת זהות', max_length=10)
insuranceCompany = models.CharField('חברת ביטוח', max_length=3, choices=INSURANCECOMPANY, default='HAR')
policyType = models.CharField('סוג פוליסה', max_length=10, default='ביטוח מנהלים', editable=False)
policyID = models.IntegerField('מספר פוליסה', null=True)
monthlyPremium = models.DecimalField('פרמיה חודשית', decimal_places=0, max_digits=6, default=0)
managementFeePremium = models.DecimalField('דמי ניהול מפרמיה', max_digits = 6, decimal_places=0, default=0)
yearlyPremium = models.DecimalField('פרמיה שנתית', decimal_places=0, max_digits=6, default=0, null=True, editable=False)
payee = models.DecimalField('נפרעים', max_digits=5, decimal_places=0, default=0, editable=False)
yearlCommissionFunded = models.DecimalField('סך הכל עמלה בשנה', max_digits=12, decimal_places=2, blank=True,
null=True, editable=False)
planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
def calc_total1(self):
amount1 = (self.monthlyPremium * 12)
return amount1
def calc_total2(self):
amount2 = (self.managementFeePremium * self.monthlyPremium)
return amount2
def calc_total3(self):
amount3 = (self.managementFeePremium * self.monthlyPremium * 12)
return amount3
def save(self):
self.yearlyPremium = self.calc_total1()
self.payee = self.calc_total2()
self.yearlCommissionFunded = self.calc_total3()
super(seniorEmployeesInsurance, self).save()
class Meta:
verbose_name = 'לקוח'
verbose_name_plural = 'ביטוח מנהלים'
#קרן פנסיה
class pensionFund (models.Model):
name = models.CharField('שם', max_length=30)
nameId = models.CharField('תעודת זהות', max_length=10)
insuranceCompany = models.CharField('חברת ביטוח', max_length=3, choices=INSURANCECOMPANY, default='HAR')
policyType = models.CharField('סוג פוליסה', max_length=10, default='קרן פנסיה', editable=False)
policyID = models.IntegerField('מספר עמית', null=True)
monthlyPremium = models.DecimalField('פרמיה חודשית', decimal_places=0, max_digits=6, default=0)
managementFeePremium = models.DecimalField('דמי ניהול מפרמיה', max_digits = 6, decimal_places=0, default=6)
managementFeeFunded = models.DecimalField('דמי ניהול מצבירה', max_digits=2, decimal_places=1, default=0.5)
yearlyPremium = models.DecimalField('פרמיה שנתית', decimal_places=0, max_digits=6, default=0, null=True, editable=False)
payee = models.DecimalField('נפרעים', max_digits=5, decimal_places=0, default=0, editable=False)
extendCommission = models.DecimalField('אחוז עמלת היקף', max_digits=5, decimal_places=0, default=6)
yearlypExtendCommission = models.DecimalField('עמלת היקף', max_digits=5, decimal_places=0, default=0, editable=False)
yearlCommissionFunded = models.DecimalField('סך הכל עמלה בשנה', max_digits=12, decimal_places=0, blank=True,
null=True, editable=False)
planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
def calc_total1(self):
amount1 = (self.monthlyPremium * 12)
return amount1
def calc_total2(self):
amount2 = (self.managementFeePremium / 100 * self.monthlyPremium)
return amount2
def calc_total3(self):
amount3 = (self.extendCommission / 100 * self.monthlyPremium * 12)
return amount3
def calc_total4(self):
amount4 = (self.managementFeePremium / 100 * self.monthlyPremium * 12)
return amount4
def save(self):
self.yearlyPremium = self.calc_total1()
self.payee = self.calc_total2()
self.yearlypExtendCommission = self.calc_total3()
self.yearlCommissionFunded = self.calc_total4()
super(pensionFund, self).save()
class Meta:
verbose_name = 'לקוח'
verbose_name_plural = 'קרן פנסיה'
#בריאות וחיים
class healthLife (models.Model):
name = models.CharField('שם', max_length=30)
nameId = models.IntegerField('תעודת זהות', null=True)
insuranceCompany = models.CharField('חברת ביטוח', max_length=3, choices=INSURANCECOMPANY, default='HAR')
policyType = models.CharField('סוג פוליסה', max_length=3, choices=HEALTHLIFEPOLICY, default='HEA')
policyID = models.IntegerField('מספר פוליסה', null=True)
monthlyPremium = models.DecimalField('פרמיה חודשית', decimal_places=2, max_digits=6, default=0)
yearlyPremium = models.DecimalField('פרמיה שנתית', decimal_places=0, max_digits=6, default=0, editable=False)
payeeCommission = models.DecimalField('אחוז עמלת נפרעים', max_digits = 6, decimal_places=0, default=24)
payee = models.DecimalField('נפרעים', max_digits=5, decimal_places=0, default=0, editable=False)
extendCommission = models.DecimalField('אחוז עמלת היקף', max_digits=5, decimal_places=0, default=50)
yearlypExtendCommission = models.DecimalField('עמלת היקף', max_digits= 6, decimal_places=2, editable=False)
yearlCommissionFunded = models.DecimalField('סך עמלה בשנה', max_digits=12, decimal_places=2, blank=True,
null=True, editable=False)
planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
def calc_total1(self):
amount1 = (self.monthlyPremium * 12)
return amount1
def calc_total2(self):
amount2 = (self.payeeCommission / 100 * self.monthlyPremium)
return amount2
def calc_total3(self):
amount3 = (self.extendCommission / 100 * self.monthlyPremium * 12)
return amount3
def calc_total4(self):
amount4 = (self.extendCommission / 100 * self.monthlyPremium * 12 + (self.payeeCommission / 100 * self.monthlyPremium))
return amount4
def save(self):
self.yearlyPremium = self.calc_total1()
self.payee = self.calc_total2()
self.yearlypExtendCommission = self.calc_total3()
self.yearlCommissionFunded = self.calc_total4()
super(healthLife, self).save()
class Meta:
verbose_name = 'לקוח'
verbose_name_plural = 'בריאות וחיים'
#סיכום עמלות ויעדים
class summary (models.Model):
target = models.CharField('יעד', max_length=30)
# nameId = models.IntegerField('תעודת זהות', null=True)
# insuranceCompany = models.CharField('חברת ביטוח', max_length=3, choices=INSURANCECOMPANY, default='HAR')
# policyType = models.CharField('סוג פוליסה', max_length=3, choices=HEALTHLIFEPOLICY, default='HEA')
# policyID = models.IntegerField('מספר פוליסה', null=True)
# monthlyPremium = models.DecimalField('פרמיה חודשית', decimal_places=2, max_digits=6, default=0)
# yearlyPremium = models.DecimalField('פרמיה שנתית', decimal_places=0, max_digits=6, default=0, editable=False)
# payeeCommission = models.DecimalField('אחוז עמלת נפרעים', max_digits = 6, decimal_places=0, default=24)
# payee = models.DecimalField('נפרעים', max_digits=5, decimal_places=0, default=0, editable=False)
# extendCommission = models.DecimalField('אחוז עמלת היקף', max_digits=5, decimal_places=0, default=50)
# yearlypExtendCommission = models.DecimalField('עמלת היקף', max_digits= 6, decimal_places=2, editable=False)
# yearlCommissionFunded = models.DecimalField('סך עמלה בשנה', max_digits=12, decimal_places=2, blank=True,
# null=True, editable=False)
# planState = models.CharField('מצב תכנית', max_length=3, choices=PLANSTATE, default='ACT')
# # created_at = models.DateTimeField(auto_now_add=True)
# # updated_at = models.DateTimeField(auto_now=True)
#
# def calc_total1(self):
# amount1 = (self.monthlyPremium * 12)
# return amount1
#
# def calc_total2(self):
# amount2 = (self.payeeCommission / 100 * self.monthlyPremium)
# return amount2
#
# def calc_total3(self):
# amount3 = (self.extendCommission / 100 * self.monthlyPremium * 12)
# return amount3
#
# def calc_total4(self):
# amount4 = (self.extendCommission / 100 * self.monthlyPremium * 12 + (self.payeeCommission / 100 * self.monthlyPremium))
# return amount4
#
# def save(self):
# self.yearlyPremium = self.calc_total1()
# self.payee = self.calc_total2()
# self.yearlypExtendCommission = self.calc_total3()
# self.yearlCommissionFunded = self.calc_total4()
# super(healthLife, self).save()
class Meta:
verbose_name = 'טבלה'
verbose_name_plural = 'סיכום עמלות ויעדים'
|
from flask import Flask, render_template
import requests
app = Flask(__name__)
# 1. 사용자가 접속할 경로를 작성
@app.route('/')
def hello_world():
print('hello word')
#수정위해원래코드
@app.route('/service.html')
def service():
# HTML 반환해주기
# 반드시 templates 폴더 안에 위치해야합니다.
# render_template 불러와주기
menu_db = [
'BBQ 황금 올리브치킨', 'BHC 뿌링클', '네네치킨 오리엔탈파닭', '교촌치킨 레드콤보', '페리카나 양념치킨', '굽네치킨 고추바사삭', '호식이두마리치킨 매운간장치킨', 'BHC 맛초킹',
'파파존스 수퍼파파스', '도미노 베스트콰트로', '피자스쿨 고구마피자', '피자에땅 달피자', ]
ans = random.choice(menu_db)
return render_template('service.html', random_menu=ans)
|
from pywebio.output import *
from pywebio.input import *
from pywebio.session import *
from functools import partial
class CRUDTable():
'''
Generalizable Create, Read, Update, Delete Table class.
:param gen_data_func: custom function that has procedure for generating the table data
:param edit_func: custom function that edits, requires parameter "i" (index)
:param del_func: custom function that deletes, requires parameter "i" (index)
'''
def __init__(self, gen_data_func, edit_func, del_func):
self.datatable = gen_data_func()
self.gen_data_func = gen_data_func
self.edit_func = edit_func
self.del_func = del_func
def put_crud_table(self):
# the CRUD table without the header
table = []
for i, table_row in enumerate(self.datatable):
# skip the header row
if i == 0:
pass
else:
# full row of a table
# get each row element of the data table row
table_row = [put_text(row_element) for row_element in table_row] + [
# use i - 1 here so that it counts after the header row.
put_buttons(["◀️"], onclick=partial(self.handle_edit_delete, custom_func=self.edit_func,i=i)),
put_buttons(["✖️"], onclick=partial(self.handle_edit_delete, custom_func=self.del_func, i=i))
]
table.append(table_row)
with use_scope("table_scope", clear=True):
put_table(table,
header= self.datatable[0] + ["Edit", "Delete"]
)
def handle_edit_delete(self, dummy, custom_func, i):
'''when edit/delete button is pressed, execute the custom edit/delete
function as well as update CRUD table'''
# originally had it in the custom functions in step5_filemanager.py,
# but thought its probably best to have it within the crud_table class to
# requery all the filepaths and refresh the crud_table
if custom_func == self.edit_func:
# if edit function, just do custom_func(i) without confirmation
custom_func(i)
# refresh table
self.datatable = self.gen_data_func()
self.put_crud_table()
# if it's the delete function, ask for confirmation
if custom_func == self.del_func:
# melt the data (row becomes key, value)
datatable_melt = list(zip(self.datatable[0], self.datatable[i+1]))
popup(
'⚠️ Are you sure you want to delete?',
[
put_table(datatable_melt, header=["row", "data"]),
put_buttons(['confirm', 'cancel'],
onclick = lambda x: self.handle_confirm(i) if x == 'confirm' else close_popup())
]
)
def handle_confirm(self, i):
''' if confirm button pressed in deletion confirmation, delete, and also close popup'''
self.del_func(i)
close_popup()
# refresh table
self.datatable = self.gen_data_func()
self.put_crud_table()
sample_table = [
['Month', 'YouTube views', 'MoM growth'],
['2020-11', '167', '-'],
['2020-12', '233', '4%'],
['2021-01', '337', '200%'],
['2021-02', '440', '218%'],
['2021-03', '785', '15%'],
['2021-04', '6124', '174%'],
['2021-05', '88588', '1125%'],
['2021-05', '6500', '100%']
]
def generate_datatable():
'''
custom generate function to use for the CRUD table
function for generating data.
index 0 should be the headers.
'''
# datatable = [['header1', 'header2']] + data
# here, data should be format [[row1col1,row1col2], [row2col1,row2col2]]
# (notice that sublist size = 2 = # of header labels
# I use [[filepath] for filepath... because pwl.find_blogfile()
# generates list of strings. doing list addition without [filepath]
# breaks strings and puts an alphabet in each table.
return sample_table
def edit_table(i):
'''
custom edit function to use for the CRUD table
load an old blog post, edit it
'''
sample_table[i][1] = input('input new view data for %s'% sample_table[i][0])
def delete_table(i):
'''
custom delete function to use for the CRUD table
delete specific file
'''
sample_table.pop(i)
def main():
'''CRUD table demo'''
# Header
# datatable = [header, row1, row2, row3] for the crud table
growth_table = CRUDTable(gen_data_func=generate_datatable, edit_func=edit_table, del_func=delete_table)
growth_table.put_crud_table()
hold()
|
""" Script to check conversion from nPE to MeV of neutrons and protons, respectively, which were simulated
with tut_detsim.py of JUNO offline version J18v1r1-pre1.
Results of this script are used to convert neutron/proton/positron with specific energy in MeV to number of PE
in the JUNO detector.
With this conversion the cut on the energy of a possible prompt signal can be made in the PE-regime and efficiency
of this cut can be calculated.
More information: info_conversion_proton_neutron.odt (/home/astro/blum/juno/atmoNC/data_NC/conversion_nPE_MeV/)
"""
import datetime
import NC_background_functions
import numpy as np
from matplotlib import pyplot as plt
from decimal import Decimal
from matplotlib.colors import LogNorm
""" define gaussian function: """
def gaussian(x, a, b, c):
return a * np.exp(- (x-b)**2 / (2*c**2))
def get_info_from_file(start, stop, filename, num_entries, radius_cut):
"""
:param start: number of first file
:param stop: number of last file
:param filename: path and name of the file
:param num_entries: number of entries per file
:param radius_cut: radius, that define the volume cut, in mm
:return:
"""
""" preallocate arrays: """
# number of PE of each event:
number_pe = np.array([])
# initial total momentum of each event in MeV:
momentum_init = np.array([])
# deposit energy in each event in MeV:
edep = np.array([])
# quenched deposit energy in each event in MeV:
qedep = np.array([])
# loop over files of proton = 10 MeV:
for num in range(start, stop+1, 1):
# path to file:
input_file = filename + "_{0:d}.root".format(num)
# get number of PE per event (array of int), hit-times of the last event in ns (array of float),
# initial momentum per event in MeV (array of float), deposit energy per event in MeV and quenched deposit
# energy per event in MeV:
num_pe, momentum, e, qe = NC_background_functions.conversion_npe_mev(input_file, num_entries, radius_cut)
# append arrays to array:
number_pe = np.append(number_pe, num_pe)
momentum_init = np.append(momentum_init, momentum)
edep = np.append(edep, e)
qedep = np.append(qedep, qe)
return number_pe, momentum_init, edep, qedep
def save_array_to_file(arr, out_path, file_name, number_events):
"""
function to save an array (either number_pe or qedep) to txt file to save time, because you must read the file only
once
:param arr: array that should be saved (array of float)
:param out_path: path, where the txt file should be saved (string)
:param file_name: file name of the txt file (string)
:param number_events: number of events in the array/root-file
:return:
"""
np.savetxt(out_path + file_name + ".txt", arr, fmt='%1.5f',
header="{0} of {1:d} events analyzed with function get_info_from_file() in script\n "
"check_conversion_npe_mev.py (number of photo-electron per event OR \n"
"quenched deposited energy/ visible energy per event in MeV).\n"
"({2})\n"
"(volume cut (R <= {3:d} mm) applied on initial position):"
.format(file_name, number_events, now, r_cut))
return
# get the date and time, when the script was run:
date = datetime.datetime.now()
now = date.strftime("%Y-%m-%d %H:%M")
# set the path of the input files:
input_path = "/local/scratch1/pipc51/astro/blum/conversion_nPE_MeV/"
input_proton = input_path + "proton_output/"
input_neutron = input_path + "neutron_output/"
# set path, where results should be saved:
output_path = "/home/astro/blum/juno/atmoNC/data_NC/conversion_nPE_MeV/"
# set the number of the first file and number of the last file that should be read:
start_number = 0
stop_number_p = 99
stop_number_n = 99
# number of entries in the input files:
Number_entries_input = 10
# total number of events:
number_events_p = (stop_number_p - start_number + 1) * Number_entries_input
number_events_n = (stop_number_n - start_number + 1) * Number_entries_input
# set the radius for the volume cut in mm:
r_cut = 16000
# set maximum visible energy for plots and fit in MeV:
max_evis = 120.0
# Set boolean variables:
PLOT_INITENERGY = False
READ_P_10MEV = True
READ_N_10MEV = True
READ_P_100MEV = True
READ_N_100MEV = True
READ_N_300MEV = True
READ_N_500MEV = True
READ_N_500MEV_2 = True
READ_N_500MEV_3 = True
READ_N_500MEV_4 = True
READ_N_500MEV_5 = True
READ_N_500MEV_6 = True
READ_N_500MEV_7 = True
READ_N_500MEV_8 = True
READ_P_1GEV = True
READ_N_1GEV = True
""" 10 MeV proton: """
if READ_P_10MEV:
print("\nstart reading 10 MeV proton files...")
# file name:
file_p_10MeV = input_proton + "user_proton_10_MeV"
# read info of all files of 10 MeV protons:
number_pe_p_10MeV, momentum_init_p_10MeV, edep_p_10MeV, qedep_p_10MeV = \
get_info_from_file(start_number, stop_number_p, file_p_10MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_p_10MeV, output_path, "number_pe_p_10MeV", number_events_p)
# save qedep to txt file:
save_array_to_file(qedep_p_10MeV, output_path, "qedep_p_10MeV", number_events_p)
else:
# load number of pe and qedep array from txt file:
number_pe_p_10MeV = np.loadtxt(output_path + "number_pe_p_10MeV.txt")
qedep_p_10MeV = np.loadtxt(output_path + "qedep_p_10MeV.txt")
""" 10 MeV neutron: """
if READ_N_10MEV:
print("\nstart reading 10 MeV neutron files...")
# file name:
file_n_10MeV = input_neutron + "user_neutron_10_MeV"
# read info of all files of 10 MeV neutrons:
number_pe_n_10MeV, momentum_init_n_10MeV, edep_n_10MeV, qedep_n_10MeV = \
get_info_from_file(start_number, stop_number_n, file_n_10MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_10MeV, output_path, "number_pe_n_10MeV", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_10MeV, output_path, "qedep_n_10MeV", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_10MeV = np.loadtxt(output_path + "number_pe_n_10MeV.txt")
qedep_n_10MeV = np.loadtxt(output_path + "qedep_n_10MeV.txt")
""" 100 MeV proton: """
if READ_P_100MEV:
print("\nstart reading 100 MeV proton files...")
# file name:
file_p_100MeV = input_proton + "user_proton_100_MeV"
# read info of all files of 100 MeV protons:
number_pe_p_100MeV, momentum_init_p_100MeV, edep_p_100MeV, qedep_p_100MeV = \
get_info_from_file(start_number, stop_number_p, file_p_100MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_p_100MeV, output_path, "number_pe_p_100MeV", number_events_p)
# save qedep to txt file:
save_array_to_file(qedep_p_100MeV, output_path, "qedep_p_100MeV", number_events_p)
else:
# load number of pe and qedep array from txt file:
number_pe_p_100MeV = np.loadtxt(output_path + "number_pe_p_100MeV.txt")
qedep_p_100MeV = np.loadtxt(output_path + "qedep_p_100MeV.txt")
""" 100 MeV neutron: """
if READ_N_100MEV:
print("\nstart reading 100 MeV neutron files...")
# file name:
file_n_100MeV = input_neutron + "user_neutron_100_MeV"
# read info of all files of 100 MeV neutrons:
number_pe_n_100MeV, momentum_init_n_100MeV, edep_n_100MeV, qedep_n_100MeV = \
get_info_from_file(start_number, stop_number_n, file_n_100MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_100MeV, output_path, "number_pe_n_100MeV", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_100MeV, output_path, "qedep_n_100MeV", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_100MeV = np.loadtxt(output_path + "number_pe_n_100MeV.txt")
qedep_n_100MeV = np.loadtxt(output_path + "qedep_n_100MeV.txt")
""" 300 MeV neutron: """
if READ_N_300MEV:
print("\nstart reading 300 MeV neutron files...")
# file name:
file_n_300MeV = input_neutron + "user_neutron_300_MeV"
# read info of all files of 300 MeV neutrons:
number_pe_n_300MeV, momentum_init_n_300MeV, edep_n_300MeV, qedep_n_300MeV = \
get_info_from_file(start_number, stop_number_n, file_n_300MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_300MeV, output_path, "number_pe_n_300MeV", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_300MeV, output_path, "qedep_n_300MeV", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_300MeV = np.loadtxt(output_path + "number_pe_n_300MeV.txt")
qedep_n_300MeV = np.loadtxt(output_path + "qedep_n_300MeV.txt")
""" 500 MeV neutron (user_neutron_500_MeV_0.root to user_neutron_500_MeV_99.root): """
if READ_N_500MEV:
print("\nstart reading 500 MeV neutron files...")
# file name:
file_n_500MeV = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV, momentum_init_n_500MeV, edep_n_500MeV, qedep_n_500MeV = \
get_info_from_file(start_number, stop_number_n, file_n_500MeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV, output_path, "number_pe_n_500MeV", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV, output_path, "qedep_n_500MeV", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV = np.loadtxt(output_path + "number_pe_n_500MeV.txt")
qedep_n_500MeV = np.loadtxt(output_path + "qedep_n_500MeV.txt")
""" 500 MeV neutron (user_neutron_500_MeV_100.root to user_neutron_500_MeV_199.root): """
if READ_N_500MEV_2:
print("\nstart reading 500 MeV neutron files 2...")
# file name:
file_n_500MeV_2 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_2, momentum_init_n_500MeV_2, edep_n_500MeV_2, qedep_n_500MeV_2 = \
get_info_from_file(start_number+100, stop_number_n+100, file_n_500MeV_2, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_2, output_path, "number_pe_n_500MeV_2", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_2, output_path, "qedep_n_500MeV_2", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_2 = np.loadtxt(output_path + "number_pe_n_500MeV_2.txt")
qedep_n_500MeV_2 = np.loadtxt(output_path + "qedep_n_500MeV_2.txt")
""" 500 MeV neutron (user_neutron_500_MeV_200.root to user_neutron_500_MeV_299.root): """
if READ_N_500MEV_3:
print("\nstart reading 500 MeV neutron files 3...")
# file name:
file_n_500MeV_3 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_3, momentum_init_n_500MeV_3, edep_n_500MeV_3, qedep_n_500MeV_3 = \
get_info_from_file(start_number+200, stop_number_n+200, file_n_500MeV_3, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_3, output_path, "number_pe_n_500MeV_3", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_3, output_path, "qedep_n_500MeV_3", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_3 = np.loadtxt(output_path + "number_pe_n_500MeV_3.txt")
qedep_n_500MeV_3 = np.loadtxt(output_path + "qedep_n_500MeV_3.txt")
""" 500 MeV neutron (user_neutron_500_MeV_300.root to user_neutron_500_MeV_399.root): """
if READ_N_500MEV_4:
print("\nstart reading 500 MeV neutron files 4...")
# file name:
file_n_500MeV_4 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_4, momentum_init_n_500MeV_4, edep_n_500MeV_4, qedep_n_500MeV_4 = \
get_info_from_file(start_number+300, stop_number_n+300, file_n_500MeV_4, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_4, output_path, "number_pe_n_500MeV_4", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_4, output_path, "qedep_n_500MeV_4", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_4 = np.loadtxt(output_path + "number_pe_n_500MeV_4.txt")
qedep_n_500MeV_4 = np.loadtxt(output_path + "qedep_n_500MeV_4.txt")
""" 500 MeV neutron (user_neutron_500_MeV_400.root to user_neutron_500_MeV_499.root): """
if READ_N_500MEV_5:
print("\nstart reading 500 MeV neutron files 5...")
# file name:
file_n_500MeV_5 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_5, momentum_init_n_500MeV_5, edep_n_500MeV_5, qedep_n_500MeV_5 = \
get_info_from_file(start_number+400, stop_number_n+400, file_n_500MeV_5, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_5, output_path, "number_pe_n_500MeV_5", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_5, output_path, "qedep_n_500MeV_5", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_5 = np.loadtxt(output_path + "number_pe_n_500MeV_5.txt")
qedep_n_500MeV_5 = np.loadtxt(output_path + "qedep_n_500MeV_5.txt")
""" 500 MeV neutron (user_neutron_500_MeV_500.root to user_neutron_500_MeV_599.root): """
if READ_N_500MEV_6:
print("\nstart reading 500 MeV neutron files 6...")
# file name:
file_n_500MeV_6 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_6, momentum_init_n_500MeV_6, edep_n_500MeV_6, qedep_n_500MeV_6 = \
get_info_from_file(start_number+500, stop_number_n+500, file_n_500MeV_6, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_6, output_path, "number_pe_n_500MeV_6", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_6, output_path, "qedep_n_500MeV_6", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_6 = np.loadtxt(output_path + "number_pe_n_500MeV_6.txt")
qedep_n_500MeV_6 = np.loadtxt(output_path + "qedep_n_500MeV_6.txt")
""" 500 MeV neutron (user_neutron_500_MeV_600.root to user_neutron_500_MeV_699.root): """
if READ_N_500MEV_7:
print("\nstart reading 500 MeV neutron files 7...")
# file name:
file_n_500MeV_7 = input_neutron + "user_neutron_500_MeV"
# read info of all files of 500 MeV neutrons:
number_pe_n_500MeV_7, momentum_init_n_500MeV_7, edep_n_500MeV_7, qedep_n_500MeV_7 = \
get_info_from_file(start_number+600, stop_number_n+600, file_n_500MeV_7, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_500MeV_7, output_path, "number_pe_n_500MeV_7", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_500MeV_7, output_path, "qedep_n_500MeV_7", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_500MeV_7 = np.loadtxt(output_path + "number_pe_n_500MeV_7.txt")
qedep_n_500MeV_7 = np.loadtxt(output_path + "qedep_n_500MeV_7.txt")
# """ 500 MeV neutron (user_neutron_500_MeV_700.root to user_neutron_500_MeV_799.root): """
# if READ_N_500MEV_8:
# print("\nstart reading 500 MeV neutron files 8...")
# # file name:
# file_n_500MeV_8 = input_neutron + "user_neutron_500_MeV"
# # read info of all files of 500 MeV neutrons:
# number_pe_n_500MeV_8, momentum_init_n_500MeV_8, edep_n_500MeV_8, qedep_n_500MeV_8 = \
# get_info_from_file(start_number+700, stop_number_n+700, file_n_500MeV_8, Number_entries_input, r_cut)
# # save number of pe to txt file:
# save_array_to_file(number_pe_n_500MeV_8, output_path, "number_pe_n_500MeV_8", number_events_n)
# # save qedep to txt file:
# save_array_to_file(qedep_n_500MeV_8, output_path, "qedep_n_500MeV_8", number_events_n)
# else:
# # load number of pe and qedep array from txt file:
# number_pe_n_500MeV_8 = np.loadtxt(output_path + "number_pe_n_500MeV_8.txt")
# qedep_n_500MeV_8 = np.loadtxt(output_path + "qedep_n_500MeV_8.txt")
""" 1 GeV proton: """
if READ_P_1GEV:
print("\nstart reading 1 GeV proton files...")
# file name:
file_p_1GeV = input_proton + "user_proton_1000_MeV"
# read info of all files of 1 GeV protons:
number_pe_p_1GeV, momentum_init_p_1GeV, edep_p_1GeV, qedep_p_1GeV = \
get_info_from_file(start_number, stop_number_p, file_p_1GeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_p_1GeV, output_path, "number_pe_p_1GeV", number_events_p)
# save qedep to txt file:
save_array_to_file(qedep_p_1GeV, output_path, "qedep_p_1GeV", number_events_p)
else:
# load number of pe and qedep array from txt file:
number_pe_p_1GeV = np.loadtxt(output_path + "number_pe_p_1GeV.txt")
qedep_p_1GeV = np.loadtxt(output_path + "qedep_p_1GeV.txt")
""" 1 GeV neutron: """
if READ_N_1GEV:
print("\nstart reading 1 GeV neutron files...")
# file name:
file_n_1GeV = input_neutron + "user_neutron_1000_MeV"
# read info of all files of 1 GeV neutrons:
number_pe_n_1GeV, momentum_init_n_1GeV, edep_n_1GeV, qedep_n_1GeV = \
get_info_from_file(start_number, stop_number_n, file_n_1GeV, Number_entries_input, r_cut)
# save number of pe to txt file:
save_array_to_file(number_pe_n_1GeV, output_path, "number_pe_n_1GeV", number_events_n)
# save qedep to txt file:
save_array_to_file(qedep_n_1GeV, output_path, "qedep_n_1GeV", number_events_n)
else:
# load number of pe and qedep array from txt file:
number_pe_n_1GeV = np.loadtxt(output_path + "number_pe_n_1GeV.txt")
qedep_n_1GeV = np.loadtxt(output_path + "qedep_n_1GeV.txt")
""" linear fit to qedep vs. nPE diagram: """
# build one array for qedep:
qedep_total = np.concatenate((qedep_p_10MeV, qedep_n_10MeV, qedep_p_100MeV, qedep_n_100MeV, qedep_n_300MeV,
qedep_n_500MeV, qedep_n_500MeV_2, qedep_n_500MeV_3, qedep_n_500MeV_4, qedep_n_500MeV_5,
qedep_n_500MeV_6, qedep_n_500MeV_7, qedep_p_1GeV, qedep_n_1GeV))
# build one array for number of p.e.:
number_pe_total = np.concatenate((number_pe_p_10MeV, number_pe_n_10MeV, number_pe_p_100MeV, number_pe_n_100MeV,
number_pe_n_300MeV, number_pe_n_500MeV, number_pe_n_500MeV_2, number_pe_n_500MeV_3,
number_pe_n_500MeV_4, number_pe_n_500MeV_5, number_pe_n_500MeV_6,
number_pe_n_500MeV_7, number_pe_p_1GeV,
number_pe_n_1GeV))
""" take only values for qedep below max_evis: """
# preallocate arrays:
qedep_total_interesting = np.array([])
number_pe_total_interesting = np.array([])
# loop over qedep_total:
for index in range(len(qedep_total)):
if qedep_total[index] <= max_evis:
qedep_total_interesting = np.append(qedep_total_interesting, qedep_total[index])
number_pe_total_interesting = np.append(number_pe_total_interesting, number_pe_total[index])
""" do linear fit """
# do linear fit with np.linalg.lstsq:
# The model is y = a * x; x = number_pe_total_interesting, y = qedep_total_interesting
# x needs to be a column vector instead of a 1D vector for this, however.
number_pe_total_interesting_columnvector = number_pe_total_interesting[:, np.newaxis]
# first value of output is slope of linear fit (fir_result is array with one entry):
fit_result = np.linalg.lstsq(number_pe_total_interesting_columnvector, qedep_total_interesting, rcond=None)[0]
# take first entry of fit_result:
fit_result = fit_result[0]
# set x axis for linear fit:
fit_x_axis = np.arange(0, max(number_pe_total_interesting), 100)
# set y axis for linear fit:
fit_y_axis = fit_result * fit_x_axis
print("n_500MeV = {0:d}".format(len(number_pe_n_500MeV) + len(number_pe_n_500MeV_2) +
len(number_pe_n_500MeV_3) + len(number_pe_n_500MeV_4) + len(number_pe_n_500MeV_5) +
len(number_pe_n_500MeV_6) + len(number_pe_n_500MeV_7)))
""" plot Qedep as function of nPE for all energies: """
h1 = plt.figure(1, figsize=(15, 8))
num_proton = len(number_pe_p_10MeV) + len(number_pe_p_100MeV) + len(number_pe_p_1GeV)
num_neutron = len(number_pe_n_10MeV) + len(number_pe_n_100MeV) + len(number_pe_n_300MeV) + len(number_pe_n_1GeV) + \
len(number_pe_n_500MeV) + len(number_pe_n_500MeV_2) + len(number_pe_n_500MeV_3) + \
len(number_pe_n_500MeV_4) + len(number_pe_n_500MeV_5) + len(number_pe_n_500MeV_6) + \
len(number_pe_n_500MeV_7)
plt.plot(number_pe_p_10MeV, qedep_p_10MeV, "rx", label="proton ({0:d} entries)".format(num_proton))
plt.plot(number_pe_n_10MeV, qedep_n_10MeV, "bx", label="neutron ({0:d} entries)".format(num_neutron))
plt.plot(number_pe_p_100MeV, qedep_p_100MeV, "rx")
plt.plot(number_pe_n_100MeV, qedep_n_100MeV, "bx")
plt.plot(number_pe_n_300MeV, qedep_n_300MeV, "bx")
plt.plot(number_pe_n_500MeV, qedep_n_500MeV, "bx")
plt.plot(number_pe_n_500MeV_2, qedep_n_500MeV_2, "bx")
plt.plot(number_pe_n_500MeV_3, qedep_n_500MeV_3, "bx")
plt.plot(number_pe_n_500MeV_4, qedep_n_500MeV_4, "bx")
plt.plot(number_pe_n_500MeV_5, qedep_n_500MeV_5, "bx")
plt.plot(number_pe_n_500MeV_6, qedep_n_500MeV_6, "bx")
plt.plot(number_pe_n_500MeV_7, qedep_n_500MeV_7, "bx")
# plt.plot(number_pe_n_500MeV_8, qedep_n_500MeV_8, "bx")
plt.plot(number_pe_p_1GeV, qedep_p_1GeV, "rx")
plt.plot(number_pe_n_1GeV, qedep_n_1GeV, "bx")
plt.xlabel("number of p.e.")
plt.ylabel("visible energy in JUNO detector (in MeV)")
plt.title("Visible energy vs. number of p.e.")
plt.legend()
plt.grid()
plt.savefig(output_path + "qedep_vs_nPE_all_energies.png")
""" plot Qedep as function of nPE for qedep <= max_evis: """
h3 = plt.figure(3, figsize=(15, 8))
plt.plot(number_pe_total_interesting, qedep_total_interesting, "rx",
label="{0:d} entries".format(len(number_pe_total_interesting)))
plt.xlabel("number of p.e.")
plt.ylabel("visible energy in JUNO detector (in MeV)")
plt.title("Visible energy vs. number of p.e.")
plt.legend()
plt.grid()
plt.savefig(output_path + "qedep_vs_nPE_interesting.png")
""" plot Qedep as function of nPE with fit for qedep <= max_evis: """
h4 = plt.figure(4, figsize=(15, 8))
plt.plot(number_pe_total_interesting, qedep_total_interesting, "rx",
label="{0:d} entries".format(len(number_pe_total_interesting)))
plt.plot(fit_x_axis, fit_y_axis, "b", label="linear fit: f(x) = {0:.3E} * x"
.format(fit_result))
plt.xlabel("number of p.e.")
plt.ylabel("visible energy in JUNO detector (in MeV)")
plt.title("Visible energy vs. number of p.e.\n(with linear fit)")
plt.legend()
plt.grid()
plt.savefig(output_path + "fit_qedep_vs_nPE_interesting.png")
""" display Qedep as function of nPE in 2D histogram for qedep <= max_evis: """
h5 = plt.figure(5, figsize=(15, 8))
bins_edges_nPE = np.arange(0, max(number_pe_total_interesting), 2000)
bins_edges_Qedep = np.arange(0, max_evis+2, 2)
plt.hist2d(number_pe_total_interesting, qedep_total_interesting, [bins_edges_nPE, bins_edges_Qedep], norm=LogNorm(),
cmap="rainbow")
plt.xlabel("number of p.e.")
plt.ylabel("visible energy in JUNO detector (in MeV)")
plt.title("Visible energy vs. number of p.e.")
plt.colorbar()
plt.legend()
plt.grid()
plt.savefig(output_path + "hist2d_Qedep_vs_nPE_interesting.png")
""" display Qedep as function of nPE in 2D histogram for qedep <= max_evis with fit: """
h6 = plt.figure(6, figsize=(15, 8))
bins_edges_nPE = np.arange(0, max(number_pe_total_interesting), 2000)
bins_edges_Qedep = np.arange(0, max_evis+2, 2)
plt.hist2d(number_pe_total_interesting, qedep_total_interesting, [bins_edges_nPE, bins_edges_Qedep], norm=LogNorm(),
cmap="rainbow")
plt.plot(fit_x_axis, fit_y_axis, "k", label="{1:d} entries\nlinear fit: f(x) = {0:.3E} * x"
.format(fit_result, len(number_pe_total_interesting)))
plt.xlabel("number of p.e.")
plt.ylabel("visible energy in JUNO detector (in MeV)")
plt.title("Visible energy vs. number of p.e.\nwith linear fit")
plt.colorbar()
plt.legend()
plt.grid()
plt.savefig(output_path + "hist2d_Qedep_vs_nPE_interesting_fit.png")
# plot initial energy of proton/neutron:
if PLOT_INITENERGY and READ_P_10MEV and READ_N_10MEV and READ_P_100MEV and READ_N_100MEV and READ_N_300MEV and \
READ_P_1GEV and READ_N_1GEV:
h3 = plt.figure(3, figsize=(15, 8))
bin_width = 0.1
Bins = np.arange(9.5, 1000.5, bin_width)
plt.hist(momentum_init_p_10MeV, bins=Bins, color='r', align='mid',
label="{0:d} protons".format(number_events_p) + " with $E_{kin}$ = 10 MeV")
plt.hist(momentum_init_n_10MeV, bins=Bins, color='b', align='mid',
label="{0:d} neutrons".format(number_events_n) + " with $E_{kin}$ = 10 MeV")
plt.hist(momentum_init_p_100MeV, bins=Bins, color='r', linestyle="--", align='mid',
label="{0:d} protons".format(number_events_p) + " with $E_{kin}$ = 100 MeV")
plt.hist(momentum_init_n_100MeV, bins=Bins, color='b', linestyle="--", align='mid',
label="{0:d} neutrons".format(number_events_n) + " with $E_{kin}$ = 100 MeV")
plt.hist(momentum_init_n_300MeV, bins=Bins, color='b', linestyle="-.", align='mid',
label="{0:d} neutrons".format(number_events_n) + " with $E_{kin}$ = 300 MeV")
plt.hist(momentum_init_p_1GeV, bins=Bins, color='r', linestyle=":", align='mid',
label="{0:d} protons".format(number_events_p) + " with $E_{kin}$ = 1 GeV")
plt.hist(momentum_init_n_1GeV, bins=Bins, color='b', linestyle=":", align='mid',
label="{0:d} neutrons".format(number_events_n) + " with $E_{kin}$ = 1 GeV")
plt.xlabel("initial kinetic energy in MeV", fontsize=13)
plt.ylabel("entries per bin (bin-width = {0:0.3f} MeV)".format(bin_width), fontsize=13)
plt.title("Initial neutron/proton energy", fontsize=18)
plt.legend()
plt.grid()
plt.savefig(output_path + "init_energy.png")
""" Efficiency of the conversion fit: """
# the prompt energy cut is defined by min_ecut and max_ecut in MeV:
min_ecut = 10.0
max_ecut = 100.0
# total number of simulated events:
number_entries = len(number_pe_total)
# preallocate number of 'real' entries inside energy window:
number_entries_real = 0
# preallocate number of 'calculated' entries inside energy window:
number_entries_calculated = 0
# preallocate number of events that are counted too less:
number_too_less = 0
# preallocate number of events that are counted too much:
number_too_much = 0
# calculate Qedep for each entry in number_pe_total with the function of the linear fit:
qedep_calculated = fit_result * number_pe_total
# loop over qedep_total (same like looping over qedep_calculated, since len(qedep_total) == len(qedep_calculated)).
# Therefore check lengths before:
if len(qedep_total) != len(qedep_calculated):
print("--------------------ERROR: len(qedep_total) != len(qedep_calculated)")
for index in range(len(qedep_total)):
# preallocate indices to check difference between real and calculated data:
index_real = 0
index_calc = 0
# get the number of entries from the simulated data, where min_ecut <= Qedep <= max_ecut:
# check min_ecut <= Qedep <= max_ecut:
if min_ecut <= qedep_total[index] <= max_ecut:
# entry inside energy window:
number_entries_real += 1
# get index:
index_real = index
# get the number of entries from the calculated data, where min_ecut <= qedep_calc <= max_ecut:
# check min_ecut <= Qedep <= max_ecut:
if min_ecut <= qedep_calculated[index] <= max_ecut:
# entry inside energy window:
number_entries_calculated += 1
# get index:
index_calc = index
# check entry, where there is an entry in real data, but not in calculated data (events are counted too much):
if index_real != 0 and index_calc == 0:
number_too_less += 1
print("\nindex_real = {0:d}, index_calc = {1:d}".format(index_real, index_calc))
print("qedep_total = {0:.2f} MeV".format(qedep_total[index]))
print("qedep_calculated = {0:.2f} MeV".format(qedep_calculated[index]))
# check entry, where there is an entry in calculated data, but not in real data (events are counted too less):
if index_real == 0 and index_calc != 0:
number_too_much += 1
print("\nindex_real = {0:d}, index_calc = {1:d}".format(index_real, index_calc))
print("qedep_total = {0:.2f} MeV".format(qedep_total[index]))
print("qedep_calculated = {0:.2f} MeV".format(qedep_calculated[index]))
# calculate the efficiency of the prompt energy cut (describes the 'error', when using the conversion from nPE to Qedep)
# in percent:
efficiency_prompt_energy_cut = 100 + float(number_too_less - number_too_much) / float(number_entries_calculated) * 100
print("total number of simulated events = {0:d}\n".format(number_entries))
print("number of 'real' entries from simulated data with {0:.1f} MeV <= Qedep_real <= {1:.1f} MeV: {2:d}\n"
.format(min_ecut, max_ecut, number_entries_real))
print("number of entries calculated with linear fit with {0:.1f} MeV <= Qedep_calc <= {1:.1f} MeV: {2:d}\n"
.format(min_ecut, max_ecut, number_entries_calculated))
print("number of events counted too less = {0:d}".format(number_too_less))
print("number of events counted too much = {0:d}".format(number_too_much))
print("efficiency of prompt energy cut = {0:.4f} % (1 + (number_too_less - number_too_much) / number_calculated)"
.format(efficiency_prompt_energy_cut))
|
'''A module that contains classes and functions for using tensorflow.'''
from contextlib import contextmanager
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import (Dense, Conv2D, MaxPooling2D, Flatten,
InputLayer)
def wrap_in_session(function, session=None):
'''
Wraps an object returned by a function in a SessionWrapper.
:param function: (callable) A callable that returns an object.
:param session: (None or tensorflow.Session) A session that is wrapped
over all methods and attributes of the returned object.
:return: (SessionWrapper) A session wrapped object.
'''
def _wrapped_function(*args, **kwargs):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
graph = session.graph if session else tf.Graph()
with graph.as_default():
new_session = session or tf.Session(graph=graph, config=config)
with new_session.as_default():
returned_object = function(*args, **kwargs)
return SessionWrapper(returned_object, new_session)
return _wrapped_function
def call_in_session(function, session):
'''
Wraps a function call in a session scope.
:param function: (callable) A callable to call within a session scope.
:param session: (None or tensorflow.Session) A session that is scoped
over the function call.
:return: () Returns the result of function.
'''
def _wrapped_function(*args, **kwargs):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
graph = session.graph
with graph.as_default():
with session.as_default():
return function(*args, **kwargs)
return _wrapped_function
@wrap_in_session
def create_conv_net(input_shape, output_size, kernel_sizes=(3, 3),
filter_sizes=(32, 64), layers=(256, 256),
activation='relu'):
'''
Create a wrapped keras convnet with its own private session.
:param input_shape: (Sequence) The shape of the expected input.
:param output_size: (int) The number of labels intended to be predicted.
:param kernel_sizes: (Sequence) Defines the sizes of the kernels.
:param filter_sizes: (Sequence) Defines the number of filters.
:param layers: (Sequence) Defines the number of hidden layers.
:param activations: (str) Defines the activation function to use.
:return: (WrappedSession(tf.keras.Model)) A keras model
'''
model = Sequential()
model.add(InputLayer(input_shape))
for k_size, f_size in zip(kernel_sizes, filter_sizes):
model.add(Conv2D(
f_size, kernel_size=k_size, activation=activation, padding='same'
))
model.add(MaxPooling2D(2))
model.add(Flatten())
for hidden_units in layers:
model.add(Dense(hidden_units, activation=activation))
model.add(Dense(output_size, activation='softmax'))
model.compile(
optimizer="adam", loss='binary_crossentropy', metrics=['accuracy']
)
return model
@wrap_in_session
def create_neural_net(input_shape, output_size, layers=(256, 256),
activation='relu'):
'''
Create a wrapped keras neural network with its own private session.
:param input_shape: (Sequence) The shape of the expected input.
:param output_size: (int) The number of labels intended to be predicted.
:param layers: (Sequence) Defines the number of hidden layers.
:param activations: (str) Defines the activation function to use.
:return: (WrappedSession(tf.keras.Model)) A keras model
'''
model = Sequential()
model.add(InputLayer(input_shape))
if len(input_shape) > 1:
model.add(Flatten())
for hidden_units in layers:
model.add(Dense(hidden_units, activation=activation))
model.add(Dense(output_size, activation='softmax'))
model.compile(
optimizer="adam", loss='binary_crossentropy', metrics=['accuracy']
)
return model
class SessionWrapper:
'''A class that encapsulates all methods of a class in a session.'''
def __init__(self, model, session):
'''
Create a session wrapper.
:param model: () An object that will have all its methods wrapped with
a session.
:param session: (tensorflow.Session) Used to wrap all method calls.
'''
self._wrapped_model = model
self._session = session
def __getattr__(self, attr):
if attr in self.__dict__:
return getattr(self, attr)
with self.with_scope():
returned_attr = getattr(self._wrapped_model, attr)
if callable(returned_attr):
return call_in_session(returned_attr, self._session)
return returned_attr
def __repr__(self):
return '<SessionWrapper<{!r}>>'.format(self._wrapped_model)
@contextmanager
def with_scope(self):
'''Enter into the owned session's scope.'''
with self._session.as_default(), self._session.graph.as_default():
yield
|
import pkg_resources
default_app_config = "pinax.badges.apps.AppConfig"
__version__ = pkg_resources.get_distribution("pinax-badges").version
|
import os
import os.path as ops
import urllib.request
import gzip
import numpy as np
import pickle
def get_mnist_data(datadir):
dataroot = 'http://yann.lecun.com/exdb/mnist/'
key_file = {
'train_img': 'train-images-idx3-ubyte.gz',
'train_label': 'train-labels-idx1-ubyte.gz',
'test_img': 't10k-images-idx3-ubyte.gz',
'test_label': 't10k-labels-idx1-ubyte.gz'
}
os.makedirs(datadir, exist_ok=True)
for key, filename in key_file.items():
if ops.exists(ops.join(datadir, filename)):
print(f"already downloaded : {filename}")
else:
urllib.request.urlretrieve(ops.join(dataroot, filename),
ops.join(datadir, filename))
with gzip.open(ops.join(datadir, key_file["train_img"]), "rb") as f:
train_img = np.frombuffer(f.read(), np.uint8, offset=16)
train_img = train_img.reshape(-1, 784)
with gzip.open(ops.join(datadir, key_file["train_label"]), "rb") as f:
train_label = np.frombuffer(f.read(), np.uint8, offset=8)
with gzip.open(ops.join(datadir, key_file["test_img"]), "rb") as f:
test_img = np.frombuffer(f.read(), np.uint8, offset=16)
test_img = test_img.reshape(-1, 784)
with gzip.open(ops.join(datadir, key_file["test_label"]), "rb") as f:
test_label = np.frombuffer(f.read(), np.uint8, offset=8)
return train_img, train_label, test_img, test_label
def get_cifar10_data(datadir):
datadir = os.path.join(datadir, "cifar-10-batches-py")
# == train ==
train_img = []
train_label = []
for i in range(1, 6):
path = os.path.join(datadir, f"data_batch_{i}")
with open(path, 'rb') as f:
data = pickle.load(f, encoding="latin-1")
train_img.append(data['data'])
train_label.append(data['labels'])
train_img = np.concatenate(train_img, axis=0).reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)
train_label = np.concatenate(train_label, axis=0)
# == test ==
path = os.path.join(datadir, f"test_batch")
with open(path, 'rb') as f:
data = pickle.load(f, encoding="latin-1")
test_img = np.array(data['data']).reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)
test_label = np.array(data['labels'])
return train_img, train_label, test_img, test_label
def get_cifar100_data(datadir):
datadir = os.path.join(datadir, "cifar-100-python")
# == train =
path = os.path.join(datadir, "train")
with open(path, 'rb') as f:
data = pickle.load(f, encoding="latin-1")
train_img = np.array(data['data']).reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)
train_label = np.array(data['fine_labels'])
# == test ==
path = os.path.join(datadir, "test")
with open(path, 'rb') as f:
data = pickle.load(f, encoding="latin-1")
test_img = np.array(data['data']).reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)
test_label = np.array(data['fine_labels'])
return train_img, train_label, test_img, test_label
|
# -*- coding: utf-8 -*-
import torch
import os
from torchvision import transforms
from PIL import Image
import torch.nn as nn
from math import log10
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def calc_psnr(pred_path, gt_path, result_save_path, epoch):
if not os.path.exists(result_save_path):
os.makedirs(result_save_path)
transform = transforms.Compose([
transforms.ToTensor(),
])
criterionMSE = nn.MSELoss().to(device)
psnr, total_psnr, avg_psnr = 0.0, 0.0, 0.0
epoch_result = result_save_path + 'PSNR_epoch_' + str(epoch) + '.csv'
epochfile = open(epoch_result, 'w')
epochfile.write('image_name' + ','+ 'psnr' + '\n')
total_result = result_save_path + 'PSNR_total_results_epoch_avgpsnr.csv'
totalfile = open(total_result, 'a+')
print('======================= start to calculate PSNR =======================')
test_imgs = [f for f in os.listdir(pred_path)]
valid_i = 0
for i, img in enumerate(test_imgs):
pred_pil = Image.open(os.path.join(pred_path, img))
pred_tensor = transform(pred_pil)
pred = pred_tensor.to(device)
imgName, _, _ = img.rsplit('_', 2)
gt_imgName = imgName + '.bmp'
gt_pil = Image.open(os.path.join(gt_path, gt_imgName))
gt_tensor = transform(gt_pil)
gt = gt_tensor.to(device)
gt = torch.cat([gt,gt,gt], dim=0)
mse = criterionMSE(pred, gt)
# psnr = 10 * log10(1 / mse.item())
eps = 0.00001
psnr = 10 * log10(1 / (mse.item() + eps))
if mse.item() > eps:
total_psnr += psnr
valid_i += 1
epochfile.write(gt_imgName + ',' + str(round(psnr, 6)) + '\n')
if i % 200 == 0:
print("=== PSNR is processing {:>3d}-th image ===".format(i))
print("======================= Complete the PSNR test of {:>3d} images ======================= ".format(i+1))
# avg_psnr = total_psnr / i
avg_psnr = total_psnr / valid_i
epochfile.write('Average' + ',' + str(round(avg_psnr, 6)) + '\n')
epochfile.close()
totalfile.write(str(epoch) + ',' + str(round(avg_psnr, 6)) + '\n')
totalfile.close()
print('valid_i is ', valid_i)
return avg_psnr
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('basketball', '0017_auto_20150724_1914'),
]
operations = [
migrations.AlterField(
model_name='playbyplay',
name='primary_play',
field=models.CharField(max_length=30, choices=[('fgm', 'FGM'), ('fga', 'FGA'), ('threepm', '3PM'), ('threepa', '3PA'), ('blk', 'BLK'), ('to', 'TO'), ('pf', 'FOUL'), ('sub_out', 'OUT'), ('misc', 'Misc')]),
),
migrations.AlterField(
model_name='playbyplay',
name='top_play_rank',
field=models.CharField(max_length=30, choices=[('t01', 'T1'), ('t02', 'T2'), ('t03', 'T3'), ('t04', 'T4'), ('t05', 'T5'), ('t06', 'T6'), ('t07', 'T7'), ('t08', 'T8'), ('t09', 'T9'), ('t10', 'T10'), ('nt01', 'NT1'), ('nt02', 'NT2'), ('nt03', 'NT3'), ('nt04', 'NT4'), ('nt05', 'NT5'), ('nt06', 'NT6'), ('nt07', 'NT7'), ('nt08', 'NT8'), ('nt09', 'NT9'), ('nt10', 'NT10')], help_text='Refers to weekly rank', blank=True),
),
]
|
#贪心算法,在表示一个较大整数的时候,“罗马数字”不会让你都用 11 加起来,
#肯定是写出来的“罗马数字”的个数越少越好。
#类似找零钱
def intTOrome(num):
# 把阿拉伯数字与罗马数字可能出现的所有情况和对应关系,放在两个数组中
#此时不适合用字典,索引不方便
# 并且按照阿拉伯数字的大小降序排列,这是贪心选择思想
nums=[1000,900,500,400,100,90,50,40,10,9,5,4,1]
romes=['M','CM','D','CD','C','XC','L','XL','x','IX','V','IV','I']
n=len(nums)
res=''
index=0
while index<n:#不超过数组的长度
while num>=nums[index]:# 注意:这里是等于号,表示尽量使用大的"面值"
res+=romes[index]
num-=nums[index]
index+=1
return res
num=3
print(intTOrome(num))
|
"""
route schema
"""
import typing
from vbml import Patcher, PatchedValidators
from vbml import Pattern
from kumquat.exceptions import KumquatException
from kumquat._types import Method
class Route:
"""
app route with path and func
"""
def __init__(self, path: str, func: typing.Callable, methods: typing.Tuple[Method]):
if not path.startswith("/"):
raise KumquatException("Path must startswith from '/'")
self.methods = methods
self.path = path
self.func = func
def __repr__(self):
return f'Route("{self.path}", {self.func})'
class Validators(PatchedValidators):
"""
validator for routes paths
"""
def route(self, value):
if "/" not in value:
return value
return None
class RoutePattern(Pattern):
def __init__(
self, text: str = None, pattern: str = "{}$", lazy: bool = True, **context
):
super().__init__(text, pattern, lazy, **context)
def __repr__(self):
return f'RoutePattern("{self.text}")'
class RoutePatcher(Patcher):
def __init__(
self,
disable_validators: bool = False,
validators: typing.Type[PatchedValidators] = None,
**pattern_inherit_context,
):
super().__init__(disable_validators, validators, **pattern_inherit_context)
def pattern(self, _pattern: typing.Union[str, Pattern], **context):
context.update(self.pattern_context)
if isinstance(_pattern, Pattern):
return _pattern.context_copy(**context)
return RoutePattern(_pattern, **context)
class Router:
"""
class for saving all app routes
"""
def __init__(self):
self.patcher = RoutePatcher(validators=Validators, default_validators=["route"])
self.pattern = self.patcher.pattern
self.routes: typing.Dict[
typing.Tuple[typing.Tuple[Method], Pattern], Route
] = {}
def add_route(self, route: Route) -> None:
"""
add route with vbml pattern path to stack
:param route:
:return:
"""
self.routes[(route.methods, self.pattern(route.path))] = route
def get_route(
self, path: str, method: str
) -> typing.Tuple[typing.Dict[str, str], typing.Optional[Route]]:
"""
get route object from string path
:param method:
:param path:
:return:
"""
for route_methods, route_pattern in self.routes:
if path == route_pattern.text:
return {}, self.routes.get((route_methods, route_pattern))
if self.patcher.check(path, route_pattern):
return (
self.patcher.check(path, route_pattern),
self.routes.get((route_methods, route_pattern)),
)
return {}, None
|
# Generated by Django 2.2.3 on 2020-09-12 09:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reviewapp', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='Ticket',
new_name='image',
),
]
|
'''
Created on May 13, 2019
@author: ab75812
'''
print('DeeSub')
|
from .structs import Currency, Scope, Claim, ClaimStatus, Balance
from .errors import MissingScope, BadRequest, NotFound
from .client import VirtualCryptoClientBase, VIRTUALCRYPTO_TOKEN_ENDPOINT, VIRTUALCRYPTO_API
from typing import Optional, List
import datetime
import aiohttp
import asyncio
class AsyncVirtualCryptoClient(VirtualCryptoClientBase):
def __init__(self, client_id: str, client_secret: str, scopes: List[Scope], loop=asyncio.get_event_loop()):
super().__init__(client_id, client_secret, scopes)
self.loop = loop
self.session = aiohttp.ClientSession(loop=self.loop)
self.wait_ready = asyncio.Event(loop=self.loop)
async def wait_for_ready(self):
await self.wait_ready.wait()
async def start(self):
await self.set_token()
self.wait_ready.set()
async def close(self):
await self.session.close()
async def set_token(self):
body = {
'scope': ' '.join(map(lambda x: x.value, self.scopes)),
'grant_type': 'client_credentials'
}
async with self.session.post(
VIRTUALCRYPTO_TOKEN_ENDPOINT,
data=body,
auth=aiohttp.BasicAuth(self.client_id, self.client_secret)) as response:
data = await response.json()
self.token = data['access_token']
self.expires_in = data['expires_in']
self.token_type = data['token_type']
self.when_set_token = datetime.datetime.utcnow()
async def get_headers(self):
if (datetime.datetime.utcnow() - self.when_set_token).seconds >= self.expires_in:
await self.set_token()
return {
"Authorization": "Bearer " + self.token
}
async def get(self, path, params) -> aiohttp.ClientResponse:
headers = await self.get_headers()
return await self.session.get(VIRTUALCRYPTO_API + path, params=params, headers=headers)
async def post(self, path, data) -> aiohttp.ClientResponse:
headers = await self.get_headers()
return await self.session.post(VIRTUALCRYPTO_API + path, data=data, headers=headers)
async def patch(self, path, data) -> aiohttp.ClientResponse:
headers = await self.get_headers()
return await self.session.patch(VIRTUALCRYPTO_API + path, data=data, headers=headers)
async def get_currency_by_unit(self, unit: str) -> Optional[Currency]:
response = await self.get("/currencies", {"unit": unit})
return Currency.by_json(await response.json())
async def get_currency_by_guild(self, guild_id: int) -> Optional[Currency]:
response = await self.get("/currencies", {"guild": str(guild_id)})
return Currency.by_json(await response.json())
async def get_currency_by_name(self, name: str) -> Optional[Currency]:
response = await self.get("/currencies", {"name": name})
return Currency.by_json(await response.json())
async def get_currency(self, currency_id: int):
response = await self.get("/currencies/" + str(currency_id), {})
return Currency.by_json(await response.json())
async def create_user_transaction(self, unit: str, receiver_discord_id: int, amount: int) -> None:
if Scope.Pay not in self.scopes:
raise MissingScope("vc.pay")
response = await self.post(
"/users/@me/transactions",
{
"unit": unit,
"receiver_discord_id": str(receiver_discord_id),
"amount": str(amount)
}
)
if response.status == 400:
raise BadRequest((await response.json())["error_info"])
pay = create_user_transaction
async def get_claims(self):
if Scope.Claim not in self.scopes:
raise MissingScope("vc.claim")
response = await self.get(
"/users/@me/claims",
{}
)
return list(map(Claim.by_json, await response.json()))
async def get_claim(self, claim_id: int):
response = await self.get("/users/@me/claims/" + str(claim_id), {})
return Claim.by_json(await response.json())
async def update_claim(self, claim_id: int, status: ClaimStatus):
if status == ClaimStatus.Pending:
raise ValueError("can't update to pending")
response = await self.patch(
"/users/@me/claims/" + str(claim_id),
{"status": status.value}
)
if response.status == 404:
raise NotFound((await response.json())["error_description"])
elif response.status == 400:
raise BadRequest((await response.json())["error_info"])
return response
async def get_balances(self):
response = await self.get(
"/users/@me/balances",
{}
)
return list(map(Balance.by_json, await response.json()))
|
#Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def getIntersectionNode(self, headA, headB):
#Determine lengths
len1, len2 = 0, 0
currA, currB = headA, headB
while currA != None:
len1 += 1
currA = currA.next
while currB != None:
len2 += 1
currB = currB.next
#Get to same Starting Point
currA, currB = headA, headB
while len1 > len2:
currA = currA.next
len1 -= 1
while len2 > len1:
currB = currB.next
len2 -= 1
#Loop until intersect is found
while currA != None and currB !=None:
if currA == currB: return currA
currA = currA.next
currB = currB.next
return None
head1 = ListNode(1)
head1.next = ListNode(2)
intersect = ListNode(3)
head1.next.next = intersect
intersect.next = ListNode(4)
head2 = ListNode(1)
head2.next = intersect
print(Solution().getIntersectionNode(head1, head2).val) #3
print(Solution().getIntersectionNode(head1, head1).val) #1
head2.next = ListNode(5)
print(Solution().getIntersectionNode(head1, head2))
|
def computepay(h,r):
if h <= 40:
pay = r * h
else:
pay = r * 40 + r * 1.5 * (h-40)
return pay
h = float(input("Enter Hours:"))
r = float(input("Enter Rate:"))
p = computepay(h,r)
print("Pay",p)
|
import pandas as pd
import lightgbm as lgb
from datetime import timedelta
from tqdm import tqdm
from data import data_frames, optimize_df, melt_and_merge, features, lgb_dataset
# Global constants
MAX_LAG = timedelta(days=57)
def next_day_features(df, forecast_date):
"""
Create features of the next day to forecast.
Args:
df = [pd.DataFrame] long format dataframe
forecast_date = [datetime] forecast date
Returns [pd.DataFrame]:
Dataframe with features for the next day to forecast.
"""
forecast_df = df[ (df['date'] >= forecast_date - MAX_LAG) & (df['date'] <= forecast_date) ].copy()
forecast_df = features(forecast_df, submission=True)
return forecast_df
def make_submission(df, first_date):
"""
Create dataframe in the correct format for submission.
Args:
df = [pd.DataFrame] pandas dataframe
Returns [pd.DataFrame]:
Submission dataframe.
"""
cols = [f"F{i}" for i in range(1, 29)]
submission = df.loc[df['date'] >= first_date, ['id', 'sales']].copy()
submission['F'] = [f'F{rank}' for rank in submission.groupby('id')['id'].cumcount() + 1]
submission = submission.set_index(['id', 'F']).unstack()['sales'][cols].reset_index()
submission.fillna(0., inplace=True)
submission.sort_values("id", inplace=True)
submission.reset_index(drop=True, inplace=True)
# make a dummy evaluation forecast
submission_eval = submission.copy()
submission_eval['id'] = submission_eval['id'].str.replace('validation', 'evaluation')
submission = pd.concat([submission, submission_eval], axis=0, sort=False)
return submission
def infer(model, calendar, prices, sales, filename=''):
"""
Infer the unit sales with the model.
Args:
model = [lgb.Booster] trained LightGBM model
calendar = [pd.DataFrame] dates of product sales
prices = [pd.DataFrame] price of the products sold per store and date
sales = [pd.DataFrame] historical daily unit sales data per product and store
Returns [pd.DataFrame]:
Submission dataframe.
"""
# create test dataset for submission
df = melt_and_merge(calendar, prices, sales, submission=True)
# set first forecast date
first_date = df.date[pd.isnull(df.sales)].min().to_pydatetime()
# forecast the 28 days for validation
for day in tqdm(range(0, 28)):
forecast_date = first_date + timedelta(days=day)
forecast_df = next_day_features(df, forecast_date)
drop_cols = ['id', 'date', 'sales', 'd', 'wm_yr_wk', 'weekday']
keep_cols = forecast_df.columns[~forecast_df.columns.isin(drop_cols)]
forecast_df = forecast_df.loc[forecast_df['date'] == forecast_date, keep_cols]
df.loc[df['date'] == forecast_date, 'sales'] = model.predict(forecast_df)
# create the submission file
submission = make_submission(df, first_date)
submission.to_csv(f'submission{filename}.csv', index=False)
return submission
if __name__ == "__main__":
# Make 4 submission for the report
DATAPATH = '../kaggle/input/m5-forecasting-accuracy/'
calendar, prices, sales = data_frames(DATAPATH)
MODELPATH = '../models/'
runs = [(f'{MODELPATH}lgb_year.pt', 365), (f'{MODELPATH}lgb_all.pt', 1000)]
val_test = [('val', 28), ('test', 0)]
for model_file, days in runs:
print(f'Starting submissions for {model_file}')
model = lgb.Booster(model_file=model_file)
for label, val_days in val_test:
print(f'# {label} set')
calendar_opt, prices_opt, sales_opt = optimize_df(calendar.copy(),
prices.copy(),
sales.copy(),
days=days,
val_days=val_days)
sub_suffix = f'_lgb_{days}d_{label}'
submission = infer(model, calendar_opt, prices_opt, sales_opt, filename=sub_suffix)
|
print("Modular multiplicative inverse")
def modolu(a, m):
a = a % m
for x in range(1, m):
if (a * x) % m == 1:
return x
return 1
a = int(input("a = "))
m = int(input("m = "))
print(modolu(a, m))
|
import numpy as np
import os
os.system('cls')
class arrayRow_DataStructure():
def __init__(self, num_columns):
self.num_columns = num_columns
self.arr = np.empty((0, self.num_columns))
return
def append(self, record):
self.arr = np.append(self.arr, record, axis=0)
def delete(self, index):
self.arr = np.delete(self.arr, index, axis=0)
def row_size(self):
return len(self.arr)
people = arrayRow_DataStructure(4)
print(people.arr, people.row_size())
print()
# ==================================================
col_01_first_name = 'Joseph'
col_02_last_name = 'Fischetti'
col_03_eye_color = 'green'
col_04_age = 75
people.append(np.array([
[col_01_first_name, col_02_last_name, col_03_eye_color, col_04_age]]))
print(people.arr, people.row_size())
print()
# ==================================================
col_01_first_name = 'Mary'
col_02_last_name = 'Smith'
col_03_eye_color = 'blue'
col_04_age = 35
people.append(np.array([
[col_01_first_name, col_02_last_name, col_03_eye_color, col_04_age]]))
print(people.arr, people.row_size())
print()
# ==================================================
col_01_first_name = 'Susan'
col_02_last_name = 'Mosley'
col_03_eye_color = 'brown'
col_04_age = 16
people.append(np.array([
[col_01_first_name, col_02_last_name, col_03_eye_color, col_04_age]]))
print(people.arr, people.row_size())
print()
# ==================================================
people.delete((0, 2))
print(people.arr, people.row_size())
print()
|
from django.apps import AppConfig
class GetSkuConfig(AppConfig):
name = 'get_sku'
|
number = int(input())
last = []
def geacha(n):
if n == 0:
return 1
else:
return 6 * (n) + last[n - 1]
i = 0
while True:
last.append(geacha(i))
if number <= last[-1]:
break
i += 1
print(i+1)
|
import botostubs
import os
import logging
import datetime
import boto3
import operator
from botocore.exceptions import ClientError
boto_session = boto3.Session(profile_name='default')
def does_the_bucket_exist(bucketname):
s3: botostubs.S3 = boto_session.client('s3')
try:
response = s3.head_bucket(Bucket=bucketname)
except ClientError as e:
logging.debug(e)
return False
return True
def main():
bucket_name = "testbucket"
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)s: %(asctime)s: %(message)s')
if does_the_bucket_exist(bucket_name) == True:
logging.info(
f'{bucket_name} exists and you have permission to access it.')
else:
logging.info(
f'{bucket_name} does not exist or you dont have permission to access it.')
if __name__ == '__main__':
main()
|
# Justin J
# Fall 2017
# Computational Complexity
# Mapping SAT -> 3SAT
# Clause helper class
class Clause:
def __init__(self, a, b, c):
self.a = str(a)
self.b = str(b)
self.c = str(c)
def toString(self):
return '( ' + self.a + ' + ' + self.b + ' + ' + self.c + ')'
# Convert any given clause, convert it to string of clauses with length 3
# Recursively calls itself until 3CNF is satisfied
def convertTo3SAT(clauseArray, wCount):
# if clause has 3 variables, return its string representation
if len(clauseArray) == 3:
return Clause(clauseArray[0], clauseArray[1], clauseArray[2]).toString()
# if clause has more than 3 variables
# introduce new variable w s.t w = 1 iff c[0] and c[1] both equal 0
elif len(clauseArray) > 3:
c1 = clauseArray[0]
c2 = clauseArray[1]
w = 'w' + str(wCount)
# create a new clause with c1, c2, and our new w
clauseToAdd = Clause(c1, c2, w)
# insert !w into front of clauseArray
clauseArray = ['!w' + str(wCount)] + clauseArray[2:]
# return 3CNF as string and recurse on the long clauseArray
return clauseToAdd.toString() + convertTo3SAT(clauseArray, wCount + 1)
# if clause has less than 3 variables, add 1 or 2 variables until length 3 met
while len(clauseArray) < 3:
clauseArray.append(clauseArray[0])
return clauseArray.toString()
# For simplification, let our long clause be represented in an array
longClause = ['x', 'y', '!r', 's', '!t', 'a', '!f', '!d', 'q']
result = convertTo3SAT(longClause, 1)
print'\nConverting instance of SAT into instance of 3SAT recursively...\n'
print '\nOriginal disjunctive clause variables:\n ' + str(longClause)
print '\n3SAT Result: \n' + result + '\n'
|
import dash_bootstrap_components as dbc
from dash import Input, Output, State, html
offcanvas = html.Div(
[
dbc.Button(
"Open scrollable offcanvas",
id="open-offcanvas-scrollable",
n_clicks=0,
),
dbc.Offcanvas(
html.P("The contents on the main page are now scrollable."),
id="offcanvas-scrollable",
scrollable=True,
title="Scrollable Offcanvas",
is_open=False,
),
]
)
@app.callback(
Output("offcanvas-scrollable", "is_open"),
Input("open-offcanvas-scrollable", "n_clicks"),
State("offcanvas-scrollable", "is_open"),
)
def toggle_offcanvas_scrollable(n1, is_open):
if n1:
return not is_open
return is_open
|
name = input('please say something: ')
print('Hi', name)
|
"""Funcionality for representing a physical variable in aospy."""
import numpy as np
class Var(object):
"""An object representing a physical quantity to be computed.
Attributes
----------
name : str
The variable's name
alt_names : tuple of strings
All other names that the variable may be referred to in the input data
names : tuple of strings
The combination of `name` and `alt_names`
description : str
A description of the variable
func : function
The function with which to compute the variable
variables : sequence of aospy.Var objects
The variables passed to `func` to compute it
units : str
The variable's physical units
domain : str
The physical domain of the variable, e.g. 'atmos', 'ocean', or 'land'
def_time, def_vert, def_lat, def_lon : bool
Whether the variable is defined, respectively, in time, vertically, in
latitude, and in longitude
math_str : str
The mathematical representation of the variable
colormap : str
The name of the default colormap to be used in plots of this variable
valid_range : length-2 tuple
The range of values outside which to flag as unphysical/erroneous
"""
def __init__(self, name, alt_names=None, func=None, variables=None,
units='', plot_units='', plot_units_conv=1, domain='atmos',
description='', def_time=False, def_vert=False, def_lat=False,
def_lon=False, math_str=False, colormap='RdBu_r',
valid_range=None):
"""Instantiate a Var object.
Parameters
----------
name : str
The variable's name
alt_names : tuple of strings
All other names that the variable might be referred to in any input
data. Each of these should be unique to this variable in order to
avoid loading the wrong quantity.
description : str
A description of the variable
func : function
The function with which to compute the variable
variables : sequence of aospy.Var objects
The variables passed to `func` to compute it. Order matters:
whenever calculations are performed to generate data corresponding
to this Var, the data corresponding to the elements of `variables`
will be passed to `self.function` in the same order.
units : str
The variable's physical units
domain : str
The physical domain of the variable, e.g. 'atmos', 'ocean', or
'land'. This is only used by aospy by some types of `DataLoader`,
including `GFDLDataLoader`.
def_time, def_vert, def_lat, def_lon : bool
Whether the variable is defined, respectively, in time, vertically,
in latitude, and in longitude
math_str : str
The mathematical representation of the variable. This is typically
a raw string of LaTeX math-mode, e.g. r'$T_\mathrm{sfc}$' for
surface temperature.
colormap : str
(Currently not used by aospy) The name of the default colormap to
be used in plots of this variable.
valid_range : length-2 tuple
The range of values outside which to flag as unphysical/erroneous
""" # noqa: W605
self.name = name
if alt_names is None:
self.names = tuple([name])
else:
self.alt_names = alt_names
self.names = tuple([name] + list(alt_names))
if func is None:
self.func = lambda x: x
self.variables = None
else:
self.func = func
self.variables = variables
self.units = units
if not description:
if self.func.__doc__ is None:
self.description = ''
else:
self.description = self.func.__doc__
else:
self.description = description
self.domain = domain
self.def_time = def_time
self.def_vert = def_vert
self.def_lat = def_lat
self.def_lon = def_lon
self.math_str = math_str
self.colormap = colormap
self.valid_range = valid_range
def __str__(self):
return 'Var instance "' + self.name + '"'
__repr__ = __str__
def to_plot_units(self, data, dtype_vert=False):
"""Convert the given data to plotting units."""
if dtype_vert == 'vert_av' or not dtype_vert:
conv_factor = self.units.plot_units_conv
elif dtype_vert == ('vert_int'):
conv_factor = self.units.vert_int_plot_units_conv
else:
raise ValueError("dtype_vert value `{0}` not recognized. Only "
"bool(dtype_vert) = False, 'vert_av', and "
"'vert_int' supported.".format(dtype_vert))
if isinstance(data, dict):
return {key: val*conv_factor for key, val in data.items()}
return data*conv_factor
def mask_unphysical(self, data):
"""Mask data array where values are outside physically valid range."""
if not self.valid_range:
return data
else:
return np.ma.masked_outside(data, np.min(self.valid_range),
np.max(self.valid_range))
|
def fun1(a):
print(a)
fun1(a=1)
def fun2 (a, **kwargs):
print(a)
print(kwargs)
fun2(10, a1=1,b=2)
def fun3(a, *args):
print(a)
print(args)
fun3(12, 1,3)
f = lambda a1,a2:a1+a2
print(f(1,2))
sum = 0
def f1():
global sum
sum = sum+1
print(sum)
f1()
|
import enum
class AuthConstants(enum.Enum):
noMatch = "Wrong username and password combination"
noUser = "User does not exist"
sucessLogout = "You have been successfully logged out"
passwordUpdated = "Successfully updated your password"
codeMail = "A 6 digit verification code has been sent to your mail id"
askUsername = "Please enter your username again for security purposes"
loginAgain = "Please login to your account again for security purposes"
sameUsername = "User with that username already exists"
class ImageConstant(enum.Enum):
defaultImage = "images/default/default_profile_img.jpg"
class ResetConstants(enum.Enum):
timeExceeded = "Time limit exceeded"
newVerfication = "A new verification code has been sent on your email"
noMatch = "Security code does not match"
|
# Generated by Django 2.0.7 on 2018-09-21 06:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rocky', '0007_auto_20180915_1454'),
]
operations = [
migrations.AddField(
model_name='book',
name='introduction',
field=models.TextField(default='', max_length=1500, verbose_name='内容简介'),
),
]
|
"""add article page view
Revision ID: 2a1c4da978f8
Revises: 4058a1c2b44d
Create Date: 2015-11-26 10:34:54.369011
"""
# revision identifiers, used by Alembic.
revision = '2a1c4da978f8'
down_revision = '4058a1c2b44d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('articles', sa.Column('page_view', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('articles', 'page_view')
### end Alembic commands ###
|
from Node import Node
class BinaryTree:
root = None
def __init__(self):
print("Tree created")
def __str__(self):
print(self.root.value)
self.root.Print("")
def Add(self, value):
if self.root == None:
self.root = Node(value)
#print("Root was zero, new Node created")
else:
self.root.AddValue(value)
#print("Root wasn't zero, other Node created")
def Print(self):
if self.root != None:
self.root.Print("")
else:
print("Tree is empty, nothing here to print")
def Print2(self):
if self.root != None:
data = ["└──── " + str(self.root.value)]
self.root.Print2(data, 0, 0)
for x in data:
print(x)
else:
print("Tree is empty, nothing here to print")
def display(self):
lines, *_ = self.root._display_aux()
for line in lines:
print(line)
def Print3(self):
data = []
if(self.root.left != None):
self.root.left.Print3(data, 6, "┌──── ")
data.append("└──── " + str(self.root.value))
if self.root.right != None:
self.root.right.Print3(data, 6, "└──── ")
returnStr = ""
for x in data:
returnStr += x + "\n"
return returnStr
|
from rest_framework import serializers
class PredictorSerializer(serializers.Serializer):
name = serializers.CharField(max_length=30)
|
import util
import os
tutorial_path = os.path.expandvars("$desktop/mans/manim_ce/example_scenes/tutorial_final.py")
lines = []
with open(tutorial_path, "r") as f:
lines = f.readlines()
modules = []
for aline in lines:
if aline[0] == 'c':
modules.append(aline.split("(")[0][6:])
for amodule in modules:
cmd = "/home/peter/setup/bin/manim_with_scene {} {}".format(tutorial_path, amodule)
print("cmd : {}".format( cmd ))
os.system(cmd)
|
import time
from flask import request
from flask_restplus import Api, Resource
from server import db
from server.operation.register import Register
from .. import api
import server.document as document
ns = api.namespace('opeartion', description="用户留言")
class Opeartion(Resource):
"""
用户留言模块
"""
def get(self):
"""
随机查看一条留言
"""
try:
data = db.query_one(
"SELECT content, FROM_UNIXTIME(create_time, '%Y-%m-%d %h') as create_time FROM `work_msg` ORDER BY RAND() DESC LIMIT 0, 1")
return {'status': 200, 'msg': data}
except Exception as err:
return {'status': 400, 'msg': '失败'}
def post(self):
"用户留言模块"
try:
kwords = request.json
except Exception as err:
return {'status': 400, 'msg': '失败,你的数据格式不对 %s' % err}
if not kwords:
return {'status': 400, 'msg': '失败,你的数据格式不对'}
kwargs = {}
try:
kwargs['msg'] = kwords['msg']
kwargs['mobile'] = kwords['mobile']
kwargs['name'] = kwords['name']
except Exception as e:
return {'status': 400, 'msg': '失败,你的数据格式不对 %s ' % e}
kwargs['create_time'] = int(time.time())
result = db.insert("""
insert into
work_msg (mobile, name, content, create_time)
value (:mobile, :name, :msg, :create_time)
""", kwargs)
if result:
return {'status': 200, 'msg': '成功'}
return {'status': 400, 'msg': '失败,你的数据格式不对'}
class OpeartionAll(Resource):
def get(self):
"获取用户所有留言"
try:
data = db.query(
"SELECT content, FROM_UNIXTIME(create_time, '%Y-%m-%d %h') as create_time FROM `work_msg` ORDER BY id DESC")
return {'status': 200, 'msg': data}
except Exception as err:
return {'status': 400, 'msg': '失败'}
ns.add_resource(Opeartion, '/')
ns.add_resource(OpeartionAll, '/all/')
|
import unittest
from katas.beta.nothing_special import nothing_special
class NothingSpecialTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(nothing_special('Hello World!'), 'Hello World')
def test_equals_2(self):
self.assertEqual(nothing_special('%^Take le$ft ##quad%r&a&nt'),
'Take left quadrant')
def test_equals_3(self):
self.assertEqual(nothing_special('M$$$$$$$y ally!!!!!'), 'My ally')
def test_equals_4(self):
self.assertEqual(nothing_special(25), 'Not a string!')
|
# Dependencies
import tweepy
import json
import numpy as np
# Twitter API Keys. Place your keys here.
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
# Setup Tweepy API Authentication
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
# Target User Account
target_user = "@DalaiLama"
# Lists for holding sentiments
compound_list = []
positive_list = []
negative_list = []
neutral_list = []
#
for page in tweepy.Cursor(api.user_timeline, id=target_user).pages(200):
# page is a list of statuses
# Loop through all tweets
for tweet in page:
tweet_text = json.dumps(tweet._json, indent=3)
tweet = json.loads(tweet_text)
# Parse the tweet to identify its text
# Analyze the sentiment of the tweet
# Add the sentiment analyses to the respective lists
# Print the average sentiments of the tweets
|
# https://www.sqlite.org/json1.html
import sqlite3
import json
db_name = 'yt.db'
def open():
conn = sqlite3.connect(db_name)
cursor = conn.cursor()
sql = 'create table stats (ts varchar(64), video_id varchar(64), data json)'
try:
cursor.execute(sql)
conn.commit()
except sqlite3.OperationalError:
# maybe table already exists
pass
return conn
def put(conn, now, video_id, stats):
cursor = conn.cursor()
sql = 'insert into stats values (?,?,?)'
cursor.execute(sql, (now, video_id, json.dumps(stats)))
conn.commit()
|
from scripted import ScriptedJobPlugin
from staging import StagingJobPlugin
_jobplugins_by_name = {
'scripted': ScriptedJobPlugin,
'staging': StagingJobPlugin,
}
def register_jobplugin(cls):
_jobplugins_by_name[cls.name] = cls
def load_jobplugin(name):
return _jobplugins_by_name[name]
|
import numpy as np
from blob_mask import blob_mask, blob_mask_dim
from constants import image_height, image_width
def get_true_mask(data):
all_blobs = data["army"] + data["enemy"]
all_masks = []
for blob in all_blobs:
if (blob["alive"]):
mask = np.zeros((image_height, image_width, 1), dtype=np.int) - 1
if (abs(blob["x"] - .5) < .4) & (abs(blob["y"] - .5) < .4):
y_k = 1
else:
y_k = -1
x_init = int(blob["x"] * 742) - 26
y_init = int(blob["y"] * 594) - 31
for i in range(blob_mask_dim[0]):
for j in range(blob_mask_dim[1]):
y = i + y_init
x = j + x_init
if (x >= 0) & (y >= 0) & (y < image_height) & (x < image_width):
if (blob_mask[i][j] == 1):
mask[y][x][0] = 1
all_masks.append((mask, y_k))
return all_masks
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys, os.path
sys.argv[1] = os.path.basename(sys.argv[1])
with open('msbuild_rule.out', 'w') as f:
f.write(' '.join(sys.argv))
|
# coding: utf-8
"""
AuthProvidersApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AuthProvidersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_ads_provider_controllers(self, id, **kwargs):
"""
List all ADS controllers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ads_provider_controllers(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: (required)
:return: AdsProviderControllers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ads_provider_controllers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_ads_provider_controllers`")
resource_path = '/platform/1/auth/providers/ads/{Id}/controllers'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['Id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdsProviderControllers',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_ads_provider_domain(self, ads_provider_domain_id, id, **kwargs):
"""
Retrieve the ADS domain information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ads_provider_domain(ads_provider_domain_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str ads_provider_domain_id: Retrieve the ADS domain information. (required)
:param str id: (required)
:return: AdsProviderDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ads_provider_domain_id', 'id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ads_provider_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ads_provider_domain_id' is set
if ('ads_provider_domain_id' not in params) or (params['ads_provider_domain_id'] is None):
raise ValueError("Missing the required parameter `ads_provider_domain_id` when calling `get_ads_provider_domain`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_ads_provider_domain`")
resource_path = '/platform/1/auth/providers/ads/{Id}/domains/{AdsProviderDomainId}'.replace('{format}', 'json')
path_params = {}
if 'ads_provider_domain_id' in params:
path_params['AdsProviderDomainId'] = params['ads_provider_domain_id']
if 'id' in params:
path_params['Id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdsProviderDomains',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_ads_provider_domains(self, id, **kwargs):
"""
List all ADS domains.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ads_provider_domains(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: AdsProviderDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ads_provider_domains" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_ads_provider_domains`")
resource_path = '/platform/1/auth/providers/ads/{Id}/domains'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['Id'] = params['id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdsProviderDomains',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_ads_provider_search(self, id, **kwargs):
"""
Retrieve search results.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ads_provider_search(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: (required)
:param str domain: The domain to search in.
:param str description: The user or group description to search for.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param bool search_users: If true, search for users.
:param str filter: The LDAP filter to apply to the search.
:param int limit: Return no more than this many results at once (see resume).
:param str user: The user name for the domain if untrusted.
:param str password: The password for the domain if untrusted.
:param bool search_groups: If true, search for groups.
:return: AdsProviderSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'domain', 'description', 'resume', 'search_users', 'filter', 'limit', 'user', 'password', 'search_groups']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ads_provider_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_ads_provider_search`")
if 'limit' in params and params['limit'] < 1.0:
raise ValueError("Invalid value for parameter `limit` when calling `get_ads_provider_search`, must be a value greater than or equal to `1.0`")
resource_path = '/platform/1/auth/providers/ads/{Id}/search'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['Id'] = params['id']
query_params = {}
if 'domain' in params:
query_params['domain'] = params['domain']
if 'description' in params:
query_params['description'] = params['description']
if 'resume' in params:
query_params['resume'] = params['resume']
if 'search_users' in params:
query_params['search_users'] = params['search_users']
if 'filter' in params:
query_params['filter'] = params['filter']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'user' in params:
query_params['user'] = params['user']
if 'password' in params:
query_params['password'] = params['password']
if 'search_groups' in params:
query_params['search_groups'] = params['search_groups']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdsProviderSearch',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
|
from django.db import models
from django.core.validators import RegexValidator
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator
class Customer(models.Model):
firstName = models.CharField (max_length=50, verbose_name="First Name")
lastName = models.CharField (max_length=50, verbose_name="Last Name")
# phone number validation
phoneMessage = 'Phone number must be 11 digits format \'00000000000\''
phone_regex = RegexValidator(regex='^\\d{11}$',message=phoneMessage)
number = models.CharField(max_length=11, validators=[phone_regex], verbose_name="Phone Number")
email = models.EmailField(max_length = 254, blank= True, null=True, verbose_name="Email Address")
address = models.TextField(blank=True, verbose_name="Address")
def __str__(self):
return self.firstName + ' ' + self.lastName
class Ticket(models.Model):
TICKET_STATUS = [('Open', 'Open'), ('Waiting on Customer', 'Waiting on Customer'), ('Waiting for Parts', 'Waiting for Parts'), ('Closed', 'Closed')]
DEVICE_TYPES = [('Mobile Phone', 'Mobile Phone'), ('Laptop', 'Laptop'), ('Desktop', 'Desktop'), ('Games Console', 'Games Console'), ('Tablet', 'Tablet'), ('Smart device', 'Smart Device'), ('Other', 'Other')]
ticketName = models.CharField(max_length=100, verbose_name="Ticket Name")
deviceMake = models.CharField (max_length=30, verbose_name="Device Make")
deviceModel = models.CharField(max_length=50, verbose_name="Device Model")
deviceType = models.CharField(max_length=15, choices=DEVICE_TYPES, verbose_name="Device Type")
customer = models.ForeignKey(Customer, on_delete=models.PROTECT, verbose_name="Customer")
assigned = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True, editable=True, related_name="assignedTechnician", verbose_name="Assigned")
ticketStatus = models.CharField(max_length=30, choices=TICKET_STATUS, verbose_name="Ticket Status")
createdDate = models.DateTimeField(auto_now_add=True, editable=False, verbose_name="Created On")
createdBy = models.ForeignKey(User, on_delete=models.DO_NOTHING, null=True, editable=False, related_name="createdByTechnician", verbose_name="createdBy")
lastUpdated = models.DateTimeField(auto_now=True, verbose_name="Last Update Date")
updatedBy = models.ForeignKey(User, on_delete=models.DO_NOTHING, null=True, editable=False, related_name="updatedByTechnician", verbose_name="Updated By")
ticketDescription = models.TextField(verbose_name="Ticket Description")
def __str__(self):
return self.ticketName
def getAssigned(self):
return self.assigned
class meta:
ordering = ['-id']
class inventoryItem(models.Model):
ITEM_TYPES = [('Mobile Phone', 'Mobile Phone'), ('Laptop', 'Laptop'), ('Desktop', 'Desktop'), ('Games Console', 'Games Console'),
('Tablet', 'Tablet'), ('Smart device', 'Smart Device'),('Monitor', 'Monitor'), ('Peripherals', 'Peripherals'), ('Component', 'Component'),('Accessory', 'Accessory'), ('Software', 'Software'), ('Other', 'Other')]
itemName = models.CharField(max_length=150, verbose_name="Item Name")
itemType = models.CharField(max_length = 50,choices=ITEM_TYPES,verbose_name="Item Type")
quantityInStock = models.PositiveIntegerField (verbose_name="In Stock")
price = models.DecimalField (decimal_places=2, validators=[MinValueValidator(0.00)], max_digits=9, verbose_name="Price")
orderLink = models.URLField(blank = True, verbose_name="Order Link")
lastOrdered = models.DateTimeField(auto_now=True, verbose_name="Last Ordered On")
def __str__(self):
return self.itemName + "(" + self.itemType + ")"
|
import pandas as pd
import numpy as np
import json
import time
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfVectorizer
from nltk.corpus import stopwords
import re
import string
from nltk.stem import WordNetLemmatizer
from nltk import word_tokenize
from nltk.corpus import stopwords
df = pd.read_csv("IMDB_movies_big_dataset_clean.csv", low_memory=False, error_bad_lines=False)
def get_title_from_id(id):
return df[df.id == id]["original_title"].values[0]
def get_year_from_id(id):
return df[df.id == id]["year"].values[0]
def get_genre_from_id(id):
return df[df.id == id]["genre"].values[0]
def get_director_from_id(id):
return df[df.id == id]["director"].values[0]
def get_actors_from_id(id):
return df[df.id == id]["actors"].values[0]
def get_id_from_title(title):
return df[df.original_title == title]["id"].values[0]
def get_rating_from_id(id):
return df[df.id == id]["avg_vote"].values[0]
stop = stopwords.words('english')
stop_words_ = set(stopwords.words('english'))
wn = WordNetLemmatizer()
def black_txt(token): #scoate punctuatia si cuvintele nefolositoare
return token not in stop_words_ and token not in list(string.punctuation) and len(token)>2
def clean_txt(text):
clean_text = []
clean_text2 = []
text = re.sub("'", "",text)
text=re.sub("(\\d|\\W)+"," ",text)
text = text.replace("nbsp", "")
clean_text = [ wn.lemmatize(word, pos="v") for word in word_tokenize(text.lower()) if black_txt(word)]
clean_text2 = [word for word in clean_text if black_txt(word)]
return " ".join(clean_text2)
def get_recommended_movies(movie):
# features = ['original_title','description','actors','genre','director']
# for feature in features:
# df[feature] = df[feature].fillna('')
# df["combined_features"] = df.apply(combine_features,axis=1)
# df["combined_features"] = df["combined_features"].apply(clean_txt)
# tfidf_vectorizer = TfidfVectorizer()
# tfidf_matrix = tfidf_vectorizer.fit_transform((df['combined_features']))
# cv = CountVectorizer()
# count_matrix = cv.fit_transform(df["combined_features"])
# cosine_sim = cosine_similarity(tfidf_matrix)
# np.save('cosine_matrix.npy',cosine_sim)
cosine_sim=np.load('cosine_matrix.npy',allow_pickle=True)
movie_index = get_id_from_title(movie)
similar_movies = list(enumerate(cosine_sim[movie_index]))
sorted_similar_movies = sorted(similar_movies,key=lambda x:x[1],reverse=True)
final_list = []
i = 0
for element in sorted_similar_movies:
if i>0:
list_element = {
'title' : str(get_title_from_id(element[0])),
'year' : str(get_year_from_id(element[0])),
'genre' : get_genre_from_id(element[0]),
'director' : get_director_from_id(element[0]),
'actors' : get_actors_from_id(element[0]),
'rating' : get_rating_from_id(element[0])
}
final_list.append(list_element)
i=i+1
if i>20:
break
return json.dumps(final_list)
# # Se citeste fisierul CSV ce contine datele despre filme
# print("Introduceti titlul filmului si apasati ENTER: ")
# movie_user_likes = str(input())
# start_time = time.time()
# t1 = time.time()
# df = pd.read_csv("IMDB_movies_big_dataset_clean.csv", low_memory=False, error_bad_lines=False)
# print("Fisierul CSV a fost citit ... (%.2f secunde)" % (time.time()-t1))
# # Se selecteaza caracteristicile ce vor fi luate in seama pentru a calcula scorul de similaritate
# t2 = time.time()
# features = ['original_title','description','actors','genre','director']
# for feature in features:
# df[feature] = df[feature].fillna('')
# # Se creeaza o coloana in obiectul DF ce va contine caracteristicile importante pentru scorul de similaritate
def combine_features(row):
try:
return row["original_title"] + " " + row["description"] + " " + row['actors'] +" " + row['genre'] + " " + row["director"]
except:
print("Error in combining features at :", row)
# df["combined_features"] = df.apply(combine_features,axis=1)
# print("S-a creat coloana ce contine caracteristicile pentru recomandare ...(%.2f secunde)" % (time.time()-t2))
# # Se creeaza matricea de frecvente pentru filme, luand in considerare caracteristicile extrase
# # Matrice ce contine pe linii filmele si pe coloane cuvintele unice de pe coloana "combined_features"
# t3 = time.time()
# cv = CountVectorizer()
# count_matrix = cv.fit_transform(df["combined_features"])
# # print(count_matrix.toarray().shape[0])
# # print(count_matrix.toarray().shape[1])
# print("S-a creat matricea de frecvente ...(%.2f secunde)" % (time.time()-t3))
# # Se calculeaza cosine similarity in functie de matricea de frecvente
# # Matrice patratica 16313 x 16313 ( = numarul de filme din fisierul CSV )
# t4 = time.time()
# cosine_sim = cosine_similarity(count_matrix)
# print(cosine_sim.shape)
# print("S-a efectuat cosine similarity pe matricea de frecvente ... (%.2f secunde)" % (time.time()-t4))
# # movie_user_likes = "The Avengers"
# # movie_user_likes = "Titanic"
# # movie_user_likes = "John Wick"
# # Se preia id-ul filmului din titlul oferit de catre utilizator
# movie_index = get_id_from_title(movie_user_likes)
# # Se preia randul din matricea de similaritate ce contine filmul oferit de utilizator si se creeaza o enumeratie de tipul
# # (id_film, scor de silimaritate) cu care se construieste lista de filme similare
# similar_movies = list(enumerate(cosine_sim[movie_index]))
# # Se preia o lista cu filme similare, sortata in ordine descrescatoare dupa scorul de similaritate
# # x[0] = id-ul filmului
# # x[1] = scorul de similaritate al filmului din enumeratie
# sorted_similar_movies = sorted(similar_movies,key=lambda x:x[1],reverse=True)
# # sorted_by_rating = sorted_similar_movies[0:14]
# # Se afiseaza in consola primele 30 de filme similare cu filmul introdus de utilizator
# i=0
# print("Top 30 filme similare cu "+movie_user_likes+" sunt :\n")
# for element in sorted_similar_movies:
# # Se extrage element[0] din enumeratie, ce reprezinta id-ul unui film din setul de date si se extrage apoi titlul filmului cu acest id
# print(get_title_from_id(element[0]))
# i=i+1
# if i>30:
# break
# total_time = time.time() - start_time
# print("--- Timpul total de executie al algoritmului : %.2f secunde ---" % (total_time))
|
# -*- test-case-name: mimic.test.test_ironic -*-
"""
API Mock for Ironic.
http://docs.openstack.org/developer/ironic/webapi/v1.html
"""
from __future__ import absolute_import, division, unicode_literals
from mimic.rest.mimicapp import MimicApp
class IronicApi(object):
"""
Rest endpoints for the Ironic API.
"""
app = MimicApp()
def __init__(self, core):
"""
:param MimicCore core: The core to which the Ironic Api will be
communicating.
"""
self.core = core
@app.route('/nodes', methods=['POST'])
def create_node(self, request):
"""
Responds with response code 201 and returns the newly created node.
"""
return self.core.ironic_node_store.create_node(request)
@app.route('/nodes/<string:node_id>', methods=['DELETE'])
def delete_node(self, request, node_id):
"""
Responds with response code 204 and delete the node.
"""
return self.core.ironic_node_store.delete_node(request, node_id)
@app.route('/nodes', methods=['GET'])
def list_nodes(self, request):
"""
Responds with response code 200 with a list of nodes.
"""
return self.core.ironic_node_store.list_nodes(include_details=False)
@app.route('/nodes/detail', methods=['GET'])
def list_nodes_with_details(self, request):
"""
Responds with response code 200 with a list of nodes and its details.
"""
return self.core.ironic_node_store.list_nodes(include_details=True)
@app.route('/nodes/<string:node_id>', methods=['GET'])
def get_node_details(self, request, node_id):
"""
Responds with response code 200 with details of the nodes.
"""
return self.core.ironic_node_store.get_node_details(request, node_id)
@app.route('/nodes/<string:node_id>/states/provision', methods=['PUT'])
def set_node_provision_state(self, request, node_id):
"""
Responds with response code 202 and sets the provision state of
the node.
"""
return self.core.ironic_node_store.set_node_provision_state(
request, node_id)
@app.route('/nodes/<string:node_id>/vendor_passthru/<string:method>', methods=['POST'])
def vendor_passthru_cache_image(self, request, node_id, method):
"""
Responds with response code 202 and sets the :obj:`Node`'s cache_image_id
and cache_status.
Returns 400 if `node_id` does not exist or if the `method` is not `cache_image`
"""
return self.core.ironic_node_store.cache_image_using_vendor_passthru(
request, node_id, method)
|
#!/usr/bin/env python
import os
from Tkinter import *
from tkMessageBox import *
from tkFileDialog import *
from reedsolo import RSCodec, ReedSolomonError
from simplecrypt import encrypt, decrypt, DecryptionException
class Notepad:
#variables
__root = Tk()
#Reed - Solomon codec for error detection and correction
#Enough to correct the file even if 15% of characters are corrupted
__rs = RSCodec(120)
#default window width and height
__thisWidth = 300
__thisHeight = 300
__thisFileFrame = Frame(__root, borderwidth=1)
__thisFileFrame.pack(fill=X)
__thisOpenFile = Button(__thisFileFrame, text="Select File", width=10, font=("TkDefaultFont", 10))
__thisOpenFile.pack(side=LEFT);
__thisClearFile = Button(__thisFileFrame, text="Clear", width=10, font=("TkDefaultFont", 10))
__thisClearFile.pack(side=LEFT);
__noFile = "[no file selected]"
__thisFileLabelText = StringVar();
__thisFileLabelText.set(__noFile)
__thisFileLabel = Label(__thisFileFrame, textvariable=__thisFileLabelText, font=("TkDefaultFont", 10))
__thisFileLabel.pack(side=LEFT, expand=True, fill=BOTH);
__thisCmdFrame = Frame(__root, borderwidth=1)
__thisCmdFrame.pack(fill=X)
__thisPassLabel = Label(__thisCmdFrame, text="Enter Key: ", width=13, font=("TkDefaultFont", 10))
__thisPassLabel.pack(side=LEFT);
__thisPassEntry = Entry(__thisCmdFrame, show='*', font=("TkFixedFont", 14))
__thisPassEntry.pack(side=LEFT, expand=True, fill=BOTH);
__thisSaveFile = Button(__thisCmdFrame, text="Save", font=("TkDefaultFont", 10))
__thisSaveFile.pack(side=LEFT);
__thisLoadFile = Button(__thisCmdFrame, text="Load", font=("TkDefaultFont", 10))
__thisLoadFile.pack(side=LEFT);
__thisTextArea = Text(__root, font=("TkFixedFont", 12), undo=TRUE)
__thisScrollBar = Scrollbar(__thisTextArea)
__file = None
def __init__(self,**kwargs):
#initialization
#set icon
try:
appdir = os.path.dirname(os.path.abspath(__file__))
self.__root.tk.call('wm','iconphoto',self.__root._w, PhotoImage(file=os.path.join(appdir, "icon.png")))
except:
pass
#set window size (the default is 300x300)
try:
self.__thisWidth = kwargs['width']
except KeyError:
pass
try:
self.__thisHeight = kwargs['height']
except KeyError:
pass
#set the window text
self.__root.title(self.__noFile + " - CryptoNotepad")
#center the window
screenWidth = self.__root.winfo_screenwidth()
screenHeight = self.__root.winfo_screenheight()
left = (screenWidth / 2) - (self.__thisWidth / 2)
top = (screenHeight / 2) - (self.__thisHeight /2)
self.__root.geometry('%dx%d+%d+%d' % (self.__thisWidth, self.__thisHeight, left, top))
#add controls (widget)
self.__thisTextArea.pack(fill=BOTH, expand=True)
self.__thisScrollBar.pack(side=RIGHT, fill=Y)
self.__thisScrollBar.config(command=self.__thisTextArea.yview)
self.__thisTextArea.config(yscrollcommand=self.__thisScrollBar.set, wrap=CHAR)
self.__thisOpenFile.config(command=self.__openFile)
self.__thisClearFile.config(command=self.__clearFile)
self.__thisSaveFile.config(command=self.__saveFile)
self.__thisLoadFile.config(command=self.__loadFile)
def __quitApplication(self):
self.__root.destroy()
#exit()
def __showAbout(self):
showinfo("CryptoNotepad", "Created by an unnamed programmer")
def __openFile(self):
self.__file = askopenfilename(defaultextension=".bin",filetypes=[("Encrypted Text Files","*.bin"),("All Files","*.*")])
if self.__file == "":
#no file to open
self.__file = None
else:
self.__root.title(self.__noFile + " - CryptoNotepad")
self.__thisFileLabelText.set(self.__file)
def __clearFile(self):
self.__file = None
self.__root.title(self.__noFile + " - CryptoNotepad")
self.__thisFileLabelText.set(self.__noFile)
def __encodeFile(self, filestr):
#Encrypt with AES-256
encrypted = bytearray(encrypt(self.__getKey(), filestr))
#Encode with Reed-Solomon codec capable of correcting 15% of byte errors
#And test the decoding on encoded file to be sure
encoded = self.__rs.encode(encrypted)
try:
decoded = self.__rs.decode(encoded)
decrypted = decrypt(self.__getKey(), buffer(decoded)).decode('utf-8')
if decoded == encrypted and filestr == decrypted:
return encoded
else:
showerror("Weird Encoding Error", "This error should never happen. Sorry, cannot save your file, try other programs. There is something wrong with reedsolo.py or simplecrypt.py")
return None
except:
print sys.exc_info()[0]
return None
def __decodeFile(self, filestr):
#decode RS code
decodedText = ''
try:
decodedText = self.__rs.decode(bytearray(filestr))
except ReedSolomonError:
showerror("Decoding Error", "The program cannot decode your file because it is corrupted beyond repair. Be careful and don't forget to make backups")
return ''
#decrypt
try:
decryptedText = decrypt(self.__getKey(), buffer(decodedText))
except DecryptionException as d:
print "DecryptionException: ", d
showerror("Decryption Error", "The program cannot decrypt contents of your file. Either your key is invalid or the password is corrupted. There is no way to decrypt your data if you lose the password. Be careful and don't forget to make backups")
decryptedText = ''
return decryptedText
def __getKey(self):
return self.__thisPassEntry.get()
def __loadFile(self):
if (len(self.__getKey()) > 0 and self.__file):
file = open(self.__file,"rb")
ftext = file.read()
text = self.__decodeFile(ftext);
self.__thisTextArea.delete(1.0,END)
self.__thisTextArea.insert(1.0, text)
else:
if not self.__file:
showerror("Error", "No file selected.")
if len(self.__thisPassEntry.get()) == 0:
showerror("Error", "No key entered.")
def __saveFile(self):
if self.__file == None:
self.__file = asksaveasfilename(initialfile='text.bin',defaultextension=".bin",filetypes=[("Encrypted Text Files","*.bin"),("All Files","*.*")])
self.__thisFileLabelText.set(self.__file)
if (len(self.__getKey()) > 0):
#try to save the file, note 'end-1c' insted of END to get rid of last newline
text = self.__thisTextArea.get(1.0, 'end-1c')
encodedText = self.__encodeFile(text);
file = open(self.__file, "wb")
if (encodedText != None): file.write(encodedText)
file.close()
#change the window title
self.__root.title(os.path.basename(self.__file) + " - CryptoNotepad")
else:
if not len(self.__thisPassEntry.get()) > 0:
showerror("Error", "No key entered.")
def __cut(self):
self.__thisTextArea.event_generate("<<Cut>>")
def __copy(self):
self.__thisTextArea.event_generate("<<Copy>>")
def __paste(self):
self.__thisTextArea.event_generate("<<Paste>>")
def run(self):
#run main application
self.__root.mainloop()
#run main application
notepad = Notepad(width=600,height=400)
notepad.run()
|
import pyglet
from pyglet.gl import *
#import random
#from random import uniform,randrange,choice
from numpy.random import uniform,randint,choice
import loaders
from pymunk import Vec2d
import PiTweener
import itertools
## http://stackoverflow.com/questions/14885349/how-to-implement-a-particle-engine
## Performance? http://docs.cython.org/src/userguide/tutorial.html
## or PyPy
def omni_spread(speed_x,speed_y):
def _omni_spread(particle):
particle.x += speed_x
particle.y += speed_y
return _omni_spread
def init_vel(x,y):
def _init_vel(particle):
particle.x += particle.vel[0]
particle.y += particle.vel[1]
particle.vel += Vec2d(x,y)
return _init_vel
def gravity(strength_x,strength_y):
def _gravity(particle):
particle.x += particle.vel[0]
particle.y += particle.vel[1]
particle.vel += Vec2d(strength_x,strength_y)
return _gravity
def scale(scale):
def _scale(particle):
particle.sprite.scale = scale
return _scale
def rotate(speed):
def _rotate(particle):
particle.sprite.rotation += speed
return _rotate
def sprite_color_overlay(color):
def _sprite_color_overlay(particle):
particle.sprite.color = color
return _sprite_color_overlay
class FadeToColor(object):
def __init__(self, color):
self.r,self.g,self.b = 255,255,255
self.tweener = PiTweener.Tweener()
self.tweener.add_tween(self,
r = color[0],
g = color[1],
b = color[2],
tween_time = uniform(.25,.5),
tween_type = self.tweener.LINEAR,)
def sprite_color_overlay_flash(color):
fader = FadeToColor(color)
def _sprite_color_overlay_flash(particle):
fader.tweener.update()
particle.sprite.color = fader.r,fader.g,fader.b
return _sprite_color_overlay_flash
def age(amount):
def _age(particle):
particle.alive += amount
return _age
class AgeDecay(object):
def __init__(self, age, fade=False):
self.tweenable = 1
self.opacity = 255
self.tweener = PiTweener.Tweener()
if not fade:
self.tweener.add_tween(self,
tweenable = 0,
tween_time = age,
tween_type = self.tweener.LINEAR,)
else:
self.tweener.add_tween(self,
tweenable = 0,
tween_time = age,
tween_type = self.tweener.OUT_CUBIC,
on_complete_function = self.fade)
def fade(self):
self.tweener.add_tween(self,
opacity = 0,
tween_time = .25,
tween_type = self.tweener.LINEAR)
def age_kill(age):
age_decay = AgeDecay(age)
def _age_kill(particle):
age_decay.tweener.update()
if age_decay.tweenable == 0:
particle.kill()
return _age_kill
def age_fade_kill(age):
age_decay = AgeDecay(age, fade=True)
def _age_fade_kill(particle):
age_decay.tweener.update()
particle.sprite.opacity = age_decay.opacity
if particle.sprite.opacity == 0:
particle.kill()
return _age_fade_kill
def age_scale_fade_kill(rate):
def _age_scale_fade_kill(particle):
particle.alive -= 1
if particle.alive < 13:
particle.sprite.opacity -= 15
particle.sprite.scale += rate
if particle.alive < 0:
particle.kill()
return _age_scale_fade_kill
def kill_at(max_x,max_y):
def _kill_at(particle):
if particle.x < -max_x or particle.x > max_x or particle.y < -max_y or particle.y > max_y:
particle.kill()
return _kill_at
def fade_kill_at(max_x,max_y):
def _kill_at(particle):
if particle.x < -max_x or particle.x > max_x or particle.y < -max_y or particle.y > max_y:
particle.sprite.opacity -= 15
if particle.sprite.opacity < 20:
particle.kill()
return _kill_at
def ascending(speed):
def _ascending(particle):
particle.y += speed
return _ascending
def fan_out(modifier):
def _fan_out(particle):
d = particle.alive / modifier
d += 1
particle.x += randint(int(-d),int(d))
return _fan_out
def wind(direction, strength):
def _wind(particle):
if randint(0,100) < strength:
particle.x += direction
return _wind
def fan(modifier):
def _fan(particle):
d = particle.alive / modifier
d += 1
particle.x += randint(-d, d)
return _fan
def spark_machine(age,img,batch,group):
def create():
for _ in range(choice([0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,3,4])):
behavior = (
omni_spread(uniform(0.4,-0.4),
uniform(0.2,-0.2)),
age_fade_kill(age+uniform(0,.5)),
#scale(uniform(2,5)),
#fade_kill_at(260,160),
rotate(uniform(0.2,-0.2)),
scale(uniform(2,5))
)
p = Particle(age,img,batch,group,*behavior)
yield p
while True:
yield create()
def powerup(age, i_vel, img, color_overlay=(0,0,0), batch=None, group=None):
def create():
for _ in range(20):
behavior = (gravity(0,-.08),
#sprite_color_overlay_flash(color_overlay),
age_fade_kill(age))
p = Particle(age,img,batch,group,*behavior)
p.sprite.color = color_overlay
p.sprite.scale = uniform(.5,1)
p.vel = (uniform(i_vel[0][0],i_vel[0][1]),
uniform(i_vel[1][0],i_vel[1][1]))
yield p
while True:
yield create()
def finish_confetti(age,
i_vel,
img,
batch=None,
group=None):
behavior = (fan(3),
gravity(0,-0.05),
age_kill(age+randint(0,2)),)
def create():
for _ in range(50):
p = Particle(age,img,batch,group,*behavior)
p.sprite.rotation = randint(-90,90)
p.sprite.scale = randint(3,4)
p.vel = (uniform(i_vel[0][0],i_vel[0][1]),
uniform(i_vel[1][0],i_vel[1][1]))
yield p
while True:
yield create()
class Spurt(object):
def __init__(self, emitter):
self.emitter = emitter
self.tweener = PiTweener.Tweener()
self.tweenable = 1
def update(self):
self.emitter.update()
self.tweener.update()
def add_factory(self, factory, duration):
self.factory = factory
self.emitter.add_factory(self.factory, pre_fill = 0)
self.tweener.add_tween(self,
tweenable = 0,
tween_time = duration,
tween_type = self.tweener.LINEAR,
on_complete_function = self.remove_factory)
def remove_factory(self):
self.emitter.factories.remove(self.factory)
class Particle():
def __init__(self,age,img,batch=None,group=None,*strategies,age_offset=(0,100)):
self.x,self.y = 0,0
self.vel = Vec2d(0,0)
self.sprite = loaders.image_sprite_loader(img,
pos = (self.x,self.y),
anchor = ('center', 'center'),
batch = batch,
group = group,
linear_interpolation = True)
self.age = age + randint(age_offset[0],age_offset[1])
self.alive = age
self.strategies = strategies
def set_scale(self, scale):
self.sprite.scale = scale
def kill(self):
self.alive = -1
def move(self):
for s in self.strategies:
s(self)
if self.alive > 0:
return self
class Emitter(object):
def __init__(self, pos=(0,0), max_num = (1500), *args, **kwargs):
self.particles = []
self.pos = pos
self.factories = []
self.max_num = max_num
def add_factory(self,factory,pre_fill=300):
self.factories.append(factory)
tmp = []
for _ in range(pre_fill):
n = next(factory)
tmp.extend(n)
for p in tmp:
p.move()
self.particles.extend(tmp)
def move(self, p):
p.sprite.x, p.sprite.y = self.pos[0]+p.x,self.pos[1]+p.y
return p
def update(self):
#if self.emit:
# for f in self.factories:
# if len(self.particles) < self.max_num:
# self.particles.extend(next(f))
# for p in self.particles[:]:
# p.move()
# if p.alive == -1:
# self.particles.remove(p)
tmp = itertools.chain(self.particles, *map(next, self.factories))
tmp2 = filter(Particle.move, tmp) # side effect!
self.particles = list(tmp2)
for p in self.particles:
p.sprite.x,p.sprite.y = self.pos[0]+p.x,self.pos[1]+p.y
|
# app building library
import streamlit as st
# dataframe libraries
import numpy as np
import pandas as pd
# model libraries
import gensim
from gensim.models import Doc2Vec
# miscellany
import pickle
import gzip
# custom functions for this app
from functions_app import *
# load poetry dataframe
with gzip.open('data/poems_df_rec_system.pkl', 'rb') as hello:
df = pickle.load(hello)
# load doc2vec dataframe
with gzip.open('data/features_doc2vec_df.pkl', 'rb') as hello:
df_docvec = pickle.load(hello)
# load doc2vec model
model = Doc2Vec.load('data/doc2vec_final.model')
# image of PO-REC
st.image('data/PO-REC.png', width=300)
# message from the recommender-bot
st.title('Greetings! It is I, PO-REC.')
st.header('I am designed to recommend poetry based on certain parameters.')
st.subheader('You can fiddle with my settings on the left of your screen.')
# number of poem recommendations in sidebar
# NOTE: text in separate markdown because couldn't figure out
# how to change font size within number_input
st.sidebar.markdown('#### How many poems shall I compute?')
num_option = st.sidebar.number_input(
'',
min_value=1,
max_value=len(df),
value=100)
# format blank space
st.sidebar.markdown('')
# select a function to run, word_similarity, text_similarity, or poem_similarity
st.sidebar.markdown('#### What method shall I use to compute?')
initialize_option = st.sidebar.radio(
'',
['word', 'phrase', 'poem'])
# format blank space
st.sidebar.markdown('')
# for word option
if initialize_option == 'word':
# format blank space
st.markdown('')
st.markdown('')
# format larger label
st.markdown('#### Give me a word.')
# ask user for a word
word_option = st.text_input('')
# upon user input
if word_option:
# determine if word (reformatted) in model's vocabulary
if word_option.lower() in model.wv.vocab.keys():
# message
st.sidebar.markdown(
'I merely vectorized the word and compared its alignment to all of the \
poems in my vast collection.')
# run function
similar_poems = word_similarity(word_option.lower(), df, model,
n=num_option)
# filter
filter_process(similar_poems, df)
# PO-REC's message if word not in model's vocabulary
else:
st.markdown(f'### It may surprise you to learn that I do not know the word\
***{word_option}***.')
st.markdown(f'### Please try another.')
# for text option
elif initialize_option == 'phrase':
# format blank space
st.markdown('')
st.markdown('')
# format larger label
st.markdown('#### Give me a phrase, or a bunch of words.')
# ask user for words
phrase_option = st.text_input('')
# upon user input
if phrase_option:
# message
st.sidebar.markdown(
'I merely processed the text, inferred its vector, and compared its \
alignment to all of the poems in my vast collection of poetry.')
# run function
similar_poems = phrase_similarity(phrase_option, df, model, n=num_option)
# filter
filter_process(similar_poems, df)
# for poem option
elif initialize_option == 'poem':
# format blank space
st.markdown('')
st.markdown('')
# initialize blank list
poets = ['']
# add all poets from dataframe
poets.extend(df['poet'].unique())
# format larger label
st.markdown('#### Pick a poet:')
# prompt user to select poet
poet_option = st.selectbox(
'',
poets)
# initialize blank list
poet_titles = ['']
# add all titles from that poet
poet_titles.extend(df[df.poet == poet_option].title.unique())
# prompt user to select title (only after poet is selected)
if poet_option:
# format blank space
st.markdown('')
# format larger label
st.markdown('#### Pick a poem:')
title_option = st.selectbox(
'',
poet_titles)
# upon title selection
if title_option:
# message
st.sidebar.markdown(
'I merely found the vector for this particular poem and compared its \
alignment to all of the other poems in my vast collection.')
# run function
similar_poems = poem_similarity(
title_option,
poet_option,
df,
df_docvec,
model,
n=num_option)
# filter
filter_process(similar_poems, df)
|
from django.http import HttpResponse, JsonResponse
from rest_framework import viewsets
from rest_framework.decorators import detail_route
from rest_framework.exceptions import ParseError
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from landscapesim import models
from landscapesim.report import Report
from landscapesim.serializers import projects, reports, scenarios, regions
class LibraryViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Library.objects.all()
serializer_class = projects.LibrarySerializer
class ProjectViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Project.objects.all()
serializer_class = projects.ProjectSerializer
@detail_route(methods=['get'])
def definitions(self, *args, **kwargs):
context = {'request': self.request}
return Response(projects.ProjectDefinitionsSerializer(self.get_object(), context=context).data)
@detail_route(methods=['get'])
def scenarios(self, *args, **kwargs):
context = {'request': self.request}
return Response(projects.ScenarioSerializer(
models.Scenario.objects.filter(project=self.get_object()), many=True, context=context
).data)
class ScenarioViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Scenario.objects.all()
serializer_class = projects.ScenarioSerializer
@detail_route(methods=['get'])
def project(self, *args, **kwargs):
context = {'request': self.request}
return Response(projects.ProjectSerializer(self.get_object().project, context=context).data)
@detail_route(methods=['get'])
def library(self, *args, **kwargs):
context = {'request': self.request}
return Response(projects.LibrarySerializer(self.get_object().project.library, context=context).data)
@detail_route(methods=['get'])
def reports(self, *args, **kwargs):
context = {'request': self.request}
return Response(reports.QueryScenarioReportSerializer(self.get_object(), context=context).data)
@detail_route(methods=['get'])
def config(self, *args, **kwargs):
context = {'request': self.request}
return Response(scenarios.ScenarioConfigSerializer(self.get_object(), context=context).data)
def get_queryset(self):
if not self.request.query_params.get('results_only'):
return self.queryset
else:
is_result = self.request.query_params.get('results_only')
if is_result not in ['true', 'false']:
raise ParseError('Was not true or false.')
return self.queryset.filter(is_result=is_result == 'true')
class StratumViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Stratum.objects.all()
serializer_class = projects.StratumSerializer
def get_queryset(self):
pid = self.request.query_params.get('pid', None)
if pid is None:
return self.queryset
return self.queryset.filter(project__pid=pid)
class StateClassViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.StateClass.objects.all()
serializer_class = projects.StateClassSerializer
class SecondaryStratumViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.SecondaryStratum.objects.all()
serializer_class = projects.SecondaryStratumSerializer
class TransitionTypeViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionType.objects.all()
serializer_class = projects.TransitionTypeSerializer
@detail_route(methods=['get'])
def groups(self, *args, **kwargs):
tgrps = [
models.TransitionGroup.objects.get(pk=obj['transition_group'])
for obj in models.TransitionTypeGroup.objects.filter(
transition_type=self.get_object()).values('transition_group')
]
return Response(projects.TransitionGroupSerializer(tgrps, many=True).data)
class TransitionGroupViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionGroup.objects.all()
serializer_class = projects.TransitionGroupSerializer
@detail_route(methods=['get'])
def types(self, *args, **kwargs):
tts = [
models.TransitionType.objects.get(pk=obj['transition_type'])
for obj in models.TransitionTypeGroup.objects.filter(
transition_group=self.get_object()).values('transition_type')
]
return Response(projects.TransitionTypeSerializer(tts, many=True).data)
class TransitionTypeGroupViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionTypeGroup.objects.all()
serializer_class = projects.TransitionTypeGroupSerializer
class TransitionMultiplierTypeViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionMultiplierType.objects.all()
serializer_class = projects.TransitionMultiplierTypeSerializer
class AttributeGroupViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.AttributeGroup.objects.all()
serializer_class = projects.AttributeGroupSerializer
class StateAttributeTypeViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.StateAttributeType.objects.all()
serializer_class = projects.StateAttributeTypeSerializer
class TransitionAttributeTypeViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionAttributeType.objects.all()
serializer_class = projects.TransitionAttributeTypeSerializer
""" Scenario configuration viewsets """
class DeterministicTransitionViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.DeterministicTransition.objects.all()
serializer_class = scenarios.DeterministicTransitionSerializer
class TransitionViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Transition.objects.all()
serializer_class = scenarios.TransitionSerializer
class InitialConditionsNonSpatialViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.InitialConditionsNonSpatial.objects.all()
serializer_class = scenarios.InitialConditionsNonSpatialSerializer
class InitialConditionsNonSpatialDistributionViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.InitialConditionsNonSpatialDistribution.objects.all()
serializer_class = scenarios.InitialConditionsNonSpatialDistributionSerializer
class InitialConditionsSpatialViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.InitialConditionsSpatial.objects.all()
serializer_class = scenarios.InitialConditionsSpatialSerializer
class TransitionTargetViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionTarget.objects.all()
serializer_class = scenarios.TransitionTargetSerializer
class TransitionMultiplierValueViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionMultiplierValue.objects.all()
serializer_class = scenarios.TransitionMultiplierValueSerializer
class TransitionSizeDistributionViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionSizeDistribution.objects.all()
serializer_class = scenarios.TransitionSizeDistributionSerializer
class TransitionSizePrioritizationViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionSizePrioritization.objects.all()
serializer_class = scenarios.TransitionSizePrioritizationSerializer
class TransitionSpatialMultiplierViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionSpatialMultiplier.objects.all()
serializer_class = scenarios.TransitionSpatialMultiplierSerializer
class StateAttributeValueViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.StateAttributeValue.objects.all()
serializer_class = scenarios.StateAttributeValueSerializer
class TransitionAttributeValueViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionAttributeValue.objects.all()
serializer_class = scenarios.TransitionAttributeValueSerializer
class TransitionAttributeTargetViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionAttributeTarget.objects.all()
serializer_class = scenarios.TransitionAttributeTargetSerializer
""" Report viewsets """
class StateClassSummaryReportViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.StateClassSummaryReport.objects.all()
serializer_class = reports.StateClassSummaryReportSerializer
class TransitionSummaryReportViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionSummaryReport.objects.all()
serializer_class = reports.TransitionSummaryReportSerializer
class TransitionByStateClassSummaryReportViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionByStateClassSummaryReport.objects.all()
serializer_class = reports.TransitionByStateClassSummaryReportSerializer
class StateAttributeSummaryReportViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.StateAttributeSummaryReport.objects.all()
serializer_class = reports.StateAttributeSummaryReportSerializer
class TransitionAttributeSummaryReportViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.TransitionAttributeSummaryReport.objects.all()
serializer_class = reports.TransitionAttributeSummaryReportSerializer
class ReportViewBase(GenericAPIView):
serializer_class = reports.GenerateReportSerializer
def _response(self, report):
raise NotImplementedError
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
config = data['configuration']
name = config.pop('report_name')
return self._response(Report(name, config))
class GenerateCSVReportView(ReportViewBase):
def _response(self, report):
csv_data = report.get_csv_data()
response = HttpResponse(content=csv_data, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(report.report_name)
return response
class RequestSpatialDataView(ReportViewBase):
def _response(self, report):
return JsonResponse(report.request_zip_data())
class RequestPDFReportView(ReportViewBase):
def _response(self, report):
return JsonResponse(report.request_pdf_data())
class RegionViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.Region.objects.all()
serializer_class = regions.RegionSerializer
@detail_route(methods=['get'])
def reporting_units(self, *args, **kwargs):
context = {'request': self.request}
return Response({
'type': 'FeatureCollection',
'features': regions.ReportingUnitSerializer(
self.get_object().reporting_units.all(), many=True, context=context
).data
})
class ReportingUnitViewset(viewsets.ReadOnlyModelViewSet):
queryset = models.ReportingUnit.objects.all()
serializer_class = regions.ReportingUnitSerializer
|
from collections import defaultdict
class Solution:
def calcEquation(self, equations: List[List[str]], values: List[float], queries: List[List[str]]) -> List[float]:
graph = defaultdict(list)
for i in range(len(equations)):
graph[equations[i][0]].append([equations[i][1],values[i]])
graph[equations[i][1]].append([equations[i][0],(1/values[i])])
output_list = []
def dfs(curr_node,dst,val,visited):
if(curr_node in visited or curr_node not in graph):
return False
visited.add(curr_node)
if(curr_node==dst):
output_list.append(val)
return True
for node in graph[curr_node]:
if(dfs(node[0],dst,val*node[1],visited)):
return True
return False
for query in queries:
src = query[0]
dst = query[1]
visited = set()
if(not dfs(src,dst,1,visited)):
output_list.append(-1)
return output_list
|
"""quiz_app URL Configuration
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('quizes.urls',namespace='quizes')),
path('user/', include(('user.urls','user'), namespace='user')),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from flask import Flask, request
from flask_restful import Resource, Api
import mysql.connector, json
app = Flask(__name__)
api = Api(app)
class Product(Resource):
#config for login credentials
config = {
'user':'root','password':'root',
'host': 'db-mysql','port':'3306',
'database':'products'
}
def get(self):
#accessing the config and connecting to the mysql database
connection = mysql.connector.connect(**self.config)
cursor = connection.cursor()
#to retrieve data from mysql table
cursor.execute("SELECT * FROM items_on_sale")
#making a json format of the data
results = [{"item":name, 'qty':qty,'price':price} for (name,qty,price) in cursor]
return {'products':results}
def post(self):
#to receive the form data from the php admin page
data = request.get_json()
#accessing the config and connecting to the mysql database
connection = mysql.connector.connect(**self.config)
cursor = connection.cursor()
cursor = connection.cursor(buffered=True)
#checking if there is already an existing product name by checking the row number it returns
sql = "SELECT COUNT(*) FROM items_on_sale WHERE name = %s "
val = (str(data['user']['Pname']),)
cursor.execute(sql,val)
result=cursor.fetchone()
#if there are none existing name that was entered then add a new record
if(result[0] == 0):
sql = "insert into items_on_sale (name,qty,price) values (%s,%s,%s) "
val = (str(data['user']['Pname']),str(data['user']['Quantity']),str(data['user']['Price']))
cursor.execute(sql,val)
connection.commit()
#if there is an existing record then just update quantity
else:
sql = "UPDATE items_on_sale SET qty = %s, price= %s WHERE name = %s"
val = (str(data['user']['Quantity']),str(data['user']['Price']),str(data['user']['Pname']))
cursor.execute(sql,val)
connection.commit()
api.add_resource(Product, '/')
if __name__=='__main__':
app.run(host='0.0.0.0',port=80,debug=True)
|
import os
import math
from utct.common.data_source_template import DataSourceTemplate
class MnistDataSourceTemplate(DataSourceTemplate):
def __init__(self,
use_augmentation=True,
data_h5_path=None):
super(MnistDataSourceTemplate, self).__init__(use_augmentation)
self.param_bounds = {
#'dat_batch_size': (10, 200),
'dat_gaussian_blur_sigma_max': (0.0, 1.0),
'dat_gaussian_noise_sigma_max': (0.0, 0.05),
'dat_perspective_transform_max_pt_deviation': (0.0, 2.99),
'dat_max_scale_add': (0.0, 2.0 / (28.0 / 2)),
'dat_max_translate': (0.0, 3.0),
'dat_rotate_max_angle_rad': (0.0, math.pi / 12)}
#self.param_bounds = {
# 'dat_batch_size': (10, 200)}
self.params = {
'data_h5_path': '../TEMP/mnist/mnist.h5'}
self.use_augmentation = use_augmentation
if data_h5_path is not None:
self.params['data_h5_path'] = data_h5_path
self.n_dim = 10
self.img_h = 28
self.img_w = 28
self.cache_data_dirname = None
self.data_loaded = False
self.train_img = None
self.train_lbl = None
self.val_img = None
self.val_lbl = None
self.batch_size = None
def update_project_dirname(self, project_dirname):
self.cache_data_dirname = os.path.join(project_dirname, 'cache_data')
if not os.path.exists(self.cache_data_dirname):
os.makedirs(self.cache_data_dirname)
def update_cache_data_dirname(self, cache_data_dirname):
self.cache_data_dirname = cache_data_dirname
|
#/usr/bin/env python3
# -*- encoding:utf8 -*-
from shutil import *
import os,sys,datetime
import subprocess
from subprocess import PIPE
from pprint import pprint
from muninn_config import JUPYTER_NOTEBOOK_ROOT_FOLDER,HTML_DST_FOLDER
import pickle, traceback
SEP = os.path.sep
def transData(clist,from_source=JUPYTER_NOTEBOOK_ROOT_FOLDER,to_source="source"+SEP):
"""用来转移文件的胶水层,耦合fatch_data
使用clist提供的期望的地址来寻找"""
print("="*10,"正在迁移指定文件夹下的ipynb文件以及其附带的媒体文件","="*10)
def getAllowedFolders(clist):
af = []
res = ""
for course in clist:
af.append(course.sourceuri)
res += course.sourceuri + "::"
return res, af
def transfile(reset=False,fast=True,from_source="source/",to_source="new_source/",allowed_folder=[]):
"只拷贝出现在允许列表的文件夹以及其文件"
print("正在遍历文件树",from_source)
fast_copytree(src=from_source,dst=to_source,symlinks=False,allowed_folder=allowed_folder)
def fast_copytree(src,dst,symlinks=False,allowed_folder=[],needconvert=""):
"""从src文件夹拷贝数据到dst文件夹,这是一个copytree的简化版本"""
#首先遍历所有的src文件夹的子文件夹
names = os.listdir(src)
#遍历的文件中一定包含那些文件夹,因此-1可以取出这些文件夹,然后创建它们
if src.split(SEP)[-1] in allowed_folder:
# print("创建文件夹",src.split(SEP)[-1])
try: os.makedirs(dst)
except: pass
errors = []
#对于每个子文件夹
for name in names:
srcname = os.path.join(src,name)
dstname = os.path.join(dst,name)
try:
if os.path.isdir(srcname):
fast_copytree(srcname,dstname,symlinks,allowed_folder=allowed_folder)
else:
if src.split(SEP)[-1] in allowed_folder:
# print("现在正在处理:",src,dst)
srcname = srcname.replace(SEP,"/")
dstname = dstname.replace(SEP,"/")
#没有文件或者文件有更新
if not os.path.isfile(dstname) or os.stat(srcname).st_mtime > os.stat(dstname).st_mtime:
try:
if os.path.isfile(dstname):
os.remove(dstname)
print("发现旧文件,正在删除..")
except: print("删除旧文件出错",dstname)
print("正在复制文件到",dstname)
#dstname source/coursera_learn_computer/chapter2_x86_mips.ipynb
copy2(srcname,dstname)
except OSError as why:
errors.append((srcname,dstname,str(why)))
except Error as err:
errors.extend(err.args[0])
try:
copystat(src.replace(SEP,"/"), dst.replace(SEP,"/"))
except:
pass
if errors:
raise Error(errors)
res, JUPYTER_NOTEBOOK_ALLOWED_FOLDER = getAllowedFolders(clist)
print("Allowed Floder is",res)
transfile(from_source=from_source,to_source=to_source,allowed_folder=JUPYTER_NOTEBOOK_ALLOWED_FOLDER)
print("文件转移完毕")
def findIpynb(clist,from_source=JUPYTER_NOTEBOOK_ROOT_FOLDER,to_source="source"+SEP):
needfiles = []
try:
print("正在根据配置文件寻找需要进行转换的ipynb文件(ipynb文件日期新于html文件)")
count = 0
for course in clist:
for chapter in course.chapters:
# coursera_learn_models\WEEK2_model_thinking.html
address = chapter.sourceuri
# coursera_learn_models\WEEK2_model_thinking.ipynb
filename = address.replace(".html",".ipynb")
# C:\Users\Administrator\Desktop\jupyter\coursera_learn_models\WEEK2_model_thinking.ipynb
from_filename = os.path.join(from_source,filename)
# source\coursera_learn_models\WEEK2_model_thinking.ipynb
to_filename = os.path.join(to_source,filename)
to_filename_html = to_filename.replace(".ipynb",".html")
# 如果不存在html文件或者ipynb文件有更新,则进行下一步
if not os.path.isfile(to_filename_html) or os.stat(from_filename).st_mtime > os.stat(to_filename_html).st_mtime:
count += 1
print("%s. 以下文件应该被找到并且更新"%count,to_filename)
needfiles.append(filename)
except:
print(traceback.format_exc())
return needfiles
def convertNotes(clist,chapter_dir,needfiles=[]):
"""对每一个Notebook,进行转换,胶水层,耦合fatch_data"""
print("="*10,"正在转换IPYNB文件","="*10)
# print("需要处理的文件为",needfiles)
print("更改CWD到",chapter_dir)
cwd = os.getcwd() #之后均在source目录下运行
os.chdir(chapter_dir)
def convert(filename,fname):
"""调用命令行工具对ipynb文件进行html转换,
放在其原始文件夹下,fname为其所在文件夹,filename为其文件名"""
current = os.getcwd()
os.chdir(fname)
c = "jupyter nbconvert %s"%filename
print("切换目录为: ",os.getcwd(),"正在执行指令:",c)
p = subprocess.Popen(c,shell=True,stdout=subprocess.PIPE,stdin=subprocess.PIPE)
p.wait()
if p.returncode != 0:
print("转换出错,错误原因为",str(p.communicate()[0],"utf-8"),p.communicate()[1])
os.chdir(current)
return 0
os.chdir(current)
return 1
co = ""
for course in clist:
co += str(course.name) + " :: "
print("课程列表为",co)
#获取需要转换的课程和笔记,这一步是因为需要在同一个目录下运行convert
conf = {}
for course in clist:
conf[(course.sourceuri,course.id)] = []
count = 0
for chapter in course.chapters:
filename = chapter.sourceuri
#对象生成的是html地址,这显然是不对的,现在还没有转换,因此转换成为ipynb文件类型
if filename.endswith(".html"):
filename = filename.replace(".html",".ipynb")
#如果此文件不能找到,则跳过转换 xxx/xxx.ipynb
if not os.path.isfile(filename):
print(filename,"此文件无法被找到,但是存在于配置文件中,请手动检查,目前已跳过转换")
continue
if not filename in needfiles:
count += 1
continue
#获得文件名,不含地址
name = filename.split(SEP)[-1]
conf[(course.sourceuri,course.id)].append(name)
#遍历这些课程,同一课程笔记统一处理(多个笔记文件)
print("需要处理的章节和课程为",conf)
for path,id in conf:
alist = conf[(path,id)]
if len(alist) == 0: continue
fnames = ""
for a in alist:
fnames += "%s"%a + " "
try:
convert(fnames,path)
except Exception as e:
print(traceback.format_exc())
print("转换 [%s] 此文件夹内容出错"%path,e)
os.chdir(cwd)
print("CWD切换回",cwd)
print("转换完毕")
if __name__ == "__main__":
clist = pickle.load(open("muninn_test_last.data","rb"))
transData(clist,from_source=JUPYTER_NOTEBOOK_ROOT_FOLDER,to_source="source"+SEP)
needfiles = findIpynb(clist,from_source=JUPYTER_NOTEBOOK_ROOT_FOLDER,to_source="source"+SEP)
convertNotes(clist,"source",needfiles=needfiles)
def get_status():
c = "git status"
process = subprocess.Popen(c,shell=True,stdout=PIPE,stdin=PIPE)
process.wait()
rc = process.returncode
print("正在检查状态信息\n")
print(process.communicate()[0])
if rc != 0:
print("初始化状态检查出错:",process.communicate())
return 0
else: return 1
def add_stuff():
c = "git add --all"
process = subprocess.Popen(c,shell=True,stdout=PIPE,stdin=PIPE)
print("正在添加到本地缓冲区\n")
print(process.communicate()[0])
process.wait()
if process.returncode != 0:
print("添加到缓存区出错",process.communicate())
return 0
else: return 1
def commit_stuff():
# i = input("请输入提交内容:____\b\b\b\b")
i = datetime.datetime.today()
c = "git commit -m last"
process = subprocess.Popen(c,shell=True,stdout=PIPE,stdin=PIPE)
print("提交本地仓库中...\n")
print(process.communicate()[0])
process.wait()
if process.returncode != 0:
print("提交到本地仓库失败",process.communicate())
return 0
else: return 1
def pull_stuff():
c = "git pull"
process = subprocess.Popen(c,shell=True,stdout=PIPE,stdin=PIPE)
print("正在拉取远程代码\n")
print(process.communicate()[0])
process.wait()
if process.returncode != 0:
print("拉取远程代码失败",process.communicate())
return 0
else: return 1
def push_stuff():
c = "git push"
process = subprocess.Popen(c,shell=True,stdout=PIPE,stdin=PIPE)
print("正在上传到服务器\n")
print(process.communicate()[0])
process.wait()
if process.returncode != 0:
print("上传到远程服务器失败",process.communicate())
return 0
else: return 1
def submit():
"""将当前文件夹提交到Git服务器"""
if get_status():
if add_stuff():
if commit_stuff():
if push_stuff():
print("成功!")
return 1
print("失败!")
return 0
def get_file():
"""获取Jupyter笔记文件夹,根据允许列表,自动进行html的转换"""
if transfile(from_source=JUPYTER_NOTEBOOK_ROOT_FOLDER,
to_source=HTML_DST_FOLDER,
allowed_folder=JUPYTER_NOTEBOOK_ALLOWED_FOLDER):
if convert_all(root_folder=HTML_DST_FOLDER):
print("Convert all done!")
# if __name__ == "__main__":
# # convert("week5_problem_soving.ipynb","notebook")
# # if transfile(fast=True,from_source=from_source,\
# # to_source=to_source): submit()
# # main()
# get_file()
# submit()
|
from keras.preprocessing.image import load_img, img_to_array, save_img
from keras.models import Sequential
from keras.layers import Conv2D, MaxPool2D, UpSampling2D
import numpy as np
import os
import argparse
import csv
import matplotlib.pyplot as plt
curdir = os.path.dirname(os.path.abspath(__file__))
parser = argparse.ArgumentParser()
parser.add_argument("--data", choices=['bottle','carpet'], default='bottle')
parser.add_argument('--optimizer', choices=['adam','sgd','adagrad','rmsprop'], default='adam')
parser.add_argument('--loss', choices=['mean_squared_error', 'binary_crossentropy'], default='binary_crossentropy')
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--test_samples', type=float, default=0.2)
parser.add_argument('--training', choices=[True,False],default=False)
parser.add_argument('--saveweights', choices=[True,False],default=True)
parser.add_argument('--predict', choices=[True,False],default=True)
def load_data(data_set, target_size=None):
images = []
directory = './' + data_set + '/train/good/'
for filename in os.listdir(directory):
img = load_img(os.path.join(directory,filename), target_size = target_size)
img = img_to_array(img)
images.append(img)
images = np.stack(images)
return images
def load_model():
input_shape=(224,224,3)
n_channels = input_shape[-1]
model = Sequential()
model.add(Conv2D(32, (3,3), activation='relu', padding='same', input_shape=input_shape))
model.add(MaxPool2D(padding='same'))
model.add(Conv2D(16, (3,3), activation='relu', padding='same'))
model.add(MaxPool2D(padding='same'))
model.add(Conv2D(8, (3,3), activation='relu', padding='same'))
model.add(UpSampling2D())
model.add(Conv2D(16, (3,3), activation='relu', padding='same'))
model.add(UpSampling2D())
model.add(Conv2D(32, (3,3), activation='relu', padding='same'))
model.add(Conv2D(n_channels, (3,3), activation='sigmoid', padding='same'))
model.compile(optimizer=args.optimizer, loss=args.loss)
return model
def main(args):
# instantiate model
model = load_model()
train = load_data(args.data, (224,224))
train = train.astype('float32') / 255.0
model.summary()
if(args.training):
print('Training...')
model.fit(x=train, y=train, batch_size=args.batch_size, epochs=args.epochs, validation_split=args.test_samples)
if(args.saveweights):
print('Saving Model...')
model.save_weights('./models/cae/cae_'+ args.data + '_' + str(args.epochs) + '_' + args.loss + '_' + args.optimizer + '_weights.h5')
plt.plot(model.history.history['loss'], label = 'loss')
plt.plot(model.history.history['val_loss'], label='val_loss')
plt.legend()
plt.savefig('./images/cae/cae_loss_' + args.data + '_' + str(args.epochs) + '_' + args.loss + '_' + args.optimizer + '.png')
else:
print('Loading weights…')
model.load_weights('./models/cae/cae_'+ args.data + '_' + str(args.epochs) + '_' + args.loss + '_' + args.optimizer + '_weights.h5')
print('Done')
if(args.predict):
print('Predicting...')
csv_name='losses.csv'
max_error = model.evaluate(train,train,batch_size=args.batch_size)
test_directory = './' + args.data + '/test/'
result_directory = './results/CAE/' + args.data + '/E' + str(args.epochs) + '_' + args.loss + '_' + args.optimizer + '/'
if not os.path.exists(result_directory):
os.makedirs(result_directory)
anomaly_list=list()
for dir in os.listdir(test_directory):
print(dir)
mse_list=list()
dir_list = list()
dir_list.append(dir)
if not os.path.exists(os.path.join(result_directory,dir)):
os.mkdir(os.path.join(result_directory,dir))
for filename in os.listdir(os.path.join(test_directory,dir)):
img = load_img(os.path.join(test_directory,dir,filename), target_size = (224,224))
img = img_to_array(img)
img = np.expand_dims(img,axis=0)
img = img.astype('float32') / 255.0
prediction = model.predict(img)
predict_name = filename + '_predict.png'
save_img(os.path.join(result_directory,dir,predict_name),prediction[0])
this_error = model.evaluate(img,img)
mse_list.append(this_error)
print('This error:' + str(this_error) + ', Max Error:' + str(max_error + max_error*0.05))
if(this_error < max_error + max_error*0.05):
dir_list.append(False)
else:
dir_list.append(True)
dir_accuracy = 0
if 'good' in dir_list:
dir_accuracy += dir_list.count(False)/(len(dir_list)-1)
else:
dir_accuracy += dir_list.count(True)/(len(dir_list)-1)
dir_accuracy_file = open(os.path.join(result_directory,dir,"accuracy.txt"), "w")
dir_accuracy_file.write(str(dir_accuracy))
dir_accuracy_file.close()
anomaly_list.append(dir_list)
with open(os.path.join(result_directory,dir,csv_name), 'w', newline='') as myfile:
wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
wr.writerow(mse_list)
# calculate accuracy
accuracy = 0
element_count = 0
for i in range(len(anomaly_list)):
element_count += len(anomaly_list[i]) - 1
if 'good' in anomaly_list[i]:
accuracy += anomaly_list[i].count(False)
else:
accuracy += anomaly_list[i].count(True)
print('Anomaly Detection Accuracy: ' + str(accuracy/element_count*100) + '%')
accuracy_file = open(os.path.join(result_directory,"accuracy.txt"), "w")
accuracy_file.write(str(accuracy/element_count))
accuracy_file.close()
del(model)
if __name__ == '__main__':
args = parser.parse_args()
main(args)
|
def f(a, b): # 매개변수 2개를 더하는 함수
return a + b
print(f(3, 5))
print(f(2, 1))
|
__author__ = 'Justin'
import os
import networkx as nx
from datetime import datetime
from SetNetworkTime import set_network_time
from WeightFunction import weightfunction
from ZenScore import zenscore
from random import choice
from geopy.distance import vincenty as latlondist
import geojson
from DisplayNetwork import networkdisplay
# DESCRIPTION: This script will generate the ideal ZenRoute based on a user's desired factor weights
#
# INPUT: factor weights- [a,b,c] corresponding to [Zenness, time, distance]
#
# OUTPUT: ZenRoute (as a networkx object or geojson output)
# Load Network
cwd = os.getcwd()
filename = "OSMNetworkReducedSet.gexf"
filepath = os.path.abspath(os.path.join(cwd, '..', 'Project Data','Networks',filename))
print(filepath)
fh=open(filepath,'rb')
G = nx.read_gexf(fh)
fh.close
# Update Time Segments
set = 0
if set == 1:
now = datetime.now()
G = set_network_time(G,'currenttime',now,1800)
# Update "Zenness"
set = 0
if set == 1:
for edge in G.edges():
nodeA = edge[0]
nodeB = edge[1]
G[nodeA][nodeB]['Zenness'] = zenscore(G[nodeA][nodeB])
# Update Total Edge Weights
weights = [1,1,1]
keys = ['Zenness','distance','currenttime']
for edge in G.edges():
nodeA = edge[0]
nodeB = edge[1]
dict = G[nodeA][nodeB]
G[nodeA][nodeB]['weight'] = weightfunction(weights,dict,keys)
# Save Network Graph
filename = "OSMNetworkReducedSet.gexf"
filepath = os.path.abspath(os.path.join(cwd, '..', 'Project Data','Networks',filename))
nx.write_gexf(G,filepath)
# Generate Source and Destination
distancelimit = 3 # distance in miles
lons = nx.get_node_attributes(G,'lon')
lats = nx.get_node_attributes(G,'lat')
nodesdist = 0
connected = False
while(nodesdist < distancelimit or not(connected)):
randomnodes = [choice(G.nodes()),choice(G.nodes())]
origin = randomnodes[0]
destination = randomnodes[1]
nodesdist = latlondist([lats[origin],lons[origin]],[lats[destination],lons[destination]]).miles
if nx.has_path(G,origin,destination):
connected = True
else:
connected = False
print('Source:',[lats[randomnodes[0]],lons[randomnodes[0]]])
print('Destination',[lats[randomnodes[1]],lons[randomnodes[1]]])
# Djkistra's Shortest Path
path = nx.shortest_path(G,source = randomnodes[0],target = randomnodes[1],weight = 'weight')
# IV) Plot Network and Routes
routestyles = [{'color':' #ccffcc','width':12}] # greenish
zenMAX = max(nx.get_edge_attributes(G,'Zenness').values())
networkdisplay(G,routes=[path],graphstyle='RdYlBu_r',routestyles = routestyles,
weightstring='Zenness',maxValue=zenMAX, title='Example')
# Export Route
Features = []
for node in path:
Features.append(geojson.Feature(geometry=geojson.Point((lons[node], lats[node]))))
Collection = geojson.FeatureCollection(Features)
dump = geojson.dumps(Collection)
filename = "ShortestPath.txt"
filepath = os.path.abspath(os.path.join(cwd, '..', 'Project Data','Paths',filename))
text_file = open(filepath, "w")
text_file.write(dump)
text_file.close()
|
"""empty message
Revision ID: 430e1e04753b
Revises: e6d8ccbfb29d
Create Date: 2020-03-24 22:50:23.344042
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '430e1e04753b'
down_revision = 'e6d8ccbfb29d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('courseoffTest',
sa.Column('course_name', sa.String(length=256), nullable=False),
sa.Column('course_type', sa.String(length=256), nullable=False),
sa.Column('course_id', sa.Integer(), nullable=False),
sa.Column('time', sa.String(length=256), nullable=True),
sa.PrimaryKeyConstraint('course_name', 'course_type', 'course_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('courseoffTest')
# ### end Alembic commands ###
|
class Stack:
def __init__(self):
self.__list = []
def __str__(self):
result = "Here is a stack: "
for item in self.__list:
result += str(item) + ", "
return result
def push(self, item):
self.__list.append(item)
def pop(self):
self.__list.pop()
def clear():
self.__list.clear()
stack = Stack()
stack.push(1)
stack.push(2)
stack.push(3)
stack.push(4)
stack.push(5)
stack.pop()
stack.pop()
print(stack)
|
import os
import tensorflow as tf
import numpy as np
import preprocess_utils
def _preprocess_zero_mean_unit_range(inputs):
"""Map image values from [0, 255] to [-1, 1]."""
return (2.0 / 255.0) * tf.to_float(inputs) - 1.0
def preprocess_image(image,
crop_height,
crop_width,
min_resize_value=None,
max_resize_value=None,
resize_factor=None,
min_scale_factor=1.,
max_scale_factor=1.,
scale_factor_step_size=0,
is_training=True):
original_image = image
processed_image = tf.cast(image, tf.float32)
if (min_resize_value is not None or max_resize_value is not None):
[processed_image] = \
preprocess_utils.resize_to_range(
image=processed_image,
min_size=min_resize_value,
max_size=max_resize_value,
factor=resize_factor,
align_corners=True)
# The `original_image` becomes the resized image.
original_image = tf.identity(processed_image)
'''
# Data augmentation by randomly scaling the inputs.
scale = preprocess_utils.get_random_scale(
min_scale_factor, max_scale_factor, scale_factor_step_size)
processed_image = preprocess_utils.randomly_scale_image(
processed_image, scale)
processed_image.set_shape([None, None, 3])
# Pad image with mean pixel value.
if is_training:
# Pad image and label to have dimensions >= [crop_height, crop_width]
image_shape = tf.shape(processed_image)
image_height = image_shape[0] # vis 508
image_width = image_shape[1]
mean_pixel = tf.reshape([127.5, 127.5, 127.5], [1, 1, 3])
target_height = image_height + tf.maximum(crop_height - image_height, 0) # 448
target_width = image_width + tf.maximum(crop_width - image_width, 0) # 256
processed_image = preprocess_utils.pad_to_bounding_box(
processed_image, 0, 0, target_height, target_width, mean_pixel)
'''
# Randomly crop the image and label.
if is_training:
[processed_image] = preprocess_utils.random_crop(
[processed_image], crop_height, crop_width)
else:
processed_image = tf.image.resize_image_with_crop_or_pad(processed_image, crop_height, crop_width)
processed_image.set_shape([crop_height, crop_width, 3])
if is_training:
# Randomly left-right flip the image and label.
processed_image, _ = preprocess_utils.flip_dim(
[processed_image], 0.5, dim=1)
processed_image = _preprocess_zero_mean_unit_range(processed_image)
return processed_image
class ImageNetDataSet(object):
def __init__(self, data_dir, subset='train', use_distortion=True):
self.data_dir = data_dir
self.subset = subset
self.use_distortion = use_distortion
def get_filenames(self):
if self.subset in ['train', 'validation', 'eval']:
return [os.path.join(self.data_dir, self.subset + '.tfrecords')]
else:
raise ValueError('Invalid data subset "%s"' % self.subset)
def parser(self, serialized_example):
features = tf.parse_single_example(
serialized_example,
features={
'image': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64)
})
image = tf.image.decode_jpeg(features['image'], channels=3)
height = tf.cast(features['height'], tf.int32)
width = tf.cast(features['width'], tf.int32)
image = tf.reshape(image, [height, width, 3])
label = tf.cast(features['label'], tf.int32)
image = tf.cast(image, tf.float32)
image = self.preprocess(image)
return image, label
def preprocess(self, image):
"""Preprocess a single image in [height, width, depth] layout."""
if self.subset == 'train' and self.use_distortion:
return preprocess_image(image, 224,224, 256,None,None,0.25,1,0,True)
else:
return preprocess_image(image, 224,224, 256,None,None,1.0,1.0,0,False)
def make_batch(self, batch_size):
"""Read the images and labels from 'filenames'."""
filenames = self.get_filenames()
# Repeat infinitely.
dataset = tf.data.TFRecordDataset(filenames)
# Parse records.
#dataset = dataset.map(
# self.parser, num_threads=batch_size, output_buffer_size=2 * batch_size)
dataset = dataset.map(self.parser, num_parallel_calls=batch_size)
dataset = dataset.repeat(None)
# Potentially shuffle records.
if self.subset == 'train':
min_queue_examples = int(
ImageNetDataSet.num_examples_per_epoch(self.subset) * 0.04)
# Ensure that the capacity is sufficiently large to provide good random
# shuffling.
#dataset = dataset.shuffle(buffer_size=min_queue_examples + 3 * batch_size)
dataset = dataset.shuffle(buffer_size=20480, reshuffle_each_iteration=True)
# Batch it up.
dataset = dataset.batch(batch_size)
iterator = dataset.make_one_shot_iterator()
image_batch, label_batch = iterator.get_next()
return image_batch, label_batch
@staticmethod
def num_examples_per_epoch(subset='train'):
if subset == 'train':
return 1281167
elif subset == 'validation':
return 50000
elif subset == 'eval':
return 50000
else:
raise ValueError('Invalid data subset "%s"' % subset)
|
import turtle
import math
import random
bob = turtle.Turtle()
bob.speed(30)
turtle.getscreen().bgcolor("black")
turtle.hideturtle()
for i in range(100):
if i%2 == 0:
bob.hideturtle()
bob.circle(100)
bob.color("orange")
bob.left(25)
else:
bob.hideturtle()
bob.circle(-100)
bob.color("green")
bob.left(25)
turtle.done()
|
from flask import request, jsonify, make_response
from functools import wraps
from flask_restful import abort
from models import User
import jwt
import os
import sys
AUTH_ERROR_MESSAGE = "The server could not verify that you are authorized to access the URL requested. You either supplied the wrong credentials (e.g. a bad password), or your browser doesn't understand how to supply the credentials required."
def authenticate(function):
@wraps(function)
def wrapper(*args, **kwargs):
if request.headers.get('Authorization') is None:
abort(make_response(jsonify(error=AUTH_ERROR_MESSAGE), 401))
token = request.headers['Authorization'].split(',')[0]
try:
jwt_token = jwt.decode(token, os.environ['JWT_SECRET'], algorithms=['HS256'])
except:
abort(make_response(jsonify(error=AUTH_ERROR_MESSAGE), 401))
user = User.query.filter_by(id=str(jwt_token['id'])).first()
if user is None:
abort(make_response(jsonify(error=AUTH_ERROR_MESSAGE), 401))
kwargs['user'] = user
return function(**kwargs)
return wrapper
def get_user(token: str):
try:
jwt_token = jwt.decode(token, os.environ['JWT_SECRET'], algorithms=['HS256'])
except:
abort(make_response(jsonify(error=AUTH_ERROR_MESSAGE), 401))
user = User.query.filter_by(id=jwt_token['id']).first()
if user is None:
abort(make_response(jsonify(error=AUTH_ERROR_MESSAGE), 401))
return user
|
# -*- coding: utf-8 -*-
import re
from typing import Iterable, Text
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import BaseHandler
import execjs
# noinspection PyProtectedMember
from bs4 import BeautifulSoup, SoupStrainer
from .base import FeedFetcher, Item
class IAppsFetcher(FeedFetcher):
DOMAIN = 'www.iapps.im'
FILTER = SoupStrainer('div', id='articleLeft')
def __init__(self):
super().__init__()
self.handler = BrowserHandler(self.DOMAIN)
self.fetcher.opener.add_handler(self.handler)
self.fetcher.browser = 'random'
self.fetcher.wait = 5
def fetch(self) -> Iterable[Item]:
try:
self.fetcher.fetch(self.url())
except HTTPError:
url = self.handler.url
if url:
self.fetcher.open(url).close()
finally:
self.handler.url = None
return super().fetch()
def url(self) -> Text:
return 'http://%s/feed' % self.DOMAIN
def description(self, url) -> Text:
data = ''
soup = self.fetcher.soup(url, parse_only=self.FILTER)
content = soup.find('div', 'entry-content')
a = content.find('a', 'chat-btn')
if a:
a.extract()
data += str(content)
carousel = soup.find('div', 'carousel')
if carousel:
data += str(carousel)
self.cache.set(url, data)
return data
# noinspection PyUnusedLocal
@staticmethod
def callback(result, item):
result['id'] = result['link'].split('/')[-1]
return True
class BrowserHandler(BaseHandler):
handler_order = 999 # after all other processing
def __init__(self, domain):
self.domain = domain
self.url = None
def check(self, response):
soup = BeautifulSoup(response, 'lxml')
script = soup.find('script')
lines = ['function run() {', 'var a = {};']
for line in script.text.splitlines():
line = line.strip()
if re.match('^var [^a]', line):
lines.append(line)
elif line.startswith(';'):
lines.append('t = "%s";' % self.domain)
lines.append(line)
lines.append('return a.value;}')
script = '\n'.join(lines)
value = execjs.compile(script).call('run')
data = {}
form = soup.find('form')
for item in form.find_all('input'):
data[item['name']] = item.get('value', value)
return 'http://%s%s?%s' % (self.domain, form['action'], urlencode(data))
# noinspection PyUnusedLocal
def http_response(self, request, response):
if response.code == 503:
self.url = self.check(response)
return response
https_response = http_response
|
from .bbox_3d import *
from .evaluation import *
|
# temp_file = open('input.txt', 'r')
# for line in temp_file:
# print(line, end='')
input_object = open('input.txt', 'r')
output_object = open('output.txt', 'w')
for line_str in input_object:
new_str = ''
line_str = line_str.strip()
for char in line_str:
new_str = char + new_str
print(new_str, file=output_object)
#line reversed is
print("Line: {:12s} reversed is : {:s}".format(line_str, new_str))
input_object.close()
output_object.close()
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 7 20:16:31 2018
@author: user
矩陣相加
"""
a=[]
b=[]
print("Enter matrix 1:")
for i in range(2):
a.append([])
for j in range(2):
print("[%d, %d]: " % (i+1, j+1), end = '')
a[i].append(int(input()))
print("Enter matrix 2:")
for i in range(2):
b.append([])
for j in range(2):
print("[%d, %d]: " % (i+1, j+1), end = '')
b[i].append(int(input()))
print("Matrix 1:")
for i in range(2):
for j in range(2):
print(a[i][j],end=" ")
print("")
print("Matrix 2:")
for i in range(2):
for j in range(2):
print(b[i][j],end=" ")
print("")
print("Sum of 2 matrices:")
a1=a[0][0]+b[0][0]
a2=a[0][1]+b[0][1]
a3=a[1][0]+b[1][0]
a4=a[1][1]+b[1][1]
print("{:} {:} ".format(a1,a2))
print("{:} {:} ".format(a3,a4))
|
a = [int(i) for i in input().split()]
b = int(input())
result = ''
for i in range(len(a)):
if b == a[i]:
result += str(i) + " "
if result != '':
print(result)
else:
print("Отсутствует")
|
print("Or "*100)
num1=28
print(num1)
num2=num1/2
print(num2)
kobi = [1,2,3]
for i in range(len(kobi)):
print(kobi[i])
|
a = [0,1,2,4,3]
# indexing
print(a[3])
# index
print(a.index(1))
# slice
print(a[1:3])
# append
a.append('6')
print(a)
# insert
a.insert(0,'7')
print(a)
# del
del a[1]
print(a)
# remove
a.remove('6')
print(a)
# pop
b = a.pop(0)
print(a)
print(b)
# sort
a.sort()
print(a)
# reverse
a.reverse()
print(a)
# count
print(a.count(2))
# clear
a.clear()
print(a)
# extend
a.extend([3,4])
print(a)
|
import json
import os.path
import secrets
from abc import ABC, abstractmethod
from csv import DictWriter
from datetime import datetime
from faker import Faker
from lib.common import FAKER_SEED
class FakeDataGenerator(ABC):
"""
Abstract Data Generator class, subclasses needs to implement
the generate_pipeline_row method specific for each customer.
"""
def __init__(self, number_records: int = 0):
self._records_count = number_records if number_records else 1
self._faker = Faker()
self._faker.random.seed(FAKER_SEED)
self._random = secrets.SystemRandom()
@abstractmethod
def generate_pipeline_row(row, file_size):
pass
def generate_fake_data(self, file_type: str, path: str):
output_file = self.generate_output_file(path, self._records_count, file_type)
print(f"Start: {output_file}")
final_list = []
for i in range(self._records_count):
line = self.generate_pipeline_row(i, self._records_count)
final_list.append(line)
if file_type == "csv":
self.output_csv(final_list, output_file)
else:
self.output_json(final_list, output_file)
print(f"Done: {output_file}")
def generate_output_file(self, path: str, file_length: int, file_type: str) -> str:
pipeline = path.split('/')[-1]
file_name = f'{pipeline}_{file_length}.{file_type}'
output_file = ""
while not output_file:
# file = f'cloversub/{path}/input_files/{file_name}'
file = f'script/input_files/{file_name}'
if os.path.isfile(file):
print(f"File '{file}' already exists")
override = input("Do you want to override file (Y/N)? ")
if override.lower() in ("n", "no"):
file_name = input("Please give another file name: ")
else:
output_file = file
else:
output_file = file
return output_file
def output_json(self, final_list, output_file):
with open(output_file, "w") as handle:
for obj in final_list:
handle.write(json.dumps(obj))
handle.write('\n')
def output_csv(self, final_list, output_file):
with open(output_file, 'w') as handle:
writer = DictWriter(handle, fieldnames=final_list[0].keys())
writer.writeheader()
writer.writerows(final_list)
def create_start_end_date(self):
date = self._faker.date() # 2017-09-16 format
end_month = f"{int(date[5:7]) + 1}" if int(date[5:7]) < 12 else "01"
if len(end_month) == 1:
end_month = f"0{end_month}"
end_date = f"{date[:5]}{end_month}{date[7:]}"
return date, end_date
def random_or_empty(self, element, empty=''):
return self._faker.random_element([element, empty])
def get_current_date(self):
return datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S')
def get_time_string(self):
return f"{self._random.randint(10,99)}:{self._random.randint(10,99)}:{self._random.randint(10,99)}"
|
import datetime
import os
import random
import pandas as pd
random.seed(1)
import numpy as np
np.random.seed(1)
# import tensorflow as tf
# tf.random.set_random_seed(1)
from train_and_test import train_and_evaluate_dae_ff
# from train_fasttext import train_and_evaluate_fasttext
if __name__ == "__main__":
# results_file = "data/fasttext-abstract-time.tsv"
results_file = "data/dae-title-time.tsv"
result_dict = {}
# input_dir = "../CitationScreeningReplicability/data/processed/"
input_dir = "data/processed/"
for infile in os.listdir(input_dir)[6:]:
print(infile)
if infile not in ["SkeletalMuscleRelaxants.tsv", "proton_beam.tsv"]:
continue
if infile[-3:] != "tsv":
continue
single_file_dict = train_and_evaluate_dae_ff(
input_data_file=f"{input_dir}/{infile}",
num_dae_epochs=150,
num_ff_epochs=100,
drop_out=0.7,
dae_minibatch=32,
ff_minibatch=128,
)
single_file_dict["date"] = datetime.datetime.now()
single_file_dict["pretrained"] = "no"
single_file_dict["model"] = "dae"
# single_dict = {
# "wss95": wss95,
# "wss100": wss100,
# "date": datetime.datetime.now(),
# "pretrained": "no",
# "model": "no undersampling, spacy tokenizer",
# }
result_dict[infile] = single_file_dict
if os.path.isfile(results_file):
df = pd.read_csv(results_file, sep="\t")
df = df.drop_duplicates()
df = df.append(
pd.DataFrame.from_dict(result_dict).transpose().reset_index(),
ignore_index=True,
)
else:
df = pd.DataFrame.from_dict(result_dict).transpose().reset_index()
df.to_csv(results_file, sep="\t", index=False)
|
# -*- coding: utf-8 -*-
# Module author: @GovnoCodules
import requests
from .. import loader, utils
@loader.tds
class WeatherMod(loader.Module):
"""Weather Module"""
strings = {'name': 'Weather'}
async def pwcmd(self, message):
""""Кидает погоду картинкой.\nИспользование: .pw <город>; ничего."""
args = utils.get_args_raw(message).replace(' ', '+')
await message.edit("Узнаем погоду...")
city = requests.get(
f"https://wttr.in/{args if args != None else ''}.png").content
await message.client.send_file(message.to_id, city)
await message.delete()
async def awcmd(self, message):
"""Кидает погоду ascii-артом.\nИспользование: .aw <город>; ничего."""
city = utils.get_args_raw(message)
await message.edit("Узнаем погоду...")
r = requests.get(
f"https://wttr.in/{city if city != None else ''}?0?q?T&lang=ru")
await message.edit(f"<code>Город: {r.text}</code>")
@loader.sudo
async def wcmd(self, message):
""".w <город>"""
message.edit("<b>Погода by wttr.in</b>")
city = utils.get_args(message)
msg = []
if city:
await message.edit("Обрабатываем запрос...")
for i in city:
r = requests.get(
"https://wttr.in/" + i + "?format=%l:+%c+%t,+%w+%m"
)
msg.append(r.text)
await message.edit("".join(msg))
else:
await message.edit("Обрабатываем запрос...")
r = requests.get("https://wttr.in/?format=%l:+%c+%t,+%w+%m")
await message.edit(r.text)
|
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
n1, n2, n10, n100 = np.loadtxt("standard.txt", usecols=(0,1,2,3), delimiter=' ', unpack='true')
n_bins = 50
n, bins, patches = plt.hist(n1, n_bins, range=(0,1))
plt.xlabel('ciao')
plt.ylabel('prova')
plt.title('Histogram loaded from file!')
plt.grid(True)
plt.figure()
n, bins, patches = plt.hist(n2, n_bins, range=(0,1))
plt.xlabel('ciao')
plt.ylabel('prova')
plt.title('Histogram loaded from file!')
plt.grid(True)
plt.figure()
n, bins, patches = plt.hist(n10, n_bins, range=(0.1, 0.9))
plt.xlabel('ciao')
plt.ylabel('prova')
plt.title('Histogram loaded from file!')
plt.grid(True)
plt.figure()
n, bins, patches = plt.hist(n100, n_bins, range=(0.35, 0.65))
plt.xlabel('ciao')
plt.ylabel('prova')
plt.title('Histogram loaded from file!')
plt.grid(True)
plt.show()
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from __future__ import division
import time
from inc import *
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
#加区间报警
#加failure
def SelectApplicationSql(module,today):
sql = "select sum(failureCount + successCount) as num from `avg_%s_%s`" % (module, today)
return sql
def UpdateApplicationSql(num,module):
sql = "update application set num = %s where name = '%s'" %(num,module)
#print sql
return sql
def TodayAvgFailSql(module,date,today):
sql = "select serviceInterface,method,failureCount,successCount from `avg_%s_%s` where failureCount > 0 and timestamp = '%s' order by id desc limit 1" % (module,today, date)
return sql
def UpdateApplication(module,today):
conn, cursor = Mysql()
cursor.execute(SelectApplicationSql(module,today))
res = cursor.fetchone()
#print res[0]
cursor.execute(UpdateApplicationSql(res[0],module))
conn.commit()
CloseMysql(conn, cursor)
def TodayAvgFail(module,date,today):
conn, cursor = Mysql()
cursor.execute(TodayAvgFailSql(module,date,today))
res = cursor.fetchall()
if res == None:
return 0
for row in res:
serviceInterface, method , failureCount , successCount = row[0] , row[1] , row[2] , row[3]
per = round(failureCount * 100 / (failureCount + successCount) , 2 )
print module,serviceInterface, method,failureCount , successCount , per
if failureCount > 30:
str = '严重:[%s]%s里的%s的%s方法调用失败%s%%' % (date, module, serviceInterface, method, per)
else:
if per > 50:
str = '严重:[%s]%s里的%s的%s方法调用失败%s次' % (date, module, serviceInterface, method, failureCount)
else:
str = '告警:[%s]%s里的%s的%s方法调用失败%s%%' % (date, module, serviceInterface, method, per)
AlarmWeixin(str)
CloseMysql(conn, cursor)
return 0
def loop(module,date,today):
TodayAvgFail(module, date, today)
def main():
date,today,timestamp = MinTime(300)
Modules = GetModules()
for m in Modules:
try:
UpdateApplication(m, today)
except:
pass
#print m , date , today, timestamp
loop(m , date , today)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-24 05:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('msg', models.TextField(max_length=10000, verbose_name='Сообещние')),
],
options={
'verbose_name_plural': 'Сообщения',
'verbose_name': 'Сообщение',
},
),
migrations.CreateModel(
name='Theme',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, verbose_name='Название темы')),
],
options={
'verbose_name_plural': 'Темы',
'verbose_name': 'Тема',
},
),
migrations.AddField(
model_name='message',
name='theme',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='iboard.Theme', verbose_name='Тема'),
),
]
|
import os
import re
os.chdir(path)
for fo in os.listdir(): #get list of artist folders
if fo != '.DS_Store':
os.chdir(fo)
for al in os.listdir(): #for each artist folder, list of album folders
if al != '.DS_Store':
os.chdir(al)
for so in os.listdir(): #for each album, get names of files
ma = re.match('.*? - (.*)', so)
if ma: #add files w/ ' - ' to list to make sure it works
os.rename(so, ma.group(1)) #rename files
os.chdir('..')
os.chdir('..')
|
from abc import abstractmethod
import copy
import pickle
import numpy as np
from scipy.stats import pearsonr
import matplotlib.pyplot as plt
import pandas as pd
from joblib import Parallel, delayed
from sklearn.model_selection import KFold, train_test_split
from sklearn.preprocessing import StandardScaler
class Model(object):
""" Modelクラス
X_obs, y_obs : 入力値そのまま
X, y : ndarrayになったデータフレーム
"""
def __init__(self):
self.layers = {}
self._n = 0
self._has_input = False
self._is_trained = False
def add_layer(self, layer):
if layer.layer_type == 'input':
assert not self._has_input, "Model already has Inputs Layer"
self._has_input = True
self.layers[self._n] = layer
self._n += 1
def train(self, X, y):
for idx, layer in self.layers.items():
layer.train(X, y)
X = layer.forward(X)
#: X, y are retained only for summary information
self.X = X
self.y = y
self._is_trained = True
def get_dataset(self):
if not self._is_trained:
print("Model is not already trained")
return
return self.X, self.y
def save(self, outpath):
with open(outpath, 'wb') as f:
pickle.dump(self, f)
def predict_proba(self, X):
for idx, layer in self.layers.items():
X = layer.forward(X)
return (X[:, 0], X[:, 1])
def predict(self, X):
for idx, layer in self.layers.items():
X = layer.forward(X)
return X[:, 0]
def score(self, X, y):
""" 入力も出力もリストに変換
"""
y_pred = list(self.predict(X).flatten())
return pearsonr(y_pred, list(y))[0]**2
def summary(self):
if not self._is_trained:
print("Model is not already trained")
return
info = []
info.append("__________"*6)
info.append(f"Input Dataset: X {self.X.shape}, y {self.y.shape}")
info.append("")
info.append("Layer(type)")
info.append("=========="*6)
for idx, layer in self.layers.items():
info.append(f"{idx}_{layer.name}({layer.layer_type})")
info.append(f"Output shape: {layer.outputs_shape}")
info.append(f"Description: {layer}")
info.append(" ")
info.append("=========="*6)
info.append(self.model_check())
info.append("__________"*6)
for line in info:
print(line)
def model_check(self):
layer_types = [layer.layer_type for layer in self.layers.values()]
if 'input' not in layer_types:
return "Invalid model structure: Missing Inputs layer"
if 'output' not in layer_types:
return "Invalid model structure: Missing Outputs layer"
if self.layers[self._n-1].outputs_shape[1] != 2:
return "Invalid outputs shape"
return "Model check: OK"
def valid(self, X, y, n=3, cv='KFold'):
"""
available cv : 'KFold' or 'random'
"""
if cv == 'random':
self.random_splitCV(X, y, n)
elif cv == 'KFold':
self.kfoldCV(X, y, n)
else:
NotImplementedError()
def random_splitCV(self, X, y, n):
test_size = 0.3
scores = []
y_train_preds = []
y_train_trues = []
y_test_preds = []
y_test_trues = []
for _ in range(n):
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=test_size)
self.train(X_train, y_train)
scores.append(self.score(X_test, y_test))
y_train_preds += list(self.predict(X_train).reshape(1, -1))
y_train_trues += list(y_train.reshape(1, -1))
y_test_preds += list(self.predict(X_test).reshape(1, -1))
y_test_trues += list(y_test.reshape(1, -1))
plt.scatter(y_train_trues, y_train_preds,
color='steelblue', alpha=0.7, label='Train')
plt.scatter(y_test_trues, y_test_preds,
color='darkred', alpha=0.7, label='Test')
plt.xlabel('y_obs')
plt.ylabel('y_pred')
plt.title(f'Train-Test-Split (test_size={test_size}) \
[R2={round(np.array(scores).mean(), 3)}]')
plt.legend()
plt.show()
def kfoldCV(self, X, y, n):
scores = []
y_train_preds = []
y_train_trues = []
y_test_preds = []
y_test_trues = []
kf = KFold(n_splits=n, shuffle=True)
for train, test in kf.split(X):
X_train, X_test = X.iloc[train], X.iloc[test]
y_train, y_test = y.iloc[train], y.iloc[test]
self.train(X_train, y_train)
scores.append(self.score(X_test, y_test))
y_train_preds += list(self.predict(X_train).reshape(1, -1)[0])
y_train_trues += list(y_train.reshape(1, -1)[0])
y_test_preds += list(self.predict(X_test).reshape(1, -1)[0])
y_test_trues += list(y_test.reshape(1, -1)[0])
plt.scatter(y_train_trues, y_train_preds,
color='steelblue', alpha=0.7, label='Train')
plt.scatter(y_test_trues, y_test_preds,
color='darkred', alpha=0.7, label='Test')
plt.xlabel('y_obs')
plt.ylabel('y_pred')
plt.title(f'Train-Test-Split (K={n}) \
[R2={round(np.array(scores).mean(), 3)}]')
plt.legend()
plt.show()
class Layer(object):
"""ベースレイヤクラス
train: モデルの構築
outputs: 予測値あるいはtransformの結果を返す
----------------------
Layer_type :'input'
self.X : ndarray
self.y : ndarray
"""
def __init__(self, name, layer_type):
self.name = name
self.layer_type = layer_type
if self.layer_type not in ['input', 'models', 'transformer', 'output']:
raise NotImplementedError('Invalid layer type')
self.outputs_shape = None
def __repr__(self):
return "No Desctiption"
@abstractmethod
def train(self, X, y):
""" Train
`input layer` :
check type and add outputs_shape
`models` :
train model and add outputs_shape
`transformer` :
check format and add outputs_shape
"""
raise NotImplementedError
def transform(self, X):
raise NotImplementedError
@abstractmethod
def forward(self, X):
""" Forward
`input layer` :
check input type and shape, transfrom input to ndarray
'transformer' :
check input type and shape, transfrom input to ndarray
"""
raise NotImplementedError
class EnsembleBaseLayer(Layer):
def __init__(self, layer_name, layer_type, n_models,
row_ratio, col_ratio, scale):
super().__init__(layer_name, layer_type)
self.n_models = n_models
self.row_ratio = row_ratio
self.col_ratio = col_ratio
self.scale = scale
self.opt = None
def __repr__(self):
return ("Ensemble Base Layer")
def train(self, X, y):
self.model = self.get_basemodel()
self.model.fit(X, y)
self.outputs_shape = self.model.predict(X).shape
def forward(self, X):
return self.model.predict(X)
@abstractmethod
def get_basemodel(self):
""" Return basemodel
"""
raise NotImplementedError
class EnsembleBaseModel(object):
"""Ridge with Random Patches and Random Subspaces
抽象クラス:モデルの最小単位
"""
def __init__(self, n_models, col_ratio, row_ratio, scale):
self.n_models = n_models
self.col_size = col_ratio
self.row_size = row_ratio
self.scale = scale
def fit(self, X, y):
self.models = {}
self.scalers = {}
self.masks = {}
self.model_rprs(X, y)
def predict(self, X):
df_result = pd.DataFrame()
for i in range(self.n_models):
model = self.models[i]
mask = self.masks[i]
scaler = self.scalers[i]
X_ = X[:, mask]
if self.scale:
X_ = scaler.transform(X_)
y_pred = model.predict(X_)
df_result[i] = list(y_pred)
return df_result.values
def model_rprs(self, X, y):
results = Parallel(n_jobs=-1)(
[delayed(self._train)(X, y, i) for i in range(self.n_models)])
for i, (mask, model, scaler) in enumerate(results):
self.masks[i] = mask
self.models[i] = model
self.scalers[i] = scaler
def _train(self, X, y, i):
sample_mask = [bool(np.random.binomial(1, self.col_size))
for i in range(X.shape[0])]
mask = [bool(np.random.binomial(1, self.col_size))
for i in range(X.shape[1])]
X_rprs = copy.deepcopy(X[:, mask][sample_mask])
if self.scale:
scaler = StandardScaler()
scaler.fit(X_rprs)
X_rprs = scaler.transform(X_rprs)
else:
scaler = None
y_rp = copy.deepcopy(y[sample_mask])
model = self.get_model()
model.fit(X_rprs, y_rp)
mask = copy.deepcopy(mask)
model = copy.deepcopy(model)
scaler = copy.deepcopy(scaler)
return mask, model, scaler
def get_model(self):
raise NotImplementedError
|
#!/usr/bin/python
import time
import argparse
import requests
from prometheus_client import start_http_server
from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily, REGISTRY
parser = argparse.ArgumentParser(description='K8S API Server exporter')
parser.add_argument('--master','-ip', type=str, help='K8S API Server IP', required=True)
parser.add_argument('--interval','-t',type=float, help='Interval between scrapes', required=True)
args = parser.parse_args()
class MicroServiceCollector(object):
def collect(self):
base_url = 'http://'+args.master+':8080'
yield GaugeMetricFamily('k8s_nodes', 'Total nodes in K8S cluster', value=getNodes(base_url))
yield GaugeMetricFamily('k8s_pods', 'Total pods in K8S cluster', value=getPods(base_url))
yield GaugeMetricFamily('k8s_running_pods' , 'Total pods in Running state' , value=totalRunningPods(base_url))
yield GaugeMetricFamily('k8s_rc', 'Total replication controllers in K8S cluster', value=getRCs(base_url))
yield GaugeMetricFamily('k8s_deployments', 'Total deployments in K8S cluster', value=getDeployments(base_url))
yield GaugeMetricFamily('k8s_version', 'Version of k8s cluster', value=getVersion(base_url))
nodes = getNodes(base_url)
node_url = base_url+'/api/v1/nodes'
for node in range(0,nodes):
ip = requests.get(node_url).json()['items'][node]['spec']['externalID']
node_disk_status= requests.get(node_url).json()['items'][node]['status']['conditions'][0]['status']
if node_disk_status == 'False':
status = 1
else:
status = 0
sufficient_disk_metric = GaugeMetricFamily('k8s_node_sufficient_disk' , 'Disk Metrics' , labels=['node'])
sufficient_disk_metric.add_metric([ip], status)
yield sufficient_disk_metric
node_memory_status = requests.get(node_url).json()['items'][node]['status']['conditions'][1]['status']
if node_memory_status == 'False':
status = 1
else:
status = 0
sufficient_memory_metric = GaugeMetricFamily('k8s_node_sufficient_memory' , 'Node Memory Metrics' , labels=['node'])
sufficient_memory_metric.add_metric([ip], status)
yield sufficient_memory_metric
node_disk_pressure_status = requests.get(node_url).json()['items'][node]['status']['conditions'][2]['status']
if node_disk_pressure_status == 'False':
status = 1
else:
status = 0
disk_pressure_metric = GaugeMetricFamily('k8s_node_disk_pressure' , 'Node Disk Pressure Metric' , labels=['node'])
disk_pressure_metric.add_metric([ip], status)
yield disk_pressure_metric
node_ready_status = requests.get(node_url).json()['items'][node]['status']['conditions'][3]['status']
if node_ready_status == 'False':
status = 0
else:
status = 1
node_ready_metric = GaugeMetricFamily('k8s_node_ready' , 'Node Ready Metric' , labels=['node'])
node_ready_metric.add_metric([ip], status)
yield node_ready_metric
def getNodes(base_url):
node_url = base_url+'/api/v1/nodes'
return len(requests.get(node_url).json()['items'])
def getDeployments(base_url):
dp_url = base_url+'/apis/extensions/v1beta1/deployments'
return len(requests.get(dp_url).json()['items'])
def getPods(base_url):
pod_url = base_url+'/api/v1/pods'
return len(requests.get(pod_url).json()['items'])
def totalRunningPods(base_url):
pod_url = base_url+'/api/v1/pods'
total = len(requests.get(pod_url).json()['items'])
count = 0
for pod in range(0 , total):
state = requests.get(pod_url).json()['items'][pod]['status']['phase']
if state == 'Running':
count += 1
return count
def getRCs(base_url):
rc_url = base_url+'/api/v1/replicationcontrollers'
return len(requests.get(rc_url).json()['items'])
def getVersion(base_url):
version_url = base_url+'/version'
return float(requests.get(version_url).json()['major']+'.'+requests.get(version_url).json()['minor'])
if __name__ == "__main__":
REGISTRY.register(MicroServiceCollector())
start_http_server(9116)
while True: time.sleep(args.interval)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.