blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
993fbbd0d87b184d4a9cc519c40fa9f0f4b4749b | 739e19aea52a747a818ccaa1e941f11328ca9783 | /DBMS_Simulate/json_test.py | e2b168bfa883fe178c75e7d3128857d072b3e6d3 | [] | no_license | MoCuishle28/python-practice | d12edb4866361f55354da53875475f05c209254c | cc557fcdd3fec2cb67efeb1f875b4d7d9d85b5b4 | refs/heads/master | 2020-03-28T03:52:36.060049 | 2019-01-19T11:53:15 | 2019-01-19T11:53:15 | 147,677,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | import json
from werkzeug.security import generate_password_hash, check_password_hash
user = {
'root':{
'password':generate_password_hash('123'),
'permissions':'w-r-u'
},
}
# json_str = json.dumps(user)
# print(json_str)
with open('test.json', 'r') as f:
data = json.load(f)
print(data)
for k,v in data.items():
print(k, ' ', v)
# json的文件相关
with open('user.json', 'w') as f:
json.dump(user, f)
# with open('user.json', 'r') as f:
# data = json.load(f)
# print(data['root'])
# print(data['root']['password'])
# print(check_password_hash(data.get('root').get('password'), '123456'))
# print(data.get('root').get('permissions'))
| [
"20164706@s.hlju.edu.cn"
] | 20164706@s.hlju.edu.cn |
9dc2351568d50ed25f7911c779a43af5570c9354 | d007e87ae0e76035ddcb25959e74303c44fb2e5e | /service/server_v3.py | eff92d5e52ff6027076d5dc2f8d403c28e479659 | [] | no_license | nju-luke/Test | a52ac1390c241d42a950a5eea1175771259e87ba | a70754e7cc619ab6363a35c5940efd5f82f78e6e | refs/heads/master | 2020-07-12T22:56:02.517965 | 2017-09-28T06:31:15 | 2017-09-28T06:31:15 | 73,898,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,045 | py | # coding:utf-8
import textwrap
import tornado.ioloop
import tornado.httpserver
import tornado.web
import GetBrand
RequestHandler = tornado.web.RequestHandler
# test1 = GetBrand.GetBrand()
class MainHandler(RequestHandler):
# def getBrand(self):
# return GetBrand.GetBrand().inBrands
getBrand = GetBrand.GetBrand().inBrands
def get(self,name): ##post??????
name = self.getBrand(name)
self.write(name)
class WrapHandler(tornado.web.RequestHandler):
def post(self):
text = self.get_argument("text")
width = self.get_argument("width", 40)
self.write(textwrap.fill(text, width))
if __name__ == "__main__":
app = tornado.web.Application([tornado.web.url(r"/getBrands/(\w+)", MainHandler),
tornado.web.url(r"/wrap", WrapHandler)
])
http_sever = tornado.httpserver.HTTPServer(app)
http_sever.listen(8888)
tornado.ioloop.IOLoop.instance().start()
# handle_request()
| [
"nju.hyhb@gmail.com"
] | nju.hyhb@gmail.com |
72c7d4ddf11b69872b6da5e79a8806b0914a3ab8 | ba5e590578a9be8f8942eade9a8466872bf4c2bb | /hezkuntza/__init__.py | e52340b819ca7e93a283445d2f7c4dadc819eacd | [] | no_license | babarlhr/hezkuntza_education-12.0 | e0c57285651659906fddf07701c60809f77fa9de | 6b0a75638a667961f7d3fabb294307ddc9a82dd5 | refs/heads/master | 2022-04-06T08:01:25.099591 | 2019-12-13T16:19:39 | 2019-12-13T16:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | # Copyright 2019 Oihane Crucelaegui - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import models
from . import wizard
| [
"rapidgrps@princegroup-bd.com"
] | rapidgrps@princegroup-bd.com |
2e2b84c7ab4ecc5e3e4b2d1618402b52b9907dbe | 257b9fb0d63582e100061d606087d66b4c5547a7 | /tests/Demo_Matplotlib_Animated_FuncAnimation.py | 17cb7c7cb0a2dc58c412e0d90b0d238b578dfbd9 | [
"MIT"
] | permissive | Venryx/RngKitPSG | b3c40e014d6acdb929bbdfe577f12fd37280f39b | a7488571ef76b8493d0263a43b6a29b2aabb3d30 | refs/heads/master | 2023-03-10T20:05:58.320806 | 2021-02-24T15:13:47 | 2021-02-24T15:13:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,082 | py | import PySimpleGUI as sg
import matplotlib.pyplot as plt
from matplotlib import style
import matplotlib.animation as animation
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from random import randint
import time
import threading
# Usage of MatPlotLib with matplotlib.animation for better performance
def main():
sg.theme('DarkBlue14')
layout = [[sg.Text('Live Plot Matplotlib - FuncAnimation')],
[sg.Canvas(k="-CANVAS-")],
[sg.Button('Start'), sg.Button('Stop'), sg.Exit()]]
window = sg.Window('Live Plot Matplotlib - FuncAnimation', layout, size=(640, 580),
location=(50, 50), finalize=True, element_justification="center", font="Calibri 18",
resizable=True)
canvas_elem = window['-CANVAS-']
canvas = canvas_elem.TKCanvas
style.use("ggplot")
global ax
f, ax = plt.subplots(figsize=(10, 4.4), dpi=100)
canvas = FigureCanvasTkAgg(f, canvas)
canvas.draw()
canvas.get_tk_widget().pack(side='top', fill='both', expand=1)
global xar
global yar
xar = [1, 2, 3, 4]
yar = [10, 5, 3, 5]
ani = animation.FuncAnimation(f, animate, interval=1000)
while True: # The Event Loop
event, values = window.read()
if event == sg.WIN_CLOSED or event == 'Exit':
break
elif event == "Start":
global thread
thread = True
x = threading.Thread(target=live_plotting, daemon=True)
x.start()
elif event == "Stop":
thread = False
window.close()
def animate(i):
global ax
global xar
global yar
ax.clear()
ax.plot(xar, yar, color='orange')
ax.set_title("Live Plot")
ax.set_xlabel('X-Label', fontsize=10)
ax.set_ylabel('Y-Label', fontsize='medium')
def live_plotting():
global xar
global yar
global thread
while thread:
xar.append(xar[len(xar) - 1] + 1)
yar.append(randint(0, 10))
time.sleep(1)
if __name__ == '__main__':
main() | [
"tjm.plastica@gmail.com"
] | tjm.plastica@gmail.com |
81cf9d1643a136c21e64d24c6ef75a20c68454d6 | b87e9a1e5e3955c4ab16ae9498535f480fc3adbc | /models/__init__.py | 46a6effb3c035f5a7900b57bd3e0c95f9218c196 | [] | no_license | jarchv/ratio-log | 715019f6dc52d2d7bc089e5239ae7440bf19754f | 6e76e8786076a3c7e866e35823606865f68fe6bb | refs/heads/master | 2023-02-27T00:55:06.656167 | 2021-01-25T18:45:40 | 2021-01-25T18:45:40 | 292,714,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | from .models import *
from .nets import *
from .vgg import *
| [
"jose.chavez.alvarez@ucsp.edu.pe"
] | jose.chavez.alvarez@ucsp.edu.pe |
f395640444a4c3fc16625ab2f978c29b9f8d8e15 | 5cecdaaf03aa5b3fb2f8ae0332329a6b67c5df71 | /datestampergui.py | bbfa92bf78aaee8ffdd71befb190c7269a609d4e | [] | no_license | nuuk/tkinterPractice | 7e2e4c421edcc473f2df6f50c6fdb090d71273b5 | 40c51d3589b19129eeae77a5326d1453339448ea | refs/heads/master | 2021-01-01T19:21:09.695534 | 2014-03-28T03:53:17 | 2014-03-28T03:53:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,832 | py | #!/usr/bin/python
# -*- coding utf-8 -*-
# print("Hello World")
# """
# ZetCode Tkinter tutorial
# This script shows a simple window
# on the screen.
# autho: Jan Bodnar
# last modified: January 2011
# website: www.zetcode.com
# """
# from Tkinter import Tk, Frame, BOTH
# class Example(Frame):
# def __init__(self, parent):
# Frame.__init__(self, parent, background="white")
# self.parent = parent
# self.initUI()
# def initUI(self):
# self.parent.title("Date Stamper")
# self.pack(fill=BOTH, expand=1)
# def main():
# root = Tk()
# root.geometry("1000x400+300+300")
# app = Example(root)
# root.mainloop()
# if __name__== '__main__':
# main()
# import Tkinter, tkFileDialog, Tkconstants
# from Tkinter import *
# dirtext='Select your pictures folder'
# filetext= 'Select your watermark file'
# def openFile():
# filename = tkFileDialog.askopenfilename(parent=root,initialdir='/home/',title=filetext , filetypes=[('image files', '.png')]) ## filename not filehandle
# filebut["text"]= str(filename) if filename else filetext
# def openDirectory():
# dirname = tkFileDialog.askdirectory(parent=root, initialdir='/home/', title=dirtext)
# dirbut["text"] = str(dirname) if dirname else dirtext
# root = Tk()
# root.title('Watermark Image Processing 1.0b')
# #Options for buttons
# button_opt = {'fill': Tkconstants.BOTH, 'padx': 5, 'pady': 5}
# #Define asking directory button
# dirbut= Button(root, text = dirtext, fg = 'black', command= openDirectory)
# dirbut.pack(**button_opt) ## must pack separately to get the value to dirbut
# #Define asking watermark file button
# filebut = Button(root, text = filetext, fg = 'black', command= openFile)
# filebut.pack(**button_opt)
# root.mainloop()
"""
ZetCode Tkinter tutorial
This program draws three
rectangles filled with different
colors.
author: Jan Bodar
last modified: January 2011
website: www.zetcode.com
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Example(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.initUI()
def initUI(self):
self.parent.title("Colors")
# self.pack(fill=BOTH, expand=1)
canvas = Canvas(self)
canvas.create_rectangle(30, 10, 120, 80,
outline="#fb0", fill="#fb0")
canvas.create_rectangle(150, 10, 240, 80,
outline="#f50", fill="#f50")
canvas.create_rectangle(270, 10, 370, 80,
outline="#05f", fill="#05f")
canvas.pack(fill=BOTH, expand=1)
def main():
root = Tk()
ex = Example(root)
root.geometry("400x100+300+300")
root.mainloop()
if __name__ == '__main__':
main()
| [
"mitchell@Mitchells-MacBook-Air.local"
] | mitchell@Mitchells-MacBook-Air.local |
fe00a3e9a75c82c8b94feeb4790958a1ef14bdba | 881041fab1b4d05f1c5371efed2f9276037eb609 | /tasks/world-trade-center-wtc-patient-categories/depositor.py | 54c18eb67afad968051c189b011d8d76577e17a1 | [] | no_license | ResidentMario/urban-physiology-nyc-catalog | b568f3b6ee1a887a50c4df23c488f50c92e30625 | cefbc799f898f6cdf24d0a0ef6c9cd13c76fb05c | refs/heads/master | 2021-01-02T22:43:09.073952 | 2017-08-06T18:27:22 | 2017-08-06T18:27:22 | 99,377,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | import requests
r = requests.get("https://data.cityofnewyork.us/api/views/dgg9-jkx8/rows.csv?accessType=DOWNLOAD")
with open("/home/alex/Desktop/urban-physiology-nyc-catalog/catalog/world-trade-center-wtc-patient-categories/data.csv", "wb") as f:
f.write(r.content)
outputs = ["/home/alex/Desktop/urban-physiology-nyc-catalog/catalog/world-trade-center-wtc-patient-categories/data.csv"]
| [
"aleksey.bilogur@gmail.com"
] | aleksey.bilogur@gmail.com |
15b8c12e8874a215d641b7b3f45d0733887be3e4 | 275ac20e4fbd1ea75f938efa0135fabc132cc29a | /api_miasm/mqrt.dll.py | f7cd33b7b0ae9f67371c83a804a42e35938b3156 | [] | no_license | commial/temp | 67428ff12882cd478c1468a34527b15cb73e33e6 | 34f45aad07777c14778d6df747ae6ce20b61b130 | refs/heads/master | 2023-05-11T11:08:59.055588 | 2023-04-30T14:27:17 | 2023-04-30T14:27:17 | 88,650,298 | 2 | 2 | null | 2019-11-15T15:37:07 | 2017-04-18T17:05:35 | HTML | UTF-8 | Python | false | false | 14,807 | py | ###### Enums ######
###################
###### Types ######
QUEUEHANDLE = HANDLE
QUEUEHANDLE_PTR = Ptr("<I", QUEUEHANDLE())
QUEUEPROPID = PROPID
QUEUEPROPID_PTR = Ptr("<I", QUEUEPROPID())
QMPROPID = PROPID
QMPROPID_PTR = Ptr("<I", QMPROPID())
MGMTPROPID = PROPID
MGMTPROPID_PTR = Ptr("<I", MGMTPROPID())
MSGPROPID = PROPID
MSGPROPID_PTR = Ptr("<I", MSGPROPID())
PMQRECEIVECALLBACK = LPVOID
MQPROPVARIANT = PROPVARIANT
MQPROPVARIANT_PTR = Ptr("<I", MQPROPVARIANT())
MQPROPVARIANT__ = Ptr("<I", MQPROPVARIANT())
_MQPR_Rel_ = ULONG
class MQPROPERTYRESTRICTION(MemStruct):
fields = [
("rel", _MQPR_Rel_()),
("prop", PROPID()),
("prval", MQPROPVARIANT()),
]
MQPROPERTYRESTRICTION_PTR = Ptr("<I", MQPROPERTYRESTRICTION())
class MQQUEUEPROPS(MemStruct):
fields = [
("cProp", DWORD()),
("aPropID", QUEUEPROPID_PTR()),
("aPropVar", MQPROPVARIANT_PTR()),
("aStatus", HRESULT_PTR()),
]
MQQUEUEPROPS_PTR = Ptr("<I", MQQUEUEPROPS())
class MQQMPROPS(MemStruct):
fields = [
("cProp", DWORD()),
("aPropID", QMPROPID_PTR()),
("aPropVar", MQPROPVARIANT_PTR()),
("aStatus", HRESULT_PTR()),
]
MQQMPROPS_PTR = Ptr("<I", MQQMPROPS())
class MQPRIVATEPROPS(MemStruct):
fields = [
("cProp", DWORD()),
("aPropID", QMPROPID_PTR()),
("aPropVar", MQPROPVARIANT_PTR()),
("aStatus", HRESULT_PTR()),
]
MQPRIVATEPROPS_PTR = Ptr("<I", MQPRIVATEPROPS())
class MQRESTRICTION(MemStruct):
fields = [
("cRes", ULONG()),
("paPropRes", MQPROPERTYRESTRICTION_PTR()),
]
MQRESTRICTION_PTR = Ptr("<I", MQRESTRICTION())
class MQCOLUMNSET(MemStruct):
fields = [
("cCol", ULONG()),
("aCol", PROPID_PTR()),
]
MQCOLUMNSET_PTR = Ptr("<I", MQCOLUMNSET())
class MQSORTKEY(MemStruct):
fields = [
("propColumn", PROPID()),
("dwOrder", ULONG()),
]
MQSORTKEY_PTR = Ptr("<I", MQSORTKEY())
class MQSORTSET(MemStruct):
fields = [
("cCol", ULONG()),
("aCol", MQSORTKEY_PTR()),
]
MQSORTSET_PTR = Ptr("<I", MQSORTSET())
class MQMGMTPROPS(MemStruct):
fields = [
("cProp", DWORD()),
("aPropID", MGMTPROPID_PTR()),
("aPropVar", MQPROPVARIANT_PTR()),
("aStatus", HRESULT_PTR()),
]
MQMGMTPROPS_PTR = Ptr("<I", MQMGMTPROPS())
class MQMSGPROPS(MemStruct):
fields = [
("cProp", DWORD()),
("aPropID", MSGPROPID_PTR()),
("aPropVar", MQPROPVARIANT_PTR()),
("aStatus", HRESULT_PTR()),
]
MQMSGPROPS_PTR = Ptr("<I", MQMSGPROPS())
###################
###### Functions ######
def mqrt_MQADsPathToFormatName(jitter):
"""
HRESULT MQADsPathToFormatName(
LPCWSTR lpwcsADsPath
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsADsPath"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQBeginTransaction(jitter):
"""
HRESULT MQBeginTransaction(
ITransaction** ppTransaction
)
"""
ret_ad, args = jitter.func_args_stdcall(["ppTransaction"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQCloseCursor(jitter):
"""
HRESULT MQCloseCursor(
HANDLE hCursor
)
"""
ret_ad, args = jitter.func_args_stdcall(["hCursor"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQCloseQueue(jitter):
"""
HRESULT MQCloseQueue(
QUEUEHANDLE hQueue
)
"""
ret_ad, args = jitter.func_args_stdcall(["hQueue"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQCreateCursor(jitter):
"""
HRESULT MQCreateCursor(
QUEUEHANDLE hQueue,
PHANDLE phCursor
)
"""
ret_ad, args = jitter.func_args_stdcall(["hQueue", "phCursor"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQCreateQueue(jitter):
"""
HRESULT MQCreateQueue(
PSECURITY_DESCRIPTOR pSecurityDescriptor,
MQQUEUEPROPS* pQueueProps,
LPWSTR lpwcsFormatName,
LPDWORD lpdwFormatNameLength
)
"""
ret_ad, args = jitter.func_args_stdcall(["pSecurityDescriptor", "pQueueProps", "lpwcsFormatName", "lpdwFormatNameLength"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQDeleteQueue(jitter):
"""
HRESULT MQDeleteQueue(
LPCWSTR lpwcsFormatName
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQFreeMemory(jitter):
"""
VOID MQFreeMemory(
PVOID pvMemory
)
"""
ret_ad, args = jitter.func_args_stdcall(["pvMemory"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQFreeSecurityContext(jitter):
"""
VOID MQFreeSecurityContext(
HANDLE hSecurityContext
)
"""
ret_ad, args = jitter.func_args_stdcall(["hSecurityContext"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetMachineProperties(jitter):
"""
HRESULT MQGetMachineProperties(
LPCWSTR lpwcsMachineName,
const GUID* pguidMachineID,
MQQMPROPS* pQMProps
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsMachineName", "pguidMachineID", "pQMProps"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetOverlappedResult(jitter):
"""
HRESULT MQGetOverlappedResult(
LPOVERLAPPED lpOverlapped
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpOverlapped"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetPrivateComputerInformation(jitter):
"""
HRESULT MQGetPrivateComputerInformation(
LPCWSTR lpwcsComputerName,
MQPRIVATEPROPS* pPrivateProps
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsComputerName", "pPrivateProps"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetQueueProperties(jitter):
"""
HRESULT MQGetQueueProperties(
LPCWSTR lpwcsFormatName,
MQQUEUEPROPS* pQueueProps
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName", "pQueueProps"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetQueueSecurity(jitter):
"""
HRESULT MQGetQueueSecurity(
LPCWSTR lpwcsFormatName,
SECURITY_INFORMATION SecurityInformation,
PSECURITY_DESCRIPTOR pSecurityDescriptor,
DWORD nLength,
LPDWORD lpnLengthNeeded
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName", "SecurityInformation", "pSecurityDescriptor", "nLength", "lpnLengthNeeded"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetSecurityContext(jitter):
"""
HRESULT MQGetSecurityContext(
LPVOID lpCertBuffer,
DWORD dwCertBufferLength,
HANDLE* phSecurityContext
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpCertBuffer", "dwCertBufferLength", "phSecurityContext"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQGetSecurityContextEx(jitter):
"""
HRESULT MQGetSecurityContextEx(
LPVOID lpCertBuffer,
DWORD dwCertBufferLength,
HANDLE* phSecurityContext
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpCertBuffer", "dwCertBufferLength", "phSecurityContext"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQHandleToFormatName(jitter):
"""
HRESULT MQHandleToFormatName(
QUEUEHANDLE hQueue,
LPWSTR lpwcsFormatName,
LPDWORD lpdwCount
)
"""
ret_ad, args = jitter.func_args_stdcall(["hQueue", "lpwcsFormatName", "lpdwCount"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQInstanceToFormatName(jitter):
"""
HRESULT MQInstanceToFormatName(
GUID* pGUID,
LPWSTR lpwcsFormatName,
LPDWORD lpdwCount
)
"""
ret_ad, args = jitter.func_args_stdcall(["pGUID", "lpwcsFormatName", "lpdwCount"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQLocateBegin(jitter):
"""
HRESULT MQLocateBegin(
LPCWSTR lpwcsContext,
MQRESTRICTION* pRestriction,
MQCOLUMNSET* pColumns,
MQSORTSET* pSort,
PHANDLE phEnum
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsContext", "pRestriction", "pColumns", "pSort", "phEnum"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQLocateEnd(jitter):
"""
HRESULT MQLocateEnd(
HANDLE hEnum
)
"""
ret_ad, args = jitter.func_args_stdcall(["hEnum"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQLocateNext(jitter):
"""
HRESULT MQLocateNext(
HANDLE hEnum,
DWORD* pcProps,
MQPROPVARIANT[] aPropVar
)
"""
ret_ad, args = jitter.func_args_stdcall(["hEnum", "pcProps", "aPropVar"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQMarkMessageRejected(jitter):
"""
HRESULT MQMarkMessageRejected(
QUEUEHANDLE hQueue,
ULONGLONG ullLookupId
)
"""
ret_ad, args = jitter.func_args_stdcall(["hQueue", "ullLookupId"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQMgmtAction(jitter):
"""
HRESULT MQMgmtAction(
LPCWSTR pMachineName,
LPCWSTR pObjectName,
LPCWSTR pAction
)
"""
ret_ad, args = jitter.func_args_stdcall(["pMachineName", "pObjectName", "pAction"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQMgmtGetInfo(jitter):
"""
HRESULT MQMgmtGetInfo(
LPCWSTR pMachineName,
LPCWSTR pObjectName,
MQMGMTPROPS* pMgmtProps
)
"""
ret_ad, args = jitter.func_args_stdcall(["pMachineName", "pObjectName", "pMgmtProps"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQMoveMessage(jitter):
"""
HRESULT MQMoveMessage(
QUEUEHANDLE sourceQueue,
QUEUEHANDLE targetQueue,
ULONGLONG lookupID
)
"""
ret_ad, args = jitter.func_args_stdcall(["sourceQueue", "targetQueue", "lookupID"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQOpenQueue(jitter):
"""
HRESULT MQOpenQueue(
LPCWSTR lpwcsFormatName,
DWORD dwAccess,
DWORD dwShareMode,
QUEUEHANDLE* phQueue
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName", "dwAccess", "dwShareMode", "phQueue"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQPathNameToFormatName(jitter):
"""
HRESULT MQPathNameToFormatName(
LPCWSTR lpwcsPathName,
LPWSTR lpwcsFormatName,
LPDWORD lpdwCount
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsPathName", "lpwcsFormatName", "lpdwCount"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQPurgeQueue(jitter):
"""
HRESULT MQPurgeQueue(
HANDLE hQueue
)
"""
ret_ad, args = jitter.func_args_stdcall(["hQueue"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQReceiveMessage(jitter):
"""
HRESULT MQReceiveMessage(
QUEUEHANDLE hSource,
DWORD dwTimeout,
DWORD dwAction,
MQMSGPROPS* pMessageProps,
LPOVERLAPPED lpOverlapped,
PMQRECEIVECALLBACK fnReceiveCallback,
HANDLE hCursor,
ITransaction* pTransaction
)
"""
ret_ad, args = jitter.func_args_stdcall(["hSource", "dwTimeout", "dwAction", "pMessageProps", "lpOverlapped", "fnReceiveCallback", "hCursor", "pTransaction"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQReceiveMessageByLookupId(jitter):
"""
HRESULT MQReceiveMessageByLookupId(
QUEUEHANDLE hSource,
ULONGLONG ullLookupId,
DWORD dwLookupAction,
MQMSGPROPS* pMessageProps,
LPOVERLAPPED lpOverlapped,
PMQRECEIVECALLBACK fnReceiveCallback,
ITransaction* pTransaction
)
"""
ret_ad, args = jitter.func_args_stdcall(["hSource", "ullLookupId", "dwLookupAction", "pMessageProps", "lpOverlapped", "fnReceiveCallback", "pTransaction"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQRegisterCertificate(jitter):
"""
HRESULT MQRegisterCertificate(
DWORD dwFlags,
PVOID lpCertBuffer,
DWORD dwCertBufferLength
)
"""
ret_ad, args = jitter.func_args_stdcall(["dwFlags", "lpCertBuffer", "dwCertBufferLength"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQSendMessage(jitter):
"""
HRESULT MQSendMessage(
QUEUEHANDLE hDestinationQueue,
MQMSGPROPS* pMessageProps,
ITransaction* pTransaction
)
"""
ret_ad, args = jitter.func_args_stdcall(["hDestinationQueue", "pMessageProps", "pTransaction"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQSetQueueProperties(jitter):
"""
HRESULT MQSetQueueProperties(
LPCWSTR lpwcsFormatName,
MQQUEUEPROPS* pQueueProps
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName", "pQueueProps"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
def mqrt_MQSetQueueSecurity(jitter):
"""
HRESULT MQSetQueueSecurity(
LPCWSTR lpwcsFormatName,
SECURITY_INFORMATION SecurityInformation,
PSECURITY_DESCRIPTOR pSecurityDescriptor
)
"""
ret_ad, args = jitter.func_args_stdcall(["lpwcsFormatName", "SecurityInformation", "pSecurityDescriptor"])
raise RuntimeError('API not implemented')
jitter.func_ret_stdcall(ret_ad, ret_value)
| [
"commial@gmail.com"
] | commial@gmail.com |
0ca7552d269d0a2a17809b625f16696d6daae972 | 31e9ae0b5431fdb643f228713001d052e93b303d | /flask_unchained/bundles/sqlalchemy/alembic/templates/flask/env.py | 9d871918db94256ce571d7028b0989212bd24dc8 | [
"MIT"
] | permissive | briancappello/flask-unchained | b35215b6e150febf8d00fd0164b49d355eae66e6 | a1f1323f63f59760e430001efef43af9b829ebed | refs/heads/master | 2023-05-13T09:11:14.745523 | 2022-04-03T00:27:09 | 2022-04-03T00:27:09 | 115,440,840 | 77 | 9 | MIT | 2023-05-01T23:40:25 | 2017-12-26T17:13:24 | Python | UTF-8 | Python | false | false | 3,111 | py | import logging
import os
from alembic import context
from flask import current_app
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
# make sure the migrations versions directory exists
versions_dir = os.path.join(current_app.root_path,
current_app.config.ALEMBIC['script_location'],
'versions')
os.makedirs(versions_dir, exist_ok=True)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url',
current_app.config.SQLALCHEMY_DATABASE_URI)
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| [
"briancappello@gmail.com"
] | briancappello@gmail.com |
dd4be1f4a065e6792a0ca776169bae3c74ff8122 | 5b6c580667899c760caec3c2abc29ab69a90570a | /Examples/magnetTest.py | db9cc35e482d0cec5f7e085d7956f74a7b241a69 | [] | no_license | jonathanrichardl/micropython-GY85 | 06259e28220abec5189f19d890d02b367d9ffa74 | 6be38dd4d9874264d5faa4fc157b653c2257b88a | refs/heads/main | 2023-06-25T15:11:08.908092 | 2021-07-30T08:16:56 | 2021-07-30T08:16:56 | 376,460,231 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | from GY85 import GY85
import machine
import utime
sclPin = 1
sdaPin = 0
i2cid = 0
magnet = GY85(scl = sclPin, sda = sdaPin, i2cid = i2cid, acc = False, magnet = True) # Magnetometer only mode, magnetometer and accelerometer configuration skipped
try:
while True:
try:
buffer = magnet.read_magnet()
print("Magnet Value x = " + str(buffer[0])+", y = " + str(buffer[1])+", z = " + str(buffer[2]))
utime.sleep(1)
except:
utime.sleep(10)
pass
except KeyboardInterrupt:
magnet.deinit()
| [
"noreply@github.com"
] | jonathanrichardl.noreply@github.com |
f88c17790331f692ffe96218dd5535bfa08275cc | 45e66074c81f422981b8dfa0848df44a86878b2e | /module1_13/module7_hw/task_5.py | a152f499c72a31012a1e08389d67fd65a58c27f8 | [] | no_license | aliakseik1993/skillbox_python_basic | 1f39644eff6ade4774e52503f88f5a190ef6e96a | be6c846c6b73b7054055808314d9bdaddc396a54 | refs/heads/main | 2023-06-03T03:09:38.169936 | 2021-06-14T11:34:10 | 2021-06-14T11:34:10 | 359,185,639 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 878 | py | print('Задача 5. Факториал')
# Мы всё ближе и ближе подбираемся к серьёзной математике.
# Одна из классических задач - задача на нахождение факториала числа.
# И в будущем мы с ней ещё встретимся.
#
# Дано натуральное число N. Напишите программу, которая находит N! (N факториал)
#
# Запись N! означает следующее:
#
# N! = 1 * 2 * 3 * 4 * 5 * … * N
#
# Пример:
#
# Введите число: 5
# Факториал числа 5 равен 120
number_int = int(input("Введите число: "))
summ = 1
for number in range(2, number_int + 1):
summ *= number
print("Факториал числа", number_int, "равен", summ) | [
"koran.aliaksei@yandex.by"
] | koran.aliaksei@yandex.by |
d73b98a9fe32010f070208fdab50f093b7fc9de3 | 0f1443c6dad19a22990a2f74f21870f745667866 | /mlr.py | adf5626c2d148cc36a8fafab403ffe6e46edc2de | [
"Apache-2.0"
] | permissive | avinashshenoy97/bitcoin_predictor | 0effba304d9f768530e81751078d0c6fb4b9c275 | 5c2f9b501fcbfbe474387e4a33957279467392ad | refs/heads/master | 2021-04-25T15:56:31.713686 | 2018-01-13T05:35:45 | 2018-01-13T05:35:45 | 109,599,966 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,553 | py | '''
Using Multiple Linear Regression to predict the next day's closing values of Bitcoin's market price in USD.
'''
import pandas, sklearn, numpy
from sklearn import linear_model
from init import *
ipdata = init()
def correlation(dataset_arg, threshold, toprint = False):
'''
Return a copy of 'dataset_arg' without the columns that have correlation > threshold
'''
dataset = dataset_arg.copy(deep = True)
col_corr = set() # Set of all the names of deleted columns
corr_matrix = dataset.corr()
for i in range(len(corr_matrix.columns)):
for j in range(i):
if corr_matrix.iloc[i, j] >= threshold:
colname = corr_matrix.columns[i] # getting the name of column
col_corr.add(colname)
if colname in dataset.columns:
del dataset[colname] # deleting the column from the dataset
if toprint:
print(dataset)
return dataset
# Drop columns with correlation > 0.75
# Drops 10 columns of 25
# Columns = ['Date', 'btc_market_price', 'btc_total_bitcoins', 'btc_trade_volume', 'btc_n_orphaned_blocks', 'btc_median_confirmation_time', 'btc_cost_per_transaction_percent', 'btc_cost_per_transaction', 'btc_output_volume', 'btc_estimated_transaction_volume']
data = correlation(ipdata, 0.75)
# Predictor variables
df = data.copy(deep = True)
del df['Date']
# Target
target = pandas.DataFrame(ipdata, columns = ["next"])
# Split into training and test sets; 80:20 split
row = round(0.8 * len(ipdata.index))
df = df.sample(frac=1).reset_index(drop=True)
trainX = df[:row]
trainY = df[row:]
testX = target[:row]['next']
testY = target[row:]['next']
X = trainX
y = testX
# Build model and make predictions
lm = linear_model.LinearRegression()
model = lm.fit(X, y)
predictions = lm.predict(trainY)
# Print stats
print("Accuracy stats of Multiple Linear Regression :")
accuracyStats(predictions, testY)
plot_results(predictions, testY, 'Multiple Linear Regression', 'Day', 'Price (in USD)')
errors = [math.fabs(x-y) for x,y in zip(predictions, testY)]
print("Average error : ", np.average(errors))
plt.plot(errors, label='Error')
plt.title('Days Ahead Vs. Error')
plt.xlabel('Day')
plt.ylabel('Price (in USD)')
plt.legend()
plt.show()
'''
Our results :
Accuracy stats of Multiple Linear Regression :
Accuracy with a margin of 100$ : 0.027149321266968326
Accuracy with a margin of 50$ : 0.013574660633484163
Accuracy with a margin of 25$ : 0.00904977375565611
Accuracy with a margin of 10$ : 0.0
Average error : 356.525268295
''' | [
"avi123shenoy@hotmail.com"
] | avi123shenoy@hotmail.com |
27a28d28322e0500b4a3598333a34ad162235c18 | 718a3a4d0a7a478ad879a474afe98cf6962313c8 | /selenium_scripts/FirstSeleniumScript.py | 3314198ea51f0ba6d76bc47ecb0dd01404895b1c | [] | no_license | RustAutomation/pythonTraining | fe691b1f66d5a586aa380d14f88dd5e89c52ddf6 | 0d7d48466116952148b77a1142fdc5081e503a82 | refs/heads/main | 2023-08-22T21:45:45.422623 | 2021-09-29T14:19:09 | 2021-09-29T14:19:09 | 411,702,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class SearchPage(object):
def __init__(self, driver):
self.driver = driver
self.base_url = "https://www.google.com/"
def find_element(self, locator, time=10):
return WebDriverWait(self.driver, time).until(EC.presence_of_element_located(locator),
message=f"Can't find element by locator {locator}")
def find_elements(self, locator, time=10):
return WebDriverWait(self.driver, time).until(EC.presence_of_all_elements_located(locator),
message=f"Can't find elements by locator {locator}")
def go_to_site(self):
return self.driver.get(self.base_url)
| [
"noreply@github.com"
] | RustAutomation.noreply@github.com |
198c8423003dfaddf05eeb4a8f038f3d8f1a2101 | fd62e4d805e16e2117d41d31fa8fd85d3ef6c98d | /knowledge_distillation/slim.py | b0cb61f91b668b5d5b4d356d0491d7a773c3c937 | [
"MIT"
] | permissive | CorentinChauvin/style_transfer_KD | fd2f2b8a605d199df580fc47822fa40052b75727 | 87bcb2963dbb8d09faf94c74a744f358cafe5427 | refs/heads/master | 2020-05-21T16:58:18.320460 | 2019-05-19T16:31:01 | 2019-05-19T16:31:01 | 186,113,222 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,987 | py | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
class ShallowConvNet(nn.Module):
def __init__(self, hidden=1000):
super(ShallowConvNet, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 5 * 5, hidden)
self.fc2 = nn.Linear(hidden, 10)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
class SimpleConvNet(nn.Module):
def __init__(self, hidden=1000):
super(SimpleConvNet, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 5 * 5, hidden)
self.fc2 = nn.Linear(hidden, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
class DeepConvNet(nn.Module):
def __init__(self):
super(DeepConvNet, self).__init__()
self.classifer = nn.Sequential(
nn.Conv2d(3, 192, kernel_size=5, stride=1, padding=2),
nn.ReLU(inplace=True),
nn.Conv2d(192, 160, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.Conv2d(160, 96, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
nn.Dropout(0.5),
nn.Conv2d(96, 192, kernel_size=5, stride=1, padding=2),
nn.ReLU(inplace=True),
nn.Conv2d(192, 192, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.Conv2d(192, 192, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.AvgPool2d(kernel_size=3, stride=2, padding=1),
nn.Dropout(0.5),
nn.Conv2d(192, 192, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(192, 192, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.Conv2d(192, 10, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True),
nn.AvgPool2d(kernel_size=8, stride=1, padding=0),
)
def forward(self, x):
x = self.classifer(x)
x = x.view(x.size(0), 10)
return x
class AlexNet(nn.Module):
def __init__(self, num_classes=10):
super(AlexNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=5),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
)
self.classifier = nn.Linear(256, num_classes)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def num_trainable_parameters(model):
model_params = filter(lambda p: p.requires_grad, model.parameters())
num_params = sum([np.prod(p.size()) for p in model_params])
return num_params
| [
"corentin.chauvin@gmail.com"
] | corentin.chauvin@gmail.com |
826acfa8e16b8fd664f0a129d86f0a409032f40a | ed41637c4e0eb0d4842814a1ad3f79fac24241a9 | /basicformtwo_app/views.py | 5442abc15d33f199e201da52ca28b0e368d024d8 | [] | no_license | beingvikasagain/django_project | 7cafcc7070d1ea8d20b7bfefe240c8ca09b7ea2d | 92d929e8bb37603852348891dd87308265b4c1dc | refs/heads/master | 2022-04-23T03:51:57.429029 | 2020-04-24T04:26:29 | 2020-04-24T04:26:29 | 258,401,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | from django.shortcuts import render
from basicformtwo_app.models import AccessRecord,Topic,Webpage
from basicformtwo_app import forms
# Create your views here.
def index(request):
webpage_list = AccessRecord.objects.order_by('date')
date_dict = {'access_record':webpage_list}
return render(request,'basicformtwo_app/index.html',context=date_dict)
def form_name_view(request):
form = forms.FormName()
if request.method == 'POST':
form = forms.FormName(request.POST)
if form.is_valid():
print("validation successful!")
print("name : "+form.cleaned_data['name'])
print("Email: "+form.cleaned_data['email'])
print("Text: "+form.cleaned_data['text'])
return render(request, 'basicformtwo_app/form.html',{'insert_form':form})
| [
"57279623+beingvikasagain@users.noreply.github.com"
] | 57279623+beingvikasagain@users.noreply.github.com |
78f31e6a72b094fc49b16f54e05c6d02a1059dd2 | ac63f6324e1d73ac12803868db659218542fbd30 | /authapp/urls.py | 8a2a938d9a0ad3ac0a09fba7f4d406f3951677b3 | [] | no_license | Cerzon/geekbrains-django | 709273476531ee2bb07fa4d850120acb2c48f9c0 | c491f7fafed15b9f1bfecbcca77ac28d048c4ffb | refs/heads/master | 2020-06-30T20:18:04.327899 | 2019-09-23T12:06:27 | 2019-09-23T12:06:27 | 200,941,848 | 0 | 0 | null | 2019-09-23T12:06:29 | 2019-08-07T00:19:35 | Python | UTF-8 | Python | false | false | 358 | py | from django.urls import path
import authapp.views as authapp
app_name = 'authapp'
urlpatterns = [
path('login/', authapp.UserLoginView.as_view(), name='login'),
path('logout/', authapp.logout, name='logout'),
path('register/', authapp.register, name='register'),
path('edit/<int:pk>/', authapp.EditUserProfileView.as_view(), name='edit'),
] | [
"kirill.tkachenko@gmail.com"
] | kirill.tkachenko@gmail.com |
ed87df5aa3f7ff0c1ea98fb4fa6a5c561166fa25 | 857fd2974ffb60d39ec4fef25a6f44f66966d482 | /code/chapter5/chapter5.5.py | d5e792b67ed97a245222a04f598351f56150ad59 | [] | no_license | DockerBB/pytorch-study | 6d6d1c5ffd0b72ca82eaa462976c1e5c4f98cebc | b61929082e4715c82678f86295ec237ee334fd6c | refs/heads/master | 2020-09-05T23:22:20.177361 | 2019-12-03T08:38:11 | 2019-12-03T08:38:11 | 220,242,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,268 | py | # 深度卷积神经网络LeNet
import time
import torch
from torchsummary import summary
from torch import nn, optim
import sys
sys.path.append("..")
import d2lzh_pytorch as d2l
print(torch.__version__)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class LeNet(nn.Module):
def __init__(self):
super(LeNet, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(1,6,5), # in_channels, out_channels, kernel_size
nn.Sigmoid(),
nn.MaxPool2d(2,2),
nn.Conv2d(6,16,5),
nn.Sigmoid(),
nn.MaxPool2d(2,2)
)
self.fc = nn.Sequential(
nn.Linear(16*4*4, 120),
nn.Sigmoid(),
nn.Linear(120, 84),
nn.Sigmoid(),
nn.Linear(84, 10)
)
def forward(self, img):
feature = self.conv(img)
output = self.fc(feature.view(img.shape[0], -1))
return output
net = LeNet()
summary(net, (1, 28, 28))
print(net)
batch_size = 256
train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size=batch_size)
def evaluate_accuracy(data_iter, net, device=None):
if device is None and isinstance(net, torch.nn.Module):
# 如果没指定device就使用net的device
device = list(net.parameters())[0].device
acc_sum, n = 0.0, 0
with torch.no_grad():
for X, y in data_iter:
if isinstance(net, torch.nn.Module):
net.eval() # 评估模式, 这会关闭dropout
acc_sum += (net(X.to(device)).argmax(dim=1) == y.to(device)).float().sum().cpu().item()
net.train() # 改回训练模式
else: # 自定义的模型, 3.13节之后不会用到, 不考虑GPU
if('is_training' in net.__code__.co_varnames): # 如果有is_training这个参数
# 将is_training设置成False
acc_sum += (net(X, is_training=False).argmax(dim=1) == y).float().sum().item()
else:
acc_sum += (net(X).argmax(dim=1) == y).float().sum().item()
n += y.shape[0]
return acc_sum / n
def train_ch5(net, train_iter, test_iter, batch_size, optimizer, device, num_epochs):
net = net.to(device)
print("training on ", device)
loss = torch.nn.CrossEntropyLoss()
for epoch in range(num_epochs):
train_l_sum, train_acc_sum, n, batch_count, start = 0.0, 0.0, 0, 0, time.time()
for X, y in train_iter:
X = X.to(device)
y = y.to(device)
y_hat = net(X)
l = loss(y_hat, y)
optimizer.zero_grad()
l.backward()
optimizer.step()
train_l_sum += l.cpu().item()
train_acc_sum += (y_hat.argmax(dim=1) == y).sum().cpu().item()
n += y.shape[0]
batch_count += 1
test_acc = evaluate_accuracy(test_iter, net)
print('epoch %d, loss %.4f, train acc %.3f, test acc %.3f, time %.1f sec'
% (epoch + 1, train_l_sum / batch_count, train_acc_sum / n, test_acc, time.time() - start))
lr, num_epochs = 0.001, 5
optimizer = torch.optim.Adam(net.parameters(), lr=lr)
train_ch5(net, train_iter, test_iter, batch_size, optimizer, device, num_epochs) | [
"837555220@qq.com"
] | 837555220@qq.com |
0c8201195e1daafea6aba55823cd0fc12745f23a | d91f0bbe7fd0a1c1c53d9d5b36d38fdb5c15e8c7 | /src/comment_me.py | 5ca7a60f629f7370d1e9271a7a11b02312499db1 | [] | no_license | Zero-ZxXxAngel/FBxMOD | c91e29d0632cca7d3de3414807a9087c7bd75758 | f0db38b0a4ccf4921d0ee0630891f73c834a64da | refs/heads/master | 2022-11-20T05:55:25.996441 | 2020-07-03T13:57:52 | 2020-07-03T13:57:52 | 272,920,531 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,618 | py | #BY: Zero & Sumarr ID
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s)\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x02\x00l\x01\x00m\x02\x00Z\x03\x00\x01d\x03\x00\x84\x00\x00Z\x04\x00d\x01\x00S(\x04\x00\x00\x00i\xff\xff\xff\xffN(\x01\x00\x00\x00t\r\x00\x00\x00BeautifulSoupc\x03\x00\x00\x00\r\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xff\x01\x00\x00y\xf1\x01d\x00\x00}\x03\x00d\x00\x00}\x04\x00d\x00\x00}\x05\x00t\x01\x00}\x06\x00|\x02\x00j\x02\x00|\x01\x00d\x01\x00\x17|\x00\x00\x83\x02\x00j\x03\x00d\x02\x00\x83\x01\x00}\x07\x00t\x04\x00|\x07\x00d\x03\x00\x83\x02\x00}\x08\x00xA\x00|\x08\x00j\x05\x00d\x04\x00\x83\x01\x00D]0\x00}\t\x00d\x05\x00t\x06\x00|\t\x00\x83\x01\x00k\x06\x00rY\x00|\x01\x00|\t\x00d\x06\x00\x19\x17}\n\x00t\x07\x00}\x06\x00PqY\x00qY\x00W|\x06\x00t\x07\x00k\x02\x00r\x04\x01|\x02\x00j\x02\x00|\n\x00|\x00\x00\x83\x02\x00}\x07\x00t\x04\x00|\x07\x00d\x03\x00\x83\x02\x00}\x0b\x00xG\x00|\x0b\x00j\x05\x00d\x04\x00\x83\x01\x00D]3\x00}\t\x00d\x07\x00t\x06\x00|\t\x00\x83\x01\x00k\x06\x00r\xca\x00|\x02\x00j\x02\x00|\x01\x00|\t\x00d\x06\x00\x19\x17|\x00\x00\x83\x02\x00\x01q\xca\x00q\xca\x00Wn\x00\x00x6\x00|\x08\x00d\x08\x00\x83\x01\x00D](\x00}\t\x00d\t\x00|\t\x00d\n\x00\x19k\x06\x00r\x11\x01|\x01\x00|\t\x00d\n\x00\x19\x17}\x03\x00Pq\x11\x01q\x11\x01WxR\x00|\x08\x00j\x08\x00d\x0b\x00\x83\x01\x00D]A\x00}\t\x00d\x0c\x00|\t\x00d\r\x00\x19k\x06\x00rp\x01|\t\x00d\x0e\x00\x19}\x04\x00n\x00\x00d\x0f\x00|\t\x00d\r\x00\x19k\x06\x00rM\x01|\t\x00d\x0e\x00\x19}\x05\x00PqM\x01qM\x01W|\x03\x00d\x00\x00k\x03\x00r\xf0\x01|\x04\x00d\x00\x00k\x03\x00r\xf0\x01|\x05\x00d\x00\x00k\x03\x00r\xf0\x01i\x03\x00|\x04\x00d\x0c\x006|\x05\x00d\x0f\x006t\t\x00j\n\x00d\x10\x00\x83\x01\x00d\x11\x006}\x0c\x00|\x02\x00j\x0b\x00|\x03\x00|\x00\x00|\x0c\x00\x83\x03\x00\x01n\x00\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x12\x00\x00\x00Ns\x11\x00\x00\x00/1145924768936987s\x05\x00\x00\x00utf-8s\x0b\x00\x00\x00html.parsert\x01\x00\x00\x00as!\x00\x00\x00/reactions/picker/?is_permalink=1t\x04\x00\x00\x00hrefs\x0f\x00\x00\x00reaction_type=8t\x04\x00\x00\x00forms\x0f\x00\x00\x00/a/comment.php?t\x06\x00\x00\x00actions\x12\x00\x00\x00input[type=hidden]t\x07\x00\x00\x00fb_dtsgt\x04\x00\x00\x00namet\x05\x00\x00\x00valuet\x07\x00\x00\x00jazoests,\x00\x00\x008J+YuEhlbGxvIHNheWEgcGVuZ2d1bmEgTUJG8J+YuA==t\x0c\x00\x00\x00comment_text(\x0c\x00\x00\x00t\x04\x00\x00\x00Nonet\x05\x00\x00\x00Falset\x0b\x00\x00\x00httpRequestt\x06\x00\x00\x00encodet\x06\x00\x00\x00parsert\x08\x00\x00\x00find_allt\x03\x00\x00\x00strt\x04\x00\x00\x00Truet\x06\x00\x00\x00selectt\x06\x00\x00\x00base64t\t\x00\x00\x00b64decodet\x0f\x00\x00\x00httpRequestPost(\r\x00\x00\x00t\x06\x00\x00\x00cookiet\x03\x00\x00\x00urlt\x06\x00\x00\x00configR\x04\x00\x00\x00R\x05\x00\x00\x00R\x08\x00\x00\x00t\x06\x00\x00\x00statust\x08\x00\x00\x00responset\x04\x00\x00\x00htmlt\x01\x00\x00\x00xt\x0c\x00\x00\x00reaction_urlt\x05\x00\x00\x00angryt\x06\x00\x00\x00params(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00Sumarr IDt\x04\x00\x00\x00main\t\x00\x00\x00sF\x00\x00\x00\x00\x01\x03\x01\x06\x01\x06\x01\x06\x01\x06\x01\x1f\x01\x0f\x01\x16\x01\x12\x01\x0e\x01\x06\x01\x08\x01\x0c\x01\x12\x01\x0f\x01\x16\x01\x12\x01"\x01\x13\x01\x10\x01\x0e\x01\x08\x01\x16\x01\x10\x01\r\x01\x10\x01\n\x01\x08\x01$\x01\x03\x01\x0e\x01\x13\x02\x1a\x01\x03\x00(\x05\x00\x00\x00R\x13\x00\x00\x00t\x03\x00\x00\x00bs4R\x00\x00\x00\x00R\x0e\x00\x00\x00R \x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00Sumarr IDt\x08\x00\x00\x00<module>\x06\x00\x00\x00s\x04\x00\x00\x00\x0c\x01\x10\x02'))
| [
"noreply@github.com"
] | Zero-ZxXxAngel.noreply@github.com |
5e6d74ef170cbb7a115044facb313a0c5a68e6c5 | a52903d2e961c34cac5e2d9aa1318237c185a9d6 | /agriWeb/agriWeb/urls.py | 651df8ad564350f2736097c42798c904e98fa199 | [] | no_license | dawdiken/web_project | c1460498642cb6b3b017f647432bf1039aed3615 | d1b9bb57fe62636e1d26aad6dee0ed0b2e05ce3a | refs/heads/master | 2020-03-26T22:38:13.981653 | 2018-08-30T22:07:06 | 2018-08-30T22:07:06 | 145,475,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | """agriWeb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.urls import path
from django.views.generic.base import TemplateView
from agriApp import views
from .views import addCustomer, saveCust, success
urlpatterns = [
url(r'^addcustomer/$', addCustomer, name='addCustomer$'),
url(r'^savecust/success', success, name='success'),
url(r'^savecust/$', saveCust.as_view(), name='saveCust'),
path('admin/', admin.site.urls),
path('', TemplateView.as_view(template_name='home.html'), name='home'),
path('agriApp/', include('agriApp.urls')),
path('accounts/', include('django.contrib.auth.urls')),
path('^agriweb/addcustomer', views.cust, name='cust'),
]+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"dawdiken@gmail.com"
] | dawdiken@gmail.com |
b7c5504c0df86500e9a9c7bd23405cd3587aa509 | d5e7346f54687a5c65b6e41ed724a3c17415c07f | /code/rt-thread/components/drivers/watchdog/SConscript | a8061d606017706a2e6b5e334c868ec3ab1e4042 | [] | no_license | liu2guang/rt-thread_ck803s | 9216b2c8e93d8b271c7bc727c59565f894d4ded9 | 22d70ed865bc5e1d85d7d0fb0b00a1a3b21171dc | refs/heads/master | 2023-03-21T11:18:43.912214 | 2020-11-13T02:18:59 | 2020-11-13T02:18:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | from building import *
cwd = GetCurrentDir()
src = Glob('*.c')
CPPPATH = [cwd + '/../include']
group = DefineGroup('device', src, depend = ['RT_USING_WDT'], CPPPATH = CPPPATH)
Return('group')
| [
"hanguangxue_x@163.com"
] | hanguangxue_x@163.com | |
5afce9e08cd3f782a4575e5f0cae0383e08debac | 3f8928e194313212bb1696b88789bf2594687024 | /pyprol/storage/sqlite_storage.py | 7bc4570e8fc3bf79cf86f70f000e91e79eaa4abe | [] | no_license | skoenen/pyprol | 8151b8237b29c2b0194abd7ce760d08aa357dd22 | 32db12a84a0466537eca9a1c3b3dff2b76504302 | refs/heads/master | 2021-01-22T09:05:25.799938 | 2013-10-30T13:37:39 | 2013-10-30T13:37:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,891 | py | try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import sqlite3
import os
import re
import sys
import json
from collections import namedtuple
from logging import getLogger
__all__ = ['SQLiteStorage']
log = getLogger(__name__)
SCHEME = ('sqlite', 'sqlite3')
def encode_timing_stat_calls(calls):
coded_calls = None
if calls is not None:
coded_calls = list()
for call in calls:
coded_calls.append(
(call.timestamp.isoformat(), call.name, call.code,
call.call_count, call.recursive_call_count, call.time_total,
call.time_function, call.calls))
return json.dumps(coded_calls)
class SQLiteStorage(object):
""" Storage implementation to save measure values in a sqlite database.
URL:
SCHEME: PATH
sqlite:// <sqlite db file path>
PATH:
In this section the environment variables will be expanded, like
$HOME
"""
create_tables = [
("CREATE TABLE IF NOT EXISTS timings ("
"measure_id INTEGER PRIMARY KEY AUTOINCREMENT, "
"timestamp TEXT NOT NULL, "
"measure_session VARCHAR(255) NOT NULL, "
"measure_point VARCHAR(255) NOT NULL, "
"code VARCHAR(255) NOT NULL, "
"call_count INTEGER, "
"recursive_call_count INTEGER, "
"time_total REAL, "
"time_function REAL)"),
("CREATE TABLE IF NOT EXISTS timings_calls ("
"measure_id INTEGER NOT NULL,"
"timestamp TEXT NOT NULL, "
"measure_point VARCHAR(255) NOT NULL, "
"code VARCHAR(255) NOT NULL, "
"call_count INTEGER, "
"recursive_call_count INTEGER, "
"time_total REAL, "
"time_function REAL, "
"FOREIGN KEY (measure_id) REFERENCES "
"timings (measure_id)) ")]
#"ON DELETE CASCADE "
#"ON UPDATE CASCADE)")]
insert_timing = ("INSERT INTO timings "
"(timestamp, measure_session, measure_point, code, "
"call_count, recursive_call_count, time_total, "
"time_function) VALUES (?, ?, ?, ?, ?, ?, ?, ?)")
insert_subcall = ("INSERT INTO timings_calls "
"(measure_id, timestamp, measure_point, code, "
"call_count, recursive_call_count, time_total, "
"time_function) VALUES (?, ?, ?, ?, ?, ?, ?, ?)")
def __init__(self, config):
self.config = config
path = var_expand(self.config.storage_endpoint.netloc)
path += var_expand(self.config.storage_endpoint.path)
try:
self.conn = sqlite3.connect(path)
except sqlite3.OperationalError:
_, error, _ = sys.exc_info()
raise RuntimeError(
("Can not open pyprol sqlite database '{0}', "
"because of '{1}'").format(path, error))
for create_query in self.create_tables:
self.conn.execute(create_query)
self.conn.commit()
def save(self, measure):
log.debug("Measure to save: {}".format(measure))
if hasattr(measure, 'timings'):
for timing in measure.timings:
cursor = self.conn.cursor()
timing_entry = (
timing.timestamp.isoformat(),
timing.session,
timing.name,
timing.code,
timing.call_count,
timing.recursive_call_count,
timing.time_total,
timing.time_function)
cursor.execute(self.insert_timing, timing_entry)
measure_id = cursor.lastrowid
if timing.calls is not None:
for call in timing.calls:
timing_call = (
measure_id,
call.timestamp.isoformat(),
call.name,
call.code,
call.call_count,
call.recursive_call_count,
call.time_total,
call.time_function)
cursor.execute(self.insert_subcall, timing_call)
self.conn.commit()
def close(self):
if hasattr(self, "conn"):
self.conn.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def var_expand(string):
return re.sub(r"\$([A-Z_0-9]+[A-Z0-9])", lambda m: os.environ[m.group(1)], string)
IMPL = SQLiteStorage
| [
"stefan.koenen@uni-duesseldorf.de"
] | stefan.koenen@uni-duesseldorf.de |
30db46f9d4fa1329e875db1f91eea3c682a20523 | 7cfbbaa4205956c26685edb4b137e3b5fd3fc3af | /08_parse-jason5.py | 525b096e8eea990b9a9eb8c7750395e809bb09a2 | [] | no_license | andresbaldeon93/PythonCEC | b3582be57f0b4a437de1509bc68cd6e46afbff41 | f9ad115a51c33336f81740d6e9f75d0e7b608673 | refs/heads/master | 2021-01-09T16:37:35.402464 | 2020-03-07T18:03:30 | 2020-03-07T18:03:30 | 242,374,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,322 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 28 19:15:29 2020
@author: CEC
"""
import urllib.parse
import requests
main_api = "https://www.mapquestapi.com/directions/v2/route?"
key = "6sHAI2zouPSmc6kOQzoPyki4190uSVAF"
while True:
orig = input("Starting location:" )
if orig=="quit" or orig == "q":
break
dest = input("Destination: ")
if dest=="quit" or dest == "q":
break
url = main_api + urllib.parse.urlencode({"key":key,"from":orig,"to":dest})
print("URL: " + (url))
json_data= requests.get(url).json()
#print(json_data)
json_status = json_data["info"]["statuscode"]
if json_status==0:
print("API Status: "+str(json_status)+" = A successfull route call.\n")
print("Directions from "+(orig)+" to "+(dest))
print("Trip Duration: "+str(json_data["route"]["formattedTime"]))
print("Kilometers: "+str(json_data["route"]["distance"]*1.61))
print("Fuel (Gal): "+str(json_data["route"]["fuelUsed"]))
print("=======================================")
for each in json_data["route"]["legs"][0]["maneuvers"]:
print((each["narrative"])+"("str("{:.2f}".format((each["distance"])*1.161))+" km"))
print("=======================================\n")
| [
"noreply@github.com"
] | andresbaldeon93.noreply@github.com |
b0e6c5aa9b04d50659c4fffc49eb2e5c9d8a2d53 | 99ee491a53aed1e8ee74205dd55295761ac85b69 | /python/back/persistence.py | 3e28a9f2540727894be398664ebdd1e39b31cf06 | [] | no_license | kronleuchter85/burritos | 2222d2544551fea2ba116a8c45343e4e767f9407 | 016794d16b2ab77ea672f307164c6762b9880e80 | refs/heads/master | 2020-03-07T13:01:51.915985 | 2018-02-04T17:02:15 | 2018-02-04T17:02:15 | 127,490,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | from commons.queries import QUERY_SELECT , QUERY_INSERT ,QUERY_CREATE_TABLE , QUERY_CHECK_IF_EXISTS
import psycopg2
class Persistence:
conn_string = "host='localhost' dbname='gtrader' user='postgres' password='postgres'"
conn = None
cursor = None
def initialize(self):
self.conn = psycopg2.connect(self.conn_string)
self.cursor = self.conn.cursor()
def createTable(self , tablename):
query = QUERY_CREATE_TABLE.replace('@tableName' , tablename)
self.cursor.execute(query)
self.conn.commit()
def getAll(self , tablename):
query = QUERY_SELECT.replace('@tableName' , tablename)
self.cursor.execute(query)
return self.cursor.fetchall()
def insert(self , tablename , event):
query = QUERY_INSERT.replace('@tableName' , tablename)
self.cursor.execute(query , event.getTuple())
self.conn.commit()
def existsTable(self , tablename):
query = QUERY_CHECK_IF_EXISTS.replace('@tableName' , tablename)
self.cursor.execute(query)
return self.cursor.fetchone()[0]
## Sample
def getAll_samples(self):
self.cursor.execute("select * from nymex_future_gc_201712 limit 10")
return self.cursor.fetchall()
| [
"kronleuchter85@gmail.com"
] | kronleuchter85@gmail.com |
e2af25729d07e29e30b711b7f26e8a704f82a27d | 4213a03eae748f456283cb29f8fe0d8461afb14c | /feature_to_csv.py | 84ffd910cc9c6eaa71b9935a5087ebc5987a8c59 | [] | no_license | zapouria/COMP6321-ML-Project | 0513927b647b1cccf0bb3dbddd0d4821535d0aca | 663ff4e4323d84bfad8a72387cc035bb4ce31b71 | refs/heads/main | 2023-01-22T14:23:03.993073 | 2020-12-01T00:46:43 | 2020-12-01T00:46:43 | 311,476,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,647 | py | import librosa
import numpy as np
import math
from pysndfx import AudioEffectsChain
import python_speech_features
import matplotlib.pyplot as plt
import librosa.display
import pandas as pd
import IPython.display as ipd
import warnings
import sklearn
from sklearn.preprocessing import MinMaxScaler
import feature_extraction_util
def extract_features(dataset_dir, csv_dir):
files = feature_extraction_util.read_file(dataset_dir)
directory = csv_dir
file_names = []
dataset_gender = []
data = []
# Gender of the each speaker
gender = {'19': 'F', '26': 'M', '27': 'M', '32':'F', '39': 'F', '40': 'F',
'60': 'M', '78': 'M', '83': 'F', '87':'F', '89':'F','103':'F','118':'M',
'125':'F','150':'F','163':'M','196':'M','198':'F','200':'F'}
for f in files:
y, sr = librosa.load(f)
# Reduce the noise
y_noise_reduced = feature_extraction_util.ambient_noise_reduction(y, sr)
# Enhance the vocal enhancements
y_vocal_enhanced = feature_extraction_util.vocal_enhancement(y_noise_reduced, sr)
# Trim the audio
y_trimmed, _ = feature_extraction_util.audio_trimming(y_vocal_enhanced, sr)
# Get 13 MFCC values
mfcc_features = librosa.feature.mfcc(y=y_trimmed, sr=sr,
n_mfcc=13, hop_length=int(0.010 * sr),
n_fft=int(0.025 * sr))
# Take the mean of MFCC coefficients
mfcc_mean = mfcc_features.mean(axis=1)
# Get MFCC Delta
mfcc_delta = librosa.feature.delta(mfcc_features, order=1)
# Take the mean of MFCC Delta coefficients
mfcc_delta_mean = mfcc_delta.mean(axis=1)
# Create a list of MFCC(13 columns) and MFCC Delta coefficients(13 columns) and appned it to data list
data.append(np.concatenate((mfcc_mean, mfcc_delta_mean)))
# Store the name of the files to a list in order to add it to the dataframe.
file_names.append(f.split("/")[-1].split("-")[0])
# Store the gender of the speaker to the list in order to add it to the dataframe.
dataset_gender.append(gender[f.split("/")[-1].split("-")[0]])
print("the speaker %s has been added to the list!" % f.split("/")[-1].split("-")[0])
# Pass the list of coefficients to the dataframe.
df = pd.DataFrame(data=data)
# Insert the column of the file name and the genders to the dataframe
df.insert(0, "file name", file_names, True)
df.insert(1, "file name", dataset_gender, True)
# Save the dataframe to the CSV file.
df.to_csv(directory, index=False)
| [
"spzahraei@yahoo.com"
] | spzahraei@yahoo.com |
fabd454eb845bf6af3c22c887e7c846a8afe525c | bbf23510da9f063fbc9b7334d2a938e1edfe314a | /examples/training/avg_word_embeddings/training_stsbenchmark_cnn.py | 1761eed4bf1f502aa00007ed181f98347deb1d9a | [
"Apache-2.0"
] | permissive | jonathansantilli/sentence-transformers | e2b2905997fef8df056aec63962d8697af66504c | 9433e9b3d4d88ab286f6108f1cd9ee966a263a35 | refs/heads/master | 2023-04-24T10:15:18.641775 | 2021-05-04T12:30:11 | 2021-05-04T12:30:11 | 364,264,204 | 0 | 0 | Apache-2.0 | 2021-05-04T13:25:04 | 2021-05-04T13:25:03 | null | UTF-8 | Python | false | false | 3,693 | py | """
This example runs a CNN after the word embedding lookup. The output of the CNN is than pooled,
for example with mean-pooling.
"""
import torch
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses, util
from sentence_transformers import LoggingHandler, SentenceTransformer
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator
from sentence_transformers.readers import *
import logging
from datetime import datetime
import os
import csv
import gzip
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
# Read the dataset
batch_size = 32
model_save_path = 'output/training_stsbenchmark_bilstm-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
#Check if dataset exsist. If not, download and extract it
sts_dataset_path = 'datasets/stsbenchmark.tsv.gz'
if not os.path.exists(sts_dataset_path):
util.http_get('https://sbert.net/datasets/stsbenchmark.tsv.gz', sts_dataset_path)
logging.info("Read STSbenchmark train dataset")
train_samples = []
dev_samples = []
test_samples = []
with gzip.open(sts_dataset_path, 'rt', encoding='utf8') as fIn:
reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
score = float(row['score']) / 5.0 # Normalize score to range 0 ... 1
inp_example = InputExample(texts=[row['sentence1'], row['sentence2']], label=score)
if row['split'] == 'dev':
dev_samples.append(inp_example)
elif row['split'] == 'test':
test_samples.append(inp_example)
else:
train_samples.append(inp_example)
# Map tokens to vectors using BERT
word_embedding_model = models.Transformer('bert-base-uncased')
cnn = models.CNN(in_word_embedding_dimension=word_embedding_model.get_word_embedding_dimension(), out_channels=256, kernel_sizes=[1,3,5])
# Apply mean pooling to get one fixed sized sentence vector
pooling_model = models.Pooling(cnn.get_word_embedding_dimension(),
pooling_mode_mean_tokens=True,
pooling_mode_cls_token=False,
pooling_mode_max_tokens=False)
model = SentenceTransformer(modules=[word_embedding_model, cnn, pooling_model])
# Convert the dataset to a DataLoader ready for training
logging.info("Read STSbenchmark train dataset")
train_dataloader = DataLoader(train_samples, shuffle=True, batch_size=batch_size)
train_loss = losses.CosineSimilarityLoss(model=model)
logging.info("Read STSbenchmark dev dataset")
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(dev_samples, name='sts-dev')
# Configure the training
num_epochs = 10
warmup_steps = math.ceil(len(train_dataloader) * num_epochs * 0.1) #10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
warmup_steps=warmup_steps,
output_path=model_save_path
)
##############################################################################
#
# Load the stored model and evaluate its performance on STS benchmark dataset
#
##############################################################################
model = SentenceTransformer(model_save_path)
test_evaluator = EmbeddingSimilarityEvaluator.from_input_examples(test_samples, name='sts-test')
model.evaluate(evaluator) | [
"rnils@web.de"
] | rnils@web.de |
510b69b892937b2ecbb532c19cc34f4795f82e5e | 60d209f361bf35ac764076100b51ae692c31c1b2 | /telnet-switch.py | a1015b32775a571e5bd5743d408c2c9b9f70a714 | [
"MIT"
] | permissive | noor-fernandez/MPLS-Network-Automation | d696115069a406669c4e7afebb0086f8f4247ccd | 9f6fb6a2ecf0cdd5917898c37deb5f084662bd68 | refs/heads/master | 2020-05-16T02:34:01.922376 | 2019-10-22T15:10:04 | 2019-10-22T15:10:04 | 182,633,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,349 | py | import getpass
import telnetlib
HOST = "localhost"
user = input("Enter your telnet username: ")
password = getpass.getpass()
IPs = open("myswitches")
for idxIP, IP in enumerate(IPs):
idxIP = idxIP + 1
idxIP_as_byte = str.encode(idxIP)
IP = IP.strip()
print("Configuring Switch " + (IP))
HOST = IP
tn = telnetlib.Telnet(HOST)
tn.read_until(b"Username: ")
tn.write(user.encode('ascii') + b"\n")
if password:
tn.read_until(b"Password: ")
tn.write(password.encode('ascii') + b"\n")
tn.write(b"en\n")
tn.write(b"cisco\n")
tn.write(b"conf t\n")
tn.write(b"int lo0\n")
tn.write(b"ip add") + idxIP.encode('ascii') + "." + idxIP.encode('ascii') + "." + idxIP.encode('ascii') + "." + idxIP.encode('ascii') + " 255.255.255.255\n")
tn.write(b"ip ospf 1 area 0\n")
tn.write(b"exit\n")
ports = ["0/0", "0/1", "0/2", "0/3", "1/0", "1/1", "1/2", "1/3", "2/0", "2/1"]
for idxPort, port in enumerate(ports):
tn.write(b"int g") + port.encode('ascii') + "\n")
tn.write(b"ip address 192.168.122.1") + idxIP_as_byte.encode('ascii') + idxIP_as_byte.encode('ascii') + "\n")
tn.write(b"no shut\n")
tn.write(b"ip ospf 1 area 0\n")
tn.write(b"exit\n")
tn.write(b"end\n")
tn.write(b"exit\n")
print(tn.read_all().decode('ascii'))
| [
"noreply@github.com"
] | noor-fernandez.noreply@github.com |
615618476768211b90173af1a529a1f04375dcc1 | 920b9cb23d3883dcc93b1682adfee83099fee826 | /itsm/sla/serializers/__init__.py | 3d55494a7d26dc78b8ca855461729f1a20246260 | [
"MIT",
"LGPL-2.1-or-later",
"LGPL-3.0-only"
] | permissive | TencentBlueKing/bk-itsm | f817fb166248d3059857b57d03e8b5ec1b78ff5b | 2d708bd0d869d391456e0fb8d644af3b9f031acf | refs/heads/master | 2023-08-31T23:42:32.275836 | 2023-08-22T08:17:54 | 2023-08-22T08:17:54 | 391,839,825 | 100 | 86 | MIT | 2023-09-14T08:24:54 | 2021-08-02T06:35:16 | Python | UTF-8 | Python | false | false | 1,767 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-ITSM 蓝鲸流程服务 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-ITSM 蓝鲸流程服务 is licensed under the MIT License.
License for BK-ITSM 蓝鲸流程服务:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from .basic import ModelSerializer # noqa
from .matrix import MatrixUpdateSerializer, PriorityMatrixSerializer # noqa
from .schedule import DaySerializer, ScheduleDayRelationSerializer, ScheduleSerializer # noqa
from .policy import ActionPolicySerializer, PriorityPolicySerializer, SlaSerializer, \
SlaTimerRuleSerializer, TicketHighlightSerializer
| [
"1758504262@qq.com"
] | 1758504262@qq.com |
a5952935415bf5278dae22b843c4de6b4d6ccc8d | f18c2e719076d3bc10dd8b4a266cc7b729d782fd | /TimePrediction.py | 819b478cf1110c81efbb77e3d8a1055df42e1ac9 | [] | no_license | alexcasella/Streamlit | 60ae03e5d3247a3954f4dac98e936f65e836a7e4 | 156452089894125889dcc3bf3814b9a1bd358e92 | refs/heads/main | 2023-05-06T04:53:40.405706 | 2021-05-30T08:46:49 | 2021-05-30T08:46:49 | 371,083,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,088 | py | # Import the relevant packages
import streamlit as st
import pandas as pd
import numpy as np
# Import the prediction function
from Joseph_alg import ult_pred
from Joseph_alg import get_values
# Import the dataframe
data = pd.read_csv('data18m.csv', index_col=0)
smooth = pd.read_csv('Smooth_data18m_pure.csv', index_col=0)
nonsmooth = pd.read_csv('nonsmooth_data18m_pure.csv', index_col=0)
smooth_data = get_values(smooth)
nonsmooth_data = get_values(nonsmooth)
# Write the headline
st.write("""
# Time Prediction Algorithm
Enter Brief Description Here
""")
## Set up the sidebar
# Form containing the inputs
form = st.sidebar.form('my_form')
# Header
form.header('User Input Values')
No_Of_Months = form.slider('Select No Of Months To Predict',1,12,6)
# Initalize Game_Num for the if statement
Game_Num = 0
Game_Num = int(form.text_input('Enter A Game Number From The List Below',3597,max_chars=4,help = 'Enter a number between 0 and ' + str(len(data)-1)))
Game_Name = data.iloc[Game_Num].Name
data_tmp = {'Game Number': Game_Num,
'Game': Game_Name,
'Number of Months': No_Of_Months}
df = pd.DataFrame(data_tmp, index=[0])
data_1 = pd.DataFrame(data,columns = ['Name']).sort_values('Name')
form.write(data_1.style.set_properties(**{'text-align': 'left'}))
# Now add a submit button to the form:
form.form_submit_button("Submit")
## Set up the output
# Print the choice made
st.subheader('User\'s choice:')
st.write(df)
# ### Imaginary Examples:
# In[188]:
name = df['Game'].values[0]
months = df['Number of Months'].values[0]
game = nonsmooth_data[data.loc[data['Name']==name].index[0]][:12]
real_data = nonsmooth_data[data.loc[data['Name']==name].index[0]][11:12+months]
subdata = nonsmooth_data.copy()
del subdata[data.loc[data['Name']==name].index[0]]
smooth_subdata = smooth_data.copy()
del smooth_subdata[data.loc[data['Name']==name].index[0]]
# Print the closest three games
[pred, close_index,close_games] = ult_pred(game, train = subdata, smooth_train = smooth_subdata, real_data = real_data, horizon = months)
| [
"noreply@github.com"
] | alexcasella.noreply@github.com |
0a70e9fe37406e80f5bbedab647d73c4114377f1 | 84ffcfdc27941b63ec86fd731dd7a78436eec7e2 | /Python/Assignment_2/cycling.py | a9352f5bb8de23b157cbda5c23987ffd965e3131 | [] | no_license | findingRellik/CSCI1300_intro_programming | 1b4955d81cf513d25a347ef3bdef244f635483a1 | ee0308b163faaa66e8d1f55d3e759fac04ed2848 | refs/heads/master | 2020-12-31T05:24:00.594659 | 2016-05-03T10:45:50 | 2016-05-03T10:45:50 | 57,960,575 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,326 | py | #Assignment 2 Football problem
#Recitation 10 am ECCR 235
#Austin Metzner
print ("Cycling: power and energy")
m = float(input("Enter mass of cyclist (kg): "))
mb = float(input("Enter mass of bike (kg): "))
v = float(input("Enter velocity (m/s): "))
g = 9.8
k = .18
cr = .001
cf = float(input("Enter CFdraft: "))
#assuming the avg. between .6 and .8
# Not sure what to do for CFdraft
#cfq = input("Is cyclist at front of pack?(y/n): ")
#if cfq == "y" or "Y":
# cf = 1.0
# elif cfq == "n" or "N":
# print (" ")
# else:
# print ("Not a valid input")
#Pair
pa = k*cf*v**3
#Proll
pr = cr*g*(m+mb)*v
#Psec
ps = pr+pa
####print ("Total power output per second: ", "%.2f" % ps)
d = float(input("Distance needed to travel (km): "))
#timeTravel in seconds
tt = ((d*1000)/v)
#Energytotal = Psec * timeTravel = et
et = round(ps*tt)
####print ("Total energy output is: ", et)
#Energy per minute
tt = int((d*1000)/(v*60))
#to get timeTraveled in minutes
et = 0
x = 0
import random
while x < tt:
cf = random.uniform(0.5,1)
pa = k*cf*v**3
#recalling pa to be inside while loop
pm = round(ps*60)
# energy = (pm*x)
#pm is ps per min
print ("Energy at ", x, " minute: ", energy)
# energy = round(pm * tt)
et += energy
print ("Total energy at ", x, " minute: ", et, "Joules")
x += 1
# if x == tt:
# pause ()
| [
"aume6638@colorado.edu"
] | aume6638@colorado.edu |
5c07ea3c97e275d3dfeb730a586531d4e2216def | 37fff9882c1dbaea8e1a18eeee9865b7f7f1fc12 | /Websockets/RT_Analytics/analytics/urls.py | b32993ebdbfd484ec5a5401c16cd7812aeedbc98 | [] | no_license | Addy209/GraphQL | f0e8297b7224a42eb68a5cc62434b9dd8034cf1f | 69c1dba3ee048f57dbe2ae8983519c86bdce03d9 | refs/heads/main | 2023-06-11T04:50:22.791456 | 2021-06-21T00:46:52 | 2021-06-21T00:46:52 | 378,825,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | from django.contrib import admin
from django.urls import path, include
from .views import GetTestData
urlpatterns = [
path('',GetTestData.as_view())
] | [
"46429259+Addy209@users.noreply.github.com"
] | 46429259+Addy209@users.noreply.github.com |
0b15c71947ca2f3e1c0bafc476731d0397432994 | 03820593096f960ad4e568339adbfeec596a3703 | /odoo13/openacademy/models/models.py | a8be4332ad90bb8907fb490c838dafd20da9765f | [] | no_license | Jeiikot/Odoo | 0f3cd1fbf4eb2fe5965ff8b8133018c3d20ddddd | 57245ebc9e0cc1fdbdae1e994e10f24526ef78fc | refs/heads/master | 2023-01-19T23:18:28.114722 | 2020-11-25T17:50:43 | 2020-11-25T17:50:43 | 263,752,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,428 | py | # -*- coding: utf-8 -*-
from datetime import timedelta
from odoo import models, fields, api, exceptions, _
class Course(models.Model):
_name = 'openacademy.course'
_description = "OpenAcademy Courses"
name = fields.Char(string="Title", required=True)
description = fields.Text()
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id',
string="Sessions")
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', _(u"Copy of {}%").format(self.name))])
if not copied_count:
new_name = _(u"Copy of {}").format(self.name)
else:
new_name = _(u"Copy of {} ({})").format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
class Session(models.Model):
_name = 'openacademy.session'
_description = "OpenAcademy Sessions"
name = fields.Char(required=True)
start_date = fields.Date(default=fields.Date.today)
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
active = fields.Boolean(default=True)
color = fields.Integer()
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
end_date = fields.Date(string="End Date", store=True, compute='_get_end_date')
# , inverse='_set_end_date')
attendees_count = fields.Integer(
string="Attendees count", compute='_get_attendees_count', store=True)
@api.depends('seats', 'attendee_ids')
def _taken_seats(self):
for r in self:
if not r.seats:
r.taken_seats = 0.0
else:
r.taken_seats = 100.0 * len(r.attendee_ids) / r.seats
@api.onchange('seats', 'attendee_ids')
def _verify_valid_seats(self):
if self.seats < 0:
return {
'warning': {
'title': _("Incorrect 'seats' value"),
'message': _("The number of available seats may not be negative"),
},
}
if self.seats < len(self.attendee_ids):
return {
'warning': {
'title': _("Too many attendees"),
'message': _("Increase seats or remove excess attendees"),
},
}
@api.depends('start_date', 'duration')
def _get_end_date(self):
for r in self:
if not (r.start_date and r.duration):
r.end_date = r.start_date
continue
# Add duration to start_date, but: Monday + 5 days = Saturday, so
# subtract one second to get on Friday instead
duration = timedelta(days=r.duration, seconds=-1)
r.end_date = r.start_date + duration + timedelta(days=1)
# def _set_end_date(self):
# for r in self:
# if not (r.start_date and r.end_date):
# continue
# # Compute the difference between dates, but: Friday - Monday = 4 days,
# # so add one day to get 5 days instead
# r.duration = (r.end_date - r.start_date).days + 1
@api.depends('attendee_ids')
def _get_attendees_count(self):
for r in self:
r.attendees_count = len(r.attendee_ids)
@api.constrains('instructor_id', 'attendee_ids')
def _check_instructor_not_in_attendees(self):
for r in self:
if r.instructor_id and r.instructor_id in r.attendee_ids:
raise exceptions.ValidationError(_("A session's instructor can't be an attendee")) | [
"jeiikot21@gmail.com"
] | jeiikot21@gmail.com |
4c71c6e6836042b43c21a50c00f75b8b6bfa4e37 | 200de187f734493a026dba6df99eb53acd7ca789 | /node_modules/watchify/node_modules/chokidar/node_modules/fsevents/build/config.gypi | 0859da99c98c3344d77531bb949e0ad8373570cc | [
"MIT"
] | permissive | jeremybdavis/flux-todos-tutorial | 5515d892cd02e7dde794462bd2e48aa92c401eb9 | dd3e1678dc7bd44aadead0a93c8369defab11555 | refs/heads/master | 2021-01-15T12:26:20.012155 | 2015-08-20T21:15:48 | 2015-08-20T21:15:48 | 41,118,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,522 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 1,
"host_arch": "x64",
"icu_small": "false",
"node_install_npm": "false",
"node_prefix": "/usr/local/Cellar/node/0.12.5",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_mdb": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_no_asm": 0,
"python": "/usr/local/opt/python/bin/python2.7",
"target_arch": "x64",
"uv_library": "static_library",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"nodedir": "/Users/jeremybdavis/.node-gyp/0.12.5",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"shell": "/bin/zsh",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"npat": "",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"access": "",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/jeremybdavis/.npm-init.js",
"userconfig": "/Users/jeremybdavis/.npmrc",
"node_version": "0.12.5",
"user": "",
"editor": "vi",
"save": "",
"tag": "latest",
"global": "",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/Users/jeremybdavis/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/2.11.2 node/v0.12.5 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"init_version": "1.0.0",
"umask": "0022",
"git": "git",
"init_author_name": "",
"scope": "",
"onload_script": "",
"tmp": "/var/folders/7j/f9vzxbns0bl0bptvxwz8b13m0000gn/T",
"unsafe_perm": "true",
"prefix": "/usr/local",
"link": ""
}
}
| [
"jdavis129@gmail.com"
] | jdavis129@gmail.com |
78f63b79d9b2c2005e2d3836eeddcc6611c7ef1d | 07f80a079277a130bb4c7725ba6a220b16338f30 | /Python_Dojo/Movie_Picker.py | 664030eb16130eeb9897aec569deeeb22c6017bf | [] | no_license | jakeT93/improved-enigma | e910bf6c776e5fe30cce7f2200072f453d5af2f0 | d4167542ee4afa00082d810e6701204daa1ca969 | refs/heads/master | 2021-09-05T10:53:44.768552 | 2018-01-26T16:28:22 | 2018-01-26T16:28:22 | 115,924,086 | 0 | 0 | null | 2018-01-01T18:06:43 | 2018-01-01T14:25:04 | Python | UTF-8 | Python | false | false | 256 | py | #Movie_Picker
#Selects a random movie or video file within the given constraints
import os
videoFilesRoot = ""
videoFilesRoot = raw_input("Please enter general path : ")
videoFilesRoot = videoFilesRoot.replace("'", "")
print(os.listdir(videoFilesRoot)
| [
"kirenjacobthomas@gmail.com"
] | kirenjacobthomas@gmail.com |
e2ca6e0e138035c73bbf4db3ef1d749afc55e22a | 616c6c525b87281ee81aa3e1985947ead0da78ab | /assistant.py | 2a0f4b5da989138084a74452ef5dd391197d645e | [] | no_license | vedant2608/Devs-console | 14cbfcf98d98e564235ec36ccd527b311708429f | 1bab7dd4ffe433d9f5d0b9a05125f75ede3b9aa3 | refs/heads/main | 2023-02-18T20:49:21.113055 | 2021-01-22T06:07:24 | 2021-01-22T06:07:24 | 330,216,801 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 22,618 | py | import os
from datetime import *
import sys
import subprocess
import pyttsx3
import pyfiglet
import screen_brightness_control as sbc
engine = pyttsx3.init()
engine.setProperty('rate', 190)
def run(cmd):
completed = subprocess.run(["powershell", "-Command", f"{cmd} 2>$logs.exe"], capture_output=True)
return completed
def speak(audio):
engine.say(audio)
engine.runAndWait()
class command_list():
def __init__(self, command):
self.command = command
def execution(self):
# Sleep commands
sleep_commands = list()
with open("commands\\sleep_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
sleep_commands.append(i)
# Self introduction
introduction_commands = list()
with open("commands\\introduction_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
introduction_commands.append(i)
# greeting command
greeting_commands = list()
with open("commands\\greeting_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
greeting_commands.append(i)
# Date-asking command
date_commands = []
with open("commands\\date_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
date_commands.append(i)
# Open for camera
camera_commands = []
with open("commands\\camera_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
camera_commands.append(i)
# Open the browsers
browsers_command = []
with open("commands\\browsers_command.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
browsers_command.append(i)
# Opening notepad
open_file_commands = []
with open("commands\\file_opening_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
open_file_commands.append(i)
# To show list of connected wifi devices
connected_wifi_commands = []
with open("commands\\connected_wifi_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
connected_wifi_commands.append(i)
# To show the password of connected and selected wifi
password_of_connected = []
with open("commands\\connected_wifi_passwords_command.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
password_of_connected.append(i)
# To generate battery report
battery_report = []
with open("commands\\battery_report_commands.exe", "r") as command:
tmp_lst = [i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
battery_report.append(i)
#For changing brightness
brightness=[]
with open("commands\\brightness_commands.exe","r") as command:
tmp_lst=[i.split("\n")[0] for i in command.readlines()]
for i in tmp_lst:
brightness.append(i)
# Checking user interfaces or command execution
# sleep command
if self.command in sleep_commands:
return 'exit'
# introduction command
if self.command in introduction_commands:
return 'introduction'
# greeting command
if self.command in greeting_commands:
return 'greeted'
# date command
if self.command in date_commands:
return 'todays_date'
# open camera
if self.command in camera_commands:
return 'opened camera'
# browsers command
if self.command in browsers_command:
return "opening browser"
# notepad command
if self.command in open_file_commands:
return 'opening file'
# connected_wifi_commands
if self.command in connected_wifi_commands:
return 'listof_connected_wifi'
# Password_connected_wifi_command
if self.command in password_of_connected:
return 'connected_wifi_passwords'
# Battery report
if self.command in battery_report:
return 'generated_battery_report'
# Help command
if self.command == 'help-dev':
return 'helping'
#brightness command
if self.command in brightness:
return 'bright'
# LAST SCRIPT
else:
system_command_code = run(self.command)
if system_command_code.returncode == 0:
subprocess.call(f"powershell -command {self.command} 2>$logs.exe", shell=True)
return "sys_command_executed"
else:
while True:
choice = input("Is this a new keyword you want to add for specific task[y/n]:")
if choice == 'y':
print('''
Please Enter category number from for commands.
The categories are-:
1.To exit the console
2.To introduce dev to yourself
3.To greet dev
4.To ask for today's date
5.To open the camera
6.To open specific website in browser
7.To open notepad
8.To show list of previsiously connected network
9.To show password of previsiously connected network
10.To generate battery report
11.To alter brightness''')
category = input("Enter choice number:")
if category == '1':
sleep_coordinator = open("commands\\sleep_commands.exe", "a+")
sleep_coordinator.write(self.command + "\n")
sleep_coordinator.close()
speak(
f"OK I will remember this and You can now use {self.command} command for terminating dev")
engine.runAndWait()
return 'exit'
elif category == '2':
introduction_coordinator = open("commands\\introduction_commands.exe", "a+")
introduction_coordinator.write(self.command + "\n")
introduction_coordinator.close()
speak(
f"Ok I will remember this and You can now use {self.command} command for introducing dev to yourself")
return 'introduction'
elif category == '3':
greet_coordinator = open("commands\\greeting_commands.exe", "a+")
greet_coordinator.write(self.command + "\n")
greet_coordinator.close()
speak(f"Ok I will remember this ,You can now use {self.command} command for greeting dev")
return 'greeted'
elif category == '4':
date_coordinator = open("commands\\date_commands.exe", "a+")
date_coordinator.write(self.command + "\n")
date_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command for greeting time from dev")
return 'todays_date'
elif category == '5':
camera_coordinator = open("commands\\camera_commands.exe", "a+")
camera_coordinator.write(self.command + "\n")
camera_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command to open your camera")
return 'opened camera'
elif category == '6':
broswer_coordinator = open("commands\\browsers_command.exe", "a+")
broswer_coordinator.write(self.command + "\n")
broswer_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command to open your broswer")
return 'opening browser'
elif category == '7':
notepad_coordinator = open("commands/file_opening_commands.exe", "a+")
notepad_coordinator.write(self.command + "\n")
notepad_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command to open files")
return 'opening file'
elif category == '8':
network_coordinator = open("commands\\connected_wifi_commands.exe", "a+")
network_coordinator.write(self.command + "\n")
network_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command to Show the list of connected wifi networks")
return 'listof_connected_wifi'
elif category == '9':
password_coordinator = open("commands\\connected_wifi_passwords_command.exe", "a+")
password_coordinator.write(self.command + "\n")
password_coordinator.close()
speak(
f"Ok I will remember this ,You can now use {self.command} command to Show the password of selected wifi networks")
return "connected_wifi_passwords"
elif category == '10':
report_coordinator = open("commands\\battery_report_commands.exe", "a+")
report_coordinator.write(self.command + "\n")
report_coordinator.close()
speak(f"Ok I will remember this, You can now use {self.command} to generate battery report")
return "generated_battery_report"
elif category=='11':
battery_coordinator=open("commands\\brightness_commands.exe","a+")
battery_coordinator.write(self.command+"\n")
battery_coordinator.close()
speak(f"Ok I will remember this, You can now use {self.command} to alter brightness")
return 'bright'
elif choice == 'n':
return "sys_command_not_executed"
else:
continue
# Untill we get exit command or ctrl+c keyboard interript
try:
while True:
try:
instruction = input("<<DEV>>")
command = command_list(instruction)
output = command.execution()
# exit check
if 'exit' == output:
confirm = input("Terminate DEV's console[y/n]:")
if confirm.lower() == 'y':
speak("Thank you for using DEV's console")
hour = datetime.now().hour
if hour >= 6 and hour < 12:
try:
speak("Have a good day Buddy")
except:
break
elif hour >= 12 and hour <= 18:
try:
speak("Have a good noon Buddy")
except:
break
elif hour > 18:
try:
speak("Good night Buddy")
except:
break
break
elif output == 'introduction': # introduction check
print(pyfiglet.figlet_format("D E V"))
print('I am DEV. A Console made with python for all windows user')
elif output == "sys_command_not_executed": # INVALID system commands check
print("Sounds like entered system command in wrong way")
elif output == 'greeted': # greeting commands check
print("Hi Buddy")
elif output == 'todays_date': # date command check
tdy_date = date.today()
print(f"Today's date in yyyy-mm-dd format is {tdy_date}")
elif output == 'opened camera': # Camera command check
run("start microsoft.windows.camera:")
elif output == "opening browser": # Open browsers with choice
print("With which browser you want to open your website")
print("1.Microsoft Edge")
print("2.Google Chrome")
print("3.Firefox")
print("4.Internet Explorer")
browser_choice = int(input("Enter your choice number:"))
url_choice = input("Enter your url with perfect link:")
# Microsoft edge
if browser_choice == 1:
if url_choice.startswith('https:\\') or url_choice.startswith('http:\\'):
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('msedge','{url_choice}')'''],
capture_output=True)
else:
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('msedge','https:\\{url_choice}')'''],
capture_output=True)
# Google chrome
if browser_choice == 2:
if url_choice.startswith('https:\\') or url_choice.startswith('http:\\'):
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('chrome','{url_choice}')'''],
capture_output=True)
else:
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('chrome','https:\\{url_choice}')'''],
capture_output=True)
# Firefox
if browser_choice == 3:
if url_choice.startswith('https:\\') or url_choice.startswith('http:\\'):
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('firefox','{url_choice}')'''],
capture_output=True)
else:
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('firefox','https:\\{url_choice}')'''],
capture_output=True)
# Internet Explorer
if browser_choice == 4:
if url_choice.startswith('https:\\') or url_choice.startswith('http:\\'):
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('iexplore','{url_choice}')'''],
capture_output=True)
else:
completed = subprocess.run(["powershell", "-Command",
f'''[system.Diagnostics.Process]::Start('iexplore','https:\\{url_choice}')'''],
capture_output=True)
elif output == 'opening file': # open files with notepad
choice = input("Do you want to open a note with specific file_name[y/n]:")
if choice == 'y':
path = input("Enter the path[Leave blank if file is in Dev's console location]:")
file_name = input("Enter the file name with extension also [e.g=> myfile.txt]:")
if len(path) == 0:
path = "..\\"
complete_path = path + "\\" + file_name
run(f'''notepad {complete_path}''')
elif choice == 'n':
run('notepad')
elif output == 'listof_connected_wifi': # To show list of all connected wifi
subprocess.run("netsh wlan show profiles", capture_output=False)
elif output == 'connected_wifi_passwords': # To show the password of selected conneced wifi
print("Here is the list of connected wifi")
subprocess.run("netsh wlan show profiles", capture_output=False)
network_name = input('''
Enter the wifi name which is in user profiles [Enter any name which is infront of /"All users profile :/"].
Enter the name of your wifi: ''').strip()
try:
print("Password is =>", subprocess.check_output(
["netsh", "wlan", "show", "profiles", network_name, "key=clear"]).decode().replace('\r',
" ").split(
"\n")[32].split(':')[1].strip())
except subprocess.CalledProcessError as cpe:
print("Password is =>", subprocess.check_output(
['netsh', 'wlan', 'show', 'profiles', f'"{network_name}"', 'key=clear']).decode().replace('\r',
" ").split(
"\n")[32].split(':')[1].strip())
except Exception:
print("Password is =>", subprocess.check_output(
[f'netsh wlan show profiles {network_name} key=clear']).decode().replace('\r', " ").split(
"\n")[32].split(':')[1].strip())
elif output == "generated_battery_report": # To generate battery report
flag = os.path.isfile("battery-report.html")
if (flag == True):
speak("Opening the battery report file")
run("start battery-report.html")
else:
speak("Generating Battery report")
run("powercfg /batteryreport")
run("start battery-report.html")
elif output=='helping':
print("Do read the readme file from the repository to get the help")
speak("I am dev console , Which has the functionality to execute system commands in kind of human language. Currently I can execute only few tasks ,but with the help of contributors ,I will be able to execute more soon! Execute any system command or type hello here to get DEV in action!")
elif output=='bright':
current_brightness = sbc.get_brightness()
if 40<current_brightness[0]<=60: speak("Curent brightness is okay! Want to alter it?")
elif current_brightness[0]>=80: speak("Brightness is too much! Want to alter it?")
elif current_brightness[0]<=30:speak("Brightness is too low! Want to alter it?")
decision=input("Alter Brightness[y/n]:")
if(decision=='y'):
battery_percentage=input("Enter the brightness percentage. It will increased or decreased according to percenatge:")
if battery_percentage.endswith("%"):
battery_percentage=battery_percentage.replace("%"," ")
sbc.set_brightness(int(battery_percentage))
else:
pass
except subprocess.CalledProcessError as e:
speak("Dev can not work over this command!")
except KeyboardInterrupt as e:
confirm = input("Terminate DEV's console[y/n]:")
if confirm.lower() == 'y':
engine.say("Roger that!")
hour = datetime.now().hour
if hour >= 6 and hour < 12:
try:
speak("Have a good day buddy")
sys.exit(0)
except:
break
elif hour >= 12 and hour < 18:
try:
speak("Have a good noon buddy")
sys.exit(0)
except:
break
elif hour > 18:
try:
speak("Good night buddy! Sweet dreams")
sys.exit(0)
except:
break
except subprocess.CalledProcessError as e:
speak("Dev can not work over this command!")
except KeyboardInterrupt as e: # If pressed ctrl+c
confirm = input("Terminate DEV's console[y/n]:")
if confirm.lower() == 'y':
engine.say("Roger that!")
hour = datetime.now().hour
if hour >= 6 and hour < 12:
try:
speak("Have a good day buddy")
sys.exit(0)
except:
pass
elif hour >= 12 and hour < 18:
try:
speak("Have a good noon buddy")
sys.exit(0)
except:
pass
elif hour > 18:
try:
speak("Good night buddy! Sweet dreams")
sys.exit(0)
except:
pass
| [
"vedant.datey2001@gmail.com"
] | vedant.datey2001@gmail.com |
040f3f198d4bd164c0c895c5125770abc31e4f36 | e08ad592722f6c737551fb3d9b6fcf0b7bbeb00c | /data/process_data.py | 67b144adae437eba3f6af3c5a8098ba0afc94a61 | [] | no_license | DataWorks71/disaster_response_pipelines | 27dcb225404f45e37fe19dc59010376d3ce8ff7f | b439b035b3cf761d22650ac98b1f90c82a9aec9a | refs/heads/master | 2020-04-04T13:46:36.600898 | 2018-11-05T18:35:41 | 2018-11-05T18:35:41 | 155,532,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,096 | py | import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
"""Load and join/merge datafiles to single dataset.
Args:
messages_filepath: str. The relative filepath to message file.
categories_filepath: str. The relative filepath to categories file.
Returns:
df: pandas.DataFrame. Merged dataset of messages and categories files.
"""
messages = pd.read_csv(messages_filepath)
categories = pd.read_csv(categories_filepath)
print('Number of records read...\n MESSAGES: {0}\n CATEGORIES: {1}'
.format(len(messages), len(categories)))
df = messages.merge(categories, how='inner', on='id')
print('Number of merged records loaded...\n MERGED(messages & categories): {0}'
.format(len(df)))
return df
def clean_data(df):
"""Cleans dataset.
- Split `categories` into separate category columns.
- De-duplicate observations by deletion.
- Delete observations considered non-usable.
Args:
df: pandas.DataFrame. Dataset of messages and categories.
Returns:
df: pandas.DataFrame. Dataset of messages and categories.
"""
# ### Split `categories` into separate category columns.
# - Split the values in the `categories` column on the `;` character
# so that each value becomes a separate column.
# https://pandas.pydata.org/pandas-docs/version/0.23/generated/pandas.Series.str.split.html
# - Use the first row of categories dataframe to create column names for the categories data.
# - Rename columns of `categories` with new column names.
# create a dataframe of the 36 individual category columns
categories = df.categories.str.split(pat=';', n=-1, expand=True)
# select the first row of the categories dataframe
row = categories.iloc[0]
# Use selected row to extract a list of new column names for categories.
# Takes everything up to the second to last character of each string.
category_colnames = [str(x).split(sep='-')[0] for x in row]
# rename the columns of `categories`
categories.columns = category_colnames
# ### 4. Convert category values to just numbers 0 or 1.
# - Iterate through the category columns in df to keep only
# the last character of each string (the 1 or 0).
# For example, `related-0` becomes `0`, `related-1` becomes `1`.
# Convert the string to a numeric value.
# https://pandas.pydata.org/pandas-docs/stable/text.html#indexing-with-str).
for column in categories:
# set each value to be the last character of the string
categories[column] = categories[column].str[-1]
# convert column from string to numeric
categories[column] = categories[column].astype(int)
# ### 5. Replace `categories` column in `df` with new category columns.
# - Drop the categories column from the df dataframe since it is no longer needed.
# - Concatenate df and categories data frames.
# drop the original categories column from `df`
df = df.drop(['categories'], axis=1)
# concatenate the original dataframe with the new `categories` dataframe
df = df.join(categories, how='inner')
print('`categories` was split into separate category columns')
# ### 6. Remove duplicates.
# - Check how many duplicates are in this dataset.
# - Drop the duplicates.
# - Confirm duplicates were removed.
# check number of duplicates
n_duplicates = df.duplicated().sum()
# calculate total number of rows
n_rows_total = len(df)
# drop duplicates
df=df[~df.duplicated()]
# calculate total number of rows
n_rows = len(df)
# check number of duplicates
if n_duplicates > 0:
if n_duplicates + n_rows == n_rows_total:
print('Delete duplicate rows: CONFIRMED')
else:
print('Delete duplicate rows: NOT CONFIRMED')
else:
print('No duplicate rows detected')
print('Number of duplicate rows deleted...\n DUPLICATED ROWS: {0} of {1} deleted (rows left: {2})'
.format(n_duplicates, n_rows_total, n_rows))
### Clean message, delete == `#NAME?`
# calculate total number of rows
n_rows_total = len(df)
# delete messages without meaning
df = df[df.message!='#NAME?']
# calculate number of rows left
n_rows = len(df)
print('Number of rows deleted...\n `message`==`#NAME?`: {0} of {1} deleted (rows left: {2})'
.format(n_rows_total - n_rows, n_rows_total, n_rows))
### Drop non-labeled observations,
# i.e 'related' == 2 is non-intepreted or non-translated messages
# without any positive category labels
# calculate total number of rows
n_rows_total = len(df)
# Drop 'related' not in [0, 1].
df = df[df.related.isin([0, 1])]
# calculate number of rows left
n_rows = len(df)
print('Number of rows deleted...\n `related` not in [0,1]: {0} of {1} deleted (rows left: {2})'
.format(n_rows_total - n_rows, n_rows_total, n_rows))
### Drop any duplicated `id`s (keep none of the duplicates)
# check number of duplicated
n_duplicates = df.duplicated('id', keep=False).sum()
# calculate total number of rows
n_rows = len(df)
# drop duplicate `id`s - keep none
df=df[~df.duplicated('id', keep=False)]
print('Number of rows deleted...\n DUPLICATED `id`s: {0} of {1} deleted (rows left: {2})'
.format(n_duplicates, n_rows, n_rows-n_duplicates))
return df
def save_data(df, database_filename):
# ### 7. Save the clean dataset into an sqlite database. See:
# - https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_sql.html
# - SQLAlchemy library.
engine = create_engine('sqlite:///' + database_filename)
my_table_name = 'ModelTrainData'
df.to_sql(my_table_name, engine)
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | [
"dataworks@hafenbradl.com"
] | dataworks@hafenbradl.com |
6c25b7082d8b57a27da90bf5fec1dd716d6b95d2 | 85d93f1a93e33cc84f19c1c71bce67d34f15c4ad | /src/utils.py | d0d22e0f1fa395bef796dc6fd5cf24c50f324288 | [
"MIT"
] | permissive | tyburam/neural-cryptography-pytorch | a72cd96d9e8fc153e03aa7231baa1eb0427edb80 | 0e102e7199e196010ef6d9391b15d71b07897ad9 | refs/heads/master | 2020-04-19T16:17:30.838673 | 2019-03-23T18:22:19 | 2019-03-23T18:22:19 | 168,300,016 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | import numpy as np
import torch.nn as nn
from src.config import *
# Function to generate n random messages and keys
def gen_data(n=BATCH_SIZE, msg_len=MSG_LEN, key_len=KEY_LEN):
return (np.random.randint(0, 2, size=(n, msg_len)) * 2 - 1), \
(np.random.randint(0, 2, size=(n, key_len)) * 2 - 1)
def init_xavier(m):
if type(m) in [nn.Linear, nn.Conv1d]:
nn.init.xavier_normal_(m.weight.data)
| [
"tyburam@hotmail.com"
] | tyburam@hotmail.com |
ab1f8c6225bdbd28011bbdd000a104075aa52bc0 | 532aab843a9b50cd216cf65846aa22f72377f5b4 | /grid_plume/scripts/merge_grids.py | 6fec8a74228ce888645d3db2cf619a3da175ff3e | [
"LicenseRef-scancode-public-domain",
"Unlicense"
] | permissive | lakiSL/GnomeTools | b3c3513823ebebcfb32a81d107a4395aab6dab2f | 1f3c635f8629a8c9119d422bf743091a647d8764 | refs/heads/master | 2020-05-20T16:14:08.627844 | 2019-04-20T00:08:45 | 2019-04-20T00:08:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,948 | py | #!usr/bin/env python
"""
Script to add up a bunch of plume files into one big file
Only handles "flat" grids at the moment
"""
from __future__ import division #Change the / operator to ensure true division throughout (Zelenke).
import sys, glob #Replacement for commented-out line below which imported unused modules (Zelenke).
#import sys, os, glob, shutil
import numpy as np
import netCDF4 as nc
def create_new_grid_file(infilename, outfilename):
"""
creates a new grid file that matches the one passed in.
"""
print "opening:", infilename
nc_old = nc.Dataset(infilename)
#print nc_old
nc_new = nc.Dataset(outfilename, "w", format = "NETCDF3_CLASSIC" )
nc_new.createDimension( "lon", len(nc_old.dimensions["lon"]) )
nc_new.createDimension( "lat", len(nc_old.dimensions["lat"]) )
nc_new.createDimension( "time", None )
##fixme: This should be copied, rather than hard coded!
nc_longitude = nc_new.createVariable(
"lon",
np.float32,
"lon",
zlib = True,
)
nc_longitude.long_name = "Longitude"
nc_longitude.units = "degrees_east"
nc_longitude.standard_name = "longitude"
nc_longitude[ : ] = nc_old.variables['lon'][:]
nc_latitude = nc_new.createVariable(
"lat",
np.float32,
"lat",
zlib = True,
)
nc_latitude.long_name = "Latitude"
nc_latitude.units = "degrees_north"
nc_latitude.standard_name = "latitude"
nc_latitude[ : ] = nc_old.variables['lat'][:]
nc_time = nc_new.createVariable(
"time",
np.float32,
"time",
zlib = True,
)
nc_time.long_name = "Time"
nc_time.units = nc_old.variables['time'].units
nc_time.base_date = nc_old.variables['time'].base_date
nc_time.standard_name = "time"
nc_time[ : ] = nc_old.variables['time'][:]
nc_grid = nc_new.createVariable(
"concentration",
np.float32,
( "time", "lat", "lon" ),
zlib = True,
fill_value = nc._default_fillvals[ "f4" ],
)
nc_grid.long_name = "Mass concentration of total hydrocarbons in seawater"
nc_grid.units = nc_old.variables['concentration'].units
nc_grid.standard_name = "mass_concentration_of_total_hydrocarbons_in_sea_water"
nc_grid[:] = nc_old.variables['concentration'][:]
setattr( nc_new, "Conventions", "CF-1.4" )
setattr( nc_new, "title", "Oil Plume Concentration Grid" )
setattr( nc_new, "institution", "NOAA/NOS/ERD/TSSB" )
setattr( nc_new, "references", "Chris.Barker@noaa.gov" )
return nc_new
if __name__ == "__main__":
outfile = sys.argv[2]
files = glob.glob(sys.argv[1])
nc1 = create_new_grid_file(files[0], outfile)
# create a numpy array:
conc1 = nc1.variables['concentration'][:]
time1 = nc1.variables['time']
# round times to tenths of an hour!
time1[:] = np.round(time1[:], 1)
for infile in files:
print "processing :", infile
nc2 = nc.Dataset(infile)
conc2 = nc2.variables['concentration'][:] # now a numpy array
# loop through time:
out_times = nc1.variables['time'][:]
for i, t in enumerate(nc2.variables['time'][:]):
t = round(t,1)
t_index = np.argwhere(t == out_times)
if len(t_index) == 1: # there is a match: add it up
conc1[t_index[0],:,:] = conc1[t_index[0],:,:] + conc2[i,:,:]
else: #not a match add a time:
ind = conc1.shape[0]
if t <= time1[-1]:
print "TIME OUT of ORDER!!!"
print time1[:]
print t
raise Exception("Time is out of order")
time1[ind] = t
conc1 = np.concatenate((conc1, conc2[i:i+1,:,:]), axis=0)
conc1[ind,:,:] = conc2[i,:,:]
conc1 = nc1.variables['concentration'][:] = conc1
nc1.close()
| [
"jasmine.sandhu@noaa.gov"
] | jasmine.sandhu@noaa.gov |
eae46453ad899822fdd9abdd0e56f590c67f8a01 | c0258e00834a4e0963169156f6a3f7667d80c156 | /myApp/migrations/0014_auto_20200519_1829.py | 96e0bc35498c1772416660bd1422810975e04d0d | [] | no_license | wajeshubham/django-rest-framework-api | ad040978f886461f4314f52ff261d09eb3d8a1fe | 3154e735160709ed3f6cca3901d250e12bb405ba | refs/heads/master | 2022-08-09T15:17:55.905154 | 2020-05-19T14:02:05 | 2020-05-19T14:02:05 | 265,250,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 758 | py | # Generated by Django 3.0.5 on 2020-05-19 12:59
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('myApp', '0013_auto_20200519_1804'),
]
operations = [
migrations.AlterField(
model_name='activityperiod',
name='end_time',
field=models.DateTimeField(default=datetime.datetime(2020, 5, 19, 12, 59, 0, 129247, tzinfo=utc)),
),
migrations.AlterField(
model_name='activityperiod',
name='start_time',
field=models.DateTimeField(default=datetime.datetime(2020, 5, 19, 12, 59, 0, 129247, tzinfo=utc)),
),
]
| [
"noreply@github.com"
] | wajeshubham.noreply@github.com |
0f924a8c650f964566ee201929a0a8d2e95ee9ec | aaafc4e18a8b54b388e199a42bd3ca2d59038a20 | /Regression/bigmart.py | 91eb3ca495fb9af7d04a6b5320c5e01970cf3f9d | [] | no_license | Mushtaq-D5037/Projects | 9aa2a804635bad6ed2a674fad075a170b325e84d | 549efb0ece9cad1490bea8adf3ebdecc7a29c8af | refs/heads/master | 2022-10-22T00:40:12.125944 | 2022-10-12T05:23:41 | 2022-10-12T05:23:41 | 203,940,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,240 | py | # -*- coding: utf-8 -*-
"""
@author: Mushtaq Mohammed
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# hypothesis
# item price > 0
# item sales > 0
# read csv files
train = pd.read_csv("store_Train.csv")
test = pd.read_csv("store_Test.csv")
# combine training and testing data into one and after data cleaning again split it
train['type'] = 'train'
test ['type'] = 'test'
# concatenating
df = pd.concat([train,test],axis = 0)
# some basic checks
df.head()
df.columns
df.info()
# statistical analysis
statistics = df.describe()
# histogram
df.hist()
# missing values
df.isnull().sum()
# Categoric and Numeric columns
cat = df.select_dtypes(exclude = [np.number])
num = df.select_dtypes(include = [np.number])
# checking all the categorical_data
for col in cat:
print('\n',col)
print(df[col].value_counts())
# observations
# in Item_Fat_Content Low Fat has a typo as LF and low fat and also Regular as reg
# replacing the typo errors in Item_fat_content
df['Item_Fat_Content'] = df['Item_Fat_Content'].replace({'LF' :'Low Fat',
'reg' :'Regular',
'low fat':'Low Fat'})
# Feature Engineering
# Creating a Column[Item_Type_ID] based on first two letter of [Item_Identifier]
df['Item_Type_ID'] = [letters[0:2] for letters in df['Item_Identifier']]
df['Item_Type_ID'] = df['Item_Type_ID'].map({'FD':'Food',
'DR':'Drink',
'NC':'Non-Consumable'}) # renaming them with more understandable names
df[df['Item_Type_ID']=='Non-Consumable']['Item_Fat_Content']
# observation
# Low Fat --> type id is Non-consumable ( which makes no sense)
# changing the value of a column based on another column
# so renaming mapping of non-consumable from low fat to non-edible
df['Item_Fat_Content'][df['Item_Type_ID']=='Non-Consumable'] = 'Non-Edible'
df['Item_Fat_Content'].value_counts()
# dropping item visibility with 0
df = df[~(df['Item_Visibility']==0)].reset_index(drop =True)
# finding missing values
df.isnull().sum()
# observation
# Item_Outlet_Sales (Dependent variable)
# Item_Weight
# Outlet_size has missing data
# Handling Missing Values
df[df['Item_Weight'].isna()]['Item_Type_ID'].value_counts()
# observation
# Item_Weight nan values are food drink and non-consumables
# so replacing Item_Weights with its respective mean values of food drink and non-consumables
# when replacing with mean check for outliers
# coz outliers effects the mean values
# Box-plot for outlier detection
df.boxplot(['Item_Weight'])
# first filling mean with 0 and then replacing it with its respective mean values
df['Item_Weight'] = df['Item_Weight'].fillna(0)
# cross checking
df[df['Item_Weight']==0]['Item_Weight'].value_counts()
df['Item_Type_ID'][df['Item_Weight']== 0 ].value_counts()
# calculating Mean of Item_Visibility with respect to Each Item_Type
Food = df['Item_Weight'][df['Item_Type_ID']=='Food']
Drink = df['Item_Weight'][df['Item_Type_ID']=='Drink']
NC = df['Item_Weight'][df['Item_Type_ID']=='Non-Consumable']
FMean = Food.mean()
DMean = Drink.mean()
NCMean= NC.mean()
# replacing 0 with its corresponding Mean
Food.replace(0,FMean,inplace = True)
Drink.replace(0,DMean,inplace = True)
NC.replace(0,NCMean,inplace = True)
df['Modified_Item_Weight'] = pd.concat([Food,Drink,NC],axis=0,ignore_index=False)
# Outlet_Size filling missing values
df['Outlet_Size'].value_counts()
df['Outlet_Size'].fillna('Unknown',inplace = True)
# creating one more column of outlet years
df['Outlet_Years'] = 2013 - df['Outlet_Establishment_Year']
df['Outlet_Years'].value_counts()
# cat columns
cat = df.select_dtypes(exclude = [np.number])
# one Hot Encoding Categorical columns
# 1.Label Encode
# 2.apply pd.dummies or OneHotEncoding()
from sklearn.preprocessing import LabelEncoder
lableEncoder = LabelEncoder()
for i in cat:
if i not in ['type']:
df[i] = lableEncoder.fit_transform(df[i])
# Variance Inflation Factor to remove collinearity between the variables
# threshold > 10 is considered a high collinearity,threshold = 5 as Medium (a thumsup rule)
from statsmodels.stats.outliers_influence import variance_inflation_factor
independent_variables = [col for col in df.columns if col not in ['Item_Outlet_Sales','Item_Identifier',
'Item_Weight','Outlet_Establishment_Year',
'type']]
X_vif = df[independent_variables]
thresh = 10
for i in np.arange(0,len(independent_variables)):
vif = [variance_inflation_factor(X_vif[independent_variables].values, ix) for ix in range(X_vif[independent_variables].shape[1])]
maxloc = vif.index(max(vif))
if (max(vif) > thresh):
print ("vif :", vif)
print('dropping:\n' + X_vif[independent_variables].columns[maxloc] + ' at index: ' + str(maxloc),'\n')
del independent_variables[maxloc]
else:
break
new_df = pd.concat([X_vif,df[['Item_Outlet_Sales','type']]],axis = 1)
#Divide into test and train:
new_train = df[df['type']=='train']
new_test = df[df['type']=='test']
#Drop unnecessary columns:
new_train = new_train.drop(['type'],axis=1)
new_test = new_test.drop(['Item_Outlet_Sales','type'],axis=1)
#Define target and ID columns:
from sklearn import metrics
predictors = [x for x in new_train.columns if x not in ['Item_Outlet_Sales']]
X = new_train[predictors]
y = new_train['Item_Outlet_Sales']
# Random Forest Model
from sklearn.ensemble import RandomForestRegressor
model = RandomForestRegressor(n_estimators=200,
max_depth=5 ,
min_samples_leaf=100,
n_jobs=4)
model.fit(X,y)
y_predict = model.predict(new_train[predictors])
print('RMSE:',round(np.sqrt(metrics.mean_squared_error(new_train['Item_Outlet_Sales'],y_predict)),2))
coef = pd.Series(model.feature_importances_, predictors).sort_values(ascending=False)
coef.plot(kind='barh', title='Feature Importances')
| [
"noreply@github.com"
] | Mushtaq-D5037.noreply@github.com |
268a3b1192dc668f432e6837975f82ba8346752f | 8ab0475048241daf0a094fda46799de3187f850b | /scripts/merge.py | c0778012b880cb4538717a05a56467dea97dbeec | [] | no_license | Rajan333/splitter | 283b38c95743bf3936153f4a013da1dae561d692 | f051c2d0b9ff5d09d709e3ec969ee41ac1e3da3b | refs/heads/master | 2021-08-08T22:54:32.580652 | 2018-06-15T18:59:48 | 2018-06-15T18:59:48 | 125,057,541 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 614 | py | import sys
import os
import subprocess
cmd = '/usr/local/bin/ffmpeg'
ffmpeg_p = subprocess.Popen(cmd, stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
output = ffmpeg_p.communicate()
#print(output)
_input_path = '/Users/rajan/ns/splitter/scripts/output_files'
#lst = os.listdir(_input_path)
file = open('sample_merge.txt','w+')
for chunk in os.listdir(_input_path):
line = 'file'+' '+ "'"+_input_path+'/'+chunk+"'"'\n'
file.write(line)
file.close()
options = ' -f concat -safe 0 -i '
comm = cmd+options+'sample_merge.txt '+'rajan.mp4'
#print comm
#os.system(comm)
| [
"rajan.middha@innovaccer.com"
] | rajan.middha@innovaccer.com |
1f988e26451398adb5fc1c64a491840b860490d4 | 254d70889f0140861202e898c5a3e854aa7deb05 | /tests/settings.py | 7495221148093a4ce3984bb4f777660437fc563c | [
"MIT"
] | permissive | adamcharnock/django-pipeline | 17ced313a828eb4d8b645bc5d20b81c9b556dcc9 | 9bcbee0719f1866c47cd0eb07e74adfa2abb256a | refs/heads/master | 2021-01-23T20:50:00.426959 | 2013-06-20T22:58:09 | 2013-06-20T22:58:09 | 3,117,934 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | import os
local_path = lambda path: os.path.join(os.path.dirname(__file__), path)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'TEST_NAME': ':memory:'
}
}
SITE_ID = 1
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.auth',
'django.contrib.admin',
'pipeline',
'tests',
]
MEDIA_URL = '/media/'
MEDIA_ROOT = local_path('media')
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
STATIC_ROOT = local_path('static/')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
('pipeline', local_path('assets/')),
local_path('assets2/'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder'
)
SECRET_KEY = "django-pipeline"
TEMPLATE_DIRS = (
local_path('templates'),
)
PIPELINE_CSS = {
'screen': {
'source_filenames': (
'pipeline/css/first.css',
'pipeline/css/second.css',
'pipeline/css/urls.css',
),
'output_filename': 'screen.css'
}
}
PIPELINE_JS = {
'scripts': {
'source_filenames': (
'pipeline/js/first.js',
'pipeline/js/second.js',
'pipeline/js/application.js',
'pipeline/templates/**/*.jst'
),
'output_filename': 'scripts.css'
}
}
| [
"timothee.peignier@tryphon.org"
] | timothee.peignier@tryphon.org |
55e3a58b688cb028650fc9d133994735b94a7458 | 9f5b50fff3c494a5deab73599e0d5dd3983e3652 | /features/tongueBottom/__init__.py | 02c3c60b37d319f28ddda586e5c9af7722cab825 | [] | no_license | FlameSky-S/TCM-Backend | 893240e918452b0949deac36d17db31389071e98 | 9807cf58c0f786e6383d5f2fc9f62f7454f81ce0 | refs/heads/master | 2023-07-25T09:48:01.260300 | 2021-07-12T02:57:01 | 2021-07-12T02:57:01 | 381,318,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | from .TongueBottomSeg import TongueBottomSeg
from .VeinsColor import VeinsColor
from .VeinsIndex import VeinsIndex
from .VeinsSeg import VeinsSeg | [
"richandyuquan@live.com"
] | richandyuquan@live.com |
9a51215ba75438061eb7f89641fc6b928c17f61f | 24ca00779a928d87e3c00420423cd8d058bb8796 | /Funciones/Clases.py | a94f825dbf4af4e987de43bbc1a7274fb69da3fe | [] | no_license | DanielDubonDR/IPC2_Proyecto2_201901772 | 56a0659a42eee5183f5564d2d145282dcdbec97e | d3f55692cb67db07a393231f2eabd44f725a8a4c | refs/heads/main | 2023-05-05T06:36:13.045335 | 2021-06-01T00:47:56 | 2021-06-01T00:47:56 | 351,954,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,452 | py | class datos:
def __init__(self, id, nombre, nFila, nColumna, matriz):
self.id=id
self.nombre=nombre
self.nFila=nFila
self.nColumna=nColumna
self.matriz=matriz
def __str__(self):
string=str("id: ")+str(self.id)+str("\nNombre: ")+str(self.nombre)+str("\nFilas: ")+str(self.nFila)+str("\nColumnas: ")+str(self.nColumna)+str("\n")
return string
class dtIterar:
def __init__(self, f, c, dt):
self.f=f
self.c=c
self.dt=dt
def __str__(self):
string=str("Fila: ")+str(self.f)+str("\nColumna: ")+str(self.c)+str("\nDato: ")+str(self.dt)+str("\n")
return string
class dtPanel:
def __init__(self, id, m1, m2, operacion):
self.id=id
self.m1=m1
self.m2=m2
self.operacion=operacion
def __str__(self):
string=str(self.id)+str(" ")+str(self.m1)+str(" ")+str(self.m2)+str(" ")+str(self.operacion)
return string
class reporte:
def __init__(self, id, nombre, ELlenos, EVacios, fecha, hora, operacion, descripcion):
self.id=id
self.nombre=nombre
self.ELlenos=ELlenos
self.EVacios=EVacios
self.fecha=fecha
self.hora=hora
self.operacion=operacion
self.descripcion=descripcion
def __str__(self):
string=str(self.id)+str(" ")+str(self.nombre)+str(" ")+str(self.ELlenos)+str(" ")+str(self.EVacios)
return string | [
"3179425811504@ingenieria.usac.edu.gt"
] | 3179425811504@ingenieria.usac.edu.gt |
220b49000230975a27b02bfe9f5b5058e4104058 | 04803c70bb97012b7d500a177ac0240fb2ddbe38 | /1heptane/pdep/network5103_1.py | de86f21a9e7594325e2ef9f2c28ecfb9bfcb0e1b | [] | no_license | shenghuiqin/chpd | 735e0415f6688d88579fc935459c1b0f53596d1d | 396ba54629036e3f2be0b3fabe09b78c90d56939 | refs/heads/master | 2023-03-01T23:29:02.118150 | 2019-10-05T04:02:23 | 2019-10-05T04:02:23 | 192,084,217 | 0 | 0 | null | 2019-06-18T18:33:13 | 2019-06-15T13:52:28 | HTML | UTF-8 | Python | false | false | 98,364 | py | species(
label = 'C=C(O)[C](O)C(C)C([O])=O(31589)',
structure = SMILES('[CH2]C(O)=C(O)C(C)C([O])=O'),
E0 = (-476.204,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2800,2850,1350,1500,750,1050,1375,1000,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.04862,0.108428,-0.000121936,7.0218e-08,-1.59357e-11,-57090,35.7591], Tmin=(100,'K'), Tmax=(1077.42,'K')), NASAPolynomial(coeffs=[20.2219,0.0294591,-1.19939e-05,2.18963e-09,-1.50545e-13,-61673.4,-68.4448], Tmin=(1077.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-476.204,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=C(O)CJ) + radical(CCOJ)"""),
)
species(
label = 'CO2(13)',
structure = SMILES('O=C=O'),
E0 = (-403.131,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([459.166,1086.67,1086.68,2300.05],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (44.0095,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(1622.99,'J/mol'), sigma=(3.941,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.2779,0.00275783,7.12787e-06,-1.07855e-08,4.14228e-12,-48475.6,5.97856], Tmin=(100,'K'), Tmax=(988.185,'K')), NASAPolynomial(coeffs=[4.55071,0.00290728,-1.14643e-06,2.25798e-10,-1.69526e-14,-48986,-1.45662], Tmin=(988.185,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-403.131,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(62.3585,'J/(mol*K)'), label="""CO2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=C(O)C(O)=CC(5562)',
structure = SMILES('C=C(O)C(O)=CC'),
E0 = (-369.89,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,3580,3650,1210,1345,900,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,180],'cm^-1')),
HinderedRotor(inertia=(0.87493,'amu*angstrom^2'), symmetry=1, barrier=(20.1164,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.877997,'amu*angstrom^2'), symmetry=1, barrier=(20.1869,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.874595,'amu*angstrom^2'), symmetry=1, barrier=(20.1087,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.875946,'amu*angstrom^2'), symmetry=1, barrier=(20.1397,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (100.116,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4302.09,'J/mol'), sigma=(6.83849,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=671.98 K, Pc=30.52 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.884099,0.0877395,-9.34163e-05,4.76671e-08,-9.0713e-12,-44294.8,25.8217], Tmin=(100,'K'), Tmax=(1473.72,'K')), NASAPolynomial(coeffs=[23.5689,0.00887539,-4.29798e-07,-1.49469e-10,1.60597e-14,-50145.5,-97.0299], Tmin=(1473.72,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-369.89,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(340.893,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-CdsHH)"""),
)
species(
label = '[CH2][C](O)C1(O)OC(=O)C1C(31775)',
structure = SMILES('[CH2][C](O)C1(O)OC(=O)C1C'),
E0 = (-382.63,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.921053,0.0862172,-2.59987e-05,-5.58638e-08,3.67329e-11,-45821.7,36.2805], Tmin=(100,'K'), Tmax=(895.881,'K')), NASAPolynomial(coeffs=[30.3719,0.00779795,2.66474e-06,-8.1721e-10,5.86876e-14,-53888.7,-124.979], Tmin=(895.881,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-382.63,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsOs) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + ring(Beta-Propiolactone) + radical(CJCO) + radical(C2CsJOH)"""),
)
species(
label = 'CC1C(O)=C(O)CC1([O])[O](31753)',
structure = SMILES('CC1C(O)=C(O)CC1([O])[O]'),
E0 = (-359.529,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.514074,0.0804685,-3.0479e-05,-2.98668e-08,2.02111e-11,-43061.9,32.3196], Tmin=(100,'K'), Tmax=(967.074,'K')), NASAPolynomial(coeffs=[24.6072,0.0209044,-6.86892e-06,1.27042e-09,-9.50766e-14,-49994.2,-98.7556], Tmin=(967.074,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-359.529,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(419.881,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsOsOs) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + ring(Cyclopentene) + radical(CC(C)(O)OJ) + radical(CC(C)(O)OJ)"""),
)
species(
label = '[CH2]C(O)=C(O)[CH]C(4609)',
structure = SMILES('[CH2]C(O)=C(O)[CH]C'),
E0 = (-122.846,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,3025,407.5,1350,352.5,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,322.653],'cm^-1')),
HinderedRotor(inertia=(0.00160507,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.208419,'amu*angstrom^2'), symmetry=1, barrier=(15.514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.20977,'amu*angstrom^2'), symmetry=1, barrier=(15.5164,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.209392,'amu*angstrom^2'), symmetry=1, barrier=(15.5175,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.207973,'amu*angstrom^2'), symmetry=1, barrier=(15.5093,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (100.116,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.141743,0.0688338,-2.99459e-05,-3.072e-08,2.30897e-11,-14621,29.0351], Tmin=(100,'K'), Tmax=(905.863,'K')), NASAPolynomial(coeffs=[24.2058,0.00628117,1.26066e-06,-4.23747e-10,2.91719e-14,-20774,-94.5791], Tmin=(905.863,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-122.846,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + radical(C=C(O)CJ) + radical(CCJCO)"""),
)
species(
label = 'OH(5)',
structure = SMILES('[OH]'),
E0 = (28.372,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3287.46],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (17.0073,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.4858,0.00133397,-4.70043e-06,5.64379e-09,-2.06318e-12,3411.96,1.99788], Tmin=(100,'K'), Tmax=(1005.25,'K')), NASAPolynomial(coeffs=[2.88225,0.00103869,-2.35652e-07,1.40229e-11,6.34581e-16,3669.56,5.59053], Tmin=(1005.25,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(28.372,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""OH""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=C=C(O)C(C)C([O])=O(31776)',
structure = SMILES('C=C=C(O)C(C)C([O])=O'),
E0 = (-243.958,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([540,610,2055,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,350,440,435,1725,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (127.118,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.385923,0.0843778,-8.88723e-05,5.2342e-08,-1.28976e-11,-29215.3,28.931], Tmin=(100,'K'), Tmax=(964.61,'K')), NASAPolynomial(coeffs=[11.3359,0.0389719,-1.82661e-05,3.54508e-09,-2.51032e-13,-31327.8,-23.5018], Tmin=(964.61,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-243.958,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(365.837,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(CCOJ)"""),
)
species(
label = 'CC([O])=C(O)C(C)C([O])=O(31777)',
structure = SMILES('CC([O])=C(O)C(C)C([O])=O'),
E0 = (-497.316,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.367291,0.100448,-0.000113404,7.05836e-08,-1.79998e-11,-59659.7,34.1379], Tmin=(100,'K'), Tmax=(944.64,'K')), NASAPolynomial(coeffs=[13.7704,0.0405832,-1.83438e-05,3.49566e-09,-2.44855e-13,-62330.7,-33.2631], Tmin=(944.64,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-497.316,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(CCOJ) + radical(C=C(C)OJ)"""),
)
species(
label = '[CH2]C(O)=C(O)[C](C)C(=O)O(31778)',
structure = SMILES('[CH2]C(O)=C(O)[C](C)C(=O)O'),
E0 = (-549.335,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.33959,0.10697,-0.000107274,4.77797e-08,-6.37184e-12,-65868.2,36.8261], Tmin=(100,'K'), Tmax=(974.005,'K')), NASAPolynomial(coeffs=[24.7398,0.0208301,-6.89712e-06,1.17076e-09,-7.98546e-14,-71942.7,-93.4089], Tmin=(974.005,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-549.335,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(403.252,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(CCJ(C)CO) + radical(C=C(O)CJ)"""),
)
species(
label = '[CH2]C(O)=C(O)C([CH2])C(=O)O(31779)',
structure = SMILES('[CH2]C(O)=C(O)C([CH2])C(=O)O'),
E0 = (-491.398,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,3580,3615,3650,1210,1277.5,1345,900,1000,1100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.86127,0.123193,-0.000152712,9.28611e-08,-2.1714e-11,-58885.5,38.855], Tmin=(100,'K'), Tmax=(1059.48,'K')), NASAPolynomial(coeffs=[25.4036,0.0202566,-6.97616e-06,1.15911e-09,-7.57004e-14,-64662.9,-94.2578], Tmin=(1059.48,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-491.398,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(403.252,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=C(O)CJ) + radical(CJC(C)C=O)"""),
)
species(
label = 'CC(O)=C([O])C(C)C([O])=O(31780)',
structure = SMILES('CC(O)=C([O])C(C)C([O])=O'),
E0 = (-497.316,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.367284,0.100448,-0.000113404,7.05833e-08,-1.79997e-11,-59659.7,34.1379], Tmin=(100,'K'), Tmax=(944.655,'K')), NASAPolynomial(coeffs=[13.7704,0.0405832,-1.83438e-05,3.49566e-09,-2.44854e-13,-62330.7,-33.2632], Tmin=(944.655,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-497.316,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=C(C)OJ) + radical(CCOJ)"""),
)
species(
label = 'C[C](C([O])=O)C(O)=C(C)O(31781)',
structure = SMILES('CC(O)=C(O)C(C)=C([O])[O]'),
E0 = (-491.151,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.34714,0.126184,-0.000152102,8.74912e-08,-1.90645e-11,-58831.7,34.9467], Tmin=(100,'K'), Tmax=(1187.63,'K')), NASAPolynomial(coeffs=[30.0338,0.0124766,-2.61932e-06,2.85799e-10,-1.39878e-14,-66195.3,-125.461], Tmin=(1187.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-491.151,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsOs) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-CdsCsCs) + radical(C=COJ) + radical(C=COJ)"""),
)
species(
label = '[CH2]C(O)=C([O])C(C)C(=O)O(31782)',
structure = SMILES('[CH2]C(O)=C([O])C(C)C(=O)O'),
E0 = (-564.104,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.19278,0.109012,-0.000123831,7.14503e-08,-1.60946e-11,-67654.6,37.6061], Tmin=(100,'K'), Tmax=(1091.67,'K')), NASAPolynomial(coeffs=[21.5092,0.0258295,-9.53508e-06,1.65192e-09,-1.10359e-13,-72611.2,-73.9092], Tmin=(1091.67,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-564.104,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=C(C)OJ) + radical(C=C(O)CJ)"""),
)
species(
label = '[CH2]C(C([O])=O)C(O)=C(C)O(31783)',
structure = SMILES('[CH2]C(C([O])=O)C(O)=C(C)O'),
E0 = (-424.61,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2800,2850,1350,1500,750,1050,1375,1000,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.05455,0.114866,-0.000143168,9.31939e-08,-2.41441e-11,-50889.9,35.453], Tmin=(100,'K'), Tmax=(942.355,'K')), NASAPolynomial(coeffs=[17.822,0.0347403,-1.5628e-05,2.96557e-09,-2.07089e-13,-54447.5,-54.4947], Tmin=(942.355,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-424.61,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(CJC(C)C=O) + radical(CCOJ)"""),
)
species(
label = '[CH2]C([O])=C(O)C(C)C(=O)O(31784)',
structure = SMILES('[CH2]C([O])=C(O)C(C)C(=O)O'),
E0 = (-564.104,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.19278,0.109012,-0.000123831,7.14503e-08,-1.60946e-11,-67654.6,37.6061], Tmin=(100,'K'), Tmax=(1091.67,'K')), NASAPolynomial(coeffs=[21.5092,0.0258295,-9.53508e-06,1.65192e-09,-1.10359e-13,-72611.2,-73.9092], Tmin=(1091.67,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-564.104,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=C(C)OJ) + radical(C=C(O)CJ)"""),
)
species(
label = 'C[CH]C([O])=O(2125)',
structure = SMILES('CC=C([O])[O]'),
E0 = (-76.8804,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,291.771,292.006],'cm^-1')),
HinderedRotor(inertia=(0.235844,'amu*angstrom^2'), symmetry=1, barrier=(14.259,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (72.0627,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.13898,0.0365285,-2.93484e-05,1.19926e-08,-1.96554e-12,-9175.97,16.3174], Tmin=(100,'K'), Tmax=(1450.68,'K')), NASAPolynomial(coeffs=[10.5535,0.013327,-5.35822e-06,9.67879e-10,-6.56299e-14,-11617.4,-27.4085], Tmin=(1450.68,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-76.8804,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsCsCs) + radical(C=COJ) + radical(C=COJ)"""),
)
species(
label = '[CH2]C(O)=[C]O(31700)',
structure = SMILES('[CH2]C(O)=[C]O'),
E0 = (-18.5667,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,1685,370,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.841623,'amu*angstrom^2'), symmetry=1, barrier=(19.3506,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.842368,'amu*angstrom^2'), symmetry=1, barrier=(19.3677,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.841193,'amu*angstrom^2'), symmetry=1, barrier=(19.3407,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (72.0627,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.351247,0.0579095,-6.81787e-05,3.58442e-08,-6.68351e-12,-2082.38,24.0014], Tmin=(100,'K'), Tmax=(1624.88,'K')), NASAPolynomial(coeffs=[17.4373,-0.00264896,4.80126e-06,-1.10446e-09,7.9234e-14,-5193.08,-59.2091], Tmin=(1624.88,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-18.5667,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(195.39,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + radical(C=C(O)CJ) + radical(C=CJO)"""),
)
species(
label = '[O][C]=O(669)',
structure = SMILES('[O][C]=O'),
E0 = (31.9507,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1855,455,950],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (44.0095,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.90478,-0.000175995,8.15126e-06,-1.13656e-08,4.4768e-12,3848.25,8.04855], Tmin=(100,'K'), Tmax=(975.388,'K')), NASAPolynomial(coeffs=[5.59398,-0.00122084,7.11747e-07,-9.7712e-11,3.97995e-15,3238.91,-1.49318], Tmin=(975.388,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(31.9507,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(103.931,'J/(mol*K)'), comment="""Thermo library: Klippenstein_Glarborg2016 + radical(OJC=O) + radical((O)CJOH)"""),
)
species(
label = 'CC(C([O])=O)C1(O)C[C]1O(31785)',
structure = SMILES('CC(C([O])=O)C1(O)C[C]1O'),
E0 = (-344.106,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.197615,0.0974865,-0.000107129,6.58549e-08,-1.67873e-11,-41239.6,34.0094], Tmin=(100,'K'), Tmax=(939.254,'K')), NASAPolynomial(coeffs=[12.6614,0.0427251,-1.96768e-05,3.78426e-09,-2.66469e-13,-43655.2,-27.2225], Tmin=(939.254,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-344.106,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-(Cds-O2d)H) + group(Cs-CsCsCsOs) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + ring(Cyclopropane) + radical(C2CsJOH) + radical(CCOJ)"""),
)
species(
label = '[CH2]C(O)=C(O)C(C)[C]1OO1(31786)',
structure = SMILES('[CH2]C(O)=C(O)C(C)[C]1OO1'),
E0 = (-115.101,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.39708,0.119439,-0.000127997,6.34904e-08,-1.14114e-11,-13539.8,45.365], Tmin=(100,'K'), Tmax=(1648.19,'K')), NASAPolynomial(coeffs=[31.5228,0.00440127,4.26471e-06,-1.15799e-09,8.59037e-14,-20936.4,-128.071], Tmin=(1648.19,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-115.101,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + ring(dioxirane) + radical(Cs_P) + radical(C=C(O)CJ)"""),
)
species(
label = '[CH2]C1(O)OC(=O)C(C)[C]1O(31643)',
structure = SMILES('[CH2]C1(O)OC(=O)C(C)[C]1O'),
E0 = (-445.568,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.848359,0.0849591,-2.3967e-05,-5.6665e-08,3.67442e-11,-53394.4,34.7883], Tmin=(100,'K'), Tmax=(895.081,'K')), NASAPolynomial(coeffs=[29.7032,0.00887189,2.24903e-06,-7.46974e-10,5.42562e-14,-61284.9,-122.744], Tmin=(895.081,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-445.568,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsOs) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + ring(butyrolactone) + radical(C2CsJOH) + radical(CJC(C)OC)"""),
)
species(
label = 'CC1[C](O)[C](O)COC1=O(31669)',
structure = SMILES('CC1[C](O)[C](O)COC1=O'),
E0 = (-455.15,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.131712,0.0838481,-6.35719e-05,2.34445e-08,-3.51572e-12,-54602.5,30.0682], Tmin=(100,'K'), Tmax=(1527.12,'K')), NASAPolynomial(coeffs=[17.939,0.0372054,-1.77576e-05,3.44419e-09,-2.41548e-13,-60041.2,-63.3809], Tmin=(1527.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-455.15,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(419.881,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + ring(Cyclohexanone) + radical(C2CsJOH) + radical(C2CsJOH)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=C(O)[C](O)CC([O])=O(31774)',
structure = SMILES('[CH2]C(O)=C(O)CC([O])=O'),
E0 = (-439.346,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,325,375,415,465,420,450,1700,1750,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.099,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.376899,0.083783,-7.36527e-05,2.70796e-08,-2.68188e-12,-52672.9,31.467], Tmin=(100,'K'), Tmax=(1103.06,'K')), NASAPolynomial(coeffs=[22.4566,0.0182371,-7.98338e-06,1.57109e-09,-1.14516e-13,-58760,-85.6892], Tmin=(1103.06,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-439.346,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(CCOJ) + radical(C=C(O)CJ)"""),
)
species(
label = 'CC1C(=O)OCC(O)=C1O(31591)',
structure = SMILES('CC1C(=O)OCC(O)=C1O'),
E0 = (-776.471,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.0101657,0.0648627,1.49209e-05,-7.04123e-08,3.20838e-11,-93223.9,26.9667], Tmin=(100,'K'), Tmax=(990.534,'K')), NASAPolynomial(coeffs=[22.6214,0.0277191,-1.08565e-05,2.14289e-09,-1.61709e-13,-100361,-95.3177], Tmin=(990.534,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-776.471,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(419.881,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + ring(Cyclohexane)"""),
)
species(
label = '[CH2]C(O)C(=O)C(C)C([O])=O(31787)',
structure = SMILES('[CH2]C(O)C(=O)C(C)C([O])=O'),
E0 = (-393.925,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.455208,0.0810663,-6.38423e-05,2.61222e-08,-4.50745e-12,-47253.6,37.71], Tmin=(100,'K'), Tmax=(1313.64,'K')), NASAPolynomial(coeffs=[13.0652,0.0426689,-1.99972e-05,3.87078e-09,-2.72718e-13,-50566.6,-26.5657], Tmin=(1313.64,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-393.925,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-O2d)H) + group(Cs-CsCsCsH) + group(Cs-(Cds-O2d)CsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsCs) + group(Cds-OdCsOs) + radical(CJCO) + radical(CCOJ)"""),
)
species(
label = '[CH2]C(=O)C(O)C(C)C([O])=O(31788)',
structure = SMILES('C=C([O])C(O)C(C)C([O])=O'),
E0 = (-442.184,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.0665203,0.0910449,-9.02341e-05,4.96733e-08,-1.14724e-11,-53044.5,36.8957], Tmin=(100,'K'), Tmax=(1022.76,'K')), NASAPolynomial(coeffs=[12.3214,0.0431162,-1.99409e-05,3.854e-09,-2.72451e-13,-55551.3,-22.5028], Tmin=(1022.76,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-442.184,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCOJ) + radical(C=C(C)OJ)"""),
)
species(
label = '[CH2]C(O)=C(O)C=C([O])OC(31789)',
structure = SMILES('[CH2]C(O)=C(O)[CH]C(=O)OC'),
E0 = (-530.482,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,3025,407.5,1350,352.5,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.34588,0.101341,-8.14274e-05,2.00455e-08,2.46441e-12,-63595.5,34.6104], Tmin=(100,'K'), Tmax=(1026.18,'K')), NASAPolynomial(coeffs=[27.1368,0.0208682,-8.45515e-06,1.65029e-09,-1.22013e-13,-71049.8,-111.376], Tmin=(1026.18,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-530.482,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(403.252,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cs-OsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(C=CCJCO) + radical(C=C(O)CJ)"""),
)
species(
label = '[CH2]C(O)=C(O)OC([O])=CC(31790)',
structure = SMILES('[CH2]C(O)=C(O)OC(=O)[CH]C'),
E0 = (-424.388,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,3025,407.5,1350,352.5,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.4957,0.118444,-0.000145803,8.96245e-08,-2.14049e-11,-50842,37.3334], Tmin=(100,'K'), Tmax=(1032.12,'K')), NASAPolynomial(coeffs=[22.6627,0.0248193,-9.73974e-06,1.74089e-09,-1.1824e-13,-55829,-79.9815], Tmin=(1032.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-424.388,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(403.252,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-O2d)(Cds-Cd)) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsCs) + group(Cds-OdCsOs) + radical(CCJCO) + radical(C=C(O)CJ)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CC(C([O])=O)C(O)=[C]O(31791)',
structure = SMILES('CC(C([O])=O)C(O)=[C]O'),
E0 = (-353.585,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2800,2850,1350,1500,750,1050,1375,1000,1685,370,3580,3650,1210,1345,900,1100,1380,1390,370,380,2900,435,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.099,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.130924,0.0903513,-0.000104281,6.18377e-08,-1.4495e-11,-42377.2,32.902], Tmin=(100,'K'), Tmax=(1042.08,'K')), NASAPolynomial(coeffs=[16.7993,0.0253649,-1.0738e-05,1.99372e-09,-1.3817e-13,-45905.7,-49.4745], Tmin=(1042.08,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-353.585,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsOsH) + radical(CCOJ) + radical(C=CJO)"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29226.7,5.11107], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29226.7,5.11107], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = '[CH2]C(O)=C(O)C(C)[C]=O(31792)',
structure = SMILES('[CH2]C(O)=C(O)C(C)[C]=O'),
E0 = (-277.534,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3580,3650,1210,1345,900,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,1855,455,950,1380,1390,370,380,2900,435,325,375,415,465,420,450,1700,1750,3000,3100,440,815,1455,1000,200],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.126,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.42341,0.106345,-0.000122406,6.84843e-08,-1.45498e-11,-33173.3,36.7784], Tmin=(100,'K'), Tmax=(1239.13,'K')), NASAPolynomial(coeffs=[25.4666,0.0135902,-2.91783e-06,3.21685e-10,-1.5524e-14,-39380.3,-96.8719], Tmin=(1239.13,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-277.534,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(382.466,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsH) + radical(CC(C)CJ=O) + radical(C=C(O)CJ)"""),
)
species(
label = 'C=C(O)C1(O)C(C)C1([O])[O](31736)',
structure = SMILES('C=C(O)C1(O)C(C)C1([O])[O]'),
E0 = (-243.59,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.753105,0.0908814,-6.68614e-05,1.09382e-08,5.2458e-12,-29113.7,35.8469], Tmin=(100,'K'), Tmax=(981.199,'K')), NASAPolynomial(coeffs=[23.097,0.0230117,-7.9878e-06,1.43144e-09,-1.01644e-13,-35207.3,-85.9648], Tmin=(981.199,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-243.59,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsCsCsH) + group(Cs-CsCsOsOs) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + ring(Cyclopropane) + radical(CC(C)(O)OJ) + radical(CC(C)(O)OJ)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=C(O)C(=O)C(C)C([O])=O(31793)',
structure = SMILES('C=C(O)C(=O)C(C)C([O])=O'),
E0 = (-510.489,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,350,440,435,1725,375,552.5,462.5,1710,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (143.117,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.396358,0.0826309,-7.09674e-05,3.18022e-08,-5.96558e-12,-61271,31.5733], Tmin=(100,'K'), Tmax=(1229.91,'K')), NASAPolynomial(coeffs=[13.4329,0.0402322,-1.92576e-05,3.77295e-09,-2.68137e-13,-64477.7,-34.0183], Tmin=(1229.91,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-510.489,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(386.623,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-CsCsCsH) + group(Cs-CsHHH) + group(Cds-O2d(Cds-Cds)Cs) + group(Cds-Cds(Cds-O2d)O2s) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCOJ)"""),
)
species(
label = 'C=C(O)C(O)=C(C)C([O])=O(31794)',
structure = SMILES('C=C(O)C(O)=C(C)C([O])=O'),
E0 = (-447.497,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,350,375,415,440,465,420,435,450,1700,1725,1750,2950,3100,1380,975,1025,1650,3580,3650,1210,1345,900,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (143.117,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.03757,0.116933,-0.000166187,1.23839e-07,-3.65652e-11,-53645.6,32.708], Tmin=(100,'K'), Tmax=(831.44,'K')), NASAPolynomial(coeffs=[16.3356,0.033346,-1.53776e-05,2.90875e-09,-2.00891e-13,-56534.4,-47.8995], Tmin=(831.44,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-447.497,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(386.623,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-Cds)HHH) + group(Cd-CdCs(CO)) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-O2d(Cds-Cds)O2s) + group(Cds-CdsHH) + radical(CCOJ)"""),
)
species(
label = 'CH3(17)',
structure = SMILES('[CH3]'),
E0 = (136.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([604.263,1333.71,1492.19,2836.77,2836.77,3806.92],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (15.0345,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.65718,0.0021266,5.45839e-06,-6.6181e-09,2.46571e-12,16422.7,1.67354], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.97812,0.00579785,-1.97558e-06,3.07298e-10,-1.79174e-14,16509.5,4.72248], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(136.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""CH3""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=C(O)C(O)=CC([O])=O(31795)',
structure = SMILES('C=C(O)C(O)=CC([O])=O'),
E0 = (-404.153,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,3580,3650,1210,1345,900,1100,3010,987.5,1337.5,450,1655,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (129.091,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.377518,0.100759,-0.000145557,1.06576e-07,-3.05569e-11,-48454.8,28.9829], Tmin=(100,'K'), Tmax=(859.172,'K')), NASAPolynomial(coeffs=[16.1968,0.0235959,-1.08415e-05,2.04542e-09,-1.4104e-13,-51302.9,-48.4631], Tmin=(859.172,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-404.153,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-Cds(Cds-Cds)O2s) + group(Cd-Cd(CO)H) + group(Cds-O2d(Cds-Cds)O2s) + group(Cds-CdsHH) + radical(CCOJ)"""),
)
species(
label = 'C=C(O)C([O])C(C)C([O])=O(31796)',
structure = SMILES('C=C(O)C([O])C(C)C([O])=O'),
E0 = (-349.628,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1277.5,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,200,800,914.286,1028.57,1142.86,1257.14,1371.43,1485.71,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.371887,0.0939521,-8.76293e-05,4.21238e-08,-8.19104e-12,-41891.4,36.7636], Tmin=(100,'K'), Tmax=(1225.87,'K')), NASAPolynomial(coeffs=[17.6519,0.0351409,-1.56669e-05,2.9885e-09,-2.09945e-13,-46310.3,-53.8614], Tmin=(1225.87,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-349.628,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCOJ) + radical(CC(C)OJ)"""),
)
species(
label = 'C=C(O)C(O)[C](C)C([O])=O(31797)',
structure = SMILES('C=C(O)C(O)[C](C)C([O])=O'),
E0 = (-427.415,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3580,3650,1210,1345,900,1100,2950,3100,1380,975,1025,1650,360,370,350,350,440,435,1725,1380,1390,370,380,2900,435,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.295686,0.0916886,-8.358e-05,3.92548e-08,-7.46542e-12,-51249.3,37.5698], Tmin=(100,'K'), Tmax=(1250.83,'K')), NASAPolynomial(coeffs=[17.4768,0.0348544,-1.54245e-05,2.92947e-09,-2.05187e-13,-55695.3,-52.15], Tmin=(1250.83,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-427.415,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCJ(C)CO) + radical(CCOJ)"""),
)
species(
label = '[CH2]C(C([O])=O)C(O)C(=C)O(31798)',
structure = SMILES('[CH2]C(C([O])=O)C(O)C(=C)O'),
E0 = (-369.478,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2950,3100,1380,975,1025,1650,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.631272,0.105557,-0.000120202,7.24038e-08,-1.76192e-11,-44274.2,38.2507], Tmin=(100,'K'), Tmax=(993.185,'K')), NASAPolynomial(coeffs=[16.2814,0.0374409,-1.7327e-05,3.3489e-09,-2.36819e-13,-47633.6,-43.2275], Tmin=(993.185,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-369.478,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CJC(C)C=O) + radical(CCOJ)"""),
)
species(
label = '[CH]=C(O)C(O)C(C)C([O])=O(31799)',
structure = SMILES('[CH]=C(O)C(O)C(C)C([O])=O'),
E0 = (-332.893,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2800,2850,1350,1500,750,1050,1375,1000,3120,650,792.5,1650,3580,3650,1210,1345,900,1100,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.496922,0.100082,-0.000105114,5.78088e-08,-1.28421e-11,-39876.7,37.8136], Tmin=(100,'K'), Tmax=(1083.19,'K')), NASAPolynomial(coeffs=[16.8771,0.0359222,-1.62634e-05,3.12312e-09,-2.20407e-13,-43640.5,-47.3942], Tmin=(1083.19,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-332.893,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCOJ) + radical(Cds_P)"""),
)
species(
label = '[CH]=C(O)[C](O)C(C)C(=O)O(31800)',
structure = SMILES('[CH]C(O)=C(O)C(C)C(=O)O'),
E0 = (-490.141,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.68855,0.114284,-0.000119196,6.2559e-08,-1.28169e-11,-58736.4,38.3048], Tmin=(100,'K'), Tmax=(1197.85,'K')), NASAPolynomial(coeffs=[24.4675,0.0269401,-9.82008e-06,1.68536e-09,-1.121e-13,-65002.6,-92.605], Tmin=(1197.85,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-490.141,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(403.252,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + radical(AllylJ2_triplet)"""),
)
species(
label = 'C=C(O)C(O)=C(C)C(=O)O(31598)',
structure = SMILES('C=C(O)C(O)=C(C)C(=O)O'),
E0 = (-673.202,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.42635,0.120064,-0.00015556,1.01332e-07,-2.5721e-11,-80772.6,33.9313], Tmin=(100,'K'), Tmax=(970.747,'K')), NASAPolynomial(coeffs=[21.2928,0.0264479,-1.09026e-05,1.98653e-09,-1.35732e-13,-85183.5,-75.0005], Tmin=(970.747,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-673.202,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-Cds)HHH) + group(Cd-CdCs(CO)) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-O2d(Cds-Cds)O2s) + group(Cds-CdsHH)"""),
)
species(
label = 'C=C(O)C(=O)C(C)C(=O)O(8754)',
structure = SMILES('C=C(O)C(=O)C(C)C(=O)O'),
E0 = (-736.195,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.536873,0.0920496,-8.17468e-05,3.63611e-08,-6.4461e-12,-88374.1,34.7587], Tmin=(100,'K'), Tmax=(1350.93,'K')), NASAPolynomial(coeffs=[20.3654,0.0301598,-1.3028e-05,2.44938e-09,-1.70506e-13,-94021.6,-72.3702], Tmin=(1350.93,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-736.195,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-CsCsCsH) + group(Cs-CsHHH) + group(Cds-O2d(Cds-Cds)Cs) + group(Cds-Cds(Cds-O2d)O2s) + group(Cds-OdCsOs) + group(Cds-CdsHH)"""),
)
species(
label = 'C=C(O)C(C)(O)[CH]C([O])=O(31801)',
structure = SMILES('C=C(O)C(C)(O)C=C([O])[O]'),
E0 = (-437.915,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,3580,3650,1210,1345,900,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,180,180,180,450.577,684.324,1600,1828.57,2971.43,3200],'cm^-1')),
HinderedRotor(inertia=(0.156068,'amu*angstrom^2'), symmetry=1, barrier=(3.58831,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156068,'amu*angstrom^2'), symmetry=1, barrier=(3.58831,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156068,'amu*angstrom^2'), symmetry=1, barrier=(3.58831,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156068,'amu*angstrom^2'), symmetry=1, barrier=(3.58831,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156068,'amu*angstrom^2'), symmetry=1, barrier=(3.58831,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.65334,0.109105,-0.000114424,5.81588e-08,-1.1358e-11,-52452.6,38.648], Tmin=(100,'K'), Tmax=(1265.42,'K')), NASAPolynomial(coeffs=[27.298,0.0175915,-5.94774e-06,1.01124e-09,-6.79415e-14,-59779.9,-107.841], Tmin=(1265.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-437.915,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(411.566,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)(Cds-Cds)CsOs) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(C=COJ) + radical(C=COJ)"""),
)
species(
label = 'C=C(O)C(O)([CH]C)C([O])=O(10239)',
structure = SMILES('C=C(O)C(O)([CH]C)C([O])=O'),
E0 = (-382.663,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2950,3100,1380,975,1025,1650,350,440,435,1725,3580,3650,1210,1345,900,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(5419.43,'J/mol'), sigma=(8.1159,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=846.50 K, Pc=23 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.768655,0.1043,-0.000114777,6.54846e-08,-1.48811e-11,-45851.4,36.8811], Tmin=(100,'K'), Tmax=(1069.36,'K')), NASAPolynomial(coeffs=[18.4527,0.0324013,-1.39237e-05,2.61007e-09,-1.81926e-13,-49962.3,-57.1397], Tmin=(1069.36,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-382.663,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(407.409,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsCsOs) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CCJCO) + radical(CCOJ)"""),
)
species(
label = 'C=C(O)C1(O)OC(=O)C1C(31686)',
structure = SMILES('C=C(O)C1(O)OC(=O)C1C'),
E0 = (-686.287,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (144.125,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.650197,0.0694387,3.78758e-05,-1.33324e-07,6.70774e-11,-82342.9,32.7562], Tmin=(100,'K'), Tmax=(899.713,'K')), NASAPolynomial(coeffs=[35.2256,-0.0014252,8.24813e-06,-1.87491e-09,1.27052e-13,-92385.9,-156.469], Tmin=(899.713,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-686.287,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsOs) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsOs) + group(Cds-CdsHH) + ring(Beta-Propiolactone)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (-476.204,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (-354.161,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (-359.529,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (-476.204,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (-215.586,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (-273.629,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (-353.121,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (-407.718,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (-317.168,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (-342.525,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (-438.497,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (-368.963,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (-397.514,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (-92.4851,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (-85.1313,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (-244.988,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (-115.101,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (-416.861,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (-377.17,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (-19.4842,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (-468.673,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (-333.09,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (-336.53,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (-216.682,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (-110.588,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (27.978,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (-34.5294,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (-243.59,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (-273.175,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (-224.885,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS31',
E0 = (-243.442,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS32',
E0 = (-332.341,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS33',
E0 = (-187.056,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS34',
E0 = (-243.319,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS35',
E0 = (-300.819,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS36',
E0 = (-196.913,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS37',
E0 = (-347.483,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS38',
E0 = (-412.804,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS39',
E0 = (-458.422,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS40',
E0 = (-243.15,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS41',
E0 = (-242.629,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS42',
E0 = (-467.92,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CO2(13)', 'C=C(O)C(O)=CC(5562)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2][C](O)C1(O)OC(=O)C1C(31775)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(5.448e+10,'s^-1','*|/',3), n=0.478, Ea=(122.043,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [R5_SS_D;doublebond_intra;radadd_intra_O]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Exocyclic"""),
)
reaction(
label = 'reaction3',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC1C(O)=C(O)CC1([O])[O](31753)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(3.49749e+08,'s^-1'), n=0.656505, Ea=(116.675,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6_SMS;multiplebond_intra;radadd_intra_cs2H] for rate rule [R6_SMS_CO;carbonylbond_intra;radadd_intra_cs2H]
Euclidian distance = 1.41421356237
family: Intra_R_Add_Exocyclic
Ea raised from 112.8 to 116.7 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction4',
reactants = ['CO2(13)', '[CH2]C(O)=C(O)[CH]C(4609)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(0.00188149,'m^3/(mol*s)'), n=2.445, Ea=(49.773,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cd_R;CsJ-CdCsH] for rate rule [CO2;CsJ-CdCsH]
Euclidian distance = 2.0
family: R_Addition_MultipleBond
Ea raised from 46.9 to 49.8 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction5',
reactants = ['OH(5)', 'C=C=C(O)C(C)C([O])=O(31776)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(41610,'cm^3/(mol*s)'), n=2.487, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 214 used for Ca_Cds-HH;OJ_pri
Exact match found for rate rule [Ca_Cds-HH;OJ_pri]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from -7.6 to 0 kJ/mol."""),
)
reaction(
label = 'reaction6',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC([O])=C(O)C(C)C([O])=O(31777)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(5.4947e+07,'s^-1'), n=1.58167, Ea=(202.575,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3H_SS_2Cd;C_rad_out_2H;XH_out] for rate rule [R3H_SS_2Cd;C_rad_out_2H;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction7',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C(O)=C(O)[C](C)C(=O)O(31778)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(82.8481,'s^-1'), n=3.49875, Ea=(123.083,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS;O_rad_out;Cs_H_out_noH]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH2]C(O)=C(O)C([CH2])C(=O)O(31779)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(8.6e-09,'s^-1'), n=5.55, Ea=(83.68,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 340 used for R4H_SSS;C_rad_out_2H;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_2H;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC(O)=C([O])C(C)C([O])=O(31780)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(493835,'s^-1'), n=1.76395, Ea=(159.036,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_SDS;C_rad_out_2H;XH_out] for rate rule [R4H_SDS;C_rad_out_2H;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['C[C](C([O])=O)C(O)=C(C)O(31781)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(184752,'s^-1'), n=1.905, Ea=(133.679,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_SDS;C_rad_out_2H;Cs_H_out] for rate rule [R4H_SDS;C_rad_out_2H;Cs_H_out_noH]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C(O)=C([O])C(C)C(=O)O(31782)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(44439,'s^-1'), n=1.84103, Ea=(37.7069,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R5H_CCC;O_rad_out;XH_out] + [R5H_CCC(Cd);Y_rad_out;XH_out] for rate rule [R5H_CCC(Cd);O_rad_out;O_H_out]
Euclidian distance = 1.41421356237
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['[CH2]C(C([O])=O)C(O)=C(C)O(31783)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(121000,'s^-1'), n=1.9, Ea=(55.6472,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 92 used for R5H_SSMS;C_rad_out_2H;Cs_H_out_2H
Exact match found for rate rule [R5H_SSMS;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C([O])=C(O)C(C)C(=O)O(31784)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(293856,'s^-1'), n=1.47286, Ea=(78.6904,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R6H;O_rad_out;XH_out] + [R6H_RSSMS;Y_rad_out;XH_out] for rate rule [R6H_RSSMS;O_rad_out;O_H_out]
Euclidian distance = 1.41421356237
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['C[CH]C([O])=O(2125)', '[CH2]C(O)=[C]O(31700)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(1.59671e+07,'m^3/(mol*s)'), n=0.0113737, Ea=(2.96199,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;C_rad/H/OneDeC] for rate rule [Cd_rad;C_rad/H/OneDeC]
Euclidian distance = 1.0
family: R_Recombination"""),
)
reaction(
label = 'reaction15',
reactants = ['[O][C]=O(669)', '[CH2]C(O)=C(O)[CH]C(4609)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(2.71464e+07,'m^3/(mol*s)'), n=0.107721, Ea=(5.76381,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [C_rad/H/CdCs;Y_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction16',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC(C([O])=O)C1(O)C[C]1O(31785)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(3.473e+12,'s^-1'), n=0.247, Ea=(231.216,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3_D;doublebond_intra_secNd;radadd_intra_cs] for rate rule [R3_D;doublebond_intra_secNd_NdNd;radadd_intra_cs2H]
Euclidian distance = 1.41421356237
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction17',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C(O)=C(O)C(C)[C]1OO1(31786)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(1.55936e+11,'s^-1'), n=0.551275, Ea=(361.103,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3_linear;multiplebond_intra;radadd_intra] for rate rule [R3_CO;carbonyl_intra_Nd;radadd_intra_O]
Euclidian distance = 2.44948974278
family: Intra_R_Add_Endocyclic
Ea raised from 359.8 to 361.1 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction18',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C1(O)OC(=O)C(C)[C]1O(31643)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(2.63144e+09,'s^-1'), n=0.656667, Ea=(59.3431,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5_SS_D;doublebond_intra_secNd;radadd_intra] for rate rule [R5_SS_D;doublebond_intra_secNd;radadd_intra_O]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction19',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC1[C](O)[C](O)COC1=O(31669)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(4.78145e+10,'s^-1'), n=0.346137, Ea=(99.0339,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6_SMS;multiplebond_intra;radadd_intra_cs2H] for rate rule [R6_SMS_CO;carbonyl_intra;radadd_intra_cs2H]
Euclidian distance = 1.41421356237
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction20',
reactants = ['CH2(S)(23)', 'C=C(O)[C](O)CC([O])=O(31774)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(873476,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;Cs_H] for rate rule [carbene;C/H2/TwoDe]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction21',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['CC1C(=O)OCC(O)=C1O(31591)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(4e+12,'s^-1'), n=0, Ea=(7.5312,'kJ/mol'), T0=(1,'K'), Tmin=(550,'K'), Tmax=(650,'K'), comment="""Estimated using template [R6_SSSDS;Y_rad_out;Cpri_rad_out_2H] for rate rule [R6_SSSDS;O_rad;Cpri_rad_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction22',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C(O)C(=O)C(C)C([O])=O(31787)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(4235.27,'s^-1'), n=2.8, Ea=(143.114,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R_ROR;R1_doublebond;R2_doublebond_CsC;R_O_H]
Euclidian distance = 0
family: ketoenol"""),
)
reaction(
label = 'reaction23',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH2]C(=O)C(O)C(C)C([O])=O(31788)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(1290.48,'s^-1'), n=2.90375, Ea=(139.674,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R_ROR;R1_doublebond;R2_doublebond;R_O_H]
Euclidian distance = 0
family: ketoenol"""),
)
reaction(
label = 'reaction24',
reactants = ['[CH2]C(O)=C(O)C=C([O])OC(31789)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(7040,'s^-1'), n=2.66, Ea=(313.8,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R_ROR;R1_doublebond;R2_doublebond;R_O_C] for rate rule [R_ROR;R1_doublebond_CHR;R2_doublebond;R_O_C]
Euclidian distance = 1.0
family: ketoenol"""),
)
reaction(
label = 'reaction25',
reactants = ['[CH2]C(O)=C(O)OC([O])=CC(31790)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(7040,'s^-1'), n=2.66, Ea=(313.8,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R_ROR;R1_doublebond;R2_doublebond;R_O_C] for rate rule [R_ROR;R1_doublebond_CHCH3;R2_doublebond;R_O_C]
Euclidian distance = 2.0
family: ketoenol"""),
)
reaction(
label = 'reaction26',
reactants = ['CH2(19)', 'CC(C([O])=O)C(O)=[C]O(31791)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_rad/NonDe;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction27',
reactants = ['O(4)', '[CH2]C(O)=C(O)C(C)[C]=O(31792)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(2085.55,'m^3/(mol*s)'), n=1.09077, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [Y_rad;O_birad] for rate rule [CO_rad/NonDe;O_birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -8.3 to 0 kJ/mol."""),
)
reaction(
label = 'reaction28',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['C=C(O)C1(O)C(C)C1([O])[O](31736)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(2.63856e+09,'s^-1'), n=0.755479, Ea=(232.615,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_S;multiplebond_intra;radadd_intra_cs] for rate rule [R4_S_CO;carbonylbond_intra;radadd_intra_csNdCd]
Euclidian distance = 2.44948974278
family: Intra_R_Add_Exocyclic
Ea raised from 230.0 to 232.6 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction29',
reactants = ['H(3)', 'C=C(O)C(=O)C(C)C([O])=O(31793)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(6.39e+06,'cm^3/(mol*s)'), n=2.09, Ea=(25.5224,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 2835 used for Od_CO-CdCs;HJ
Exact match found for rate rule [Od_CO-CdCs;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction30',
reactants = ['H(3)', 'C=C(O)C(O)=C(C)C([O])=O(31794)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(72.3521,'m^3/(mol*s)'), n=1.66655, Ea=(10.8198,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-OneDeCs_Cds;HJ] for rate rule [Cds-COCs_Cds;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction31',
reactants = ['CH3(17)', 'C=C(O)C(O)=CC([O])=O(31795)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS31',
kinetics = Arrhenius(A=(0.0143836,'m^3/(mol*s)'), n=2.40621, Ea=(24.5235,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-OneDeH_Cds;CsJ-HHH] for rate rule [Cds-COH_Cds;CsJ-HHH]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction32',
reactants = ['[O][C]=O(669)', 'C=C(O)C(O)=CC(5562)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS32',
kinetics = Arrhenius(A=(0.00423131,'m^3/(mol*s)'), n=2.43347, Ea=(5.59858,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-CsH_Cds-OneDe;CJ] for rate rule [Cds-CsH_Cds-CdOs;CJ]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction33',
reactants = ['C=C(O)C([O])C(C)C([O])=O(31796)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS33',
kinetics = Arrhenius(A=(1.52488e+09,'s^-1'), n=1.21745, Ea=(162.572,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_Cd] for rate rule [R2H_S;O_rad_out;Cs_H_out_Cd]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction34',
reactants = ['C=C(O)C(O)[C](C)C([O])=O(31797)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS34',
kinetics = Arrhenius(A=(3.62e+13,'s^-1'), n=-0.14, Ea=(184.096,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_noH] for rate rule [R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_Cd]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction35',
reactants = ['[CH2]C(C([O])=O)C(O)C(=C)O(31798)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS35',
kinetics = Arrhenius(A=(5.14e-16,'s^-1'), n=8.15, Ea=(68.6594,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""Estimated using template [R3H_SS;C_rad_out_2H;Cs_H_out_OneDe] for rate rule [R3H_SS_Cs;C_rad_out_2H;Cs_H_out_Cd]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction36',
reactants = ['[CH]=C(O)C(O)C(C)C([O])=O(31799)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS36',
kinetics = Arrhenius(A=(6.04e+10,'s^-1'), n=0.59, Ea=(135.98,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R3H_DS;Cd_rad_out_singleH;Cs_H_out_NonDe] for rate rule [R3H_DS;Cd_rad_out_singleH;Cs_H_out_NDMustO]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction37',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['[CH]=C(O)[C](O)C(C)C(=O)O(31800)'],
transitionState = 'TS37',
kinetics = Arrhenius(A=(6.854,'s^-1'), n=3.311, Ea=(128.721,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;O_rad_out;Cd_H_out_singleH] for rate rule [R6HJ_3;O_rad_out;Cd_H_out_singleH]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 4.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction38',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['C=C(O)C(O)=C(C)C(=O)O(31598)'],
transitionState = 'TS38',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad_NDe]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction39',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['C=C(O)C(=O)C(C)C(=O)O(8754)'],
transitionState = 'TS39',
kinetics = Arrhenius(A=(2.14e+09,'s^-1'), n=0.137, Ea=(17.782,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad_De] for rate rule [R5radEndo;Y_rad;XH_Rrad_De]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction40',
reactants = ['C=C(O)C(C)(O)[CH]C([O])=O(31801)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS40',
kinetics = Arrhenius(A=(5.59192e+09,'s^-1'), n=1.025, Ea=(194.765,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCsCJ;CsJ-OneDeH;CH3] for rate rule [cCs(-R!HR!H)CJ;CsJ-OneDeH;CH3]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction25',
reactants = ['C=C(O)C(O)([CH]C)C([O])=O(10239)'],
products = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
transitionState = 'TS41',
kinetics = Arrhenius(A=(1.08731e+10,'s^-1'), n=0.796, Ea=(140.034,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [cCsCJ;CsJ;CO] + [cCs(-R!HR!H)CJ;CsJ;C] for rate rule [cCs(-R!HR!H)CJ;CsJ-CsH;CO]
Euclidian distance = 1.41421356237
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction42',
reactants = ['C=C(O)[C](O)C(C)C([O])=O(31589)'],
products = ['C=C(O)C1(O)OC(=O)C1C(31686)'],
transitionState = 'TS42',
kinetics = Arrhenius(A=(3.24e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Ypri_rad_out] for rate rule [R4_SSS;C_rad_out_OneDe/O;Opri_rad]
Euclidian distance = 3.16227766017
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
network(
label = '5103',
isomers = [
'C=C(O)[C](O)C(C)C([O])=O(31589)',
],
reactants = [
('CO2(13)', 'C=C(O)C(O)=CC(5562)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '5103',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"qin.she@husky.neu.edu"
] | qin.she@husky.neu.edu |
1af5cd448e1e9c8e83bc546e3cbdb5ce3b00683c | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2387/60708/290720.py | ba2f48389061059797cdf4f0828fef6430847308 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | s1=input()
s2=input()
if(s1=='1'):
print(16)
else:
print(s2) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
932201f1f2a80402144846da9103a14f970583bc | 629968cbd491dcced1a64165523b7ebd8b548abb | /app/science/demo_image/sara_data_source/cleanup.py | a3429bf5322dd4768529c0ea765a24186d8c1169 | [] | no_license | countzen2000/face-service | 1bb121b55a534181431cdcca03ad9be61faaa45f | 51cecda39347d1bacd58e4abe0891c9e8bfdbe52 | refs/heads/master | 2021-06-21T01:59:16.614212 | 2017-08-15T01:09:14 | 2017-08-15T01:09:14 | 100,322,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | from __future__ import division, print_function, absolute_import
import cv2
import os
# Loading each image is very slow, but lets try one run
face_cascade = cv2.CascadeClassifier('classifier/haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('classifier/haarcascade_eye.xml')
# # Make sure the data is normalized
# img_prep = ImagePreprocessing()
# img_prep.add_featurewise_zero_center()
# img_prep.add_featurewise_stdnorm()
#
# # Create extra synthetic training data by flipping, rotating and blurring the
# # images on our data set.
# img_aug = ImageAugmentation()
# img_aug.add_random_flip_leftright()
# img_aug.add_random_rotation(max_angle=25.)
# img_aug.add_random_blur(sigma_max=3.)
for i in range(1,7377): #1-7377
count_name = str(i)
while len(count_name) < 4:
count_name = "0"+count_name
try:
gray = cv2.imread("demo_image/sara_data_source/output-{0}.jpg".format(count_name), cv2.IMREAD_GRAYSCALE)
except:
continue
# print ("loaded: {0}".format(count_name))
# detect faces
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
# print ("face count: {0}".format(len(faces)))
# draw squares around faces and output
# for (x, y, w, h) in faces:
# cv2.rectangle(gray, (x, y), (x + w, y + h), (255, 0, 0), 2)
# If the faces match use that to train on.
if len(faces) <= 0:
print ("noFaces")
print(count_name)
# cv2.imshow("Output", gray)
try:
os.rename("demo_image/sara_data_source/output-{0}.jpg".format(count_name), "demo_image/sara_data_source/noface/output-{0}.jpg".format(count_name))
except:
print ("couldn't move")
print(count_name)
# cv2.imshow("Output", gray)
# cv2.waitKey() | [
"billy.shin@bcgdv.com"
] | billy.shin@bcgdv.com |
4c862c1e3a54efb2995cba4d73ef0e5c4fbc6473 | ae2c38c8562ee9dbb7578e965ee1109043a25814 | /src/plot_end.py | 0a1ec75702dd656b55cb9bcaa40753c5cfb2e3d2 | [] | no_license | agilman0803/Main_PDE_Repo | 988dc1043eff528384eaae5f4743f21a30fbe815 | bc54f20c774a9cede482db11091a8ed6303438d8 | refs/heads/master | 2020-12-08T06:59:40.371291 | 2020-01-15T05:03:01 | 2020-01-15T05:03:01 | 232,919,761 | 0 | 0 | null | 2020-01-09T22:39:43 | 2020-01-09T22:39:42 | null | UTF-8 | Python | false | false | 3,120 | py | ## @file plot_end
# Visualizer sub class for plotting the first and last frames of the solution to a
# 1d and a 2d problem
# Uses pyplot.plot and pyplot.contourf respectively to generate the desired plots
from . import visualizer
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
## PLotEnd
# Implementation of plot_end package as a class
class PlotEnd(visualizer.Visualizer):
## Constructor
#
# @param outloc directory name for output to be saved to
def __init__(self,outloc):
self.outloc = outloc
## plot1d
# class to plot first and last frames of a 1d grid held in a logger
#
# plots a 1d contour of the first and last frames of the PDE solution and saves them to the outputs director
# uses matplotlib to actually generate the plots
# @param log a Logger() object
# @param name a string containing the desired name of the file
# @param **kwargs keyword arguments that will be passed to the plt.plot function
def plot1d(self,log,name="",**kwargs):
fext = "1d_end_"+name
suffix = ".png"
last_frame = log.get_frame(-1)
initial_frame = log.get_frame(0)
if last_frame[1].spec.ndim != 1:
raise IndexError("Attempting to do a 1d plot of a multi dimensional grid")
x = last_frame[1].spec.coords
x = np.transpose(np.array(x))
plt.figure()
plt.subplot(211)
plt.plot(x,initial_frame[1].grid,**kwargs)
plt.subplot(212)
plt.plot(x,last_frame[1].grid,**kwargs)
plt.savefig(self.outloc+fext+suffix)
plt.close(fig='all')
## plot2d
# class to plot first and last frames of a 2d grid held in a logger
#
# plots a 2d contour of the first and last frames of the PDE solution and saves them to the outputs director
# uses matplotlib to actually generate the plots
# @param log a Logger() object
# @param name a string containing the desired name of the file
# @param **kwargs keyword arguments that will be passed to the plt.contourf function
def plot2d(self,log,name="",**kwargs):
fext = "2d_end_"+name
suffix = ".png"
last_frame = log.get_frame(-1)
initial_frame = log.get_frame(0)
if last_frame[1].spec.ndim != 2:
raise IndexError("Attempting to do a 2d plot of a multi dimensional grid")
x,y=log.get_frame(0)[1].spec.coords #sets up coordinates for plot using GridSpec coordinate tuple
xmesh,ymesh=np.meshgrid(x,y,indexing='ij')
levels1 = MaxNLocator(nbins=15).tick_values(initial_frame[1].grid.min(), initial_frame[1].grid.max())
levels2 = MaxNLocator(nbins=15).tick_values(last_frame[1].grid.min(), last_frame[1].grid.max())
cmap = plt.get_cmap('PiYG')
fig, (ax0, ax1) = plt.subplots(nrows=2)
cf0 = ax0.contourf(x,y,initial_frame[1].grid, levels=levels1,cmap=cmap,**kwargs)
fig.colorbar(cf0, ax=ax0)
ax1.set_title('Initial State')
cf1 = ax1.contourf(x,y,last_frame[1].grid, levels=levels2,cmap=cmap,**kwargs)
fig.colorbar(cf1, ax=ax1)
ax1.set_title('Final State')
fig.tight_layout()
plt.savefig(self.outloc+fext+suffix)
plt.close(fig='all')
| [
"j.v.roggeveen@gmail.com"
] | j.v.roggeveen@gmail.com |
faaf74956ae15b83a8c7ac9a3bcd61b1ebb018f0 | 2434bad76814cb49a755c5f3a76060655903cab8 | /solid/ocp.py | b5efd158db05534fcd0828fddc43e9b3426859d9 | [] | no_license | chrismedrela/2020-01-27-tdd | 87208142a369f37db958c03e84f01c79424630a3 | b07fd7261ce6c8cd622bb477ba848dd1d05df15c | refs/heads/master | 2020-12-21T08:36:23.237118 | 2020-01-26T21:20:54 | 2020-01-26T21:20:54 | 236,375,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import Optional
import pytest
@dataclass
class Customer:
first_purchase_date: Optional[datetime] # datetime or None
birth_date: datetime
is_veteran: bool
# @dataclass decorator automagically generates the following constructor:
# def __init__(self, first_purchase_date, birth_date, is_veteran):
# self.first_purchase_date = first_purchase_date
# self.birth_date = birth_date
# self.is_veteran = is_veteran
def calculate_discount_percentage(customer):
discount = 0
now = datetime.now()
year = timedelta(days=365)
if customer.birth_date <= now - 65*year:
# senior discount
discount = 5
if customer.first_purchase_date is not None:
if customer.first_purchase_date <= now - year:
# after one year, loyal customers get 10%
discount = 10
if customer.first_purchase_date <= now - 5*year:
# after five years, 12%
discount = 12
if customer.first_purchase_date <= now - 10*year:
# after ten years, 20%
discount = 20
else:
# first time purchase ==> 15% discount
discount = 15
if customer.is_veteran:
discount = max(discount, 10)
return discount | [
"chris.medrela+github@gmail.com"
] | chris.medrela+github@gmail.com |
8ce2af89ed640e6aa1387411389a00fc631f3498 | d9b03a4cc29e9bebf78c0be3066f9e78203af22e | /PID_pwm0.py | bd3fb875ac92645a3e20350da46f3fe08a2b625c | [] | no_license | simketejong/aquadouwe | 2b6d3df7d738313730125777e9610a822a313049 | e52f62a4f8e99c7218fe23b52d48ae03229e4319 | refs/heads/master | 2021-01-10T03:20:40.335896 | 2015-10-24T18:49:46 | 2015-10-24T18:49:46 | 43,691,379 | 1 | 1 | null | 2015-10-25T19:51:43 | 2015-10-05T15:00:58 | JavaScript | UTF-8 | Python | false | false | 2,626 | py | #The recipe gives simple implementation of a Discrete Proportional-Integral-Derivative (PID) controller. PID controller gives output value for error between desired reference input and measurement feedback to minimize error value.
#More information: http://en.wikipedia.org/wiki/PID_controller
#
#cnr437@gmail.com
#
####### Example #########
#
#p=PID(3.0,0.4,1.2)
#p.setPoint(5.0)
#while True:
# pid = p.update(measurement_value)
#
#
import os
class PID:
"""
Discrete PID control
"""
def __init__(self, P=2.0, I=0.0, D=1.0, Derivator=0, Integrator=0, Integrator_max=500, Integrator_min=-500):
self.Kp=P
self.Ki=I
self.Kd=D
self.Derivator=Derivator
self.Integrator=Integrator
self.Integrator_max=Integrator_max
self.Integrator_min=Integrator_min
self.set_point=0.0
self.error=0.0
def update(self,current_value):
"""
Calculate PID output value for given reference input and feedback
"""
self.error = self.set_point - current_value
self.P_value = self.Kp * self.error
self.D_value = self.Kd * ( self.error - self.Derivator)
self.Derivator = self.error
self.Integrator = self.Integrator + self.error
if self.Integrator > self.Integrator_max:
self.Integrator = self.Integrator_max
elif self.Integrator < self.Integrator_min:
self.Integrator = self.Integrator_min
self.I_value = self.Integrator * self.Ki
PID = self.P_value + self.I_value + self.D_value
return PID
def setPoint(self,set_point):
"""
Initilize the setpoint of PID
"""
self.set_point = set_point
self.Integrator=0
self.Derivator=0
def setIntegrator(self, Integrator):
self.Integrator = Integrator
def setDerivator(self, Derivator):
self.Derivator = Derivator
def setKp(self,P):
self.Kp=P
def setKi(self,I):
self.Ki=I
def setKd(self,D):
self.Kd=D
def getPoint(self):
return self.set_point
def getError(self):
return self.error
def getIntegrator(self):
return self.Integrator
def getDerivator(self):
return self.Derivator
p=PID(1.0,0.00001,0,Derivator=0,Integrator=0,Integrator_max=500,Integrator_min=-500)
p.setPoint(30.0)
pid=100
while True:
f = os.popen('/bin/cat /sys/class/thermal/thermal_zone0/temp')
measurement_value=int(f.read())
measurement_value=measurement_value/1000
pid_echt = p.update(measurement_value)
print ("pid1=%f value=%f error=%f"%(pid_echt,measurement_value,p.getError()))
if measurement_value > 34:
pid=pid+1
print ("+pid=%d"%pid)
else:
pid=pid-1
print ("-pid=%d"%pid)
if pid > 100:
pid = 100
if pid < 15:
pid = 15
fh = open("/tmp/pwm0.duty", "w")
fh.write("%d" %pid)
print ("pid=%d"%pid)
fh.close
| [
"simketejong@gmail.com"
] | simketejong@gmail.com |
2c693c0650b6399755d44d77ec37321c6343f7ae | 56b135591185acd69e93d2ad0683a94d90201511 | /py/55.py | 3358d2b56b53e3611f8fc76b5ef86bf04c0edc4c | [] | no_license | threeifbywhiskey/houston | 568e95efb521042b9f6695a26cb09eff1286821d | da9647106109a1c45cce61261791026216f8bc11 | refs/heads/master | 2021-01-20T06:59:49.771724 | 2014-03-16T16:16:28 | 2014-03-16T16:16:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | def lychrel(n):
for i in xrange(50):
n += int(str(n)[::-1])
if str(n) == str(n)[::-1]:
return False
return True
print len(filter(lychrel, xrange(10000)))
| [
"3ifbyw@gmail.com"
] | 3ifbyw@gmail.com |
e0d09522cbd1931a1ae6ab337616cd4f3dbb95c5 | 0f9b8f4c735036cbeafee3a169152aff92796892 | /simulacion/venv/Scripts/easy_install-3.6-script.py | f3a22db2c49fc50ff1a861e63ae48df0d505d388 | [] | no_license | fabarros/CAPSTONE | 922bb3f4d791e54e82bc7c9a6bddf0b9996e3fae | d3fb965f66ca64e2948bf705806b08a0ce5e8e96 | refs/heads/master | 2020-07-31T13:44:20.264071 | 2019-09-26T14:13:48 | 2019-09-26T14:13:48 | 210,619,240 | 0 | 0 | null | null | null | null | ISO-8859-2 | Python | false | false | 516 | py | #!"C:\Users\fabar\OneDrive - uc.cl\Universidad\2019-2\Investigación de Operaciones\simulacion\venv\Scripts\python.exe" -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"fabarros@uc.cl"
] | fabarros@uc.cl |
0c39eaa9a35b6eee008799c9dcf1c6b2829403cb | c4ae91e31a13865df38994c831cc891833fcb270 | /users/migrations/0008_rename_commmented_customuser_commmented_on.py | 0d232e9cc28cbee04e74a75d2c1b8d770602ce50 | [] | no_license | Beefy-py/drf-react-youpost | e8eae87fb01b5e2f9e50f048563952f8899d2c17 | 241abed96a9539b42d7cd264caa08e4ee445ff04 | refs/heads/main | 2023-07-11T16:00:48.924071 | 2021-08-31T23:35:26 | 2021-08-31T23:35:26 | 387,951,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | # Generated by Django 3.2.6 on 2021-08-21 20:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0007_auto_20210818_2129'),
]
operations = [
migrations.RenameField(
model_name='customuser',
old_name='commmented',
new_name='commmented_on',
),
]
| [
"hoftkenny@gmail.com"
] | hoftkenny@gmail.com |
338659e0b3650f32f043d3dddbc2609ce451583f | 1d204a27ca5b798b557c546a26acf29575261bd9 | /bcbio/upload/__init__.py | 2ec90ac6f67878a29dbb32eab5c7e6e2bc2881cf | [] | no_license | kevyin/bcbio-nextgen | 8b9bf91cba9be52dae8625589985a2642b83ebb2 | 6b05823d1b5983c4d6e2a9d9a48ba72a7fad30b6 | refs/heads/master | 2021-01-15T20:19:09.572940 | 2013-02-24T05:35:21 | 2013-02-24T05:35:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,769 | py | """Handle extraction of final files from processing pipelines into storage.
"""
import datetime
import os
from bcbio.upload import shared, filesystem, galaxy
_approaches = {"filesystem": filesystem,
"galaxy": galaxy}
def from_sample(sample):
"""Upload results of processing from an analysis pipeline sample.
"""
upload_config = sample["info"].get("upload")
if upload_config:
approach = _approaches[upload_config.get("method", "filesystem")]
for finfo in _get_files(sample):
approach.update_file(finfo, sample["info"], upload_config)
# ## File information from sample
def _get_files(sample):
"""Retrieve files for the sample, dispatching by analysis type.
Each file is a dictionary containing the path plus associated
metadata about the file and pipeline versions.
"""
analysis = sample["info"].get("analysis")
if analysis in ["variant", "SNP calling"]:
return _get_files_variantcall(sample)
else:
return []
def _add_meta(xs, sample):
out = []
for x in xs:
x["mtime"] = shared.get_file_timestamp(x["path"])
x["sample"] = sample["name"][-1]
out.append(x)
return out
def _get_files_variantcall(sample):
"""Return output files for the variant calling pipeline.
"""
out = [{"path": sample["summary"]["pdf"],
"type": "pdf",
"ext": "summary"},
{"path": sample["work_bam"],
"type": "bam",
"ext": "ready"}]
for x in sample["variants"]:
out.append({"path": x["vrn_file"],
"type": "vcf",
"ext": x["variantcaller"],
"variantcaller": x["variantcaller"]})
return _add_meta(out, sample)
| [
"chapmanb@50mail.com"
] | chapmanb@50mail.com |
f6f49e460639a36eed8f533cb21319c96f7010cf | 7f1ea035ef62640554052168aaa46427a97adbd4 | /metagraph_cuda/plugins/cudf/types.py | e14bc1997f071b98ba63623396d8668a1ab7bdf7 | [
"Apache-2.0"
] | permissive | paul-tqh-nguyen/metagraph-cuda | e330b41c4b0ec49746997d3e1c52606d4441b6f2 | b9a7a83ed6f1f841a37ee7a77531d4dad82b4ea3 | refs/heads/master | 2023-01-29T06:36:12.598906 | 2020-12-16T18:38:15 | 2020-12-16T18:38:15 | 286,832,050 | 0 | 0 | Apache-2.0 | 2020-08-11T19:34:16 | 2020-08-11T19:34:15 | null | UTF-8 | Python | false | false | 11,495 | py | import numpy as np
from metagraph.wrappers import (
NodeSetWrapper,
NodeMapWrapper,
EdgeSetWrapper,
EdgeMapWrapper,
)
from metagraph import ConcreteType, Wrapper, dtypes
from metagraph.types import (
DataFrame,
Vector,
NodeSet,
NodeMap,
EdgeSet,
EdgeMap,
)
from .. import has_cudf
from typing import Set, List, Dict, Any
if has_cudf:
import cudf
import cupy
class CuDFType(ConcreteType, abstract=DataFrame):
value_type = cudf.DataFrame
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=1e-9,
abs_tol=0.0,
):
raise NotImplementedError
class CuDFVector(Wrapper, abstract=Vector):
"""
CuDFVector stores data in format where the index is the vector position and the values are the values.
"""
def __init__(self, data):
self._assert_instance(data, cudf.Series)
self._assert(
data.index.dtype == np.dtype("int64"),
f"{data} does not have an integer index.",
)
self.value = data
def __contains__(self, node_id):
return node_id in self.value
def __getitem__(self, position):
return self.value.iloc[position]
def __len__(self):
return self.value.index.max() + 1
class TypeMixin:
@classmethod
def _compute_abstract_properties(
cls, obj, props: Set[str], known_props: Dict[str, Any]
) -> Dict[str, Any]:
ret = known_props.copy()
# fast properties
for prop in {"dtype"} - ret.keys():
if prop == "dtype":
ret[prop] = dtypes.dtypes_simplified[obj.value.dtype]
return ret
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=1e-9,
abs_tol=0.0,
):
assert len(obj1.value) == len(
obj2.value
), f"{len(obj1.value)} != {len(obj2.value)}"
assert (
aprops1 == aprops2
), f"abstract property mismatch: {aprops1} != {aprops2}"
assert (obj1.value == obj2.value).all()
class CuDFNodeMap(NodeMapWrapper, abstract=NodeMap):
"""
CuDFNodeMap stores data in a cudf.Series where the index corresponds to the node ids
and the entries correspond to the mapped values.
"""
def __init__(self, data):
self._assert_instance(data, cudf.Series)
self.value = data
def __contains__(self, node_id):
return node_id in self.value.index
def __getitem__(self, node_id):
return self.value.loc[node_id].loc[self.value]
def __len__(self):
return len(self.value)
def copy(self):
return CuDFNodeMap(self.value.copy())
@property
def num_nodes(self):
return len(self.value)
class TypeMixin:
@classmethod
def _compute_abstract_properties(
cls, obj, props: List[str], known_props: Dict[str, Any]
) -> Dict[str, Any]:
ret = known_props.copy()
# fast properties
for prop in props - ret.keys():
if prop == "dtype":
ret[prop] = dtypes.dtypes_simplified[obj.value.dtype]
return ret
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=1e-9,
abs_tol=0.0,
):
assert (
aprops1 == aprops2
), f"abstract property mismatch: {aprops1} != {aprops2}"
if aprops1.get("dtype") == "float":
assert (cupy.isclose(obj1.value, obj2.value)).all()
else:
assert (obj1.value == obj2.value).all()
class CuDFEdgeSet(EdgeSetWrapper, abstract=EdgeSet):
def __init__(
self, df, src_label="source", dst_label="target", *, is_directed=True
):
self._assert_instance(df, cudf.DataFrame)
self.value = df
self.is_directed = is_directed
self.src_label = src_label
self.dst_label = dst_label
self._assert(src_label in df, f"Indicated src_label not found: {src_label}")
self._assert(dst_label in df, f"Indicated dst_label not found: {dst_label}")
# Build the MultiIndex representing the edges
self.index = df.set_index([src_label, dst_label]).index
def copy(self):
return CuDFEdgeSet(
self.value.copy(),
self.src_label,
self.dst_label,
bool(self.is_directed),
)
@property
def num_nodes(self):
src_nodes, dst_nodes = self.index.levels
return len(src_nodes | dst_nodes)
class TypeMixin:
@classmethod
def _compute_abstract_properties(
cls, obj, props: List[str], known_props: Dict[str, Any]
) -> Dict[str, Any]:
ret = known_props.copy()
# fast properties
for prop in {"is_directed"} - ret.keys():
if prop == "is_directed":
ret[prop] = obj.is_directed
return ret
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=None,
abs_tol=None,
):
assert (
aprops1 == aprops2
), f"abstract property mismatch: {aprops1} != {aprops2}"
g1 = obj1.value
g2 = obj2.value
assert len(g1) == len(g2), f"{len(g1)} != {len(g2)}"
assert g1.index.equals(g2.index), f"{g1.index} != {g2.index}"
class CuDFEdgeMap(EdgeMapWrapper, abstract=EdgeMap):
def __init__(
self,
df,
src_label="source",
dst_label="target",
weight_label="weight",
*,
is_directed=True,
):
self._assert_instance(df, cudf.DataFrame)
self.value = df
self.is_directed = is_directed
self.src_label = src_label
self.dst_label = dst_label
self.weight_label = weight_label
self._assert(src_label in df, f"Indicated src_label not found: {src_label}")
self._assert(dst_label in df, f"Indicated dst_label not found: {dst_label}")
self._assert(
weight_label in df, f"Indicated weight_label not found: {weight_label}"
)
# Build the MultiIndex representing the edges
self.index = df.set_index([src_label, dst_label]).index
@property
def num_nodes(self):
src_nodes, dst_nodes = self.index.levels
return len(src_nodes | dst_nodes)
class TypeMixin:
@classmethod
def _compute_abstract_properties(
cls, obj, props: List[str], known_props: Dict[str, Any]
) -> Dict[str, Any]:
ret = known_props.copy()
# fast properties
for prop in {"is_directed", "dtype"} - ret.keys():
if prop == "is_directed":
ret[prop] = obj.is_directed
if prop == "dtype":
ret[prop] = dtypes.dtypes_simplified[
obj.value[obj.weight_label].dtype
]
# slow properties, only compute if asked
for prop in props - ret.keys():
if prop == "has_negative_weights":
if ret["dtype"] == "bool":
ret[prop] = None
else:
ret[prop] = obj.value[obj.weight_label].lt(0).any()
return ret
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=1e-9,
abs_tol=0.0,
):
assert (
aprops1 == aprops2
), f"abstract property mismatch: {aprops1} != {aprops2}"
g1 = obj1.value
g2 = obj2.value
assert len(g1) == len(g2), f"{len(g1)} != {len(g2)}"
assert (
g1.index.isin(g2.index).all() and g2.index.isin(g1.index).all()
), f"obj1 and obj2 are indexed differently."
# Ensure dataframes are indexed the same
if not (g1.index == g2.index).values.all():
g2 = (
g2.set_index(obj2.index)
.reindex(obj1.index)
.reset_index(drop=True)
)
# Compare
v1 = g1[obj1.weight_label]
v2 = g2[obj2.weight_label]
if issubclass(v1.dtype.type, np.floating):
assert np.isclose(v1, v2, rtol=rel_tol, atol=abs_tol).all()
else:
assert (v1 == v2).all()
class CuDFNodeSet(NodeSetWrapper, abstract=NodeSet):
def __init__(self, data):
self._assert_instance(data, cudf.Series)
unique_values = data.unique()
self.value = cudf.Series(unique_values).set_index(unique_values)
def copy(self):
return CuDFNodeSet(self.value.copy())
@property
def num_nodes(self):
return len(self.value)
def __len__(self):
return len(self.value)
def __iter__(self):
return iter(self.value.values_host)
def __contains__(self, item):
return item in self.value.index
class TypeMixin:
@classmethod
def assert_equal(
cls,
obj1,
obj2,
aprops1,
aprops2,
cprops1,
cprops2,
*,
rel_tol=None,
abs_tol=None,
):
assert (
aprops1 == aprops2
), f"abstract property mismatch: {aprops1} != {aprops2}"
v1, v2 = obj1.value, obj2.value
assert len(v1) == len(v2), f"size mismatch: {len(v1)} != {len(v2)}"
assert (v1 == v2).all(), f"node sets do not match"
| [
"paul.tqh.nguyen@gmail.com"
] | paul.tqh.nguyen@gmail.com |
d82ecb057f1699e2b2ef12d6e86d8b251fb119c2 | efda10ac2e2f029a3d75e35101445d508eb6c4b8 | /scenario-3.1/monitor.py | 36d7d525d91dd1a8f164bf1277f54860c992c362 | [] | no_license | miaomiaovivi/packet | 550b7fd13b180785b547d01c539869d1d1350107 | f8033c361b19309cd83cfeeae3163107c211421c | refs/heads/master | 2021-09-06T16:57:27.924535 | 2018-02-08T20:22:21 | 2018-02-08T20:22:21 | 113,625,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,772 | py | import getopt
import json
import os
import requests
import sys
import constant as const
from server import Server, RequestHandler
from criu import CRIU
class Monitor(object):
_COMPUTE_PORT = None
_COMPUTE_HOST = "http://127.0.0.1"
_CRIU_DIR = "criu_dump"
_PREV_CRIU_DIR = "criu_prev_dump"
_PAIR = ""
_ROLE = ""
def __init__(self):
"""
initialize this method with a CRIU object which will be used for
checkpointing and restoring.
"""
#TODO COMPLETE ME
def _dump_and_sync(self, pid):
"""
you should run a dump on the currently running process and sync the
contents of this folder with that of the pair machine. You could choose
to delegate the synchronization task to an external command "rsync",
or write your own python helper methods to sync the dir contents.
"""
#TODO COMPLETE ME
def compute(self, payload):
"""
the expected behavior of this method is, it should not accept any
request if the role is not ACTIVE, or if the compute server is not
running.
It should hold the request if it is currently syncing the criu dump.
Then, it should attempt to contact with the compute server in the local
machine. Return the response as is from the local machine if it
received a response. Otherwise, tell the scheduler that it should
switch roles of the machines from ACTIVE to STANDBY and STANDBY to
ACTIVE.
should return a tuple with status code as first argument and message
as the second
"""
#TODO COMPLETE ME
return const.STATUS_OK, self.compute.__name__
def register(self, payload):
"""
the server that computes will register itself with the monitoring
service in this method. Please make sure that this compute service can
only run when the monitoring service has an ACTIVE role.
if this is an ACTIVE monitoring service, and this is the first time you
have got a call to register the compute server, you could choose to
store the CRIU pre dump contents in this path.
should return a tuple with status code as first argument and message
as the second
"""
#TODO COMPLETE ME
return const.STATUS_OK, self.register.__name__
def active(self):
"""
if this function is called, the scheduler has assigned the role to the
current node to be ACTIVE. Since this service didn't start as ACTIVE,
and has been constantly syncing the criu dump content locally, this
service is made ACTIVE by the scheduler, hence it should restore the
service from the current dump content.
should return a tuple with status code as first argument and message
as the second
"""
#TODO COMPLETE ME
return const.STATUS_OK, self.active.__name__
def standby(self):
"""
if this function is called, the scheduler has assigned the role to the
current node to be STANDBY
should return a tuple with status code as first argument and message
as the second
"""
#TODO COMPLETE ME
return const.STATUS_OK, self.standby.__name__
def dumping(self, payload):
"""
sets the dump status sent from the the peer monitor service.
should return a tuple with status code as first argument and message
as the second
"""
#TODO COMPLETE ME
return const.STATUS_OK, self.dumping.__name__
class MonRequestHandler(RequestHandler):
"""
An HTTP request handler that inherits RequestHandler and adds custom
functionality to it that is useful for monitoring service
"""
_monitor = Monitor()
def process_args(self):
"""
process the path and parameters passed in the GET request.
The response of this class should be the class you want to display.
should return a tuple with status code as first argument and message
as the second
"""
path, query_args = self.parse_get_args()
if path == const.REGISTER_PATH:
return self._monitor.register(query_args)
elif path == const.ACTIVE_PATH:
return self._monitor.active()
elif path == const.STANDBY:
return self._monitor.standby()
elif path == const.COMPUTE_PATH:
return self._monitor.compute(query_args)
elif path == const.DUMPING_PATH:
return self._monitor.dumping(query_args)
else:
return self.NOT_FOUND, "Error 404: Unknown path - %s" % path
def do_GET(self):
"""
GET requests land in this method, we let process_args handle to
request and parse it into strings
"""
status_code, response = self.process_args()
self._set_headers(status_code)
self.wfile.write(self.process_args())
def parse_args():
options, remainder = getopt.getopt(sys.argv[1:],
"p:r:", ["pair=", "role="])
pair = None
role = None
for opt, arg in options:
if opt in ('-p', '--pair'):
pair = arg
elif opt in ('-r', '--role'):
role = arg
if not pair or not role:
print ("Usage: %s [OPTIONS]\n"
"Where options are:\n"
" -p|--pair\t- the IP of the pair machine\n"
" -r|--role\t- the role of the current service\n"
% sys.argv[0])
sys.exit(const.ERROR)
return (pair, role)
if __name__ == "__main__":
pair, role = parse_args()
Monitor._PAIR = pair
Monitor._ROLE = role
Server(MonRequestHandler, '0.0.0.0', 80)
| [
"xiaowei1@andrew.cmu.edu"
] | xiaowei1@andrew.cmu.edu |
53c42adfacc6ce3edf509f3185acb6127e1bb751 | 3c8246c84f94ead6703eff7729013777aa344388 | /find_street_mention.py | c4852c465fb4ef50082498922cce664cd6d2e36d | [] | no_license | NATALIAGR/Twitter_location_mention_detection | 39a018d474386df583187039abe431006875df6e | 52fa244238ec8a996e2b56cc99543c2094b20801 | refs/heads/master | 2020-07-07T00:26:16.741996 | 2019-08-19T16:01:49 | 2019-08-19T16:01:49 | 203,185,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,388 | py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
"""
@author: Natalia Grion
"""
import re
import nltk
from nltk import word_tokenize
nltk.download('averaged_perceptron_tagger')
nltk.download('vader_lexicon')
# Regex street loop
street_compiler= re.compile(r'\w+\s([Rr](oa)?d|[Ss]tr(eet)?|[Aa]v(enue)?|[Ll](a)?n(e)?)')
def find_street_mention(text):
""" find matches through regex, then with nltk discards matches in which there is no NOUN found before word 'street'
this is to avoid matches of the kind 'the road
requirements: import nltk
from nltk import word_tokenize
nltk.download('averaged_perceptron_tagger')
nltk.download('vader_lexicon')
"""
match = street_compiler.search(text)
# Discard matches in which there is no NOUN found before word 'street' this is to avoid matches of the kind 'the road
if match==None:
data_=None
else:
item= match.group(0)
tokenized = nltk.word_tokenize(item)
tagged = nltk.pos_tag(tokenized)
if (tagged[0][1].startswith('DT') | tagged[0][1].startswith('VB') | tagged[0][1].startswith('IN')
| tagged[0][1].startswith('CC') | tagged[0][1].startswith('PRP$')):
data_=None
else:
data_= match.group(0)
return data_
| [
"noreply@github.com"
] | NATALIAGR.noreply@github.com |
926f98e8a68c4c9d69298173b68e6e7d6c48d97f | b9330bc0e333e7a1bb5905ba5d1039ff42cb110b | /tests/testapp/urls.py | 9fdaa721dc76ea19e5675ad4f44e198575dfac9d | [
"MIT"
] | permissive | coldrye-collaboration/django-authlib | 865c853ce75543bc64ad86f53d0703f0db879f53 | 6589316159bb0c2dace29d35334e4f930353eb88 | refs/heads/main | 2023-07-26T05:05:53.157272 | 2021-09-07T14:31:13 | 2021-09-07T14:31:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,511 | py | from django.contrib import admin
from django.shortcuts import render
from django.urls import include, re_path
from testapp.views import custom_verification, custom_verification_code
from authlib import views
from authlib.facebook import FacebookOAuth2Client
from authlib.google import GoogleOAuth2Client
from authlib.twitter import TwitterOAuthClient
urlpatterns = [
re_path(r"", include("authlib.admin_oauth.urls")),
re_path(r"^admin/", admin.site.urls),
re_path(r"^404/$", lambda request: render(request, "404.html")),
re_path(r"^login/$", views.login, name="login"),
re_path(
r"^oauth/facebook/$",
views.oauth2,
{"client_class": FacebookOAuth2Client},
name="accounts_oauth_facebook",
),
re_path(
r"^oauth/google/$",
views.oauth2,
{"client_class": GoogleOAuth2Client},
name="accounts_oauth_google",
),
re_path(
r"^oauth/twitter/$",
views.oauth2,
{"client_class": TwitterOAuthClient},
name="accounts_oauth_twitter",
),
re_path(r"^email/$", views.email_registration, name="email_registration"),
re_path(
r"^email/(?P<code>[^/]+)/$",
views.email_registration,
name="email_registration_confirm",
),
re_path(r"^logout/$", views.logout, name="logout"),
re_path(r"^custom/$", custom_verification),
re_path(
r"^custom/(?P<code>[^/]+)/$",
custom_verification_code,
name="custom_verification_code",
),
]
| [
"mk@feinheit.ch"
] | mk@feinheit.ch |
9c4d4b914e4b146d879626d485aac23f0d61c0af | 98e820b4aaecea6509b8ae3147d31583732a38c2 | /Lab2-RGandFA/FiniteAutomaton.py | f35c1faace51a51c8112e58543eff581ab9500ea | [] | no_license | ancapatriciastegerean/LFTC | 175bb4ead47976fd321157699901b1e000bc3b6a | 906593825e73e2e186ced61b2a31454f5dfada15 | refs/heads/master | 2022-04-04T13:04:17.168032 | 2020-01-13T21:44:52 | 2020-01-13T21:44:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,368 | py | class FiniteAutomaton:
def __init__(self, Q, E, S, q0, F):
self.Q = Q
self.E = E
self.S = S
self.q0 = q0
self.F = F
def is_state(self, value):
return value in self.Q
@staticmethod
def line_parsed(line):
# returns after the equal sign and the element before and after the coma
return [element.strip() for element in line.strip().split('=')[1].strip()[1:-1].strip().split(',')]
@staticmethod
def console_parsed(line):
# returns the element before and after the coma
return [element.strip() for element in line.strip()[1:-1].strip().split(',')]
@staticmethod
def read_from_file(fileName):
with open(fileName) as f:
Q = FiniteAutomaton.line_parsed(f.readline())
E = FiniteAutomaton.line_parsed(f.readline())
q0 = f.readline().split('=')[1].strip()
F = FiniteAutomaton.line_parsed(f.readline())
S = FiniteAutomaton.transactions_parsed(FiniteAutomaton.line_parsed(''.join([line for line in f])))
return FiniteAutomaton(Q, E, S, q0, F)
@staticmethod
def read_from_console():
Q = FiniteAutomaton.console_parsed(input('Q = '))
E = FiniteAutomaton.console_parsed(input('E = '))
q0 = input('q0 = ')
F = FiniteAutomaton.console_parsed(input('F = '))
S = FiniteAutomaton.transactions_parsed(FiniteAutomaton.console_parsed(input('S = ')))
return FiniteAutomaton(Q, E, S, q0, F)
@staticmethod
def transactions_parsed(parts):
result = []
transitions = []
index = 0
while index < len(parts):
transitions.append(parts[index] + ',' + parts[index + 1])
index += 2
for transition in transitions:
lhs, rhs = transition.split('->')
state2 = rhs.strip()
state1, route = [value.strip() for value in lhs.strip()[1:-1].split(',')]
result.append(((state1, route), state2))
return result
@staticmethod
def get_fa_from_regular_grammar(rg):
Q = rg.N + ['K']
E = rg.E
q0 = rg.S
F = ['K']
S = []
for production in rg.P:
state2 = 'K'
state1, rhs = production
if state1 == q0 and rhs[0] == 'E':
F.append(q0)
continue
route = rhs[0]
if len(rhs) == 2:
state2 = rhs[1]
S.append(((state1, route), state2))
return FiniteAutomaton(Q, E, S, q0, F)
def get_transactions_for(self, state):
if not self.is_state(state):
raise Exception('Can only get transitions for states')
return [trans for trans in self.S if trans[0][0] == state]
def show_transitions_for(self, state):
transitions = self.get_transactions_for(state)
print('{ ' + ' '.join([' -> '.join([str(part) for part in trans]) for trans in transitions]) + ' }')
def __str__(self):
return 'Q = { ' + ', '.join(self.Q) + ' }\n' \
+ 'E = { ' + ', '.join(self.E) + ' }\n' \
+ 'F = { ' + ', '.join(self.F) + ' }\n' \
+ 'S = { ' + ', '.join([' -> '.join([str(part) for part in trans]) for trans in self.S]) + ' }\n' \
+ 'q0 = ' + str(self.q0) + '\n'
| [
"tpie2451@scs.ubbcluj.ro"
] | tpie2451@scs.ubbcluj.ro |
e977127dda8ac2f2c9a41bf57af3bd7e54a7ce47 | 83bc96df34fc2311a33a68e8e79af802d84370b9 | /vmraid/model/sync.py | 603a5c359111066b36066fc1ab17e48fec1d725d | [
"MIT"
] | permissive | sowrisurya/vmraid | 001072130ac6be5a3ef5a84523d8949d891e6954 | f833e00978019dad87af80b41279c0146c063ed5 | refs/heads/main | 2023-05-05T13:52:45.386039 | 2021-05-31T10:23:56 | 2021-05-31T10:23:56 | 372,466,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,264 | py | # Copyright (c) 2015, VMRaid Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
"""
Sync's doctype and docfields from txt files to database
perms will get synced only if none exist
"""
import vmraid
import os
from vmraid.modules.import_file import import_file_by_path
from vmraid.modules.patch_handler import block_user
from vmraid.utils import update_progress_bar
def sync_all(force=0, verbose=False, reset_permissions=False):
block_user(True)
for app in vmraid.get_installed_apps():
sync_for(app, force, verbose=verbose, reset_permissions=reset_permissions)
block_user(False)
vmraid.clear_cache()
def sync_for(app_name, force=0, sync_everything = False, verbose=False, reset_permissions=False):
files = []
if app_name == "vmraid":
# these need to go first at time of install
for d in (("core", "docfield"),
("core", "docperm"),
("core", "doctype_action"),
("core", "doctype_link"),
("core", "role"),
("core", "has_role"),
("core", "doctype"),
("core", "user"),
("custom", "custom_field"),
("custom", "property_setter"),
("website", "web_form"),
("website", "web_template"),
("website", "web_form_field"),
("website", "portal_menu_item"),
("data_migration", "data_migration_mapping_detail"),
("data_migration", "data_migration_mapping"),
("data_migration", "data_migration_plan_mapping"),
("data_migration", "data_migration_plan"),
("desk", "number_card"),
("desk", "dashboard_chart"),
("desk", "dashboard"),
("desk", "onboarding_permission"),
("desk", "onboarding_step"),
("desk", "onboarding_step_map"),
("desk", "module_onboarding"),
("desk", "workspace_link"),
("desk", "workspace_chart"),
("desk", "workspace_shortcut"),
("desk", "workspace")):
files.append(os.path.join(vmraid.get_app_path("vmraid"), d[0],
"doctype", d[1], d[1] + ".json"))
for module_name in vmraid.local.app_modules.get(app_name) or []:
folder = os.path.dirname(vmraid.get_module(app_name + "." + module_name).__file__)
get_doc_files(files, folder)
l = len(files)
if l:
for i, doc_path in enumerate(files):
import_file_by_path(doc_path, force=force, ignore_version=True,
reset_permissions=reset_permissions, for_sync=True)
vmraid.db.commit()
# show progress bar
update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l)
# print each progress bar on new line
print()
def get_doc_files(files, start_path):
"""walk and sync all doctypes and pages"""
# load in sequence - warning for devs
document_types = ['doctype', 'page', 'report', 'dashboard_chart_source', 'print_format',
'website_theme', 'web_form', 'web_template', 'notification', 'print_style',
'data_migration_mapping', 'data_migration_plan', 'workspace',
'onboarding_step', 'module_onboarding']
for doctype in document_types:
doctype_path = os.path.join(start_path, doctype)
if os.path.exists(doctype_path):
for docname in os.listdir(doctype_path):
if os.path.isdir(os.path.join(doctype_path, docname)):
doc_path = os.path.join(doctype_path, docname, docname) + ".json"
if os.path.exists(doc_path):
if not doc_path in files:
files.append(doc_path)
| [
"sowrisurya@outlook.com"
] | sowrisurya@outlook.com |
4a0b8e945cc87af0984f292a9d3a7cf7c5c27d23 | bc7927288ada439b7d52d2246f5415497651f3f4 | /tools/mp_graph.py | 2b5f12f544438cbc58066741d29de3f04f7cdc90 | [
"Apache-2.0"
] | permissive | sailfish009/cgnn | 2ccea385224fc384c8462f97cde9759c3dc81410 | 29d6341af2cacb1f738c3cf184c4d21b69600a29 | refs/heads/master | 2022-04-19T19:06:52.746653 | 2020-04-10T06:16:39 | 2020-04-10T06:16:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,065 | py | # Copyright 2019 Takenori Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A crystal graph coordinator for CGNN."""
import numpy as np
import pandas as pd
import pbc
from pymatgen.core.periodic_table import Element
from sklearn.cluster import KMeans
import sys
import os
import glob
from joblib import Parallel, delayed
import warnings
CC_CUTOFF = 0.03
RADIUS_FACTOR = 1.2
def get_nbrs(crystal_xyz, crystal_lat, R_max):
A = np.transpose(crystal_lat)
B = np.linalg.inv(A)
crystal_red = np.matmul(crystal_xyz, np.transpose(B))
crystal_nbrs = pbc.get_shortest_distances(crystal_red, A, R_max,
crdn_only=True)
return crystal_nbrs
def get_radius(e):
r = e.atomic_radius_calculated
if r is None:
r = e.atomic_radius
if r is None:
raise NameError('Not found the atomic radius for the element: {}'.format(e))
else:
return r
def get_nn_cluster(X):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
kmeans = KMeans(n_clusters=2, random_state=0).fit(X)
cc_distance = abs(kmeans.cluster_centers_[0,0]-kmeans.cluster_centers_[1,0])
if cc_distance > CC_CUTOFF:
nnc_index = np.argmin(kmeans.cluster_centers_)
nnc_nbrs = [i for i in range(len(X)) if kmeans.labels_[i] == nnc_index]
return np.array(nnc_nbrs)
else:
return np.arange(len(X))
def get_nnc_loop(X):
indices = np.arange(len(X))
for _ in range(10):
if len(indices) <= 1:
break
nnc = get_nn_cluster(X)
if len(nnc) == len(indices):
break
else:
indices = indices[nnc]
X = X[nnc]
return indices
def get_neighbors(geom):
elems = [Element.from_Z(z) for z in geom.atomic_numbers]
radii = np.array([get_radius(e) for e in elems])
cutoff = radii[:,np.newaxis] + radii[np.newaxis, :]
vol_atom = (4 * np.pi / 3) * np.array([r**3 for r in radii]).sum()
factor_vol = (geom.volume / vol_atom)**(1.0/3.0)
factor = factor_vol * RADIUS_FACTOR
cutoff *= factor
candidates = get_nbrs(geom.cart_coords, geom.lattice.matrix, cutoff)
neighbors = []
for j in range(len(candidates)):
dists = []
for nbr in candidates[j]:
i = nbr[0]
d = nbr[1]
r = nbr[2]
dists.append(d / cutoff[j,i])
X = np.array(dists).reshape((-1, 1))
nnc_nbrs = get_nnc_loop(X)
neighbors.append([candidates[j][i][0] for i in nnc_nbrs])
return neighbors
def get_structure(m):
if m['nsites'] == len(m['structure']):
return m['structure']
else:
s = m['structure'].get_primitive_structure()
for _ in range(10):
if m['nsites'] == len(s):
return s
else:
s = s.get_primitive_structure()
raise NameError('The primitive structure could not be got for {}'.format(m['material_id']))
def load_materials(filepath):
try:
data = np.load(filepath)['materials']
except UnicodeError:
data = np.load(filepath, encoding='latin1')['materials']
return data
def process(data_path):
materials = load_materials(data_path)
material_ids = [m['material_id'] for m in materials]
structures = [get_structure(m) for m in materials]
data_ac = []
data_nbr = []
for geom in structures:
neighbors = get_neighbors(geom)
data_ac.append(geom.atomic_numbers)
data_nbr.append(neighbors)
graph_path = data_path.replace('mp_data', 'mp_graph')
np.savez_compressed(graph_path, graph_names=material_ids,
graph_nodes=data_ac, graph_edges=data_nbr)
def main(data_dir, num_cpus):
if not os.path.isdir(data_dir):
print('Not found the data directory: {}'.format(data_dir))
exit(1)
data_files = sorted(glob.glob(os.path.join(data_dir, 'mp_data_*.npz')))
Parallel(n_jobs=num_cpus, verbose=10)([delayed(process)(path) for path in data_files])
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Crystal Graph Coordinator.')
parser.add_argument('--data_dir', metavar='PATH', type=str, default='data',
help='The path to a data directory (default: data)')
parser.add_argument('--num_cpus', metavar='N', type=int, default=-1,
help='The number of CPUs used for processing (default: -1)')
options = vars(parser.parse_args())
main(**options)
| [
"11532812+Tony-Y@users.noreply.github.com"
] | 11532812+Tony-Y@users.noreply.github.com |
9df52bdddb088e60b3d737e0b79c9023234d8230 | e49eaa8a2de83ba5abad230b41effd1f19a70369 | /db_credentials.py | 35ef0c18118d8d50ab73fa4b78ead4b2bbc3a761 | [] | no_license | NobleCactus/CS_340_Portfolio | f73b568cf474248830bc5b1c4cca2feb0c957a37 | 3bd6e082ccc08f300cff63f6455b42438f066971 | refs/heads/master | 2023-03-24T11:17:27.031211 | 2021-03-14T17:22:49 | 2021-03-14T17:22:49 | 333,941,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | # To actually have your app use this file, you need to RENAME the file to db_credentials.py
# You will find details about your CS340 database credentials on Canvas.
# the following will be used by the db_connector.py and
# in turn by all the Python code in this codebase to interact with the database
host = 'classmysql.engr.oregonstate.edu' #DON'T CHANGE ME UNLESS THE INSTRUCTIONS SAY SO
user = ''
passwd = ''
db = 'cs340_eppingea' | [
"andrew.eppinger@yahoo.com"
] | andrew.eppinger@yahoo.com |
be379de4863ae31f20197b96ce02d6fba18d36d5 | fb5b2b1adde15cfb43de697a9eb0815d326ac504 | /python语言程序设计/第五部分-函数定义/FiveAngle.py | b99bd7579b1b0d665a79827044350a7c6b0bb300 | [] | no_license | wangyongguang/Python | 9d45a64e85cf4dfd2a1787954bab44ee7035f38c | 298986b0234b1b17d823f70f1b0acc0cece8f447 | refs/heads/master | 2021-09-10T08:40:01.723028 | 2018-03-23T03:22:52 | 2018-03-23T03:22:52 | 114,337,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | from turtle import Turtle
p = Turtle()
p.speed(3)
p.pensize(5)
p.color("blue",'yellow')
p.fillcolor("red")
p.begin_fill()
for i in range(5):
p.forward(200)
p.right(144)
p.end_fill()
| [
"549257430@qq.com"
] | 549257430@qq.com |
9df84799211740c7f4e37ba828a3043d278eb843 | 989e55fc48ba8fc37b1677231382b8efd001585f | /acm_placement_app/users/tests/test_tasks.py | a69167c34bda005933b93f271580014f23ed9635 | [
"MIT"
] | permissive | mrklees/acm-placement-app | 222b8810775c35937243d81bcd4becc0c68b378e | 0ecc20861fa5b44580dec12986446e7c84537281 | refs/heads/master | 2020-05-24T02:44:27.725919 | 2019-06-05T02:17:15 | 2019-06-05T02:17:15 | 187,058,889 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | import pytest
from celery.result import EagerResult
from acm_placement_app.users.tasks import get_users_count
from acm_placement_app.users.tests.factories import UserFactory
@pytest.mark.django_db
def test_user_count(settings):
"""A basic test to execute the get_users_count Celery task."""
UserFactory.create_batch(3)
settings.CELERY_TASK_ALWAYS_EAGER = True
task_result = get_users_count.delay()
assert isinstance(task_result, EagerResult)
assert task_result.result == 3
| [
"keyvan@keyvanm.com"
] | keyvan@keyvanm.com |
9979ba39da4d5323f6ed7358fab546d7d3b827a1 | 08e65977cbdca21613763fff6a721c7ffb315e4b | /apply-gain.py | 02ea0fcceaea2ff65936164b2de5c9bef22ebf50 | [] | no_license | paulomouat/AudioUtils | eb56810fe141a46cca19ef3a28d92d819f40d980 | 03585898b14d70986f802f7a7abd6e5ac0818ff3 | refs/heads/main | 2023-07-10T22:50:09.194870 | 2021-08-01T20:47:05 | 2021-08-01T20:47:05 | 387,241,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,315 | py | #!/usr/bin/env python3
import sys, getopt
import numpy as np
from wavfile import read, write
from wavfileinfo import outputinfo
def usage():
print('apply-gain.py -i <inputfile> -o <outputfile> -g <gain>')
sys.exit(2)
def db_to_float(db, using_amplitude=True):
db = float(db)
if using_amplitude:
return 10 ** (db / 20)
else: # using power
return 10 ** (db / 10)
def main(argv):
inputfile = ''
outputfile = ''
gainopt = ''
try:
opts, args = getopt.getopt(argv, "i:o:g:", ["inputfile=", "outputfile=", "gain="])
except getopt.GetoptError:
usage()
for opt, arg in opts:
if opt in ("-i", "--inputfile"):
inputfile = arg
elif opt in ("-o", "--outputfile"):
outputfile = arg
elif opt in ("-g", "--gain"):
gainopt = arg
if inputfile == '' or outputfile == '':
usage()
gain = 0.0
if gainopt != '':
gain = float(gainopt)
gain_float = db_to_float(gain)
(rate, data, bits, *other) = read(inputfile)
print('input file:')
outputinfo(inputfile)
scaled_data = data * gain_float
write(outputfile, rate, scaled_data, bits)
print('output file:')
outputinfo(outputfile)
if __name__ == "__main__":
main(sys.argv[1:]) | [
"paulo.mouat@gmail.com"
] | paulo.mouat@gmail.com |
246d6219b180cf29e8e3fbfa058381452ac03983 | d4d1e0b19498484136268c7761eb524f3d9d9252 | /azeezllica/polls/views.py | 5b395b93a992e18fbf9455e63ca210747670b2cb | [] | no_license | azeez13-meet/MEET-YL2 | 1dd443d357e094d452d8e47e462bdc8d0af013b6 | b56991f0144464f25432aa33386ea1dd63ea544e | refs/heads/master | 2020-05-20T13:04:49.890485 | 2014-12-11T17:47:01 | 2014-12-11T17:47:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from polls.models import Choice, Poll
def index(request):
latest_poll_list = Poll.objects.order_by('-pub_date')[:5]
context = { 'latest_poll_list': latest_poll_list }
return render(request, 'index.html', context)
def detail(request, poll_id):
poll = get_object_or_404(Poll, pk=poll_id)
return render(request, 'detail.html', {'poll': poll})
def results(request, poll_id):
poll = get_object_or_404(Poll, pk=poll_id)
return render(request, 'results.html', {'poll': poll})
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
return render(request, 'detail.html', {
'poll': p,
'error_message': "You didn't select a choice."
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('results', args=(p.id,)))
return HttpResponse("You're voting on poll %s." % poll_id)
def my_page(request):
return render(request, 'my_page.html')
def help(request):
return render(request, 'help.html')
def about(request):
return render(request, 'about.html')
| [
"guest-wlpeBm@meet.mit.edu"
] | guest-wlpeBm@meet.mit.edu |
10c1fdfae2274a01c1e017d2d8b28cb745bf1285 | 98b38056b8134f9faca12bd055787ced4d20e47b | /Q2/code.py | 0098b1dc52c07acd837f7ca39768c02f5f6d4679 | [] | no_license | ShiyuLi1997/Hw4 | 29f20b71f2ef1a87a53c02ad964a27af8cc20cef | 4533aedb9e1c2fd26af384236b6c05d3b47d41e9 | refs/heads/main | 2023-01-05T09:52:03.782961 | 2020-11-10T12:19:26 | 2020-11-10T12:19:26 | 311,585,750 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,222 | py | import random
from time import time
from matplotlib import pyplot as plt
import math
from statistics import mean
"""
The merge sort implementation is taken from website:
URL: https://realpython.com/sorting-algorithms-python/#the-merge-sort-algorithm-in-python
"""
def merge(left, right):
# If the first array is empty, then nothing needs
# to be merged, and you can return the second array as the result
if len(left) == 0:
return right
# If the second array is empty, then nothing needs
# to be merged, and you can return the first array as the result
if len(right) == 0:
return left
result = []
index_left = index_right = 0
# Now go through both arrays until all the elements
# make it into the resultant array
while len(result) < len(left) + len(right):
# The elements need to be sorted to add them to the
# resultant array, so you need to decide whether to get
# the next element from the first or the second array
if left[index_left] <= right[index_right]:
result.append(left[index_left])
index_left += 1
else:
result.append(right[index_right])
index_right += 1
# If you reach the end of either array, then you can
# add the remaining elements from the other array to
# the result and break the loop
if index_right == len(right):
result += left[index_left:]
break
if index_left == len(left):
result += right[index_right:]
break
return result
def merge_sort(array,):
# If the input array contains fewer than two elements,
# then return it as the result of the function
if len(array) < 2:
return array
midpoint = len(array) // 2
return merge(
left=merge_sort(array[:midpoint]),
right=merge_sort(array[midpoint:]))
# Sort the array by recursively splitting the input
# into two equal halves, sorting each half and merging them
# together into the final result
def timSort(array,k):
# If the input array contains fewer than two elements,
# then return it as the result of the function
if len(array) < 2:
return array
if len(array) <= k:
insertionSort(array)
return array
midpoint = len(array) // 2
return merge(
left=timSort(array[:midpoint],k),
right=timSort(array[midpoint:],k))
# Sort the array by recursively splitting the input
# into two equal halves, sorting each half and merging them
# together into the final result
"""
The insertion Sort algorithm is taken from website:
URL: https://runestone.academy/runestone/books/published/pythonds/SortSearch/TheInsertionSort.html
"""
def insertionSort(alist):
for index in range(1,len(alist)):
currentvalue = alist[index]
position = index
while position>0 and alist[position-1]>currentvalue:
alist[position]=alist[position-1]
position = position-1
alist[position]=currentvalue
def findFirst(l,text):
for i in range(1,len(l)):
if l[i] == text:
return i
def findLast(l,text):
for i in reversed(range(len(l))):
if l[i] == text:
return i
N = 200 # data input
k = 40 # each input run k times and take the average time cost
cap = 38 # cap of the intersection of Time sort and tim sort
if __name__ == "__main__":
mergeTime = []
insertionTime = []
inpSize = []
winnerList = []
TimTime = []
for n in range(1,N):
# append input size to
subMerge = []
subInser = []
subTim = []
for j in range(k):
# Generate an array of `ARRAY_LENGTH` items consisting
# of random integer values between 0 and 999
#print("n: ",n)
array = [random.randint(0, 1000) for i in range(n)]
# copy array for merge sort and insertion sort
a1 = array.copy()
a2 = array.copy()
a3 = array.copy()
# Call the function using the name of the sorting algorithm
# and the array you just created
t0 = time()
merge_sort(a1)
t1 = time()
# call the insertionSort function using the input
ta = time()
insertionSort(a2)
tb = time()
# call Tim sort function using the input
tx = time()
timSort(a3,cap)
ty = time()
# append time info in the list
subMerge.append(t1-t0)
subInser.append(tb-ta)
subTim.append(ty-tx)
#print(l)
#print("Average time cost for merge_sort for n={}: ".format(n),(t1-t0)/n)
#print("Average time cost for insertionSort for n={}: ".format(n),(tb-ta)/n)
inpSize.append(n)
mergeTime .append(mean(subMerge))
insertionTime.append(mean(subInser))
TimTime.append(mean(subTim))
print("n: ",n," ",end="")
winner = "Merge" if (t1-t0)/n <= (tb-ta)/n else "Insertion"
if (t1-t0)/n <= (tb-ta)/n:
winnerList.append("Merge")
else:
winnerList.append("Insertion")
print("insertion:{}, merge:{}, {} wins ".format((tb-ta)/n,(t1-t0)/n,winner))
#
# print out the range of n that function MergeSort is better than Insertion sort
# regarding the time complexity or time cost
print("From range {} to {} that merge sort is quicker than insertion sort"
.format(findFirst(winnerList,"Merge"),findLast(winnerList,"Insertion")))
# plotting the graph
print("For cap: {}, Tim/Merge: {} Tim/Insertion:{} ".format(cap,sum(TimTime)/sum(mergeTime),sum(TimTime)/sum(insertionTime)))
print(" smaller is better")
plt.plot(inpSize, mergeTime, label='MergeSortTimeCost')
plt.plot(inpSize, insertionTime, label='InsertionSortTimeCost')
plt.plot(inpSize, TimTime, label='TimSortTimeCost')
plt.xlabel("Input size (n)")
plt.ylabel("Cost of time(s)")
plt.title("cap = {}".format(cap))
plt.legend()
plt.show()
| [
"lishiyu6@msu.edu"
] | lishiyu6@msu.edu |
a3e4403580bd3cf7ad4cc7dc5c9cb90c0cf3f184 | 977f7a7386899a5d0152b29b57ec26682b430437 | /data_managers/data_manager_malt_index_builder/data_manager/malt_index_builder.py | 7ebf885fd08201668a570bb1e1566b3996ae4c65 | [
"MIT"
] | permissive | galaxyproject/tools-iuc | 0b87e21e1cb075ca6dc6b12622bc4e538a7c6507 | 96f8a533278b4b6394aebd7a8f537513b0d29b1a | refs/heads/main | 2023-08-31T16:14:34.563541 | 2023-08-31T04:31:22 | 2023-08-31T04:31:22 | 23,992,530 | 164 | 508 | MIT | 2023-09-13T19:41:14 | 2014-09-13T11:18:49 | HTML | UTF-8 | Python | false | false | 4,530 | py | #!/usr/bin/env python
import json
import optparse
import os
import subprocess
import sys
def get_id_name(params, dbkey, fasta_description=None):
sequence_id = params['param_dict']['sequence_id']
if not sequence_id:
sequence_id = dbkey
sequence_name = params['param_dict']['sequence_name']
if not sequence_name:
sequence_name = fasta_description
if not sequence_name:
sequence_name = dbkey
return sequence_id, sequence_name
def build_malt_index(data_manager_dict, fasta_filename, params, target_directory, dbkey, sequence_id, sequence_name, sequence_type, shapes, max_hits_per_seed, protein_reduct):
# The malt-build program produces a directory of files,
# so the data table path entry will be a directory and
# not an index file.
fasta_base_name = os.path.split(fasta_filename)[-1]
sym_linked_fasta_filename = os.path.join(target_directory, fasta_base_name)
os.symlink(fasta_filename, sym_linked_fasta_filename)
args = ['malt-build', '--input', sym_linked_fasta_filename, '--sequenceType', sequence_type, '--index', target_directory]
threads = os.environ.get('GALAXY_SLOTS')
if threads:
args.extend(['--threads', threads])
if shapes is not None:
args.extend(['--shapes', shapes])
if max_hits_per_seed is not None:
args.extend(['--maxHitsPerSeed', max_hits_per_seed])
if protein_reduct is not None:
args.extend(['--proteinReduct', protein_reduct])
proc = subprocess.Popen(args=args, shell=False, cwd=target_directory)
return_code = proc.wait()
if return_code:
sys.exit('Error building index, return_code: %d' % return_code)
# Remove unwanted files from the output directory.
os.remove(sym_linked_fasta_filename)
# The path entry here is the directory
# where the index files will be located,
# not a single index file (malt-build
# produces a directory if files, which
# is considered an index..
data_table_entry = dict(value=sequence_id, dbkey=dbkey, name=sequence_name, path=None)
_add_data_table_entry(data_manager_dict, data_table_entry)
def _add_data_table_entry(data_manager_dict, data_table_entry):
data_table_name = "malt_indices"
data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {})
data_manager_dict['data_tables'][data_table_name] = data_manager_dict['data_tables'].get(data_table_name, [])
data_manager_dict['data_tables'][data_table_name].append(data_table_entry)
return data_manager_dict
def main():
parser = optparse.OptionParser()
parser.add_option('-f', '--fasta_filename', dest='fasta_filename', action='store', type="string", help='fasta filename')
parser.add_option('-d', '--fasta_dbkey', dest='fasta_dbkey', action='store', type="string", help='fasta dbkey')
parser.add_option('-t', '--fasta_description', dest='fasta_description', action='store', type="string", default=None, help='fasta description')
parser.add_option('-e', '--sequence_type', dest='sequence_type', action='store', type="string", help='DNA or Protein sequences')
parser.add_option('-p', '--shapes', dest='shapes', action='store', type="string", default=None, help='Comma-separated list of seed shapes')
parser.add_option('-m', '--max_hits_per_seed', dest='max_hits_per_seed', action='store', type="string", default=None, help='Maximum number of hits per seed')
parser.add_option('-r', '--protein_reduct', dest='protein_reduct', action='store', type="string", default=None, help='Name or definition of protein alphabet reduction')
(options, args) = parser.parse_args()
filename = args[0]
with open(filename) as fh:
params = json.load(fh)
target_directory = params['output_data'][0]['extra_files_path']
os.mkdir(target_directory)
data_manager_dict = {}
dbkey = options.fasta_dbkey
if dbkey in [None, '', '?']:
raise Exception('"%s" is not a valid dbkey. You must specify a valid dbkey.' % (dbkey))
sequence_id, sequence_name = get_id_name(params, dbkey=dbkey, fasta_description=options.fasta_description)
# Build the index.
build_malt_index(data_manager_dict, options.fasta_filename, params, target_directory, dbkey, sequence_id, sequence_name, options.sequence_type, options.shapes, options.max_hits_per_seed, options.protein_reduct)
# Save info to json file.
with open(filename, 'w') as fh:
json.dump(data_manager_dict, fh, sort_keys=True)
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | galaxyproject.noreply@github.com |
0b0715f158e2c44f0d69e82bbb870580eb41b7b7 | 87442943f7f9a833253c31a9dab71c8e5ed910df | /mysqlconnection.py | 44d728176909c9e453b770b5e418333c3c111a02 | [] | no_license | Codingmamba/Python-login-forum | 5dacd86486a151ba9ad768ed8f01ae4cab19ea82 | 649483397db7106c60228b03986037707b68db9a | refs/heads/master | 2020-03-08T08:06:33.812280 | 2018-04-04T05:43:21 | 2018-04-04T05:43:21 | 128,012,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,981 | py | """ import the necessary modules """
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.sql import text
# Create a class that will give us an object that we can use to connect to a database
class MySQLConnection(object):
def __init__(self, app, db):
config = {
'host': 'localhost',
'database': db, # we got db as an argument
'user': 'root',
'password': 'Applejuice18',
'port': '3306' # change the port to match the port your SQL server is running on
}
# this will use the above values to generate the path to connect to your sql database
DATABASE_URI = "mysql://{}:{}@127.0.0.1:{}/{}".format(config['user'], config['password'], config['port'], config['database'])
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
# establish the connection to database
self.db = SQLAlchemy(app)
# this is the method we will use to query the database
def query_db(self, query, data=None):
result = self.db.session.execute(text(query), data)
if query[0:6].lower() == 'select':
# if the query was a select
# convert the result to a list of dictionaries
list_result = [dict(r) for r in result]
# return the results as a list of dictionaries
return list_result
elif query[0:6].lower() == 'insert':
# if the query was an insert, return the id of the
# commit changes
self.db.session.commit()
# row that was inserted
return result.lastrowid
else:
# if the query was an update or delete, return nothing and commit changes
self.db.session.commit()
# This is the module method to be called by the user in server.py. Make sure to provide the db name!
def MySQLConnector(app, db):
return MySQLConnection(app, db)
| [
"darnwi@amazon.com"
] | darnwi@amazon.com |
43ace6831b2886e6fc2011556b593bda0552e970 | faca9214e8c0a0d45696d88daaad8e086b2a7488 | /app/api/fuzzy.py | dc6a2e603672648712ae48be5449c4f217abcb16 | [] | no_license | GabrielFalcom/flask_vue_oracle11HR | 5f088d0ebc369f626202203dc96e58b3d5044f2f | 54d9e1481c555495998abbdc5cbec7b9b83cf69e | refs/heads/master | 2021-06-26T12:33:09.354843 | 2019-12-12T04:02:41 | 2019-12-12T04:02:41 | 227,514,248 | 1 | 0 | null | 2021-05-06T20:30:58 | 2019-12-12T03:43:40 | HTML | UTF-8 | Python | false | false | 3,414 | py | import json
from flask import jsonify, views
from app import db
from models import Country, Department, Location, Employee
class FuzzyApi(views.MethodView):
def get(self,country_id=None, department_id=None,city=None):
if country_id and not department_id:
departmentsCountry = db.session.query(Department, Location, Country, Employee) \
.filter(Country.country_id == Location.country_id) \
.filter(Department.location_id == Location.location_id) \
.filter(Employee.department_id == Department.department_id) \
.filter(Country.country_id == country_id) \
.all()
json = []
for department, location, country, employee in departmentsCountry:
dictResult = {}
dictResult.update(department.serialize)
dictResult.update(location.serialize)
dictResult.update(country.serialize)
dictResult.update(employee.serialize)
json.append(dictResult)
return jsonify(json)
elif country_id and department_id and not city:
departmentsCountry = db.session.query(Department, Location, Country, Employee) \
.filter(Country.country_id == Location.country_id) \
.filter(Department.location_id == Location.location_id) \
.filter(Employee.department_id == Department.department_id) \
.filter(Country.country_id == country_id) \
.filter(Department.department_id == department_id) \
.all()
json = []
for department, location, country, employee in departmentsCountry:
dictResult = {}
dictResult.update(department.serialize)
dictResult.update(location.serialize)
dictResult.update(country.serialize)
dictResult.update(employee.serialize)
json.append(dictResult)
return jsonify(json)
elif country_id and department_id and city:
departmentsCountry = db.session.query(Department, Location, Country, Employee) \
.filter(Country.country_id == Location.country_id) \
.filter(Department.location_id == Location.location_id) \
.filter(Employee.department_id == Department.department_id) \
.filter(Department.department_id == department_id) \
.filter(Country.country_id == country_id) \
.filter(Location.city == city) \
.all()
json = []
for department, location, country, employee in departmentsCountry:
dictResult = {}
dictResult.update(department.serialize)
dictResult.update(location.serialize)
dictResult.update(country.serialize)
dictResult.update(employee.serialize)
json.append(dictResult)
return jsonify(json)
else:
result = db.session.execute("SELECT COUNTRY_NAME, CITY, DEPARTMENT_NAME, FIRST_NAME FROM HR.COUNTRIES JOIN HR.LOCATIONS USING (COUNTRY_ID) JOIN HR.DEPARTMENTS USING (LOCATION_ID) JOIN HR.EMPLOYEES E on HR.DEPARTMENTS.DEPARTMENT_ID = E.DEPARTMENT_ID")
jsonDict = ([(dict(row.items())) for row in result])
return jsonify(jsonDict) | [
"Gabriel Paixao"
] | Gabriel Paixao |
d3359c699b2abfc093f3fe1941057b16f9e57524 | 188b46e77d511ad381acabf8eeecfdd5f43f5f94 | /venv/bin/pip3.8 | 215392558d9a3ce5e21e6b53cbeca98a46921c63 | [] | no_license | mooncinnamon/pipelinetest | 1289ef90a369b5d3c9ae800a1c6d4f65472ac2ca | e4c1fb06a0ae994dc5ea25af57c34a89e183a0c6 | refs/heads/master | 2023-06-19T13:40:25.131928 | 2021-07-19T08:05:40 | 2021-07-19T08:05:40 | 375,726,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | 8 | #!/Users/mooncinnamon/Project/shutle/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"moon.pinnamon@gmail.com"
] | moon.pinnamon@gmail.com |
66b2f7a53d577a25631242cf7bf300eeff0b13e4 | ea96bdf0b7fb55ed7cfcbdc9eaa2de81035db105 | /leadmanager/leads/api.py | a73b649cf2d84570729f7d06b612356b19f1e32f | [] | no_license | nabilhesham/Lead-Manager-React-Django | 55c57e9712c97e6d26e153f8a50b9a411bc8a83b | 0978b7b9ea70b6d785d0bb05a1d7033f0406faa7 | refs/heads/master | 2023-01-07T20:15:55.986252 | 2020-03-25T23:37:09 | 2020-03-25T23:37:09 | 247,982,708 | 0 | 0 | null | 2023-01-07T16:03:25 | 2020-03-17T13:53:37 | Python | UTF-8 | Python | false | false | 487 | py | from leads.models import Lead
from rest_framework import viewsets, permissions
from .serializers import LeadSerializer
# Lead ViewSet
class LeadViewSet(viewsets.ModelViewSet):
# queryset = Lead.objects.all()
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = LeadSerializer
def get_queryset(self):
return self.request.user.leads.all()
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
| [
"nabilhz1994@gmail.com"
] | nabilhz1994@gmail.com |
f2734cd5b6c1d4547346763a7be1798180f76933 | 93951b1949407098d8a46c0ef059869df744b821 | /product_herb_e/wizard/__init__.py | d8a95b30cbd8e6048f674d43f53fd42c664c76db | [] | no_license | eyedz9/herb-e | 9b3cb386bbe4067c7780a591a07d921e191e91a6 | ea5671d6c18e3c8b33a6683a098690de56ecaa1c | refs/heads/master | 2020-04-06T07:08:16.640841 | 2016-09-10T16:45:29 | 2016-09-10T16:45:29 | 63,463,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py |
import new_product | [
"root@vultr.guest"
] | root@vultr.guest |
aad1005c74f5ba2b3ccf74f2f00e84c320fa4a15 | b90ca63620dc438f4003f6f508c7a978b8ba78ac | /6/rehab_arfan/python/first.py | e4b4dfcd1030a5b479586f6a04fd6a7919b111e7 | [] | no_license | aqueed-shaikh/submissions | 1718d08a97bcf70b0a78900ba6b6470cd77ffba7 | 9d4562de4ddae6f5f682a1a210d9e62a3c30c3f7 | refs/heads/master | 2021-05-27T15:02:46.924276 | 2013-12-17T04:03:40 | 2013-12-17T04:03:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | #random functions
def bigger(a,b):
if a > b:
return a
else:
return b
def biggest(a,b,c):
return bigger(a,bigger(b,c))
def median(a,b,c):
if bigger(a,b) < c:
return bigger(a,b)
else:
if bigger(a,c) < b:
return bigger(a,c)
else:
return bigger(b,c)
def print_multiplication_table(n):
i = 1
while i <= n:
j = 1
while j <= n:
print str(i) + " * " + str(j) + " = " + str(i*j)
j = j + 1
i = i + 1 | [
"areha308@gmail.com"
] | areha308@gmail.com |
6fe9f60a6e047c4ddbc46c705e2af0f9e2a4b2a7 | 0ec9e67e3d11982e1c6eb23375490217f9960dd3 | /UCB_Python/week04_day02_Python3/06-Stu_Shifty/Solved/shifty_solved_Bonus.py | 0a89cc1fa73b1703985d4ccfb23b6d4e3e29084b | [] | no_license | yamscha/repo_class | 53fb317394b3a469c1b8f1d5dfbcf89982b4f0c4 | d8080ea15c2387789f89292412a7a7a047df1a21 | refs/heads/master | 2020-05-02T10:59:29.839497 | 2019-03-27T03:54:50 | 2019-03-27T03:54:50 | 177,914,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,643 | py | # List of letters in the alphabet
regularAlphabet = ['a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l',
'm', 'n', 'o', 'p', 'q', 'r',
's', 't', 'u', 'v', 'w', 'x',
'y', 'z']
# Empty list for the shifted letters of the cipher
cipherAlphabet = []
# Sentence to encode
sentence = input("Enter a sentence to be shifted: ")
# Number of letters we will be shifting to create our cipher
shiftNumber = int(input("Enter a shift number: "))
# Variable to hold our encoded sentence
newSentence = ""
# Loop through the alphabet (26 minus the shiftNumber times to account for overage)
for i in range(0, len(regularAlphabet)-shiftNumber):
# Position each letter shifted from its original position in the alphabet
cipherAlphabet.append(regularAlphabet[i+shiftNumber])
# Loop through the beginning part of the alphabet and separately add it to the cipher list
for i in range(0, shiftNumber):
# Position each letter shifted from its original position in the alphabet
cipherAlphabet.append(regularAlphabet[i])
# Loops through each character in the sentence string
for i in sentence:
# Handle the space by skipping it
if i != " ":
# Determine the index location of the letter in the alphabet
letterPosition = regularAlphabet.index(i)
# Add the encoded letter to the new sentence
newSentence = newSentence + cipherAlphabet[letterPosition]
# If the character is a space, immediately incorporate it.
else:
newSentence = newSentence+" "
# Print the sentence to the screen
print("Your ciphered sentence is: " + newSentence)
| [
"yamini@github.com"
] | yamini@github.com |
6fe4d48456469dcfa58a924d7e50b66908cadf95 | 0abd8f28aa103342ba9a1e48aabdd8ebba262ae9 | /simannealsolve.py | 02a3d593df44b98c6df3669dd08ccc544b2be635 | [] | no_license | con-ji/cs170proj | 6851a31e6bf37d0d1a79719ea7d445f9f88797d9 | 3c60b53674c801135825addcf5c9c710f9d8f4c6 | refs/heads/master | 2021-08-22T18:34:35.815544 | 2017-12-01T00:04:28 | 2017-12-01T00:04:28 | 112,406,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,538 | py | '''
Uses simulated annealing to solve this. For realsies this time.
Simulated Annealing Python Library: https://github.com/perrygeo/simanneal
TODO:
input20_3.in
input35_4.in
'''
import simannealsolve
import sys
from simanneal import Annealer
import math
import random
class WizardConstraints(Annealer):
def __init__(self, state, num_vars, constraints):
self.num_vars = num_vars
self.curr_errors = [0] * num_vars
self.constraints = constraints
super(WizardConstraints, self).__init__(state)
def move(self):
# random swaps now xd
swaps = random.sample(range(self.num_vars), 2)
self.state[swaps[0]], self.state[swaps[1]] = self.state[swaps[1]], self.state[swaps[0]]
# objective function - minimize total sum of errors
def energy(self):
# current location of each wizard
curr_loc = {}
for i in range(len(self.state)):
curr_loc[self.state[i]] = i
# iterate through constraints, checking each
errors = 0
for c in self.constraints:
w1, w2, w3 = c[0], c[1], c[2]
# check if w3 out of w1, w2 range
if (curr_loc[w3] > curr_loc[w1] and \
curr_loc[w3] < curr_loc[w2]):
errors += 1
elif (curr_loc[w3] < curr_loc[w1] and \
curr_loc[w3] > curr_loc[w2]):
errors += 1
return errors
def get_wizards(num_vars, cs):
wizards = set()
for c in cs:
wizards.add(c[0])
wizards.add(c[1])
wizards.add(c[2])
return list(wizards)
def solve(num_vars, constraints):
init_state = get_wizards(num_vars, constraints)
wizard_solver = WizardConstraints(init_state, num_vars, constraints)
auto_schedule = wizard_solver.auto(minutes=15,steps=150000)
wizard_solver.set_schedule(auto_schedule)
wizard_solver.copy_strategy = "slice"
state, x = wizard_solver.anneal()
print(wizard_solver.energy())
return state
'''
Parse the input file, call the methods and return the result.
'''
def parse(input_file):
inputs = open(input_file, "r")
result = open("output" + str(input_file[-7:-3]) + ".in", "w")
input_list = inputs.readlines()
inputs.close()
num_vars = int(input_list[0])
num_constraints = int(input_list[1])
input_list = [line.split() for line in input_list[2:]]
for s in simannealsolve.solve(num_vars, input_list):
result.write(s + " ")
result.close()
if __name__ == '__main__':
parse(sys.argv[1])
| [
"jason-ji@berkeley.edu"
] | jason-ji@berkeley.edu |
fc49c1c9eae4f3ff37984f06b97134064b5b424a | fa181e5dddf9505ccf51cbc7a883bd5b1d30bdd2 | /almacenes/models.py | 63fa237d961b5a26c49b8b11b33673e9c53c13b2 | [] | no_license | tachuelota/automotriz | a39e10d8bb686d0268464149fddc575de364a8a0 | 0ca1c70d0579a6822e83a36861213eaae655a3d7 | refs/heads/master | 2021-01-24T03:43:06.279169 | 2013-10-21T13:55:41 | 2013-10-21T13:55:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,518 | py | # -*- coding: utf-8 -*-
from django.db import models
from common.models import TipoMoneda
class Marca(models.Model):
"""
"""
descripcion = models.CharField(max_length=100, verbose_name=u"descripción")
abreviatura = models.CharField(max_length=5)
def __unicode__(self):
return u"%s" % self.descripcion
class Unidad(models.Model):
"""
"""
descripcion = models.CharField(max_length=20, verbose_name=u"descripción")
abreviatura = models.CharField(max_length=5)
def __unicode__(self):
return u"%s" % self.abreviatura
class Meta:
verbose_name_plural = u"Unidades"
class Producto(models.Model):
"""
"""
codigo = models.CharField(max_length=20, verbose_name=u"código")
descripcion = models.CharField(max_length=100, verbose_name=u"descripción")
abreviatura = models.CharField(max_length=5)
unidad = models.ForeignKey(Unidad, null=True, blank=True)
medida = models.CharField(max_length=10, null=True, blank=True)
aro = models.CharField(max_length=5, null=True, blank=True)
pr = models.CharField(max_length=10, null=True, blank=True,
verbose_name=u"PR")
uso = models.CharField(max_length=10, null=True, blank=True)
def __unicode__(self):
return u"%s" % self.descripcion
class PrecioProducto(models.Model):
"""
"""
descripcion = models.CharField(max_length=20, verbose_name=u"descripción")
#TODO: revisar como truncar a dos decimales al guardar
tipo_moneda = models.ForeignKey(TipoMoneda, verbose_name=u"Tipo de moneda")
precio = models.FloatField()
producto = models.ForeignKey(Producto)
TIPO_PRECIO_CHOICES = (
(u"C", u"Compra"),
(u"V", u"Venta"),
)
tipo_precio = models.CharField(max_length=1, choices=TIPO_PRECIO_CHOICES,
verbose_name=u"tipo de precio")
def __unicode__(self):
return u"%s - %s" % (self.descripcion, self.precio)
class Meta:
verbose_name = u"Precio por producto"
verbose_name_plural = u"Precios por producto"
class Sucursal(models.Model):
"""
"""
descripcion = models.CharField(max_length=100, verbose_name=u"descripción")
direccion = models.CharField(max_length=150, verbose_name=u"dirección")
telefono = models.CharField(max_length=30, verbose_name=u"teléfono",
null=True, blank=True)
def __unicode__(self):
return u"%s" % self.descripcion
class Meta:
verbose_name_plural = u"Sucursales"
class Almacen(models.Model):
"""
"""
descripcion = models.CharField(max_length=100, verbose_name=u"descripción")
sucursal = models.ForeignKey(Sucursal)
productos = models.ManyToManyField(Producto, through='ProductoAlmacen')
def __unicode__(self):
return u"%s" % self.descripcion
class Meta:
verbose_name = u"Almacén"
verbose_name_plural = u"Almacenes"
class ProductoAlmacen(models.Model):
"""
"""
producto = models.ForeignKey(Producto)
almacen = models.ForeignKey(Almacen, verbose_name=u"almacén")
unidades = models.FloatField()
fecha_ultimo_movimiento = models.DateField(
null=True, blank=True, verbose_name=u"fecha de último movimiento")
def __unicode__(self):
return u"%s - %s" % (self.producto, self.almacen)
class Meta:
verbose_name = u"Producto por almacén"
verbose_name_plural = u"Productos por almacén" | [
"luisjarufe@gmail.com"
] | luisjarufe@gmail.com |
628d2152aa36cbc45f0bf606e330b8fc8bb790a2 | 716903317278fcaa1d2c01980c13bf214145773e | /IA-KNN-IRIS.py | 3bcc7863eae7b00c978caa97e52c13a5a8c0ae05 | [] | no_license | JkevinX23/intro-IA | 9a6e28cd27253c72469aa62e9d3a4d505c170fbe | 1257ec1141ac0c2cccfb2aea7f40166f10cc7685 | refs/heads/master | 2020-05-20T23:42:13.904480 | 2019-05-16T18:37:39 | 2019-05-16T18:37:39 | 185,808,229 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,588 | py | from sklearn.datasets import load_iris #Carrega o dataset de dados da iris
from random import randint
from math import sqrt
from operator import itemgetter
from numpy import array
from numpy import choose
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
rp_global = []
testes_global = []
'''
Kill é a quantidade de acertos
Miss é a quantidade de erros
previsão, são as classificações geradas pelo algoritmo
rp_global é a classificação correta
se a previsão for igual a classificação correta, incremento kill (acertos)
se não, incremento miss(erro)
a função retorna a porcentagem de acertos, que é dada por kill(acertos) /kill+miss(total) *100 (para devolver a porcentagem)
'''
def calculaPrecisao(previsao):
kill = 0
miss = 0
for i in range (len(previsao)):
if previsao[i] == rp_global[i]:
kill = kill + 1
else: miss = miss + 1
taxaPrecisao = (kill/(kill + miss))*100
return taxaPrecisao
'''
Função Processa resultados
recebe o retorno de controleTestes(uma lista de lista com todos os k individuos mais próximos para cada teste)
faz uma votação para classificar os testes. Ou seja:
é observado a classificação de cada um dos k individuos próximos
se for 0, v1 é incrementado
se for 1, v2 é incrementado
se for 2, v3 é incrementado
No final, os individuos são classificados de acordo com o tipo que mais apareceu (O mais votado)
'''
def processaResultados(results):
conclusao = []
for x in range (len(results)):
v1 = 0
v2 = 0
v3 = 0
for y in range(len(results[x])):
if results[x][y][4] == 0 :
v1 = v1 + 1
elif results[x][y][4] == 1:
v2 = v2 + 1
elif results[x][y][4] == 2:
v3 = v3 + 1
if v2 > v1 and v2 > v3:
conclusao.append(1)
elif v3>v1 and v3 > v2:
conclusao.append(2)
elif v1 > v2 and v1 > v3:
conclusao.append(0)
return conclusao
'''
Função controleTestes
a função controleTestes, passa todos os testes na função KNN e armazena seu resultado
retorno, uma lista de lista com todos os k individuos mais próximos para cada teste
'''
def controleTestes(treinos,testes,k):
resultados = []
for i in range(len(testes)):
resultados.append(KNN(treinos,testes[i],k))
return resultados
def distanciaEuclidiana(i1, i2, lenght): #Calcula a distância euclidiana entre i1 e i2
distance = 0
for x in range(lenght):
distance += pow((i1[x] - i2[x]), 2) #Pow vem de potenciação, assim, i1 - i2 está sendo elevado ao quadrado
return sqrt(distance)#Retorna a raiz do quadrado da subtração de i1 e i2 (valor da distância euclidiana)
'''
Função KNN
Classificados são os dados de treino, e teste os dados a serem classifiicados
e k é a quantidade de elementos proximos/classificados que serão retornados.
o retorno é uma lista com os k elementos proximos para cada individuo
'''
def KNN(classificados,teste,k):
testes_global.append(teste)
distancias = []
lenght = len(teste) - 1
for x in range(len(classificados)):
dist = distanciaEuclidiana(teste,classificados[x],lenght)
distancias.append((classificados[x], dist))
distancias.sort(key = itemgetter(1))
knn = []
for x in range(k):
knn.append(distancias[x][0])
return knn
'''
SelecionaDadosTreino irá separar os dados em treino e teste.
SelecionaDadosTreino recebe o ['data'] que são os dados na variavel iris
e typeFlor são as classificações desses dados (tipo da flor para cada individuo de data)
x é a quantidade de elementos que irão pro treinamento, o restante servirá para os testes
iris e typeflor são arrays, assim o tolist() transforma-o em lista (Não consigo remover elementos de um array)
n é uma variavel de controle
a função seleciona os x individuos aleatoriamente, insere o tipo dela no final da linha e
deleta da lista para que não possa ser escolhido novamente
os valores que sobram na lista serão os dados de teste, assim, não é anexado a resposta.. estas serão armazenadas
em rp_global para que a taxa de acertos seja gerada futuramente.
'''
def selecionaDadosTreino(iris,typeFlor,k,x):
iris = iris.tolist()
typeFlor = typeFlor.tolist()
n=0
treino = [] # Treino recebe 'x' individuos
tipos=[]
t = len(iris)-1
for i in range (x):
c = randint(0,t-n)
linha=[]
for j in range (4):
linha.append(iris[c][j])
linha.append(typeFlor[c])
treino.append(linha)
del iris[c]
del typeFlor[c]
n=n+1
for i in range((t+1)-n):
rp_global.append(typeFlor[i])
return controleTestes(treino,iris,k)
'''
os treinos serão chamados na função controleTestes(), pois a função KNN recebe todos os dados de treino
e um dado de teste para classifica-lo
'''
def printPrecisao(real, esperado):
for i in range (len(real)):
if real[i] == esperado [i]:
print("[",real[i]," : ",esperado[i],"]", "Acertou ")
else: print ("[",real[i]," : ",esperado[i],"]","Errou ")
if __name__ == "__main__":
pass
mediaPrecisao = []
k=5 #Quantidade de individuos próximos
x= 100 #Quantidade de individuos para treino - 100/150 -> 66.67%
votos = []
iris = load_iris() #Carrega o dataset com os dados da iris
resultados = selecionaDadosTreino(iris['data'],iris['target'],k,x)
#divide treino e teste e passa os resultados no algoritmo knn retornando os k mais proximos
votos = processaResultados(resultados)
#avalia os mais proximos e decide de qual tipo são as flores
precisao = calculaPrecisao(votos)
printPrecisao(votos,rp_global)
#print("Taxa de acerto: ", precisao)
#avalia a taxa de acerto
total = 0
#for i in range(100):
#total+=mediaPrecisao[i]
#print("PRECISAO MEDIA: ",total/100 )
#print(array(testes_global[0][3]))
#c =testes_global[:len(testes_global)][0]
print(len(testes_global))
x = pd.DataFrame(testes_global, columns=['Sepal Length', 'Sepal Width', 'Petal Length', 'Petal Width'])
y = pd.DataFrame(votos, columns=['Target'])
x2 = pd.DataFrame(testes_global, columns=['Sepal Length', 'Sepal Width', 'Petal Length', 'Petal Width'])
y2 = pd.DataFrame(rp_global, columns=['Target'])
plt.figure(figsize=(12,3))
colors = np.array(['red', 'green', 'blue'])
#nrows=1, ncols=2, plot_number=1
plt.subplot(1, 2, 1)
plt.scatter(x['Sepal Length'], x['Sepal Width'], c=colors[y['Target']], s=40, alpha=0.8)
plt.xlabel("Sepal Length (cm)")
plt.ylabel("Sepal Width (cm)")
plt.title('Resultado obtido')
plt.subplot(1,2,2)
plt.scatter(x2['Sepal Length'], x2['Sepal Width'], c= colors[y2['Target']], s=40, alpha= 0.8)
plt.xlabel("Sepal Length (cm)")
plt.ylabel("Sepal Width (cm)")
plt.title('Resultado esperado')
plt.show()
colors = np.array(['red', 'green', 'blue'])
#nrows=1, ncols=2, plot_number=1
plt.subplot(1, 2, 1)
plt.scatter(x['Petal Length'], x['Petal Width'], c=colors[y['Target']], s=40, alpha= 0.8)
plt.xlabel("Petal Length (cm)")
plt.ylabel("Petal Width (cm)")
plt.title('Resultados obtidos ')
plt.subplot(1,2,2)
plt.scatter(x2['Petal Length'], x2['Petal Width'], c= colors[y2['Target']], s=40, alpha= 0.8)
plt.xlabel("Petal Length (cm)")
plt.ylabel("Petal Width (cm)")
plt.title('Resultados esperados')
plt.show() | [
"kevinmira12@gmail.com"
] | kevinmira12@gmail.com |
b563c65b4bfd34cd2ea3ebc55a52c57631c4aa1c | 73d9d5e1618b207429aac4ce260292e8f7b0c33d | /agent.py | 9e865aecb493c16f941f2090d2417b62edcf1e2f | [] | no_license | saidulislam/deep-reinforcement-taxi-v2 | 5aa152904e81e5b891b6e94365880ac8724a6fe9 | d08eb1a14c1023a0f3ba15d3d6042207207291bf | refs/heads/master | 2021-02-14T12:05:02.484583 | 2020-03-04T04:07:49 | 2020-03-04T04:07:49 | 244,802,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,994 | py | import numpy as np
import gym
import random
from collections import defaultdict
class Agent:
def __init__(self, nA=6):
""" Initialize agent.
Params
======
- nA: number of actions available to the agent
"""
self.nA = nA
self.Q = defaultdict(lambda: np.zeros(self.nA))
self.epsilon = 1 # Exploration rate
self.decay = 0.5
self.gamma = 0.99 # Discount rate
self.alpha = 0.5
self.learning_method = "EXPECTED_SARSA" # implemented methods are SARSA, SARSA_MAX, EXPECTED_SARSA
self.i_episode = 0
self.epsilon_function = self.decayed_epsilon
def calculate(next_state):
return self.Q[next_state][self.select_action(next_state)]
self.next_value_function = calculate
def decayed_epsilon(self):
self.epsilon *= self.decay
def select_action(self, state):
""" Given the state, select an action.
Params
======
- state: the current state of the environment
Returns
=======
- action: an integer, compatible with the task's action space
"""
probs = np.ones(self.nA) * self.epsilon /self.nA
probs[np.argmax(self.Q[state])] += 1 - self.epsilon
return np.random.choice(np.arange(self.nA), p=probs)
def step(self, state, action, reward, next_state, done):
""" Update the agent's knowledge, using the most recently sampled tuple.
Params
======
- state: the previous state of the environment
- action: the agent's previous choice of action
- reward: last reward received
- next_state: the current state of the environment
- done: whether the episode is complete (True or False)
"""
if (done == False):
self.epsilon = 1.0 / (1.0 + self.i_episode)
if(self.learning_method == "SARSA"):
next_action = self.select_action(state)
self.Q[state][action] += self.alpha * (reward + self.gamma * self.Q[next_state][next_action] - self.Q[state][action])
elif(self.learning_method == "SARSA_MAX"):
next_action = self.select_action(state)
self.Q[state][action] += self.alpha * (reward + self.gamma * np.max(self.Q[next_state]) - self.Q[state][action])
elif(self.learning_method == "EXPECTED_SARSA"):
probs = np.ones(self.nA) * self.epsilon /self.nA
probs[np.argmax(self.Q[state])] += 1 - self.epsilon
next_action = np.random.choice(np.arange(self.nA), p=probs)
self.Q[state][action] += self.alpha * (reward + self.gamma * np.sum(self.Q[next_state] * probs) - self.Q[state][action])
else:
self.Q[state][action] += self.alpha * (reward - self.Q[state][action])
self.i_episode += 1 | [
"noreply@github.com"
] | saidulislam.noreply@github.com |
769f55187c8980e95ac6b89b8f5d6894dfbc8721 | ea9d660733a645330949a80fc50a6313b234619c | /src/user/views.py | 8d1e017ab76d21e9281bb5a633e025dec0e70e42 | [] | no_license | saurabh1e/payNudge | 4cc680cd9d7618df51c50a5da7af3b08c45365ce | a2ea3c82203a908c15a4f1a0f945dd0cd8feb901 | refs/heads/master | 2022-12-14T13:18:07.684321 | 2019-07-14T09:16:29 | 2019-07-14T09:16:29 | 196,816,495 | 1 | 1 | null | 2022-09-16T18:05:37 | 2019-07-14T09:16:22 | HTML | UTF-8 | Python | false | false | 6,588 | py | from random import randint
from datetime import timedelta
from flask import request, jsonify, make_response, redirect, json
from flask_jwt_extended import (create_access_token, jwt_required)
from flask_restful import Resource
from flask_security.utils import verify_and_update_password, login_user
from flask_security import current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from src import BaseView, limiter, db, redis_store, sms
from src import api
from src.user.schemas import UserSchema
from src.utils.api import set_user
from src.utils.methods import List, Fetch, Create, Update
from .models import User, UserToUser
from .resources import UserResource
@api.register()
class UserView(BaseView):
api_methods = [List, Fetch, Create, Update]
@classmethod
def get_resource(cls):
return UserResource
class UserLoginResource(Resource):
model = User
decorators = [limiter.limit("300/day;30/hour;5/minute;2/second")]
def post(self):
if request.json:
data = request.json
print(data)
user = self.model.query.filter(self.model.mobile_number == data['mobile_number']).first()
print(user)
if user and verify_and_update_password(data['password'], user) and login_user(user):
expires = timedelta(days=365)
return make_response(
jsonify({'id': user.id,
'user': UserSchema(only=('id', 'email', 'first_name', 'last_name', 'roles', 'business_name')).dump(user).data,
'authentication_token': create_access_token(identity=user.id, expires_delta=expires)}), 200)
else:
return make_response(jsonify({'meta': {'code': 403}}), 403)
else:
data = request.form
user = self.model.query.filter(self.model.email == data['email']).first()
if user and verify_and_update_password(data['password'], user) and login_user(user):
return make_response(redirect('/admin/', 302))
else:
return make_response(redirect('/api/v1/login', 403))
class UserRegisterResource(Resource):
model = User
schema = UserSchema
def post(self):
data = request.json
user = User.query.filter(User.mobile_number == data['mobile_number']).first()
if user:
return make_response(jsonify({}), 400)
user, errors = self.schema().load(data)
if errors:
return make_response(jsonify(errors), 400)
# try:
# db.session.add(user)
# db.session.commit()
# except (IntegrityError, InvalidRequestError) as e:
# print(e)
# db.session.rollback()
# return make_response(jsonify(str(e)), 400)
redis_store.setex('user:' + data['mobile_number'], 10 * 600, json.dumps(data))
send_otp(user.mobile_number, 'Your otp to sign up at zoPay is {0}. Valid for 10 minutes.')
return make_response(jsonify({}), 200)
def send_otp(phone: str, content) -> bool:
otp = randint(100000, 999999)
redis_store.setex(phone, 10 * 600, otp)
try:
business_name = current_user.business_name
except AttributeError:
business_name = ''
content = [dict(message=content.format(otp, business_name), to=[phone])]
sms.send_sms(content=content)
return True
class UserVerifyResource(Resource):
model = User
schema = UserSchema
def post(self):
data = request.json
if redis_store.get('user:' + data['mobile_number']) and redis_store.get(data['mobile_number']).decode('utf-8') == str(data['otp']):
user, errors = self.schema().load(json.loads(redis_store.get('user:' + data['mobile_number']).decode('utf-8')))
if errors:
return make_response(jsonify(errors), 400)
try:
db.session.add(user)
db.session.commit()
except (IntegrityError, InvalidRequestError) as e:
print(e)
db.session.rollback()
return make_response(jsonify({}), 400)
expires = timedelta(days=365)
return make_response(
jsonify({'id': user.id,
'user': UserSchema().dump(user, only=('id', 'email', 'first_name', 'last_name', 'roles', 'business_name')),
'authentication_token': create_access_token(identity=user.id,
expires_delta=expires)}), 200)
else:
return make_response(jsonify({'meta': {'code': 403}}), 403)
class CustomerRegistrationResource(Resource):
model = User
method_decorators = [set_user, jwt_required]
def post(self):
data = request.json
user = self.model.query.filter(self.model.mobile_number == data['mobile_number']).first()
if not user:
user_data = dict(mobile_number=data['mobile_number'], first_name=data['first_name'])
user, errors = UserSchema().load(user_data)
if errors:
return make_response(jsonify(errors), 400)
db.session.add(user)
db.session.commit()
send_otp(data['mobile_number'],
'Your otp to verify your number at {1} is {0}. Please share your otp with {1}')
return make_response(jsonify({}), 200)
class CustomerVerifyResource(Resource):
model = User
method_decorators = [set_user, jwt_required]
def post(self):
data = request.json
user = self.model.query.filter(self.model.mobile_number == data['mobile_number']).first()
if user and redis_store.get(data['mobile_number']).decode('utf-8') == data['otp']:
utu = UserToUser()
utu.business_owner_id = current_user.id
utu.customer_id = user.id
db.session.add(utu)
db.session.commit()
return make_response(jsonify({'id': user.id, 'first_name': user.first_name}), 200)
else:
return make_response(jsonify({'meta': {'code': 403}}), 403)
api.add_resource(UserLoginResource, '/login/', endpoint='login')
api.add_resource(UserRegisterResource, '/register/', endpoint='register')
api.add_resource(UserVerifyResource, '/verify/', endpoint='verify')
api.add_resource(CustomerRegistrationResource, '/customer_register/', endpoint='customer_register')
api.add_resource(CustomerVerifyResource, '/customer_verify/', endpoint='customer_verify') | [
"saurabh.1e1@gmail.com"
] | saurabh.1e1@gmail.com |
eb51bd2537abf626faae83f8b8dc0d660c4f5fc0 | 485b6a0816a28e84211d06ea78bf056d5d1759b5 | /cat_vs_dog_app/views.py | 73449a690a13f4cc28455c565a6ced3f931cff4f | [] | no_license | AlekhyaD/cat-vs-dog--django | cb2950ef2c1af807182a3abf52d3c017b24dacba | d812d0c3dfa2dd8cd52c807c9ba376987ccfba95 | refs/heads/main | 2023-01-21T01:12:57.431477 | 2020-12-06T17:04:46 | 2020-12-06T17:04:46 | 319,074,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,973 | py | from django.shortcuts import render
# Create your views here.
from tensorflow import keras
from keras.models import load_model
import numpy as np
#import cv2
from keras.preprocessing import image
from PIL import Image
from django.shortcuts import render
from keras.preprocessing import image
import numpy as np
import tensorflow as tf
from keras.models import load_model
from keras.preprocessing.image import load_img
global graph,model
from PIL import Image
from django.core.files.uploadedfile import InMemoryUploadedFile
import io
model = load_model('catvsdog_classification_app/dogvscat.model')
model.summary()
class_dict = {'cat': 0, 'dog': 1}
class_names = list(class_dict.keys())
def prediction(request):
if request.method == 'POST' and request.FILES['myfile']:
post = request.method == 'POST'
myfile = request.FILES['myfile']
# dimensions of our images.
#img = Image.open(myfile)
#img_width, img_height = 150, 150
#img = image.load_img(myfile, target_size = (img_width, img_height))
#img = image.img_to_array(img)
#img = np.expand_dims(img, axis = 0)
img = Image.open(myfile)
print("**************************************************************")
print(type(img))
#img = img.convert('RGB')
img = img.resize((150, 150))
img = image.img_to_array(img)
#img = image.load_img(myfile, target_size=(224, 224))
#img = image.img_to_array(img)
img = np.expand_dims(img, axis=0)
#img = img/255
predictions = model.predict(img)
print(predictions)
if predictions == 0:
predictions = 'Cat'
elif predictions == 1:
predictions = 'Dog'
print( "Prediction completed: this is a", predictions)
return render(request, "catvsdog_classification_app/prediction.html", {
'result': predictions})
else:
return render(request, "catvsdog_classification_app/prediction.html")
'''
img = cv2.imread('myfile')
img = cv2.resize(img, (150, 150))
img = np.reshape(img, [1, 150, 150, 3])
predictions = model.predict_classes(img)
preds = preds.flatten()
m = max(preds)
for index, item in enumerate(preds):
if item == m:
result = class_names[index]
return render(request, "catvsdog_classification_app/prediction.html", {
'result': result})
else:
return render(request, "catvsdog_classification_app/prediction.html")
'''
'''
if predictions == 0:
predictions = 'Cat'
elif predictions == 1:
predictions = 'Dog'
print( "Prediction completed: this is a", predictions)
return render(request, "catvsdog_classification_app/prediction.html", {
'result': predictions})
else:
return render(request, "catvsdog_classification_app/prediction.html")
''' | [
"noreply@github.com"
] | AlekhyaD.noreply@github.com |
ad360f168ac25df6f7cb52dbb954cb8fb3f80db5 | f9f74f44f3e81bbe2ee70dcba36a2285037eeeb7 | /App/models.py | 288f62093395f271b873bcab3c1205fdae8b78d4 | [] | no_license | sujits-hub/UserActivity | 9b9425fe209cbd20e44d342ea914c54942584aca | 8de69b9eb3bce99276bb1b64d604fa97f5651e07 | refs/heads/master | 2022-09-06T04:44:24.790104 | 2020-05-27T08:07:39 | 2020-05-27T08:07:39 | 267,040,757 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,131 | py | from django.db import models
from django.utils import timezone
class User(models.Model):
'''
This Model is used to store information about the user.
attributes:
user_id: unique CharField() which contains the id of the user
real_name: CharField() which contains the name of the user
tz: DateTimeField() which contains the timezone
'''
user_id = models.CharField(max_length=30)
real_name = models.CharField(max_length=30)
tz = models.CharField(max_length=30)
def __str__(self):
return self.user_id
class ActivityPeriods(models.Model):
'''
This Model is used to store the session of the user that is activity periods of the user.
attributes:
user: ForeignKey attribute that contains User information
start_time: DateTimeField() which contains the start_time of the activity of the user
end_time: DateTimeField() which contains the end_time of the activity of the user
'''
user = models.ForeignKey(User, on_delete=models.CASCADE)
start_time = models.DateTimeField(default=timezone.now())
end_time = models.DateTimeField(default=timezone.now())
def __str__(self):
return self.user.real_name
| [
"sujitcpatil1851@gmail.com"
] | sujitcpatil1851@gmail.com |
f69f5d8ce76527e186cccc6f06e466798fe7b256 | ef1bf421aca35681574c03014e0c2b92da1e7dca | /pyqode/core/modes/matcher.py | ae2b73693ace03cec917c4572e642bde46af1ca6 | [
"MIT"
] | permissive | pyQode/pyqode.core | 74e67f038455ea8cde2bbc5bd628652c35aff6eb | 0ffabebe4f0397d53429024f6f44db3fe97b0828 | refs/heads/master | 2020-04-12T06:36:33.483459 | 2020-01-18T14:16:08 | 2020-01-18T14:16:08 | 7,739,074 | 24 | 25 | MIT | 2020-01-18T14:16:10 | 2013-01-21T19:46:41 | Python | UTF-8 | Python | false | false | 9,836 | py | # -*- coding: utf-8 -*-
"""
This module contains the symbol matcher mode
"""
from pyqode.core.api import get_block_symbol_data
from pyqode.core.api.decoration import TextDecoration
from pyqode.core.api.mode import Mode
from pyqode.qt import QtGui
#: symbols indices in SymbolMatcherMode.SYMBOLS map
PAREN = 0
SQUARE = 1
BRACE = 2
#: character indices in SymbolMatcherMode.SYMBOLS map
OPEN = 0
CLOSE = 1
class SymbolMatcherMode(Mode):
""" Highlights matching symbols (parentheses, braces,...)
.. note:: This mode requires the document to be filled with
:class:`pyqode.core.api.TextBlockUserData`, i.e. a
:class:`pyqode.core.api.SyntaxHighlighter` must be installed on
the editor instance.
"""
#: known symbols {SYMBOL: (OPEN, CLOSE)}, you can customise this map to
#: add support for other symbols
SYMBOLS = {
PAREN: ('(', ')'),
SQUARE: ('[', ']'),
BRACE: ('{', '}')
}
@property
def match_background(self):
"""
Background color of matching symbols.
"""
return self._match_background
@match_background.setter
def match_background(self, value):
self._match_background = value
self._refresh_decorations()
if self.editor:
for clone in self.editor.clones:
try:
clone.modes.get(self.__class__).match_background = value
except KeyError:
# this should never happen since we're working with clones
pass
@property
def match_foreground(self):
"""
Foreground color of matching symbols.
"""
return self._match_foreground
@match_foreground.setter
def match_foreground(self, value):
self._match_foreground = value
self._refresh_decorations()
if self.editor:
for clone in self.editor.clones:
try:
clone.modes.get(self.__class__).match_foreground = value
except KeyError:
# this should never happen since we're working with clones
pass
@property
def unmatch_background(self):
"""
Background color of non-matching symbols.
"""
return self._unmatch_background
@unmatch_background.setter
def unmatch_background(self, value):
self._unmatch_background = value
self._refresh_decorations()
if self.editor:
for clone in self.editor.clones:
try:
clone.modes.get(self.__class__).unmatch_background = value
except KeyError:
# this should never happen since we're working with clones
pass
@property
def unmatch_foreground(self):
"""
Foreground color of matching symbols.
"""
return self._unmatch_foreground
@unmatch_foreground.setter
def unmatch_foreground(self, value):
self._unmatch_foreground = value
self._refresh_decorations()
if self.editor:
for clone in self.editor.clones:
try:
clone.modes.get(self.__class__).unmatch_foreground = value
except KeyError:
# this should never happen since we're working with clones
pass
def __init__(self):
super(SymbolMatcherMode, self).__init__()
self._decorations = []
self._match_background = QtGui.QBrush(QtGui.QColor('#B4EEB4'))
self._match_foreground = QtGui.QColor('red')
self._unmatch_background = QtGui.QBrush(QtGui.QColor('transparent'))
self._unmatch_foreground = QtGui.QColor('red')
def _clear_decorations(self):
for deco in self._decorations:
self.editor.decorations.remove(deco)
self._decorations[:] = []
def symbol_pos(self, cursor, character_type=OPEN, symbol_type=PAREN):
"""
Find the corresponding symbol position (line, column) of the specified
symbol. If symbol type is PAREN and character_type is OPEN, the
function will look for '('.
:param cursor: QTextCursor
:param character_type: character type to look for (open or close char)
:param symbol_type: symbol type (index in the SYMBOLS map).
"""
retval = None, None
original_cursor = self.editor.textCursor()
self.editor.setTextCursor(cursor)
block = cursor.block()
data = get_block_symbol_data(self.editor, block)
self._match(symbol_type, data, block.position())
for deco in self._decorations:
if deco.character == self.SYMBOLS[symbol_type][character_type]:
retval = deco.line, deco.column
break
self.editor.setTextCursor(original_cursor)
self._clear_decorations()
return retval
def _refresh_decorations(self):
for deco in self._decorations:
self.editor.decorations.remove(deco)
if deco.match:
deco.set_foreground(self._match_foreground)
deco.set_background(self._match_background)
else:
deco.set_foreground(self._unmatch_foreground)
deco.set_background(self._unmatch_background)
self.editor.decorations.append(deco)
def on_state_changed(self, state):
if state:
self.editor.cursorPositionChanged.connect(self.do_symbols_matching)
else:
self.editor.cursorPositionChanged.disconnect(
self.do_symbols_matching)
def _match(self, symbol, data, cursor_pos):
symbols = data[symbol]
for i, info in enumerate(symbols):
pos = (self.editor.textCursor().position() -
self.editor.textCursor().block().position())
if info.character == self.SYMBOLS[symbol][OPEN] and \
info.position == pos:
self._create_decoration(
cursor_pos + info.position,
self._match_left(
symbol, self.editor.textCursor().block(), i + 1, 0))
elif info.character == self.SYMBOLS[symbol][CLOSE] and \
info.position == pos - 1:
self._create_decoration(
cursor_pos + info.position,
self._match_right(
symbol, self.editor.textCursor().block(), i - 1, 0))
def _match_left(self, symbol, current_block, i, cpt):
while current_block.isValid():
data = get_block_symbol_data(self.editor, current_block)
parentheses = data[symbol]
for j in range(i, len(parentheses)):
info = parentheses[j]
if info.character == self.SYMBOLS[symbol][OPEN]:
cpt += 1
continue
if info.character == self.SYMBOLS[symbol][CLOSE] and cpt == 0:
self._create_decoration(current_block.position() +
info.position)
return True
elif info.character == self.SYMBOLS[symbol][CLOSE]:
cpt -= 1
current_block = current_block.next()
i = 0
return False
def _match_right(self, symbol, current_block, i, nb_right_paren):
while current_block.isValid():
data = get_block_symbol_data(self.editor, current_block)
parentheses = data[symbol]
for j in range(i, -1, -1):
if j >= 0:
info = parentheses[j]
if info.character == self.SYMBOLS[symbol][CLOSE]:
nb_right_paren += 1
continue
if info.character == self.SYMBOLS[symbol][OPEN]:
if nb_right_paren == 0:
self._create_decoration(
current_block.position() + info.position)
return True
else:
nb_right_paren -= 1
current_block = current_block.previous()
data = get_block_symbol_data(self.editor, current_block)
parentheses = data[symbol]
i = len(parentheses) - 1
return False
def do_symbols_matching(self):
"""
Performs symbols matching.
"""
self._clear_decorations()
current_block = self.editor.textCursor().block()
data = get_block_symbol_data(self.editor, current_block)
pos = self.editor.textCursor().block().position()
for symbol in [PAREN, SQUARE, BRACE]:
self._match(symbol, data, pos)
def _create_decoration(self, pos, match=True):
cursor = self.editor.textCursor()
cursor.setPosition(pos)
cursor.movePosition(cursor.NextCharacter, cursor.KeepAnchor)
deco = TextDecoration(cursor, draw_order=10)
deco.line = cursor.blockNumber()
deco.column = cursor.columnNumber()
deco.character = cursor.selectedText()
deco.match = match
if match:
deco.set_foreground(self._match_foreground)
deco.set_background(self._match_background)
else:
deco.set_foreground(self._unmatch_foreground)
deco.set_background(self._unmatch_background)
self._decorations.append(deco)
self.editor.decorations.append(deco)
return cursor
def clone_settings(self, original):
self.match_background = original.match_background
self.match_foreground = original.match_foreground
self.unmatch_background = original.unmatch_background
self.unmatch_foreground = original.unmatch_foreground
| [
"colin.duquesnoy@gmail.com"
] | colin.duquesnoy@gmail.com |
afe320205a6db04fd97ad178bf72c89d401dc959 | c2a892b2ab87897e648d141500d04d4024a89252 | /mlpipe/cli/interface.py | 088651cd6fb99fd0ced6046de8ab05a4b82af13d | [] | no_license | robie2011/mlpipe | 52e01002ecdb8553e83dc54d582df2bdefd42892 | d6b0c803535c07afc389749c5833e43f58e9deeb | refs/heads/master | 2021-07-13T04:19:42.618649 | 2020-03-30T16:09:14 | 2020-03-30T16:09:14 | 247,807,021 | 1 | 0 | null | 2021-06-02T01:13:03 | 2020-03-16T20:09:12 | Python | UTF-8 | Python | false | false | 258 | py | from dataclasses import dataclass
from datetime import datetime
@dataclass
class ModelLocation:
name: str
session_id: str
path: str
sizeBytes: int
epochs: int
batch_size: int
samples: int
metrics: str
datetime: datetime
| [
"robert.rajakone@fhnw.ch"
] | robert.rajakone@fhnw.ch |
e5f9eafb72a9cb05aa3946670ed8698211e70df4 | 96e05939da42a4366ee221e3b6282cbfe9d8b530 | /urls.py | 2c1719446150cda6823016476929a65f8bab672a | [] | no_license | drewp/photo | 522045f02f6977ef559afb8ea00a7adc38c4e609 | 8f7e7504f89dbc35855ccfd5e0df269f7ad306af | refs/heads/master | 2021-04-12T05:35:03.045418 | 2018-09-18T08:22:23 | 2018-09-18T08:22:23 | 1,566,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | import urllib, logging, os
from ns import SITE
log = logging.getLogger()
def localSite(url):
if url.startswith('/'):
return url
if url.startswith(SITE):
return url[len(SITE)-1:]
raise ValueError("%s not on site" % url)
absSiteHost = (
os.environ.get("PHOTO_SITE_HOST", "photo.bigasterisk.com"),
int(os.environ.get("PHOTO_SITE_PORT", "80")))
def absoluteSite(url):
"""
not correctly implemented yet
>>> absoluteSite('http://photo.bigasterisk.com/foo')
'http://localhost:8080/foo'
or
'http://photo.bigasterisk.com/foo'
"""
# would it help to pass ctx? still might not be enough info unless
# i use vhost monster style
return ('http://' +
absSiteHost[0] +
(":%d" % absSiteHost[1] if absSiteHost[1] != 80 else "") +
localSite(url))
def photoUri(filename):
assert filename.startswith('/my/pic/')
return SITE[urllib.quote(filename[len("/my/pic/"):])]
def relPath(uri):
"""
>>> relPath('http://photo.bigasterisk.com/foo')
'/foo'
"""
assert uri.startswith(SITE)
return uri[len(SITE)-1:]
| [
"drewp@bigasterisk.com"
] | drewp@bigasterisk.com |
927cc866efe9fa8f8e2e5e77a5ef3af51c28806d | 4f212c4b20581b2303398e8d77573177a2f65744 | /q1.py | 297a598b1bcf0961c75f85afb2134c4fca6066b8 | [] | no_license | shivam15s/assignment-3-shivam15s | 156840f505144a128934ec375ebdd14f9e3fd8b0 | b5310ed4fd604db1372d832846d350430918ccd5 | refs/heads/main | 2023-04-10T22:29:26.912683 | 2021-04-22T16:20:10 | 2021-04-22T16:20:10 | 358,378,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,434 | py | import numpy as np
from numpy.lib.npyio import load
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.datasets import load_breast_cancer
from sklearn.preprocessing import MinMaxScaler
from logisticRegression.logisticRegression import LogisticRegression
from metrics import *
np.random.seed(42)
data = load_breast_cancer()
X = pd.DataFrame(data.data, columns=data.feature_names)
scaler = MinMaxScaler()
X = pd.DataFrame(scaler.fit_transform(X))
y = pd.Series(data.target)
folds = 3
fold_size = X.shape[0]//folds
datasets = [X.iloc[fold_size*i: fold_size*(i+1)] for i in range(folds)]
print("\n----------Gradient Descent (Formula vs Autograd)----------")
for fit_intercept in [True]:
LR = LogisticRegression(fit_intercept=fit_intercept)
#LR = LogisticRegression(fit_intercept=fit_:intercept, regularization='L1', reg_lambda=5)
#LR = LogisticRegression(fit_intercept=fit_intercept, regularization='L2', reg_lambda=0.5)
LR.fit_vectorised(X, y, X.shape[0], n_iter = 100, lr=1)
y_hat = LR.predict(X)
print("Gradient Descent using Formula: ", accuracy(y_hat, y))
LR.fit_autograd(X, y, X.shape[0], n_iter = 100, lr=1)
y_hat = LR.predict(X)
print("Gradient Descent using Autograd", accuracy(y_hat, y))
#3 folds cross validation
print("\n----------3 Folds Accuracy----------")
fold_acc = []
for itr1 in range(folds):
test = datasets[itr1]
frames1 = []
for j in range(folds):
if j!=itr1:
frames1.append(datasets[j])
"""
Divides dataset into folds_outer parts. Takes folds - 1 parts for Training
and 1 part for Testing
"""
#Creates dataset of fold_outer - 1 sub-parts of data
X_t = pd.concat(frames1).sort_index()
y_t = y[X_t.index].reset_index(drop=True)
X_t = X_t.reset_index(drop=True)
LR = LogisticRegression()
LR.fit_vectorised(X_t, y_t, X_t.shape[0], n_iter = 100, lr=2)
y_hat = LR.predict(datasets[itr1])
curr_acc = accuracy(y_hat, y[datasets[itr1].index])
fold_acc.append(curr_acc)
print("Test fold {}: ".format(itr1+1), curr_acc)
print("Average accuracy: ", np.mean(fold_acc))
print("\n----------Decision Boundary----------")
X_small = X.iloc[:, :2]
LR = LogisticRegression()
LR.fit_vectorised(X_small, y, X_small.shape[0], n_iter=200, lr=2)
y_hat = LR.predict(X_small)
print("Accuracy:", accuracy(y_hat, y))
fig = LR.plot_decision_boundary()
fig.savefig("plots/q1_d.png") | [
"shivam15800@gmail.com"
] | shivam15800@gmail.com |
5aca83650438f40e55367ff75028af33c6310e46 | 7525eb86c374201ffcc6cc767a1bc0e4aeff9168 | /examples/timeout/server.py | e83007c17f7fa98e891798cc7c1d01526521bd44 | [
"MIT"
] | permissive | irmen/Pyro5 | ed386d028ab36c40003e75f5d4dce725151d468c | c9eb525b3b1700bd16405499d874be93f5802144 | refs/heads/master | 2023-08-22T10:16:22.222288 | 2023-06-04T16:37:51 | 2023-06-04T16:37:51 | 83,903,998 | 271 | 34 | MIT | 2023-02-23T18:19:34 | 2017-03-04T15:39:14 | Python | UTF-8 | Python | false | false | 730 | py | import time
from Pyro5.api import expose, locate_ns, Daemon, config
@expose
class TimeoutServer(object):
def delay(self, amount):
print("sleeping %d" % amount)
time.sleep(amount)
print("done.")
return "slept %d seconds" % amount
config.COMMTIMEOUT = 0 # the server won't be using timeouts
ns = locate_ns()
daemon = Daemon()
daemon2 = Daemon()
obj = TimeoutServer()
obj2 = TimeoutServer()
uri = daemon.register(obj)
uri2 = daemon2.register(obj2)
ns.register("example.timeout", uri)
ns.register("example.timeout.frozendaemon", uri2)
print("Server ready.")
# Note that we're only starting one of the 2 daemons.
# daemon2 is not started, to simulate connection timeouts.
daemon.requestLoop()
| [
"irmen@razorvine.net"
] | irmen@razorvine.net |
4d1e5d9ad529462c01deed24dec8ef3a4826c2cd | 24b33a4a70223ce22e3583dc6d8196e22da5057d | /examples/analysis/OpticalImageTest.py | a57389c38a5c2f584d5c9814edb65d5a0da88540 | [
"MIT"
] | permissive | will-hossack/Poptics | 4b4fcd1c596d257835ba2e448a05b5904bfa20e5 | 4093876e158eb16421dfd4e57818210b11381429 | refs/heads/master | 2022-11-28T02:45:57.542189 | 2020-08-03T08:47:26 | 2020-08-03T08:47:26 | 265,811,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | """
Example to for a test grid and image through simple singlet with default
parameters.
"""
import matplotlib.pyplot as plt
from poptics.analysis import OpticalImage
from poptics.lens import SimpleSinglet
from poptics.tio import tprint
def main():
# Make a test image of 100 x 100 pixels and size 300 x 300 mm
oi = OpticalImage(0,100,100,300,300)
oi.addTestGrid(8,8) # Add a 8 x 8 test grid
# Make plot area and plot target on the left
plt.subplot(1,2,1)
oi.draw()
# Simple singlet of focal length 80mm, radius 10 mm at location 200mm
lens = SimpleSinglet(200,80,10)
# Get a system image with a -0.2 magnification
im = oi.getSystemImage(lens,-0.2)
tprint(repr(oi))
tprint(repr(im))
# Plot output image on right.
plt.subplot(1,2,2)
im.draw()
plt.show()
main()
| [
"will.hossack@gmail.com"
] | will.hossack@gmail.com |
788198d754bec62231b412ac94e7031913e5149d | 1705f13c70fa76cc400da2c3e84da6a4fac7f1a2 | /Factory.py | 347e8bfae15cfe67d735555bbd05e57fc369adb6 | [] | no_license | Rur1k/Pattern | 62a3dcfe263451727b4ff3f3f5713c8038e66ee1 | 0ce70189c62f4f5cec13aed3e41f850cc2db0018 | refs/heads/master | 2023-05-04T11:29:58.940033 | 2021-05-26T14:28:37 | 2021-05-26T14:28:37 | 360,499,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 966 | py | class Transport:
def CreateTransport(self):
pass
class AirTransport(Transport):
def CreateTransport(self):
return 'Create air transport'
class SeaTransport(Transport):
def CreateTransport(self):
return 'Create sea transport'
class LandTransport(Transport):
def CreateTransport(self):
return 'Create land transport'
class Creator:
def create_transport(self, type_):
pass
class MyCreator(Creator):
def create_transport(self, type_):
if type_ == 'Air':
return AirTransport()
elif type_ == 'Sea':
return SeaTransport()
elif type_ == 'Land':
return LandTransport()
else:
raise ValueError()
CreatorTransport = MyCreator()
print(CreatorTransport.create_transport('Air').CreateTransport())
print(CreatorTransport.create_transport('Sea').CreateTransport())
print(CreatorTransport.create_transport('Land').CreateTransport())
| [
"falgerok.master@gmail.com"
] | falgerok.master@gmail.com |
b3a34414131f70d1385b8b9595851efaa7c0a956 | 1f1405d2321b15666c1d88170820b2c4023cfe21 | /list_vs_array_comparisons/test.py | 18b6457b7b41b245786041c40ee90c2bec99f6b9 | [] | no_license | mehmetegeacican/Data-Structures-Python | e3aa5ec32a0c8f6711faaed85ece5fca345aefa1 | cfdb9f6f02d4041e1ab8d5da21afa384fdbc8967 | refs/heads/main | 2023-02-02T22:04:23.797009 | 2020-12-22T09:18:58 | 2020-12-22T09:18:58 | 309,906,454 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,927 | py | from linked_list import *
from linked_list_sort import *
import random
from math import ceil, floor
print("HELLO WELCOME TO LIST VS ARRAY COMPARISONS")
size = input("ENTER THE SIZE OF ARRAY (SIZES OF 100 TO 5000 IS ADVISED)")
llist = LinkedList()
A = [None]*int(size)
for i in range(len(A)):
A[i] = i + random.randint(0,len(A))
llist.insertfromTail(random.randint(0,len(A)))
decision = True
while(decision):
print("What do you want to compare?")
compDec = 0
print("1-INSERTION\n")
print("2-DELETION\n")
print("3-SORTING\n")
print("4-MERGE SORT\n")
print("5-FIND INDEX\n")
print("6-QUIT\n")
compDec = int(input("Your decision: "))
if compDec == 1:
#HEAD INSERTION
insertToArrayHead(A,random.randint(0,len(A)))
start = timer()
llist.insertfromHead(random.randint(0,len(A)))
end = timer()
print("the insertion to the Linked List Head took", float_round(1000*(end-start), 5, ceil))
#TAIL INSERTION
insertToTailArray(A,random.randint(0,len(A)))
start2 = timer()
llist.insertfromTail(random.randint(0,len(A)))
end2 = timer()
print("the insertion to the Linked List Tail took", float_round(1000*(end2-start2), 5, ceil))
elif compDec == 2:
#HEAD DELETION
deleteFromHead(A)
start = timer()
llist.deleteHead()
end = timer()
print("the deletion of the Linked List Head took", float_round(1000*(end-start), 5, ceil))
#TAIL DELETION
deleteFromEnd(A)
start = timer()
llist.deleteTail()
end = timer()
print("the deletion of the Linked List Tail took", float_round(1000*(end-start), 5, ceil))
elif compDec == 3:
#SORTING ARRAY
A = insertionSort(A)
#SORTING LIST
llist2 = linked_list_sort(llist)
elif compDec == 4:
#MERGE SORT
A = mergeSort(A)
startMergeSort = timer()
llist2 = llist.mergeSort_linked_list(llist.head)
endMergeSort = timer()
print("the merge sort of the Linked List Tail took", float_round(1000*(endMergeSort-startMergeSort), 5, ceil))
elif compDec == 5:
#SEARCHING AN INDEX IN ARRAY
i = random.randint(0,len(A))
start = timer()
searchArrayIndex(A,i)
end = timer()
print("the search in Array took", float_round(1000*(end-start),5,ceil))
start_list = timer()
llist.findNodeByIndex(i)
end_list = timer()
print("The search in Linked List took",float_round(1000*(end_list-start_list),5,ceil))
elif compDec == 6:
decision = False
elif compDec > 6 or compDec < 1:
print("Please Enter a Number between 1 and 4")
print("*************************************")
| [
"noreply@github.com"
] | mehmetegeacican.noreply@github.com |
961ccea223bfcaa1eec7d10fa51f4b9f665092d8 | b4a90da211c58228fdb66ed7d25d481326842852 | /testing/unittests/test-generators.py | 7e811b1ab8a5fa25f66ea004b105bff40bdca330 | [] | no_license | like2000/PyHEADTAIL | 6c943d1a9fe72ab04a87568e96c8d2f8c40f2ba2 | 3ecfc9d6325381450a5c808abf834e931f8e5933 | refs/heads/master | 2020-05-29T18:56:37.479678 | 2019-06-06T10:01:57 | 2019-06-06T10:01:57 | 16,484,363 | 3 | 2 | null | 2014-03-18T10:24:58 | 2014-02-03T16:05:59 | Python | UTF-8 | Python | false | false | 3,777 | py | #!/usr/bin/python
from __future__ import division
import sys, os
BIN = os.path.dirname(__file__) # ./PyHEADTAIL/testing/unittests/
BIN = os.path.abspath( BIN ) # absolute path to unittests
BIN = os.path.dirname( BIN ) # ../ --> ./PyHEADTAIL/testing/
BIN = os.path.dirname( BIN ) # ../ --> ./PyHEADTAIL/
BIN = os.path.dirname( BIN ) # ../ --> ./
sys.path.append(BIN)
import unittest
import numpy as np
from scipy.constants import c, e, m_p
from PyHEADTAIL.trackers.rf_bucket import RFBucket
from PyHEADTAIL.trackers.transverse_tracking import TransverseMap
from PyHEADTAIL.trackers.simple_long_tracking import LinearMap, RFSystems
from PyHEADTAIL.particles.generators import (
ParticleGenerator,
Gaussian6D
)
class TestParticleGenerators(unittest.TestCase):
def setUp(self):
# general simulation parameters
self.macroparticlenumber = 10000
self.n_segments = 1
# machine parameters
self.C = 157.
self.inj_alpha_x = 0
self.inj_alpha_y = 0
self.inj_beta_x = 5.9 # in [m]
self.inj_beta_y = 5.7 # in [m]
self.Qx = 5.1
self.Qy = 6.1
self.gamma_tr = 4.05
self.alpha_array = [self.gamma_tr**-2]
self.V_rf = 8e3 # in [V]
self.harmonic = 1
self.phi_offset = 0 # measured from aligned focussing phase (0 or pi)
# beam parameters
self.Ekin = 1.4e9 # in [eV]
self.intensity = 1.684e12
self.epsn_x = 2.5e-6 # in [m*rad]
self.epsn_y = 2.5e-6 # in [m*rad]
self.epsn_z = 1.2 # 4pi*sig_z*sig_dp (*p0/e) in [eVs]
# calculations
self.gamma = 1 + e * self.Ekin / (m_p * c**2)
self.beta = np.sqrt(1 - self.gamma**-2)
self.betagamma = np.sqrt(self.gamma**2 - 1)
self.eta = self.alpha_array[0] - self.gamma**-2
if self.eta < 0:
self.phi_offset = np.pi - self.phi_offset
self.Etot = self.gamma * m_p * c**2 / e
self.p0 = np.sqrt(self.gamma**2 - 1) * m_p * c
self.Qs = np.sqrt(np.abs(self.eta) * self.V_rf /
(2 * np.pi * self.beta**2 * self.Etot))
self.beta_z = np.abs(self.eta) * self.C / (2 * np.pi * self.Qs)
self.turn_period = self.C / (self.beta * c)
self.sigma_x = np.sqrt(self.epsn_x * self.inj_beta_x/ self.betagamma)
self.sigma_y = np.sqrt(self.epsn_y * self.inj_beta_y/ self.betagamma)
self.sigma_z = np.sqrt(self.epsn_z * self.beta_z /
(4 * np.pi * self.p0/e))
self.sigma_xp = self.sigma_x / self.inj_beta_x
self.sigma_yp = self.sigma_y / self.inj_beta_y
self.sigma_dp = self.sigma_z / self.beta_z
# BETATRON
# Loop on number of segments and create the TransverseSegmentMap
# for each segment.
self.s = np.arange(0, self.n_segments + 1) * self.C / self.n_segments
self.alpha_x = self.inj_alpha_x * np.ones(self.n_segments)
self.beta_x = self.inj_beta_x * np.ones(self.n_segments)
self.D_x = np.zeros(self.n_segments)
self.alpha_y = self.inj_alpha_y * np.ones(self.n_segments)
self.beta_y = self.inj_beta_y * np.ones(self.n_segments)
self.D_y = np.zeros(self.n_segments)
def create_Gaussian6D(self):
return Gaussian6D(
self.macroparticlenumber, self.intensity, e, m_p,
self.C, self.gamma,
self.sigma_x, self.sigma_xp,
self.sigma_y, self.sigma_yp,
self.sigma_z, self.sigma_dp
)
def test_Gaussian6D_macroparticlenumber(self):
bunch = self.create_Gaussian6D().generate()
self.assertTrue(bunch.macroparticlenumber == self.macroparticlenumber)
if __name__ == '__main__':
unittest.main()
| [
"oeftiger@cern.ch"
] | oeftiger@cern.ch |
99071ef7a104489568cf83e9b6a02c2b55966809 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2021_07_01/aio/operations/_iot_hub_resource_operations.py | 0a935f1474514c5eabfe0dce67fd6d5241828a2a | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 128,318 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._iot_hub_resource_operations import (
build_check_name_availability_request,
build_create_event_hub_consumer_group_request,
build_create_or_update_request,
build_delete_event_hub_consumer_group_request,
build_delete_request,
build_export_devices_request,
build_get_endpoint_health_request,
build_get_event_hub_consumer_group_request,
build_get_job_request,
build_get_keys_for_key_name_request,
build_get_quota_metrics_request,
build_get_request,
build_get_stats_request,
build_get_valid_skus_request,
build_import_devices_request,
build_list_by_resource_group_request,
build_list_by_subscription_request,
build_list_event_hub_consumer_groups_request,
build_list_jobs_request,
build_list_keys_request,
build_test_all_routes_request,
build_test_route_request,
build_update_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IotHubResourceOperations: # pylint: disable=too-many-public-methods
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.iothub.v2021_07_01.aio.IotHubClient`'s
:attr:`iot_hub_resource` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.IotHubDescription:
"""Get the non-security related metadata of an IoT hub.
Get the non-security related metadata of an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubDescription or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: Union[_models.IotHubDescription, IO],
if_match: Optional[str] = None,
**kwargs: Any
) -> _models.IotHubDescription:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(iot_hub_description, (IO, bytes)):
_content = iot_hub_description
else:
_json = self._serialize.body(iot_hub_description, "IotHubDescription")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: _models.IotHubDescription,
if_match: Optional[str] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Create or update the metadata of an IoT hub.
Create or update the metadata of an Iot hub. The usual pattern to modify a property is to
retrieve the IoT hub metadata and security metadata, and then combine them with the modified
values in a new body to update the IoT hub. If certain properties are missing in the JSON,
updating IoT Hub may cause these values to fallback to default, which may lead to unexpected
behavior.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param iot_hub_description: The IoT hub metadata and security metadata. Required.
:type iot_hub_description: ~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription
:param if_match: ETag of the IoT Hub. Do not specify for creating a brand new IoT Hub. Required
to update an existing IoT Hub. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: IO,
if_match: Optional[str] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Create or update the metadata of an IoT hub.
Create or update the metadata of an Iot hub. The usual pattern to modify a property is to
retrieve the IoT hub metadata and security metadata, and then combine them with the modified
values in a new body to update the IoT hub. If certain properties are missing in the JSON,
updating IoT Hub may cause these values to fallback to default, which may lead to unexpected
behavior.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param iot_hub_description: The IoT hub metadata and security metadata. Required.
:type iot_hub_description: IO
:param if_match: ETag of the IoT Hub. Do not specify for creating a brand new IoT Hub. Required
to update an existing IoT Hub. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_description: Union[_models.IotHubDescription, IO],
if_match: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Create or update the metadata of an IoT hub.
Create or update the metadata of an Iot hub. The usual pattern to modify a property is to
retrieve the IoT hub metadata and security metadata, and then combine them with the modified
values in a new body to update the IoT hub. If certain properties are missing in the JSON,
updating IoT Hub may cause these values to fallback to default, which may lead to unexpected
behavior.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param iot_hub_description: The IoT hub metadata and security metadata. Is either a
IotHubDescription type or a IO type. Required.
:type iot_hub_description: ~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription or IO
:param if_match: ETag of the IoT Hub. Do not specify for creating a brand new IoT Hub. Required
to update an existing IoT Hub. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
iot_hub_description=iot_hub_description,
if_match=if_match,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
async def _update_initial(
self, resource_group_name: str, resource_name: str, iot_hub_tags: Union[_models.TagsResource, IO], **kwargs: Any
) -> _models.IotHubDescription:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(iot_hub_tags, (IO, bytes)):
_content = iot_hub_tags
else:
_json = self._serialize.body(iot_hub_tags, "TagsResource")
request = build_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
@overload
async def begin_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_tags: _models.TagsResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Update an existing IoT Hubs tags.
Update an existing IoT Hub tags. to update other fields use the CreateOrUpdate method.
:param resource_group_name: Resource group identifier. Required.
:type resource_group_name: str
:param resource_name: Name of iot hub to update. Required.
:type resource_name: str
:param iot_hub_tags: Updated tag information to set into the iot hub instance. Required.
:type iot_hub_tags: ~azure.mgmt.iothub.v2021_07_01.models.TagsResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
resource_name: str,
iot_hub_tags: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Update an existing IoT Hubs tags.
Update an existing IoT Hub tags. to update other fields use the CreateOrUpdate method.
:param resource_group_name: Resource group identifier. Required.
:type resource_group_name: str
:param resource_name: Name of iot hub to update. Required.
:type resource_name: str
:param iot_hub_tags: Updated tag information to set into the iot hub instance. Required.
:type iot_hub_tags: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self, resource_group_name: str, resource_name: str, iot_hub_tags: Union[_models.TagsResource, IO], **kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Update an existing IoT Hubs tags.
Update an existing IoT Hub tags. to update other fields use the CreateOrUpdate method.
:param resource_group_name: Resource group identifier. Required.
:type resource_group_name: str
:param resource_name: Name of iot hub to update. Required.
:type resource_name: str
:param iot_hub_tags: Updated tag information to set into the iot hub instance. Is either a
TagsResource type or a IO type. Required.
:type iot_hub_tags: ~azure.mgmt.iothub.v2021_07_01.models.TagsResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
iot_hub_tags=iot_hub_tags,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
async def _delete_initial(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> Union[_models.IotHubDescription, _models.ErrorDetails]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[Union[_models.IotHubDescription, _models.ErrorDetails]] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if response.status_code == 404:
deserialized = self._deserialize("ErrorDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncLROPoller[_models.IotHubDescription]:
"""Delete an IoT hub.
Delete an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IotHubDescription or An instance of
AsyncLROPoller that returns either ErrorDetails or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription] or
~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2021_07_01.models.ErrorDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubDescription] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("IotHubDescription", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}"
}
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.IotHubDescription"]:
"""Get all the IoT hubs in a subscription.
Get all the IoT hubs in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubDescription or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubDescriptionListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("IotHubDescriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Devices/IotHubs"}
@distributed_trace
def list_by_resource_group(
self, resource_group_name: str, **kwargs: Any
) -> AsyncIterable["_models.IotHubDescription"]:
"""Get all the IoT hubs in a resource group.
Get all the IoT hubs in a resource group.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubDescription or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubDescriptionListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("IotHubDescriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs"
}
@distributed_trace_async
async def get_stats(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> _models.RegistryStatistics:
"""Get the statistics from an IoT hub.
Get the statistics from an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryStatistics or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.RegistryStatistics
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.RegistryStatistics] = kwargs.pop("cls", None)
request = build_get_stats_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_stats.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("RegistryStatistics", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubStats"
}
@distributed_trace
def get_valid_skus(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.IotHubSkuDescription"]:
"""Get the list of valid SKUs for an IoT hub.
Get the list of valid SKUs for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubSkuDescription or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.IotHubSkuDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubSkuDescriptionListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_get_valid_skus_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_valid_skus.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("IotHubSkuDescriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_valid_skus.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/skus"
}
@distributed_trace
def list_event_hub_consumer_groups(
self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, **kwargs: Any
) -> AsyncIterable["_models.EventHubConsumerGroupInfo"]:
"""Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an
IoT hub.
Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an
IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint. Required.
:type event_hub_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EventHubConsumerGroupInfo or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.EventHubConsumerGroupsListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_event_hub_consumer_groups_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
event_hub_endpoint_name=event_hub_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_event_hub_consumer_groups.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("EventHubConsumerGroupsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_event_hub_consumer_groups.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups"
}
@distributed_trace_async
async def get_event_hub_consumer_group(
self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, name: str, **kwargs: Any
) -> _models.EventHubConsumerGroupInfo:
"""Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub.
Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
Required.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to retrieve. Required.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.EventHubConsumerGroupInfo] = kwargs.pop("cls", None)
request = build_get_event_hub_consumer_group_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
event_hub_endpoint_name=event_hub_endpoint_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_event_hub_consumer_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("EventHubConsumerGroupInfo", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_event_hub_consumer_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}"
}
@overload
async def create_event_hub_consumer_group(
self,
resource_group_name: str,
resource_name: str,
event_hub_endpoint_name: str,
name: str,
consumer_group_body: _models.EventHubConsumerGroupBodyDescription,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EventHubConsumerGroupInfo:
"""Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
Required.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to add. Required.
:type name: str
:param consumer_group_body: The consumer group to add. Required.
:type consumer_group_body:
~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupBodyDescription
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def create_event_hub_consumer_group(
self,
resource_group_name: str,
resource_name: str,
event_hub_endpoint_name: str,
name: str,
consumer_group_body: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EventHubConsumerGroupInfo:
"""Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
Required.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to add. Required.
:type name: str
:param consumer_group_body: The consumer group to add. Required.
:type consumer_group_body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def create_event_hub_consumer_group(
self,
resource_group_name: str,
resource_name: str,
event_hub_endpoint_name: str,
name: str,
consumer_group_body: Union[_models.EventHubConsumerGroupBodyDescription, IO],
**kwargs: Any
) -> _models.EventHubConsumerGroupInfo:
"""Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
Required.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to add. Required.
:type name: str
:param consumer_group_body: The consumer group to add. Is either a
EventHubConsumerGroupBodyDescription type or a IO type. Required.
:type consumer_group_body:
~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupBodyDescription or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.EventHubConsumerGroupInfo] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(consumer_group_body, (IO, bytes)):
_content = consumer_group_body
else:
_json = self._serialize.body(consumer_group_body, "EventHubConsumerGroupBodyDescription")
request = build_create_event_hub_consumer_group_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
event_hub_endpoint_name=event_hub_endpoint_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_event_hub_consumer_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("EventHubConsumerGroupInfo", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_event_hub_consumer_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}"
}
@distributed_trace_async
async def delete_event_hub_consumer_group( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, name: str, **kwargs: Any
) -> None:
"""Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub.
Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
Required.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to delete. Required.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_event_hub_consumer_group_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
event_hub_endpoint_name=event_hub_endpoint_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete_event_hub_consumer_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_event_hub_consumer_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}"
}
@distributed_trace
def list_jobs(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.JobResponse"]:
"""Get a list of all the jobs in an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
Get a list of all the jobs in an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either JobResponse or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.JobResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.JobResponseListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_jobs_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_jobs.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("JobResponseListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_jobs.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs"
}
@distributed_trace_async
async def get_job(
self, resource_group_name: str, resource_name: str, job_id: str, **kwargs: Any
) -> _models.JobResponse:
"""Get the details of a job from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
Get the details of a job from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param job_id: The job identifier. Required.
:type job_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.JobResponse] = kwargs.pop("cls", None)
request = build_get_job_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
job_id=job_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_job.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("JobResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_job.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs/{jobId}"
}
@distributed_trace
def get_quota_metrics(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.IotHubQuotaMetricInfo"]:
"""Get the quota metrics for an IoT hub.
Get the quota metrics for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubQuotaMetricInfo or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.IotHubQuotaMetricInfo]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.IotHubQuotaMetricInfoListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_get_quota_metrics_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_quota_metrics.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("IotHubQuotaMetricInfoListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_quota_metrics.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/quotaMetrics"
}
@distributed_trace
def get_endpoint_health(
self, resource_group_name: str, iot_hub_name: str, **kwargs: Any
) -> AsyncIterable["_models.EndpointHealthData"]:
"""Get the health for routing endpoints.
Get the health for routing endpoints.
:param resource_group_name: Required.
:type resource_group_name: str
:param iot_hub_name: Required.
:type iot_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EndpointHealthData or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.EndpointHealthData]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.EndpointHealthDataListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_get_endpoint_health_request(
resource_group_name=resource_group_name,
iot_hub_name=iot_hub_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_endpoint_health.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("EndpointHealthDataListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_endpoint_health.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routingEndpointsHealth"
}
@overload
async def check_name_availability(
self, operation_inputs: _models.OperationInputs, *, content_type: str = "application/json", **kwargs: Any
) -> _models.IotHubNameAvailabilityInfo:
"""Check if an IoT hub name is available.
Check if an IoT hub name is available.
:param operation_inputs: Set the name parameter in the OperationInputs structure to the name of
the IoT hub to check. Required.
:type operation_inputs: ~azure.mgmt.iothub.v2021_07_01.models.OperationInputs
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubNameAvailabilityInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.IotHubNameAvailabilityInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def check_name_availability(
self, operation_inputs: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.IotHubNameAvailabilityInfo:
"""Check if an IoT hub name is available.
Check if an IoT hub name is available.
:param operation_inputs: Set the name parameter in the OperationInputs structure to the name of
the IoT hub to check. Required.
:type operation_inputs: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubNameAvailabilityInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.IotHubNameAvailabilityInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def check_name_availability(
self, operation_inputs: Union[_models.OperationInputs, IO], **kwargs: Any
) -> _models.IotHubNameAvailabilityInfo:
"""Check if an IoT hub name is available.
Check if an IoT hub name is available.
:param operation_inputs: Set the name parameter in the OperationInputs structure to the name of
the IoT hub to check. Is either a OperationInputs type or a IO type. Required.
:type operation_inputs: ~azure.mgmt.iothub.v2021_07_01.models.OperationInputs or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubNameAvailabilityInfo or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.IotHubNameAvailabilityInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.IotHubNameAvailabilityInfo] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(operation_inputs, (IO, bytes)):
_content = operation_inputs
else:
_json = self._serialize.body(operation_inputs, "OperationInputs")
request = build_check_name_availability_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("IotHubNameAvailabilityInfo", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {
"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Devices/checkNameAvailability"
}
@overload
async def test_all_routes(
self,
iot_hub_name: str,
resource_group_name: str,
input: _models.TestAllRoutesInput,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TestAllRoutesResult:
"""Test all routes.
Test all routes configured in this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Input for testing all routes. Required.
:type input: ~azure.mgmt.iothub.v2021_07_01.models.TestAllRoutesInput
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestAllRoutesResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestAllRoutesResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def test_all_routes(
self,
iot_hub_name: str,
resource_group_name: str,
input: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TestAllRoutesResult:
"""Test all routes.
Test all routes configured in this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Input for testing all routes. Required.
:type input: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestAllRoutesResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestAllRoutesResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def test_all_routes(
self, iot_hub_name: str, resource_group_name: str, input: Union[_models.TestAllRoutesInput, IO], **kwargs: Any
) -> _models.TestAllRoutesResult:
"""Test all routes.
Test all routes configured in this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Input for testing all routes. Is either a TestAllRoutesInput type or a IO type.
Required.
:type input: ~azure.mgmt.iothub.v2021_07_01.models.TestAllRoutesInput or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestAllRoutesResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestAllRoutesResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.TestAllRoutesResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(input, (IO, bytes)):
_content = input
else:
_json = self._serialize.body(input, "TestAllRoutesInput")
request = build_test_all_routes_request(
iot_hub_name=iot_hub_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.test_all_routes.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("TestAllRoutesResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
test_all_routes.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routing/routes/$testall"
}
@overload
async def test_route(
self,
iot_hub_name: str,
resource_group_name: str,
input: _models.TestRouteInput,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TestRouteResult:
"""Test the new route.
Test the new route for this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Route that needs to be tested. Required.
:type input: ~azure.mgmt.iothub.v2021_07_01.models.TestRouteInput
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestRouteResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestRouteResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def test_route(
self,
iot_hub_name: str,
resource_group_name: str,
input: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TestRouteResult:
"""Test the new route.
Test the new route for this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Route that needs to be tested. Required.
:type input: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestRouteResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestRouteResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def test_route(
self, iot_hub_name: str, resource_group_name: str, input: Union[_models.TestRouteInput, IO], **kwargs: Any
) -> _models.TestRouteResult:
"""Test the new route.
Test the new route for this Iot Hub.
:param iot_hub_name: IotHub to be tested. Required.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to. Required.
:type resource_group_name: str
:param input: Route that needs to be tested. Is either a TestRouteInput type or a IO type.
Required.
:type input: ~azure.mgmt.iothub.v2021_07_01.models.TestRouteInput or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestRouteResult or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.TestRouteResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.TestRouteResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(input, (IO, bytes)):
_content = input
else:
_json = self._serialize.body(input, "TestRouteInput")
request = build_test_route_request(
iot_hub_name=iot_hub_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.test_route.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("TestRouteResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
test_route.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routing/routes/$testnew"
}
@distributed_trace
def list_keys(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.SharedAccessSignatureAuthorizationRule"]:
"""Get the security metadata for an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
Get the security metadata for an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SharedAccessSignatureAuthorizationRule or the
result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2021_07_01.models.SharedAccessSignatureAuthorizationRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.SharedAccessSignatureAuthorizationRuleListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_keys_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_keys.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SharedAccessSignatureAuthorizationRuleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_keys.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/listkeys"
}
@distributed_trace_async
async def get_keys_for_key_name(
self, resource_group_name: str, resource_name: str, key_name: str, **kwargs: Any
) -> _models.SharedAccessSignatureAuthorizationRule:
"""Get a shared access policy by name from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
Get a shared access policy by name from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param key_name: The name of the shared access policy. Required.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SharedAccessSignatureAuthorizationRule or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.SharedAccessSignatureAuthorizationRule
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.SharedAccessSignatureAuthorizationRule] = kwargs.pop("cls", None)
request = build_get_keys_for_key_name_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
key_name=key_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_keys_for_key_name.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("SharedAccessSignatureAuthorizationRule", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_keys_for_key_name.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubKeys/{keyName}/listkeys"
}
@overload
async def export_devices(
self,
resource_group_name: str,
resource_name: str,
export_devices_parameters: _models.ExportDevicesRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.JobResponse:
"""Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param export_devices_parameters: The parameters that specify the export devices operation.
Required.
:type export_devices_parameters: ~azure.mgmt.iothub.v2021_07_01.models.ExportDevicesRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def export_devices(
self,
resource_group_name: str,
resource_name: str,
export_devices_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.JobResponse:
"""Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param export_devices_parameters: The parameters that specify the export devices operation.
Required.
:type export_devices_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def export_devices(
self,
resource_group_name: str,
resource_name: str,
export_devices_parameters: Union[_models.ExportDevicesRequest, IO],
**kwargs: Any
) -> _models.JobResponse:
"""Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param export_devices_parameters: The parameters that specify the export devices operation. Is
either a ExportDevicesRequest type or a IO type. Required.
:type export_devices_parameters: ~azure.mgmt.iothub.v2021_07_01.models.ExportDevicesRequest or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.JobResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(export_devices_parameters, (IO, bytes)):
_content = export_devices_parameters
else:
_json = self._serialize.body(export_devices_parameters, "ExportDevicesRequest")
request = build_export_devices_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.export_devices.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("JobResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_devices.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/exportDevices"
}
@overload
async def import_devices(
self,
resource_group_name: str,
resource_name: str,
import_devices_parameters: _models.ImportDevicesRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.JobResponse:
"""Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param import_devices_parameters: The parameters that specify the import devices operation.
Required.
:type import_devices_parameters: ~azure.mgmt.iothub.v2021_07_01.models.ImportDevicesRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def import_devices(
self,
resource_group_name: str,
resource_name: str,
import_devices_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.JobResponse:
"""Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param import_devices_parameters: The parameters that specify the import devices operation.
Required.
:type import_devices_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def import_devices(
self,
resource_group_name: str,
resource_name: str,
import_devices_parameters: Union[_models.ImportDevicesRequest, IO],
**kwargs: Any
) -> _models.JobResponse:
"""Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub. Required.
:type resource_group_name: str
:param resource_name: The name of the IoT hub. Required.
:type resource_name: str
:param import_devices_parameters: The parameters that specify the import devices operation. Is
either a ImportDevicesRequest type or a IO type. Required.
:type import_devices_parameters: ~azure.mgmt.iothub.v2021_07_01.models.ImportDevicesRequest or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2021_07_01.models.JobResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.JobResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(import_devices_parameters, (IO, bytes)):
_content = import_devices_parameters
else:
_json = self._serialize.body(import_devices_parameters, "ImportDevicesRequest")
request = build_import_devices_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.import_devices.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("JobResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_devices.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/importDevices"
}
| [
"noreply@github.com"
] | Azure.noreply@github.com |
1a7ba9922dc7ad5edb07f0927418e6314f3beab2 | 07524e9ec69f339dc4898564ecdf9dc5496555a6 | /Ervany Septa Prawara.A-10-4C-Tugas P5/Menubutton - TugasP5.py | 35b11ac5b1f08aff5cd72dfde0ffe7258f65dece | [] | no_license | ervanyspa/python | fe0a01c9645e75a5dc6754df8fc4d9399ed7f22e | d894d8030b714db51dee7da949366214db532c04 | refs/heads/master | 2023-03-12T02:23:36.291350 | 2021-03-03T02:05:57 | 2021-03-03T02:05:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | from tkinter import *
mainform = Tk()
mainform.geometry('500x200')
mainform.wm_title("Menubutton")
mainform['background']="#bbdfc8"
lbl = Label(mainform)
lbl['text']="Pilih Bahasa Pemrograman"
lbl.pack()
mbt = Menubutton(mainform)
mbt['text']="Bahasa Pemrograman"
mbt['activebackground']="#75cfb8"
mbt['activeforeground']="red"
mbt['bg'] = "#fde8cd"
mbt['bd']=2
mbt['cursor']="star"
mbt['direction']="right"
mbt['fg']="navy"
mbt['height']=2
mbt.pack()
mbt.menu = Menu(mbt, tearoff=0)
mbt['menu'] = mbt.menu
mbt.menu.add_checkbutton(label="Python")
mbt.menu.add_checkbutton(label="Java")
mbt.menu.add_checkbutton(label="C++")
mainform.mainloop() | [
"ti.ervany@gmail.com"
] | ti.ervany@gmail.com |
6eaf25242a314c9c3bd54e3421c93441b06cfdc2 | fa9bd18aa442e58da6647a3087e5cdb43718c777 | /md5fun.py | 1c016fd5f44091bc0f24a7394c4d221f3d5ee4e4 | [] | no_license | kuiqejw/Security | 5830dd1b04fb2c4a5a3bf05502a5fed50e2ba1ce | a245a4a53c3bdac6d96fec71d8ab7b6d8ce4a379 | refs/heads/master | 2020-04-22T21:28:45.680659 | 2019-05-06T01:31:02 | 2019-05-06T01:31:02 | 170,674,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,969 | py | import hashlib
import random
import time
from string import ascii_lowercase
#read and write
def getFileContent(filename):
file = open(filename, mode = 'r', encoding='utf-8', newline = '\n')
lines = []
for line in file:
if line.strip():
lines.append(line.strip())
return lines
def getFileContentDict(filename):
file = open(filename, mode = 'r', encoding='utf-8', newline = '\n')
lines = []
for line in file:
if line.strip():
lines.append(line.strip())
return lines
def writeFile(filename, content):
#content in list
file = open(filename, mode = 'w', encoding = 'utf-8', newline = '\n')
file.write(content)
print(content)
file.close()
return
def printList(list):
out = ''
for i in list:
out += i + '\n'
return out
#brute force
def getPasswords(words, hashes):
passwords = []
s = set()
for i in range(len(words)):
print(i)
hashed = hashlib.md5(words[i].encode('utf-8')).hexdigest()
if hashed in hashes:
passwords.append(words[i])
hashes.remove(hashed)
else:
#grab string
x = ''.join(set(words[i]))
if x not in s:
s.add(x)
x += '0123456789'
editedBruteForce(hashes, x, passwords)
# else:
# uniq_str = '0123456789'.join(set(words[i]))
# editedBruteForce(hashes, uniq_str, passwords)
# if len(passwords) == 15:
# break
writeFile('pass2.txt', printList(passwords))
def editedBruteForce(hashes, string, passwords):
len_charac= len(string)
for i in range(len_charac):
for j in range(len_charac):
for x in range(len_charac):
for y in range(len_charac):
for z in range(len_charac):
word = string[i] + string[j] + string[x]+string[y]+string[z]
e = hashlib.md5(word.encode('utf-8')).hexdigest()
if e in hashes:
passwords.append(word)
hashes.remove(e)
if len(passwords) == 15:
return passwords
return passwords
def bruteForce(hashes):
string = 'abcdefghijklmnopqrstuvwxyz0123456789'
len_charac= len(string)
passwords = []
for i in range(len_charac):
for j in range(len_charac):
for x in range(len_charac):
for y in range(len_charac):
for z in range(len_charac):
word = string[i] + string[j] + string[x]+string[y]+string[z]
e = hashlib.md5(word.encode('utf-8')).hexdigest()
if e in hashes:
passwords.append(word)
if len(passwords) == 15:
return passwords
return passwords
def Part3():
startTime = time.perf_counter()
passwords = bruteForce(hash5)
endTime = time.perf_counter()
print(passwords)
print('Elapsed time = ', endTime - startTime)
writeFile('pass5.txt', printList(passwords))
return
def salting():
characters = ascii_lowercase
salt = characters[int(random.random()*100) %len(characters)]
return salt
def Part5():
passwords = getFileContent('pass5.txt')
passwordsSalted = []
outPass6 = ''
outSalt = ''
for p in passwords:
salt = salting()
passwordsSalted.append((p, salt))
newPassword = p + salt
outPass6 += newPassword + '\n'
outSalt += hashlib.md5(newPassword.encode('utf-8')).hexdigest() +'\n'
writeFile('pass6.txt', outPass6)
writeFile('salted6.txt',outSalt)
def Part6():
hashesContent = getFileContent('hashes.txt')
hashes = []
difficulty = -1
for line in hashesContent:
if 'Weak' in line or 'Moderate' in line or 'Strong' in line:
hashes.append([])
difficulty += 1
hashes[difficulty] = []
else:
print(difficulty)
hashes[difficulty].append(line.split()[1])
genFile('hashes_weak.txt', printList(hashes[0]))
genFile('hashes_moderate.txt', printList(hashes[1]))
genFile('hashes_strong.txt', printList(hashes[2]))
return
if __name__=="__main__":
# parser=argparse.ArgumentParser(description='Brute force.')
# parser.add_argument('-i', dest='infile',help='Input file')
# parser.add_argument('-d', dest='dictfile',help='Dictionary file')
# parser.add_argument('-o', dest='outfile',help='Output file')
# args=parser.parse_args()
# infile=args.infile
# dictfile=args.dictfile
# outfile=args.outfile
# if infile==None or outfile==None or dictfile==None:
# print 'Missing infile, outfile, or dictfile'
# printusage();
# sys.exit(1)
# print 'Reading from: ',infile
# print 'Dictionary from: ',dictfile
# print 'Writing to: ',outfile
words5 = getFileContent('words5.txt')
hash5 = getFileContent('hash5.txt')
# start_time = time.time()
# print('start')
# Part3()
# print('---%s seconds ----' %(time.time() - start_time))
#230.76921701 seconds
start_time = time.time()
print('start', start_time)
getPasswords(words5, hash5)
print('---%s seconds ----' %(time.time() - start_time))
# start_time = time.time()
# print('start')
# Part5()
# print('---%s seconds ----' %(time.time() - start_time))
#0.04687643 seconds
# getPasswords(words5, hash5) | [
"noreply@github.com"
] | kuiqejw.noreply@github.com |
1556b0892916ec6c428405d5f4b71791ac399917 | 0c355ffd8815a4da5e08a50840b2949f7f4c53b7 | /8단계_문자열/study.py | 8e03c457ac1bd30d3c0af544d9854b5a6071a906 | [] | no_license | alsgur6180/baekjun | 81fcd1e5b5eb0ebbc41bd52753b6b7949fba85c4 | 6584c065689d312f711dc9a5220797ca6b1a1907 | refs/heads/master | 2023-02-04T21:41:32.362614 | 2020-12-27T12:11:55 | 2020-12-27T12:11:55 | 290,671,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | s = input().upper()
setList = list(set(s))
best = 0
result = ''
for i in setList:
if best<s.count(i):
best = s.count(i)
result = i
elif best == s.count(i) and result != i:
result = '?'
print(result) | [
"alsgur6180@naver.com"
] | alsgur6180@naver.com |
3d85728869a6a5f2b3254a2625613e36d3bf0311 | 2394a7ace0398485d0c851136efd402e5b7718e4 | /run_game.py | 4958496168c29cc5670ea22ec88a6675887f2d50 | [
"Apache-2.0"
] | permissive | TheBigKahuna353/ScratchGameEngine | e4546b8e74eba28a1ff4fe69d78ed897a6334299 | 7330e3f7a95eade9ad219bdc397f9f9050847b79 | refs/heads/master | 2023-01-10T06:25:17.491865 | 2020-11-12T07:37:37 | 2020-11-12T07:37:37 | 290,346,249 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | import pygame
from threading import Thread
def run(engine):
Thread(target=start, args=(engine,)).start()
def start(engine):
pygame.init()
screen2 = pygame.display.set_mode((500, 500))
while True:
pass | [
"65262579+TheBigKahuna353@users.noreply.github.com"
] | 65262579+TheBigKahuna353@users.noreply.github.com |
df6e2204bce9b1bbe8cbdea4fabaf6ddeeb0ef70 | 45d955ba10d0478bc3bb4e04c5ff4e5778343d77 | /env/bin/django-admin | 652d7d3b462788b5f3fff54d8c31f2831827a156 | [] | no_license | GennadyBLack/Django_social | b3b318f08b4bb4a7b8c3a0f830220d1760fe4923 | c0507cbfd035fd3d488f3ad98d1c9c820fe28d49 | refs/heads/master | 2022-11-28T04:27:19.796729 | 2020-04-15T19:08:51 | 2020-04-15T19:08:51 | 253,865,617 | 0 | 0 | null | 2022-11-22T05:28:51 | 2020-04-07T17:28:55 | JavaScript | UTF-8 | Python | false | false | 284 | #!/home/she/wowchat/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"richard.black183@gmail.com"
] | richard.black183@gmail.com | |
9db4c5ea883e33030a8c2dd4c0af0434a3be59e8 | fbb643aa6044af78697b7274cf2a507437987db6 | /test/serialbox-python/serialbox/test_serializer.py | 17ac2c9cae210e838319672ee2a870ca221b08d2 | [
"BSD-2-Clause"
] | permissive | mbianco/serialbox2 | c164875640ca5d2f4eac6e0d1036e9ed1678f395 | 1d5fdcf10c21f88dcf0b1779b3a00cbe384a55bf | refs/heads/master | 2021-01-12T08:25:20.104433 | 2016-12-15T15:17:01 | 2016-12-15T15:17:01 | 67,140,472 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 18,230 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
##===-----------------------------------------------------------------------------*- Python -*-===##
##
## S E R I A L B O X
##
## This file is distributed under terms of BSD license.
## See LICENSE.txt for more information.
##
##===------------------------------------------------------------------------------------------===##
##
## Unittest of the serializer.
##
##===------------------------------------------------------------------------------------------===##
import logging
import os
import shutil
import unittest
import numpy as np
from serialbox import *
class TestSerializer(unittest.TestCase):
archive = "Binary"
def set_path(self):
self.path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "unittest-tmp-dir",
"TestSerializer", self._testMethodName)
def setUp(self):
self.set_path()
logging.debug("Creating: %s" % self.path)
os.makedirs(self.path, exist_ok=True)
def tearDown(self):
logging.debug("Deleting: %s" % self.path)
shutil.rmtree(self.path)
def test_init(self):
#
# Directory does not exist -> Error
#
self.assertRaises(SerialboxError, Serializer, OpenModeKind.Read, self.path, "field",
self.archive)
#
# Archive not registered -> Error
#
self.assertRaises(SerialboxError, Serializer, OpenModeKind.Write, self.path, "field",
"wrong-archive")
#
# Write
#
ser = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
self.assertEqual(ser.mode, OpenModeKind.Write)
self.assertEqual(ser.prefix, "field")
self.assertEqual(ser.directory, self.path)
#
# Write (OpenModeKind as int)
#
ser = Serializer(OpenModeKind.Write.value, self.path, "field", self.archive)
self.assertEqual(ser.mode, OpenModeKind.Write)
self.assertEqual(ser.prefix, "field")
self.assertEqual(ser.directory, self.path)
ser.update_meta_data()
#
# Read
#
ser = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
self.assertEqual(ser.mode, OpenModeKind.Read)
#
# Append
#
ser = Serializer(OpenModeKind.Append, self.path, "field", self.archive)
self.assertEqual(ser.mode, OpenModeKind.Append)
def test_global_meta_info(self):
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
ser_write.global_metainfo.insert("key1", 5.0)
ser_write.global_metainfo.insert("key2", 1)
ser_write.global_metainfo.insert("key3", "str")
ser_write.update_meta_data()
ser_append = Serializer(OpenModeKind.Append, self.path, "field", self.archive)
self.assertEqual(ser_append.global_metainfo["key1"], 5.0)
self.assertEqual(ser_append.global_metainfo["key2"], 1)
self.assertEqual(ser_append.global_metainfo["key3"], "str")
def test_serialization_status(self):
self.assertGreaterEqual(Serializer.status(), 0)
#
# Disable serialization
#
Serializer.disable()
self.assertEqual(Serializer.status(), Serializer.Disabled)
#
# Enable serialization
#
Serializer.enable()
self.assertEqual(Serializer.status(), Serializer.Enabled)
def test_savepoint_list(self):
ser = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
#
# Add savepoints
#
sp1 = Savepoint('s1')
ser.register_savepoint(sp1)
self.assertRaises(SerialboxError, ser.register_savepoint, sp1)
sp2 = Savepoint('sp', {"key1": 1.0})
ser.register_savepoint(sp2)
sp3 = Savepoint('sp', {"key1": 2.0})
ser.register_savepoint(sp3)
#
# Get savepoint vector
#
savepoints = ser.savepoint_list()
self.assertEqual(savepoints[0], sp1)
self.assertEqual(savepoints[1], sp2)
self.assertEqual(savepoints[2], sp3)
self.assertTrue(ser.has_savepoint(sp1))
self.assertTrue(ser.has_savepoint(sp2))
self.assertTrue(ser.has_savepoint(sp3))
#
# Get all savepoints with `name`
#
savepoints_with_name = ser.get_savepoint("sp")
self.assertTrue(sp2 in savepoints_with_name)
self.assertTrue(sp3 in savepoints_with_name)
ser.update_meta_data()
#
# Check savepoint list gets cached
#
ser = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
savepoint_list_1 = ser.savepoint_list()
savepoint_list_2 = ser.savepoint_list()
self.assertEqual(id(savepoint_list_1), id(savepoint_list_2))
def test_savepoint(self):
ser = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
ser.register_savepoint(Savepoint('sp', {"key": 1}))
ser.register_savepoint(Savepoint('sp', {"key": 2}))
ser.register_savepoint(Savepoint('sp', {"key": 2, "key2": 5.0}))
ser.register_savepoint(Savepoint('sp-2', {"key": "value", "meta info": True}))
ser.register_savepoint(Savepoint('sp-2', {"key": "value", "1": 1}))
# Check type
self.assertTrue(isinstance(ser.savepoint, SavepointCollection))
#
# Query savepoint: Case 1 : Access via __getattr__
#
# Access sp
self.assertEqual(ser.savepoint.sp.key[1].as_savepoint(), Savepoint('sp', {"key": 1}))
self.assertEqual(ser.savepoint.sp.key[2].key2[5.0].as_savepoint(),
Savepoint('sp', {"key": 2, "key2": 5.0}))
self.assertEqual(ser.savepoint[0], Savepoint('sp', {"key": 1}))
# Access sp-2
self.assertEqual(ser.savepoint.sp_2.key["value"].meta_info[True].as_savepoint(),
Savepoint('sp-2', {"key": "value", "meta info": True}))
self.assertEqual(ser.savepoint.sp_2.key["value"]._1[1].as_savepoint(),
Savepoint('sp-2', {"key": "value", "1": 1}))
# Mixed
self.assertEqual(ser.savepoint['sp-2'].key["value"]["1"][1].as_savepoint(),
Savepoint('sp-2', {"key": "value", "1": 1}))
#
# Query savepoint: Case 2: Access only with __getitem__
#
# Access sp
self.assertEqual(ser.savepoint['sp']['key'][1].as_savepoint(), Savepoint('sp', {"key": 1}))
self.assertEqual(ser.savepoint['sp']['key'][2]['key2'][5.0].as_savepoint(),
Savepoint('sp', {"key": 2, "key2": 5.0}))
self.assertEqual(ser.savepoint[0], Savepoint('sp', {"key": 1}))
# Access sp-2
self.assertEqual(ser.savepoint['sp-2']['key']['value']['meta info'][True].as_savepoint(),
Savepoint('sp-2', {"key": "value", "meta info": True}))
self.assertEqual(ser.savepoint['sp-2']['key']['value']['1'][1].as_savepoint(),
Savepoint('sp-2', {"key": "value", "1": 1}))
#
# Errors
#
# Ambiguous match
self.assertRaises(SerialboxError, ser.savepoint.sp.key[2].as_savepoint)
self.assertEqual(len(ser.savepoint.sp.key[2].savepoints()), 2)
# TopCollection not unqique -> Error
self.assertRaises(SerialboxError, ser.savepoint.as_savepoint)
# NamedCollection not unqique -> Error
self.assertRaises(SerialboxError, ser.savepoint.sp.as_savepoint)
# Savepoint no existing -> Error
self.assertRaises(SerialboxError, ser.savepoint.__getattr__, 'spX')
# Metainfo key not existing -> Error
self.assertRaises(SerialboxError, ser.savepoint.sp.__getattr__, 'keyX')
# Metainfo value not existing -> Error
self.assertRaises(SerialboxError, ser.savepoint.sp.key.__getitem__, 3)
# Indexing not supported -> Error
self.assertRaises(SerialboxError, ser.savepoint.sp.key[1].__getitem__, 3)
def test_field(self):
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
field1 = FieldMetainfo(TypeID.Float64, [12, 13, 14])
field2 = FieldMetainfo(TypeID.Float32, [1024, 1024])
field3 = FieldMetainfo(TypeID.Int32, [4096])
#
# Add fields
#
ser_write.register_field("field1", field1)
self.assertTrue(ser_write.has_field("field1"))
ser_write.register_field("field2", field2)
self.assertTrue(ser_write.has_field("field2"))
ser_write.register_field("field3", field3)
self.assertTrue(ser_write.has_field("field3"))
# Add existing field -> Error
self.assertRaises(SerialboxError, ser_write.register_field, "field1", field1)
# Query non-existing field
self.assertFalse(ser_write.has_field("non-existing"))
#
# Fieldnames
#
fieldnames = ser_write.fieldnames()
self.assertTrue("field1" in fieldnames)
self.assertTrue("field2" in fieldnames)
self.assertTrue("field3" in fieldnames)
#
# Write meta-data to disk
#
ser_write.update_meta_data()
#
# Query field-meta-info
#
ser_read = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
self.assertEqual(ser_read.get_field_metainfo("field1"), field1)
self.assertEqual(ser_read.get_field_metainfo("field2"), field2)
self.assertEqual(ser_read.get_field_metainfo("field2"), field2)
def test_write_and_read_implicit(self):
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
#
# Setup fields
#
field1_input = np.random.rand(16)
field2_input = np.random.rand(4, 4)
field3_input = np.random.rand(2, 2, 2)
field3_output_allocted = np.random.rand(2, 2, 2)
sp = Savepoint("sp")
#
# Write fields (implicitly register savepoint and fields)
#
ser_write.write("field1", sp, field1_input)
ser_write.write("field2", sp, field2_input)
ser_write.write("field3", sp, field3_input)
#
# Read fields
#
ser_read = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
field1_output = ser_read.read("field1", sp)
field2_output = ser_read.read("field2", sp)
field3_output = ser_read.read("field3", ser_read.savepoint.sp)
ser_read.read("field3", sp, field3_output_allocted)
#
# Validate
#
self.assertTrue(np.allclose(field1_input, field1_output))
self.assertTrue(np.allclose(field2_input, field2_output))
self.assertTrue(np.allclose(field3_input, field3_output))
self.assertTrue(np.allclose(field3_input, field3_output_allocted))
#
# Failures
#
# Reading with OpenModeKind.Write -> Error
self.assertRaises(SerialboxError, ser_write.read, "field1", sp)
# Writing with OpenModeKind.Read -> Error
self.assertRaises(SerialboxError, ser_read.write, "field1", field1_input, sp)
# Read non-existent field -> Error
self.assertRaises(SerialboxError, ser_read.read, "field-non-existent", sp)
# Read at non-existent savepoint -> Error
self.assertRaises(SerialboxError, ser_read.read, "field1", Savepoint('sp2'))
# Read with field but wrong dimensions -> Error
self.assertRaises(SerialboxError, ser_read.read, "field1", sp,
np.ndarray(shape=[16, 15], dtype=np.float64))
# Read with field but wrong type -> Error
self.assertRaises(SerialboxError, ser_read.read, "field1", sp,
np.ndarray(shape=[16], dtype=np.int32))
def test_write_and_read_explict(self):
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
N = 5
#
# Setup fields
#
field_bool = np.ndarray(dtype=np.bool, shape=[N, N, N])
field_int32 = np.ndarray(dtype=np.int32, shape=[N, N, N])
field_int64 = np.ndarray(dtype=np.int64, shape=[N, N, N])
field_float32 = np.ndarray(dtype=np.float32, shape=[N, N, N])
field_float64 = np.ndarray(dtype=np.float64, shape=[N, N, N])
for i in range(N):
for j in range(N):
for k in range(N):
rnd = np.random.rand(1)
field_bool[i, j, k] = True if rnd > 0.5 else False
field_int32[i, j, k] = 100 * rnd
field_int64[i, j, k] = 100 * rnd
field_float32[i, j, k] = rnd
field_float64[i, j, k] = rnd
#
# Register field
#
register_field = lambda n, t, f: ser_write.register_field(n, FieldMetainfo(t, f.shape))
register_field("field_bool", TypeID.Boolean, field_bool)
register_field("field_int32", TypeID.Int32, field_int32)
register_field("field_int64", TypeID.Int64, field_int64)
register_field("field_float32", TypeID.Float32, field_float32)
register_field("field_float64", TypeID.Float64, field_float64)
#
# Register savepoint
#
sp_bool = Savepoint("sp_bool")
sp_ints = Savepoint("sp_ints")
sp_floats = Savepoint("sp_floats")
ser_write.register_savepoint(sp_bool)
ser_write.register_savepoint(sp_ints)
ser_write.register_savepoint(sp_floats)
#
# Write
#
ser_write.write("field_bool", sp_bool, field_bool, False)
ser_write.write("field_int32", sp_ints, field_int32, False)
ser_write.write("field_int64", sp_ints, field_int64, False)
ser_write.write("field_float32", sp_floats, field_float32, False)
ser_write.write("field_float64", sp_floats, field_float64, False)
#
# Read fields
#
ser_read = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
field_bool_output = ser_read.read("field_bool", sp_bool)
field_int32_output = ser_read.read("field_int32", sp_ints)
field_int64_output = ser_read.read("field_int64", sp_ints)
field_float32_output = ser_read.read("field_float32", sp_floats)
field_float64_output = ser_read.read("field_float64", sp_floats)
#
# Validate
#
self.assertTrue(np.allclose(field_bool_output, field_bool))
self.assertTrue(np.allclose(field_int32_output, field_int32))
self.assertTrue(np.allclose(field_int64_output, field_int64))
self.assertTrue(np.allclose(field_float32_output, field_float32))
self.assertTrue(np.allclose(field_float64_output, field_float64))
def test_write_and_read_async(self):
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
#
# Setup fields
#
field = np.random.rand(5, 6, 7)
sp = Savepoint("sp")
#
# Write field
#
ser_write.write("field", sp, field)
#
# Read fields asynchronously
#
ser_read = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
field_1 = ser_read.read_async("field", sp)
field_2 = ser_read.read_async("field", sp)
field_3 = ser_read.read_async("field", sp)
ser_read.wait_for_all()
#
# Validate
#
self.assertTrue(np.allclose(field, field_1))
self.assertTrue(np.allclose(field, field_2))
self.assertTrue(np.allclose(field, field_3))
def test_write_and_read_sliced(self):
field_input = np.random.rand(10, 15, 20)
#
# Write
#
ser_write = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
ser_write.write("field", Savepoint("sp"), field_input)
#
# Read
#
ser_read = Serializer(OpenModeKind.Read, self.path, "field", self.archive)
field_output = ser_read.read_slice("field", Savepoint("sp"), Slice[:])
self.assertTrue(np.allclose(field_output[:], field_input[:]))
field_output = ser_read.read_slice("field", Savepoint("sp"), Slice[:, :, 0])
self.assertTrue(np.allclose(field_output[:, :, 0], field_input[:, :, 0]))
field_output = ser_read.read_slice("field", Savepoint("sp"), Slice[2:, 1:-1:2, 1:5])
self.assertTrue(np.allclose(field_output[2:, 1:-1:2, 1:5], field_input[2:, 1:-1:2, 1:5]))
field_output = ser_read.read_slice("field", Savepoint("sp"), Slice[:-1, 1::2])
self.assertTrue(np.allclose(field_output[:-1, 1::2],
field_input[:-1, 1::2]))
#
# To many slices -> Error
#
self.assertRaises(SerialboxError, ser_read.read_slice, "field", Savepoint("sp"),
Slice[:, :, :, :])
def test_write_and_read_stateless(self):
field_input = np.random.rand(2, 2, 2)
field_output = np.random.rand(2, 2, 2)
#
# Read & write from file (Binary archive)
#
Serializer.to_file("field", field_input, os.path.join(self.path, "test.dat"))
Serializer.from_file("field", field_output, os.path.join(self.path, "test.dat"))
self.assertTrue(np.allclose(field_input, field_output))
#
# Read & write from file (NetCDF archive)
#
if "SERIALBOX_HAS_NETCDF" in Config().compile_options:
Serializer.to_file("field", field_input, os.path.join(self.path, "test.nc"))
Serializer.from_file("field", field_output, os.path.join(self.path, "test.nc"))
self.assertTrue(np.allclose(field_input, field_output))
#
# Invalid file extension
#
self.assertRaises(SerialboxError, Serializer.to_file, "field", field_input, "test.X")
def test_to_string(self):
ser = Serializer(OpenModeKind.Write, self.path, "field", self.archive)
if __name__ == "__main__":
unittest.main()
| [
"fabian_thuering@hotmail.com"
] | fabian_thuering@hotmail.com |
611e8b71aaa47a96f014a44dca6f7f079c89b447 | d856c9f4682b2d32d6f2571ba925b6c8fe4f7572 | /lib/test/__init__.py | 7bc6eac05f393c752827ab5bdf85f25d8de04d05 | [] | no_license | vsraptor/bbhtm | 4a2d5a263a7dbd8c1c5a4eb30f78b4960572414d | 8c051d118c9a35743fcbb435adc6985b50979d5b | refs/heads/master | 2020-12-24T19:46:20.460406 | 2019-01-31T22:17:12 | 2019-01-31T22:17:12 | 58,018,682 | 16 | 6 | null | null | null | null | UTF-8 | Python | false | false | 38 | py |
#__all__ = ['data','test','results']
| [
"me@me.com"
] | me@me.com |
60ffedaa00bbf00d1911ac30a215e3866a5a5dab | 754a53b8da48a5920aef5221ba4f4503828efcb9 | /test/unit/test_postgres_adapter.py | c5b6d05b39cf58cfde3abe135515401a467cabda | [
"Apache-2.0"
] | permissive | tjengel/dbt | a88f35fbf13ddddef856af0e87eab0a94fe4d290 | 6a62ec43e946547ee7ee934138422bd40a9d2cdc | refs/heads/development | 2020-09-03T01:50:18.896130 | 2018-09-07T16:03:17 | 2018-09-07T16:03:17 | 219,355,230 | 0 | 0 | Apache-2.0 | 2019-11-03T19:39:54 | 2019-11-03T19:39:54 | null | UTF-8 | Python | false | false | 2,662 | py | import mock
import unittest
import dbt.flags as flags
import dbt.adapters
from dbt.adapters.postgres import PostgresAdapter
from dbt.exceptions import ValidationException
from dbt.logger import GLOBAL_LOGGER as logger # noqa
class TestPostgresAdapter(unittest.TestCase):
def setUp(self):
flags.STRICT_MODE = True
self.profile = {
'dbname': 'postgres',
'user': 'root',
'host': 'database',
'pass': 'password',
'port': 5432,
'schema': 'public'
}
def test_acquire_connection_validations(self):
try:
connection = PostgresAdapter.acquire_connection(self.profile,
'dummy')
self.assertEquals(connection.get('type'), 'postgres')
except ValidationException as e:
self.fail('got ValidationException: {}'.format(str(e)))
except BaseException as e:
self.fail('validation failed with unknown exception: {}'
.format(str(e)))
def test_acquire_connection(self):
connection = PostgresAdapter.acquire_connection(self.profile, 'dummy')
self.assertEquals(connection.get('state'), 'open')
self.assertNotEquals(connection.get('handle'), None)
@mock.patch('dbt.adapters.postgres.impl.psycopg2')
def test_default_keepalive(self, psycopg2):
connection = PostgresAdapter.acquire_connection(self.profile, 'dummy')
psycopg2.connect.assert_called_once_with(
dbname='postgres',
user='root',
host='database',
password='password',
port=5432,
connect_timeout=10)
@mock.patch('dbt.adapters.postgres.impl.psycopg2')
def test_changed_keepalive(self, psycopg2):
self.profile['keepalives_idle'] = 256
connection = PostgresAdapter.acquire_connection(self.profile, 'dummy')
psycopg2.connect.assert_called_once_with(
dbname='postgres',
user='root',
host='database',
password='password',
port=5432,
connect_timeout=10,
keepalives_idle=256)
@mock.patch('dbt.adapters.postgres.impl.psycopg2')
def test_set_zero_keepalive(self, psycopg2):
self.profile['keepalives_idle'] = 0
connection = PostgresAdapter.acquire_connection(self.profile, 'dummy')
psycopg2.connect.assert_called_once_with(
dbname='postgres',
user='root',
host='database',
password='password',
port=5432,
connect_timeout=10)
| [
"noreply@github.com"
] | tjengel.noreply@github.com |
27f7598d4ad87002d9ba329193fc4307bf8a0533 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03243/s829842448.py | 787222bd819c81ea3a40fa9d036778147e46371b | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | N = int(input())
i = N // 111
if N%111 != 0:
i += 1
print(i*111) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.