hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4ca8d914db8f80f1a127210560f44c395e212d78 | 1,213 | py | Python | setup.py | avmaint/obs-ws-rc | 8ff2c36bdd2ac1636feabb356864b9ebb20e9b30 | [
"MIT"
] | 38 | 2017-08-07T04:30:28.000Z | 2021-11-03T08:30:47.000Z | setup.py | avmaint/obs-ws-rc | 8ff2c36bdd2ac1636feabb356864b9ebb20e9b30 | [
"MIT"
] | 10 | 2017-09-20T11:21:41.000Z | 2021-09-27T22:56:22.000Z | setup.py | avmaint/obs-ws-rc | 8ff2c36bdd2ac1636feabb356864b9ebb20e9b30 | [
"MIT"
] | 13 | 2017-10-28T20:41:39.000Z | 2020-12-28T02:51:03.000Z | from pathlib import Path
from setuptools import setup
root = Path(__file__).parent.absolute()
with open(str(root / 'README.rst')) as f:
long_description = f.read()
setup(
name='obs-ws-rc',
version='2.3.0',
description=("asyncio-based Python 3.5+ client to obs-websocket "
"plugin for OBS Studio"),
long_description=long_description,
url="https://github.com/KirillMysnik/obs-ws-rc",
author="Kirill Mysnik",
author_email = "kirill@mysnik.com",
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Video',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='obs obs-websocket',
packages=['obswsrc', ],
install_requires=['websockets', ],
python_requires='>=3.5',
package_data={
'obswsrc': ["protocol.json", ],
}
)
| 21.280702 | 71 | 0.602638 | from pathlib import Path
from setuptools import setup
root = Path(__file__).parent.absolute()
with open(str(root / 'README.rst')) as f:
long_description = f.read()
setup(
name='obs-ws-rc',
version='2.3.0',
description=("asyncio-based Python 3.5+ client to obs-websocket "
"plugin for OBS Studio"),
long_description=long_description,
url="https://github.com/KirillMysnik/obs-ws-rc",
author="Kirill Mysnik",
author_email = "kirill@mysnik.com",
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Video',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='obs obs-websocket',
packages=['obswsrc', ],
install_requires=['websockets', ],
python_requires='>=3.5',
package_data={
'obswsrc': ["protocol.json", ],
}
)
| 0 | 0 | 0 |
db570ba3a1be2917db42a3105f080646ef9b30bc | 192 | py | Python | ida/daenerys_utils.py | daenerys-sre/source | e9175407d4f43b33e20a5d7060480a7659535874 | [
"MIT"
] | 296 | 2019-03-19T04:02:41.000Z | 2021-12-15T08:06:35.000Z | ida/daenerys_utils.py | daenerys-sre/source | e9175407d4f43b33e20a5d7060480a7659535874 | [
"MIT"
] | 1 | 2019-04-01T14:13:56.000Z | 2019-04-01T14:14:18.000Z | ida/daenerys_utils.py | daenerys-sre/source | e9175407d4f43b33e20a5d7060480a7659535874 | [
"MIT"
] | 24 | 2019-03-19T09:48:11.000Z | 2021-12-10T14:35:57.000Z | # Daenerys IDA/Ghidra interop framework
# by Elias Bachaalany <elias.bachaalany@gmail.com>
#
# Python utility functions
import numbers | 21.333333 | 50 | 0.786458 | # Daenerys IDA/Ghidra interop framework
# by Elias Bachaalany <elias.bachaalany@gmail.com>
#
# Python utility functions
import numbers
def is_number(n):
return isinstance(n, numbers.Number) | 34 | 0 | 23 |
ee0c68f70bcd626146ccf821e62ba39daab4b397 | 8,242 | py | Python | py_everything/sencrypt.py | tsukuyomm/py_everything | 1d0578670d5ae28f16711b92a9fc0f2ff5bf6897 | [
"MIT"
] | null | null | null | py_everything/sencrypt.py | tsukuyomm/py_everything | 1d0578670d5ae28f16711b92a9fc0f2ff5bf6897 | [
"MIT"
] | null | null | null | py_everything/sencrypt.py | tsukuyomm/py_everything | 1d0578670d5ae28f16711b92a9fc0f2ff5bf6897 | [
"MIT"
] | null | null | null | '''
Super Encrypt - Encryption Algorithm with 4 key encryption and decryption
__author__ = "PyBash"
__version__ = "v1.0.0"
'''
import random
from . import error
BASE_LETTERS = 'abcdefghijklmnopqrstuvwxyz'
BASE_SYMBOLS = ' 1234567890!@#$%^&*()-_=+[{]};:\'"<,>.?/`~|\\'
| 35.07234 | 77 | 0.545377 | '''
Super Encrypt - Encryption Algorithm with 4 key encryption and decryption
__author__ = "PyBash"
__version__ = "v1.0.0"
'''
import random
from . import error
BASE_LETTERS = 'abcdefghijklmnopqrstuvwxyz'
BASE_SYMBOLS = ' 1234567890!@#$%^&*()-_=+[{]};:\'"<,>.?/`~|\\'
def genCharKeys():
base = list(BASE_LETTERS)
random.shuffle(base)
key1 = ''.join(base)
random.shuffle(base)
key2 = ''.join(base)
random.shuffle(base)
key3 = ''.join(base)
random.shuffle(base)
key4 = ''.join(base)
keyList = []
keyList.append(key1)
keyList.append(key2)
keyList.append(key3)
keyList.append(key4)
return keyList
def genSymKey():
base = list(BASE_SYMBOLS)
random.shuffle(base)
key = ''.join(base)
return key
def checkCharKeys(keyList):
key1 = keyList[0].replace("\n", '')
key2 = keyList[1].replace("\n", '')
key3 = keyList[2].replace("\n", '')
key4 = keyList[3].replace("\n", '')
base = BASE_LETTERS
key1Sorted = sorted(key1)
key2Sorted = sorted(key2)
key3Sorted = sorted(key3)
key4Sorted = sorted(key4)
baseSorted = sorted(base)
key1 = "".join(key1Sorted)
key2 = "".join(key2Sorted)
key3 = "".join(key3Sorted)
key4 = "".join(key4Sorted)
base = "".join(baseSorted)
if key1 != base:
raise error.InvalidKeyListError()
elif key2 != base:
raise error.InvalidKeyListError()
elif key3 != base:
raise error.InvalidKeyListError()
elif key4 != base:
raise error.InvalidKeyListError()
def checkSymKey(symKey: str):
sym = symKey
base = BASE_SYMBOLS
symSorted = sorted(sym)
baseSorted = sorted(base)
sym = "".join(symSorted)
base = "".join(baseSorted)
if sym != base:
raise error.InvalidSymbolKeyError()
class SuperEncrypt():
def __init__(self, keyCharList, keySym: str):
self.keyCharList = keyCharList
self.key1 = self.keyCharList[0]
self.key2 = self.keyCharList[1]
self.key3 = self.keyCharList[2]
self.key4 = self.keyCharList[3]
self.key5 = keySym
def encrypt(self, msg: str) -> str:
encrypted1 = ''
encrypted2 = ''
encrypted3 = ''
encrypted4 = ''
encryptedSym = ''
checkCharKeys(self.keyCharList)
checkSymKey(self.key5)
for char in msg:
actualChars = BASE_LETTERS
encryptedChars = self.key1
if char.lower() not in actualChars:
encrypted1 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
encryptedElem = encryptedChars[charIndex]
if char.isupper():
encrypted1 += char.replace(char, encryptedElem.upper())
else:
encrypted1 += char.replace(char, encryptedElem)
for char in encrypted1:
actualChars = BASE_LETTERS
encryptedChars = self.key2
if char.lower() not in actualChars:
encrypted2 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
encryptedElem = encryptedChars[charIndex]
if char.isupper():
encrypted2 += char.replace(char, encryptedElem.upper())
else:
encrypted2 += char.replace(char, encryptedElem)
for char in encrypted2:
actualChars = BASE_LETTERS
encryptedChars = self.key3
if char.lower() not in actualChars:
encrypted3 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
encryptedElem = encryptedChars[charIndex]
if char.isupper():
encrypted3 += char.replace(char, encryptedElem.upper())
else:
encrypted3 += char.replace(char, encryptedElem)
for char in encrypted3:
actualChars = BASE_LETTERS
encryptedChars = self.key4
if char.lower() not in actualChars:
encrypted4 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
encryptedElem = encryptedChars[charIndex]
if char.isupper():
encrypted4 += char.replace(char, encryptedElem.upper())
else:
encrypted4 += char.replace(char, encryptedElem)
for char in encrypted4:
actualChars = BASE_SYMBOLS
encryptedChars = self.key5
if char.lower() not in actualChars:
encryptedSym += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
encryptedElem = encryptedChars[charIndex]
if char.isupper():
encryptedSym += char.replace(char, encryptedElem.upper())
else:
encryptedSym += char.replace(char, encryptedElem)
return encryptedSym
def decrypt(self, msg: str) -> str:
decrypted4 = ''
decrypted3 = ''
decrypted2 = ''
decrypted1 = ''
decryptedSym = ''
checkCharKeys(self.keyCharList)
checkSymKey(self.key5)
for char in msg:
actualChars = self.key4
decryptedChars = BASE_LETTERS
if char.lower() not in actualChars:
decrypted4 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
decryptedElem = decryptedChars[charIndex]
if char.isupper():
decrypted4 += char.replace(char, decryptedElem.upper())
else:
decrypted4 += char.replace(char, decryptedElem)
for char in decrypted4:
actualChars = self.key3
decryptedChars = BASE_LETTERS
if char.lower() not in actualChars:
decrypted3 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
decryptedElem = decryptedChars[charIndex]
if char.isupper():
decrypted3 += char.replace(char, decryptedElem.upper())
else:
decrypted3 += char.replace(char, decryptedElem)
for char in decrypted3:
actualChars = self.key2
decryptedChars = BASE_LETTERS
if char.lower() not in actualChars:
decrypted2 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
decryptedElem = decryptedChars[charIndex]
if char.isupper():
decrypted2 += char.replace(char, decryptedElem.upper())
else:
decrypted2 += char.replace(char, decryptedElem)
for char in decrypted2:
actualChars = self.key1
decryptedChars = BASE_LETTERS
if char.lower() not in actualChars:
decrypted1 += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
decryptedElem = decryptedChars[charIndex]
if char.isupper():
decrypted1 += char.replace(char, decryptedElem.upper())
else:
decrypted1 += char.replace(char, decryptedElem)
for char in decrypted1:
actualChars = self.key5
decryptedChars = BASE_SYMBOLS
if char.lower() not in actualChars:
decryptedSym += char.replace(char, char)
else:
charIndex = actualChars.find(char.lower())
decryptedElem = decryptedChars[charIndex]
if char.isupper():
decryptedSym += char.replace(char, decryptedElem.upper())
else:
decryptedSym += char.replace(char, decryptedElem)
return decryptedSym
| 7,773 | 0 | 199 |
b4b57c521467495188566db029e82728527ac2ce | 21 | py | Python | devel/lib/python2.7/dist-packages/rosserial_mbed/srv/__init__.py | ankit131199/Rebellious-Cowards | 56ec395147f2fc59a26669a74a04fe02227bc7b7 | [
"BSD-2-Clause"
] | 3 | 2021-08-20T03:25:37.000Z | 2022-03-31T02:47:28.000Z | devel/lib/python2.7/dist-packages/rosserial_mbed/srv/__init__.py | ankit131199/Rebellious-Cowards | 56ec395147f2fc59a26669a74a04fe02227bc7b7 | [
"BSD-2-Clause"
] | 30 | 2020-11-27T23:12:12.000Z | 2021-04-25T15:37:42.000Z | devel/lib/python2.7/dist-packages/rosserial_mbed/srv/__init__.py | ankit131199/Rebellious-Cowards | 56ec395147f2fc59a26669a74a04fe02227bc7b7 | [
"BSD-2-Clause"
] | 3 | 2020-10-01T15:22:00.000Z | 2020-10-01T17:06:55.000Z | from ._Test import *
| 10.5 | 20 | 0.714286 | from ._Test import *
| 0 | 0 | 0 |
39862178fd723b90f84b5890435898f345b96a59 | 4,479 | py | Python | Exercises.py | cnaseeb/Pythonify | c4f2206e660f5edf2b069b6cfb9a4b504cb6c685 | [
"Apache-2.0"
] | null | null | null | Exercises.py | cnaseeb/Pythonify | c4f2206e660f5edf2b069b6cfb9a4b504cb6c685 | [
"Apache-2.0"
] | null | null | null | Exercises.py | cnaseeb/Pythonify | c4f2206e660f5edf2b069b6cfb9a4b504cb6c685 | [
"Apache-2.0"
] | null | null | null | # The code below almost works
name = input("Enter your name")
print("Hello", name)
##2
# This first line is provided for you
hrs = input("Enter Hours:")
# hrs = float(hrs) #use the one in line 9 instead
ratePerHour = input("Enter rate per hour:")
# rateperHour = float(ratePerHour) #use the one in line 9 instead
# you will hit the following error, if you don't convert the inputs to float
# TypeError: can't multiply sequence by non-int of type 'str' on line 5
grossPay = float(hrs) * float(ratePerHour)
print("Pay:", grossPay)
##3.1
hrs = input("Enter Hours:")
ratePerHour = input("Enter rate per hour:")
try:
fhrs = float(hrs)
fratePerHour = float(ratePerHour)
except:
print("Error, please enter numeric input")
quit()
#print(fhrs, fratePerHour)
#Pay = float(hrs) * float(ratePerHour)
if fhrs > 40:
PayNormal = fhrs * fratePerHour
PayExtra = (fhrs - 40) * (fratePerHour * 0.5)
Pay = PayExtra + PayNormal
else:
Pay = fhrs * fratePerHour
print(Pay)
### 3.3
"""3.3 Write a program to prompt for a score between 0.0 and 1.0. If the score is out of range, print an error. If the score is between 0.0 and 1.0, print a grade using the following table:
Score Grade
>= 0.9 A
>= 0.8 B
>= 0.7 C
>= 0.6 D
< 0.6 F
If the user enters a value out of range, print a suitable error message and exit. For the test, enter a score of 0.85."""
### done
score = input("Enter Score: ")
score = float(score)
#if score in range(0.0,0.9): #only ints
if 0.0 <= score and score <= 1.0:
if score >= 0.9 and score <= 1.0:
print("A")
elif score >= 0.8 and score < 0.9:
print("B")
elif score >= 0.7 and score < 0.8:
print("C")
elif score >= 0.6 and score < 0.7:
print("D")
elif score < 0.6:
print("F")
else:
print("Error, please enter the score in the range of 0.0 and 1.0")
quit()
### 4.6
"""
4.6 Write a program to prompt the user for hours and rate per hour using input to compute gross pay. Pay should be the normal rate for
hours up to 40 and time-and-a-half for the hourly rate for all hours worked above 40 hours. Put the logic to do the computation of pay in a
function called computepay() and use the function to do the computation. The function should return a value. Use 45 hours and a rate of
10.50 per hour to test the program (the pay should be 498.75). You should use input to read a string and float() to convert the string to a
number. Do not worry about error checking the user input unless you want to - you can assume the user types numbers properly. Do not name
your variable sum or use the sum() function."""
p = computepay(10,20)
print("Pay", p)
### 5.1
# print Total, count of numbers, and their average entered by a user
# also do verification
num = 0
total = 0
while True :
sval = input('Enter a number')
if sval == 'done' :
break
try:
fval = float(sval)
except:
print('Invalid input')
continue
num = num + 1
total = total + fval
print(total, num, total/num)
### 5.2
"""
5.2 Write a program that repeatedly prompts a user for integer numbers until the user enters 'done'. Once 'done' is entered, print out the
largest and smallest of the numbers. If the user enters anything other than a valid number catch it with a try/except and put out an
appropriate message and ignore the number. Enter 7, 2, bob, 10, and 4 and match the output below.
"""
largest = None
smallest = None
count = 0
while True:
num = input("Enter a number: ")
if num == "done" : break
else:
try:
ival = int(num)
except:
print('Invalid input')
continue
if count == 0 and smallest is None:
smallest = ival
if ival < smallest:
smallest = ival
elif largest is None or ival > largest:
largest = ival
count = count + 1
#print(num)
print("Maximum is", largest)
print("Minimum is", smallest)
| 29.084416 | 189 | 0.652154 | # The code below almost works
name = input("Enter your name")
print("Hello", name)
##2
# This first line is provided for you
hrs = input("Enter Hours:")
# hrs = float(hrs) #use the one in line 9 instead
ratePerHour = input("Enter rate per hour:")
# rateperHour = float(ratePerHour) #use the one in line 9 instead
# you will hit the following error, if you don't convert the inputs to float
# TypeError: can't multiply sequence by non-int of type 'str' on line 5
grossPay = float(hrs) * float(ratePerHour)
print("Pay:", grossPay)
##3.1
hrs = input("Enter Hours:")
ratePerHour = input("Enter rate per hour:")
try:
fhrs = float(hrs)
fratePerHour = float(ratePerHour)
except:
print("Error, please enter numeric input")
quit()
#print(fhrs, fratePerHour)
#Pay = float(hrs) * float(ratePerHour)
if fhrs > 40:
PayNormal = fhrs * fratePerHour
PayExtra = (fhrs - 40) * (fratePerHour * 0.5)
Pay = PayExtra + PayNormal
else:
Pay = fhrs * fratePerHour
print(Pay)
### 3.3
"""3.3 Write a program to prompt for a score between 0.0 and 1.0. If the score is out of range, print an error. If the score is between 0.0 and 1.0, print a grade using the following table:
Score Grade
>= 0.9 A
>= 0.8 B
>= 0.7 C
>= 0.6 D
< 0.6 F
If the user enters a value out of range, print a suitable error message and exit. For the test, enter a score of 0.85."""
### done
score = input("Enter Score: ")
score = float(score)
#if score in range(0.0,0.9): #only ints
if 0.0 <= score and score <= 1.0:
if score >= 0.9 and score <= 1.0:
print("A")
elif score >= 0.8 and score < 0.9:
print("B")
elif score >= 0.7 and score < 0.8:
print("C")
elif score >= 0.6 and score < 0.7:
print("D")
elif score < 0.6:
print("F")
else:
print("Error, please enter the score in the range of 0.0 and 1.0")
quit()
### 4.6
"""
4.6 Write a program to prompt the user for hours and rate per hour using input to compute gross pay. Pay should be the normal rate for
hours up to 40 and time-and-a-half for the hourly rate for all hours worked above 40 hours. Put the logic to do the computation of pay in a
function called computepay() and use the function to do the computation. The function should return a value. Use 45 hours and a rate of
10.50 per hour to test the program (the pay should be 498.75). You should use input to read a string and float() to convert the string to a
number. Do not worry about error checking the user input unless you want to - you can assume the user types numbers properly. Do not name
your variable sum or use the sum() function."""
def computepay(hrs,ratePerHour):
hrs = input("Enter Hours:")
ratePerHour = input("Enter rate per hour:")
try:
fhrs = float(hrs)
fratePerHour = float(ratePerHour)
except:
print("Error, please enter numeric input")
quit()
#print(fhrs, fratePerHour)
#Pay = float(hrs) * float(ratePerHour)
if fhrs > 40:
PayNormal = fhrs * fratePerHour
PayExtra = (fhrs - 40) * (fratePerHour * 0.5)
Pay = PayExtra + PayNormal
else:
Pay = fhrs * fratePerHour
#print(Pay)
return Pay
p = computepay(10,20)
print("Pay", p)
### 5.1
# print Total, count of numbers, and their average entered by a user
# also do verification
num = 0
total = 0
while True :
sval = input('Enter a number')
if sval == 'done' :
break
try:
fval = float(sval)
except:
print('Invalid input')
continue
num = num + 1
total = total + fval
print(total, num, total/num)
### 5.2
"""
5.2 Write a program that repeatedly prompts a user for integer numbers until the user enters 'done'. Once 'done' is entered, print out the
largest and smallest of the numbers. If the user enters anything other than a valid number catch it with a try/except and put out an
appropriate message and ignore the number. Enter 7, 2, bob, 10, and 4 and match the output below.
"""
largest = None
smallest = None
count = 0
while True:
num = input("Enter a number: ")
if num == "done" : break
else:
try:
ival = int(num)
except:
print('Invalid input')
continue
if count == 0 and smallest is None:
smallest = ival
if ival < smallest:
smallest = ival
elif largest is None or ival > largest:
largest = ival
count = count + 1
#print(num)
print("Maximum is", largest)
print("Minimum is", smallest)
| 520 | 0 | 23 |
9a50bec54ba634d2c7dab08f7f2245948c044388 | 5,352 | py | Python | setup.py | kenzanlabs/spinikube | 07850a30769139c846e187aeeee9f0d24dce897e | [
"Apache-2.0"
] | 113 | 2016-09-03T02:53:37.000Z | 2021-04-20T00:18:49.000Z | setup.py | kenzanlabs/spinikube | 07850a30769139c846e187aeeee9f0d24dce897e | [
"Apache-2.0"
] | 27 | 2016-08-04T20:28:41.000Z | 2018-08-22T10:04:15.000Z | setup.py | kenzanlabs/spinikube | 07850a30769139c846e187aeeee9f0d24dce897e | [
"Apache-2.0"
] | 47 | 2016-08-16T15:34:17.000Z | 2020-06-13T17:57:37.000Z | #!/usr/local/bin/python
import os
import time
import collections
import subprocess
o("minikube delete")
o("minikube start --memory 10000 --cpus 4 --disk-size=60g")
#o("kubectl delete namespace spinnaker")
#time.sleep(30)
o("kubectl create namespace spinnaker")
c("applications/kubedash/bundle.yaml")
c("applications/tectonic/pull.yml")
c("applications/tectonic/tectonic-console.yaml")
c("applications/tectonic/tectonic.json")
components = ('jenkins', 'registry', 'registryui', 'debweb')
for component in components:
c("applications/" + component + "/deployment.yml")
c("applications/" + component + "/service.json")
c("applications/kubeproxy/pod.yml")
components = ('cassandra', 'redis')
for component in components:
c("applications/spinnaker/" + component + "/deployment.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
os.system("rm -rf minikube")
os.system("mkdir minikube")
os.system("cp ~/.minikube/apiserver.crt minikube/apiserver.crt")
os.system("cp ~/.minikube/apiserver.key minikube/apiserver.key")
os.system("cp ~/.minikube/ca.crt minikube/ca.crt")
ip = os.popen('minikube ip').read().strip()
kubeConfig = """
apiVersion: v1
clusters:
- cluster:
certificate-authority: /root/.kube/ca.crt
server: https://""" + ip + """:8443
name: minikube
contexts:
- context:
cluster: minikube
user: minikube
name: minikube
current-context: minikube
kind: Config
preferences: {}
users:
- name: minikube
user:
client-certificate: /root/.kube/apiserver.crt
client-key: /root/.kube/apiserver.key
"""
with open("minikube/config", "w") as text_file:
text_file.write(kubeConfig)
time.sleep(1)
os.system("kubectl create secret generic spinnaker-config --from-file=./config/echo.yml --from-file=./config/igor.yml --from-file=./config/gate.yml --from-file=./config/orca.yml --from-file=./config/rosco.yml --from-file=./config/front50.yml --from-file=./config/clouddriver.yml --namespace spinnaker")
os.system("kubectl create secret generic minikube-config --from-file=./minikube/config --from-file=./minikube/ca.crt --from-file=./minikube/apiserver.crt --from-file=./minikube/apiserver.key --namespace spinnaker")
os.system("rm -rf minikube")
#print "seeding spinnaking images"
o("./podexec jenkins /usr/local/jenkins/jobs/seed.sh")
components = ('front50', 'clouddriver', 'rosco', 'orca', 'igor', 'gate', 'deck')
for component in components:
c("applications/spinnaker/" + component + "/controller.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
time.sleep(2)
services = '''
{
"services" : [
{
"title": "Spinnaker Dashboard",
"description": "Spinnaker UI",
"link": "''' + cmdOut("minikube service spinnaker-deck --namespace spinnaker --url") + '''"
},
{
"title": "Kubernetes Dashboard",
"description": "Management UI",
"link": "''' + cmdOut("minikube service kubernetes-dashboard --namespace kube-system --url") + '''"
},
{
"title": "Tectonic Console",
"description": "Alternative management UI",
"link": "''' + cmdOut("minikube service tectonic --namespace spinnaker --url") + '''"
},
{
"title": "Jenkins",
"description": "Automation Server",
"link": "''' + cmdOut("minikube service spinnaker-jenkins --namespace spinnaker --url") + '''"
},
{
"title": "Cluster Performace",
"description": "Performance analytics UI",
"link": "''' + cmdOut("minikube service kubedash --namespace spinnaker --url") + '''"
},
{
"title": "Container Image Registry",
"description": "Local image repository",
"link": "''' + cmdOut("minikube service spinnaker-registryui --namespace spinnaker --url") + '''"
},
{
"title": "Apt Repository",
"description": "Local apt repository",
"link": "''' + cmdOut("minikube service spinnaker-debweb --namespace spinnaker --url") + '''"
}
]
}
'''
os.system("rm -f applications/start/services.json")
with open("applications/start/services.json", "w") as text_file:
text_file.write(services)
os.system("kubectl create secret generic start-config --from-file=./applications/start/index.html --from-file=./applications/start/services.json --namespace spinnaker")
#cqlsh -e "COPY front50.pipeline TO '/front50.pipeline.csv' WITH HEADER = 'true'"
c("applications/start/deployment.yml")
c("applications/start/service.json")
poll()
#add example pipeline
o("./podexec spinnaker apt-get update")
o("./podexec spinnaker apt-get install -y git")
o("./podexec spinnaker git clone git@github.com:moondev/SpiniKube.git /SpiniKube")
o("./podexec spinnaker cqlsh -e 'COPY front50.pipeline FROM \'/SpiniKube/pipelines/pipelines.csv\' WITH HEADER = \'true\';'")
o("minikube service spinnaker-start -n spinnaker") | 28.92973 | 302 | 0.678812 | #!/usr/local/bin/python
import os
import time
import collections
import subprocess
def cmdOut(cmd):
return subprocess.check_output(cmd, shell=True).strip()
def poll():
creating = "ContainerCreating"
while creating.find("ContainerCreating") != -1:
creating = cmdOut("kubectl get pods --all-namespaces")
os.system("clear")
print creating
print "\nwaiting for pods to start..."
time.sleep(2)
def o(cmd):
print "Running: " + cmd
os.system(cmd)
time.sleep(2)
def k(cmd):
o("kubectl " + cmd + " --namespace spinnaker")
time.sleep(2)
def c(cmd):
o("kubectl create -f " + cmd + " --namespace spinnaker")
time.sleep(2)
o("minikube delete")
o("minikube start --memory 10000 --cpus 4 --disk-size=60g")
#o("kubectl delete namespace spinnaker")
#time.sleep(30)
o("kubectl create namespace spinnaker")
c("applications/kubedash/bundle.yaml")
c("applications/tectonic/pull.yml")
c("applications/tectonic/tectonic-console.yaml")
c("applications/tectonic/tectonic.json")
components = ('jenkins', 'registry', 'registryui', 'debweb')
for component in components:
c("applications/" + component + "/deployment.yml")
c("applications/" + component + "/service.json")
c("applications/kubeproxy/pod.yml")
components = ('cassandra', 'redis')
for component in components:
c("applications/spinnaker/" + component + "/deployment.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
os.system("rm -rf minikube")
os.system("mkdir minikube")
os.system("cp ~/.minikube/apiserver.crt minikube/apiserver.crt")
os.system("cp ~/.minikube/apiserver.key minikube/apiserver.key")
os.system("cp ~/.minikube/ca.crt minikube/ca.crt")
ip = os.popen('minikube ip').read().strip()
kubeConfig = """
apiVersion: v1
clusters:
- cluster:
certificate-authority: /root/.kube/ca.crt
server: https://""" + ip + """:8443
name: minikube
contexts:
- context:
cluster: minikube
user: minikube
name: minikube
current-context: minikube
kind: Config
preferences: {}
users:
- name: minikube
user:
client-certificate: /root/.kube/apiserver.crt
client-key: /root/.kube/apiserver.key
"""
with open("minikube/config", "w") as text_file:
text_file.write(kubeConfig)
time.sleep(1)
os.system("kubectl create secret generic spinnaker-config --from-file=./config/echo.yml --from-file=./config/igor.yml --from-file=./config/gate.yml --from-file=./config/orca.yml --from-file=./config/rosco.yml --from-file=./config/front50.yml --from-file=./config/clouddriver.yml --namespace spinnaker")
os.system("kubectl create secret generic minikube-config --from-file=./minikube/config --from-file=./minikube/ca.crt --from-file=./minikube/apiserver.crt --from-file=./minikube/apiserver.key --namespace spinnaker")
os.system("rm -rf minikube")
#print "seeding spinnaking images"
o("./podexec jenkins /usr/local/jenkins/jobs/seed.sh")
components = ('front50', 'clouddriver', 'rosco', 'orca', 'igor', 'gate', 'deck')
for component in components:
c("applications/spinnaker/" + component + "/controller.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
time.sleep(2)
services = '''
{
"services" : [
{
"title": "Spinnaker Dashboard",
"description": "Spinnaker UI",
"link": "''' + cmdOut("minikube service spinnaker-deck --namespace spinnaker --url") + '''"
},
{
"title": "Kubernetes Dashboard",
"description": "Management UI",
"link": "''' + cmdOut("minikube service kubernetes-dashboard --namespace kube-system --url") + '''"
},
{
"title": "Tectonic Console",
"description": "Alternative management UI",
"link": "''' + cmdOut("minikube service tectonic --namespace spinnaker --url") + '''"
},
{
"title": "Jenkins",
"description": "Automation Server",
"link": "''' + cmdOut("minikube service spinnaker-jenkins --namespace spinnaker --url") + '''"
},
{
"title": "Cluster Performace",
"description": "Performance analytics UI",
"link": "''' + cmdOut("minikube service kubedash --namespace spinnaker --url") + '''"
},
{
"title": "Container Image Registry",
"description": "Local image repository",
"link": "''' + cmdOut("minikube service spinnaker-registryui --namespace spinnaker --url") + '''"
},
{
"title": "Apt Repository",
"description": "Local apt repository",
"link": "''' + cmdOut("minikube service spinnaker-debweb --namespace spinnaker --url") + '''"
}
]
}
'''
os.system("rm -f applications/start/services.json")
with open("applications/start/services.json", "w") as text_file:
text_file.write(services)
os.system("kubectl create secret generic start-config --from-file=./applications/start/index.html --from-file=./applications/start/services.json --namespace spinnaker")
#cqlsh -e "COPY front50.pipeline TO '/front50.pipeline.csv' WITH HEADER = 'true'"
c("applications/start/deployment.yml")
c("applications/start/service.json")
poll()
#add example pipeline
o("./podexec spinnaker apt-get update")
o("./podexec spinnaker apt-get install -y git")
o("./podexec spinnaker git clone git@github.com:moondev/SpiniKube.git /SpiniKube")
o("./podexec spinnaker cqlsh -e 'COPY front50.pipeline FROM \'/SpiniKube/pipelines/pipelines.csv\' WITH HEADER = \'true\';'")
o("minikube service spinnaker-start -n spinnaker") | 457 | 0 | 115 |
e9a7d2f66b4f8dbaa2eb22e345ef51c2d6c7fe14 | 2,360 | py | Python | src/Line.py | npanuhin/BIOCAD-BWA | 50f56fd7d08b8ad1247934c902fb137f3c28cdf8 | [
"MIT"
] | null | null | null | src/Line.py | npanuhin/BIOCAD-BWA | 50f56fd7d08b8ad1247934c902fb137f3c28cdf8 | [
"MIT"
] | null | null | null | src/Line.py | npanuhin/BIOCAD-BWA | 50f56fd7d08b8ad1247934c902fb137f3c28cdf8 | [
"MIT"
] | null | null | null | from typing import List
from collections import deque
class Line:
"""
Properties:
start_x {0}
start_y {1}
end_x {2}
end_y {3}
dots = [dot1, ..., dotN] {4}
coords = (start_x, start_y, end_x, end_y)
"""
@property
# @property
# def x1(self):
# return self.start_x
# @property
# def y1(self):
# return self.start_y
# @property
# def x2(self):
# return self.end_x
# @property
# def y2(self):
# return self.end_y
@property
@property
@property
@property
| 25.106383 | 84 | 0.555932 | from typing import List
from collections import deque
class Line:
"""
Properties:
start_x {0}
start_y {1}
end_x {2}
end_y {3}
dots = [dot1, ..., dotN] {4}
coords = (start_x, start_y, end_x, end_y)
"""
def __init__(self, start_x=None, start_y=None, end_x=None, end_y=None, dots=[]):
self.start_x = start_x
self.start_y = start_y
self.end_x = end_x
self.end_y = end_y
self.dots = dots
def __repr__(self):
return "Line(start_x={}, start_y={}, end_x={}, end_y={}, dots=[{}])".format(
self.start_x, self.start_y, self.end_x, self.end_y, len(self.dots)
)
@property
def coords(self):
return self.start_x, self.start_y, self.end_x, self.end_y
# @property
# def x1(self):
# return self.start_x
# @property
# def y1(self):
# return self.start_y
# @property
# def x2(self):
# return self.end_x
# @property
# def y2(self):
# return self.end_y
@property
def center_x(self):
return (self.start_x + self.end_x) // 2
@property
def center_y(self):
return (self.start_y + self.end_y) // 2
def isTiltedCorrectly(self):
return self.start_y <= self.end_y
@property
def k(self):
return (self.end_y - self.start_y) / (self.end_x - self.start_x)
@property
def b(self):
return self.end_y - self.end_x * self.k
def copyCoords(self):
return Line(self.start_x, self.start_y, self.end_x, self.end_y, dots=[])
def shift(self, dx=0, dy=0):
self.start_x += dx
self.start_y += dy
self.end_x += dx
self.end_y += dy
for i in range(len(self.dots)):
self.dots[i][0] += dx
self.dots[i][1] += dy
def rotateY(self, rotation_center, line=True, dots=False):
if line:
self.start_y -= (self.start_y - rotation_center) * 2
self.end_y -= (self.end_y - rotation_center) * 2
if dots:
for i in range(len(self.dots)):
self.dots[i][1] -= (self.dots[i][1] - rotation_center) * 2
def shiftLines(lines, count) -> List[Line]:
result = deque(lines)
for _ in range(count):
result.append(result.popleft())
return list(result)
| 1,434 | 0 | 315 |
84581387c769a73a11b21a07890f917ea1153434 | 11,312 | py | Python | Interfaz/UIPyfiles/VistaAnalisisDatos.py | Dumaru/ExcelReports | 989d8864d77e2b84dc73f16205894a6afa8e11ab | [
"MIT"
] | 1 | 2020-10-16T19:54:17.000Z | 2020-10-16T19:54:17.000Z | Interfaz/UIPyfiles/VistaAnalisisDatos.py | Dumaru/ExcelReports | 989d8864d77e2b84dc73f16205894a6afa8e11ab | [
"MIT"
] | null | null | null | Interfaz/UIPyfiles/VistaAnalisisDatos.py | Dumaru/ExcelReports | 989d8864d77e2b84dc73f16205894a6afa8e11ab | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\VistaAnalisisDatos.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from UI.Recursos import images_rc
# import DATOS IMAGENES_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
VistaAnalisisDatos = QtWidgets.QMainWindow()
ui = Ui_VistaAnalisisDatos()
ui.setupUi(VistaAnalisisDatos)
VistaAnalisisDatos.show()
sys.exit(app.exec_())
| 57.714286 | 144 | 0.726397 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\VistaAnalisisDatos.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from UI.Recursos import images_rc
class Ui_VistaAnalisisDatos(object):
def setupUi(self, VistaAnalisisDatos):
VistaAnalisisDatos.setObjectName("VistaAnalisisDatos")
VistaAnalisisDatos.resize(1280, 720)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(VistaAnalisisDatos.sizePolicy().hasHeightForWidth())
VistaAnalisisDatos.setSizePolicy(sizePolicy)
VistaAnalisisDatos.setMinimumSize(QtCore.QSize(1280, 720))
VistaAnalisisDatos.setStyleSheet("background-color: rgb(255, 255, 255);")
self.centralwidget = QtWidgets.QWidget(VistaAnalisisDatos)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout_2.setObjectName("gridLayout_2")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setStyleSheet("image: url(:/Imagenes/Logo.png);")
self.label.setText("")
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 3, 2, 1, 1)
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 0, 0, 1, 1)
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setObjectName("gridLayout_4")
self.gridLayout.addLayout(self.gridLayout_4, 0, 1, 1, 1)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(20)
self.label_2.setFont(font)
self.label_2.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.gridLayout_5 = QtWidgets.QGridLayout()
self.gridLayout_5.setObjectName("gridLayout_5")
self.pushButtonIMSIS = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Corbel")
font.setPointSize(10)
self.pushButtonIMSIS.setFont(font)
self.pushButtonIMSIS.setStyleSheet("background-color: rgb(255, 255, 127);\n"
"")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/Iconos/icons8-marcar-como-no-oculto-96.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonIMSIS.setIcon(icon)
self.pushButtonIMSIS.setObjectName("pushButtonIMSIS")
self.gridLayout_5.addWidget(self.pushButtonIMSIS, 2, 1, 1, 1)
self.pushButtonGraficar = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Corbel")
font.setPointSize(10)
self.pushButtonGraficar.setFont(font)
self.pushButtonGraficar.setStyleSheet("background-color: rgb(255, 255, 127);\n"
"")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/Iconos/graficas.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonGraficar.setIcon(icon1)
self.pushButtonGraficar.setIconSize(QtCore.QSize(30, 30))
self.pushButtonGraficar.setObjectName("pushButtonGraficar")
self.gridLayout_5.addWidget(self.pushButtonGraficar, 4, 1, 1, 1)
self.tableWidgetVerDatosFiltrados = QtWidgets.QTableWidget(self.centralwidget)
self.tableWidgetVerDatosFiltrados.setStyleSheet("background-color: rgb(255, 255, 255);")
self.tableWidgetVerDatosFiltrados.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
self.tableWidgetVerDatosFiltrados.setTextElideMode(QtCore.Qt.ElideRight)
self.tableWidgetVerDatosFiltrados.setObjectName("tableWidgetVerDatosFiltrados")
self.tableWidgetVerDatosFiltrados.setColumnCount(8)
self.tableWidgetVerDatosFiltrados.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidgetVerDatosFiltrados.setHorizontalHeaderItem(7, item)
self.gridLayout_5.addWidget(self.tableWidgetVerDatosFiltrados, 4, 0, 1, 1)
self.pushButtonGuardarReporte = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Corbel")
font.setPointSize(10)
self.pushButtonGuardarReporte.setFont(font)
self.pushButtonGuardarReporte.setStyleSheet("background-color: rgb(255, 255, 127);")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/Iconos/icons8-hand-box-256.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonGuardarReporte.setIcon(icon2)
self.pushButtonGuardarReporte.setIconSize(QtCore.QSize(30, 30))
self.pushButtonGuardarReporte.setObjectName("pushButtonGuardarReporte")
self.gridLayout_5.addWidget(self.pushButtonGuardarReporte, 1, 0, 1, 1)
self.pushButtonReporte = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Corbel")
font.setPointSize(10)
self.pushButtonReporte.setFont(font)
self.pushButtonReporte.setStyleSheet("background-color: rgb(255, 255, 127);")
self.pushButtonReporte.setIcon(icon)
self.pushButtonReporte.setObjectName("pushButtonReporte")
self.gridLayout_5.addWidget(self.pushButtonReporte, 1, 1, 1, 1)
self.pushButtonGuardarDatosIMISISIMEI = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Corbel")
font.setPointSize(10)
self.pushButtonGuardarDatosIMISISIMEI.setFont(font)
self.pushButtonGuardarDatosIMISISIMEI.setStyleSheet("background-color: rgb(255, 255, 127);")
self.pushButtonGuardarDatosIMISISIMEI.setIcon(icon2)
self.pushButtonGuardarDatosIMISISIMEI.setIconSize(QtCore.QSize(30, 30))
self.pushButtonGuardarDatosIMISISIMEI.setObjectName("pushButtonGuardarDatosIMISISIMEI")
self.gridLayout_5.addWidget(self.pushButtonGuardarDatosIMISISIMEI, 2, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setObjectName("label_4")
self.gridLayout_5.addWidget(self.label_4, 5, 0, 1, 1)
self.gridLayout.addLayout(self.gridLayout_5, 3, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 0, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 0, 1, 1)
VistaAnalisisDatos.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(VistaAnalisisDatos)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1280, 21))
self.menubar.setObjectName("menubar")
self.menuVolver_A_Filtros = QtWidgets.QMenu(self.menubar)
self.menuVolver_A_Filtros.setObjectName("menuVolver_A_Filtros")
VistaAnalisisDatos.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(VistaAnalisisDatos)
self.statusbar.setObjectName("statusbar")
VistaAnalisisDatos.setStatusBar(self.statusbar)
self.actionVer_Filtros = QtWidgets.QAction(VistaAnalisisDatos)
self.actionVer_Filtros.setObjectName("actionVer_Filtros")
self.menuVolver_A_Filtros.addAction(self.actionVer_Filtros)
self.menubar.addAction(self.menuVolver_A_Filtros.menuAction())
self.retranslateUi(VistaAnalisisDatos)
QtCore.QMetaObject.connectSlotsByName(VistaAnalisisDatos)
def retranslateUi(self, VistaAnalisisDatos):
_translate = QtCore.QCoreApplication.translate
VistaAnalisisDatos.setWindowTitle(_translate("VistaAnalisisDatos", "MainWindow"))
self.label_3.setText(_translate("VistaAnalisisDatos", "<html><head/><body><p><img src=\":/Iconos/logoRaptor.png\"/></p></body></html>"))
self.label_2.setText(_translate("VistaAnalisisDatos", "REPORTE FINAL "))
self.pushButtonIMSIS.setText(_translate("VistaAnalisisDatos", "Ver IMSIS.IMEI"))
self.pushButtonGraficar.setText(_translate("VistaAnalisisDatos", "Graficar Datos"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(0)
item.setText(_translate("VistaAnalisisDatos", "RAT"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(1)
item.setText(_translate("VistaAnalisisDatos", "Operator"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(2)
item.setText(_translate("VistaAnalisisDatos", "IMEI"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(3)
item.setText(_translate("VistaAnalisisDatos", "IMSI"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(4)
item.setText(_translate("VistaAnalisisDatos", "TA"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(5)
item.setText(_translate("VistaAnalisisDatos", "MS Power"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(6)
item.setText(_translate("VistaAnalisisDatos", "Last Lac"))
item = self.tableWidgetVerDatosFiltrados.horizontalHeaderItem(7)
item.setText(_translate("VistaAnalisisDatos", "Hits"))
self.pushButtonGuardarReporte.setText(_translate("VistaAnalisisDatos", "Generar Reporte Final"))
self.pushButtonReporte.setText(_translate("VistaAnalisisDatos", "Ver Reporte"))
self.pushButtonGuardarDatosIMISISIMEI.setText(_translate("VistaAnalisisDatos", "Generar Datos IMSIS-IMEI"))
self.label_4.setText(_translate("VistaAnalisisDatos", "<html><head/><body><p><img src=\":/Iconos/Iocom.png\"/></p></body></html>"))
self.menuVolver_A_Filtros.setTitle(_translate("VistaAnalisisDatos", "Volver"))
self.actionVer_Filtros.setText(_translate("VistaAnalisisDatos", "Ver Filtros"))
# import DATOS IMAGENES_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
VistaAnalisisDatos = QtWidgets.QMainWindow()
ui = Ui_VistaAnalisisDatos()
ui.setupUi(VistaAnalisisDatos)
VistaAnalisisDatos.show()
sys.exit(app.exec_())
| 10,647 | 15 | 76 |
0e510ebbd43fbd50d21e92622e59d0b9da6eb08e | 120 | py | Python | mtproxy/proxy/streams/wappers/__init__.py | i-Naji/mtproxy | b8fe68023cf35bc6bc8e96d797f1f39304d4e5fa | [
"MIT"
] | 16 | 2019-06-21T11:26:26.000Z | 2021-03-14T08:06:36.000Z | mtproxy/proxy/streams/wappers/__init__.py | WhymustIhaveaname/mtproxy | b8fe68023cf35bc6bc8e96d797f1f39304d4e5fa | [
"MIT"
] | 2 | 2020-02-27T16:25:41.000Z | 2021-03-14T08:31:14.000Z | mtproxy/proxy/streams/wappers/__init__.py | WhymustIhaveaname/mtproxy | b8fe68023cf35bc6bc8e96d797f1f39304d4e5fa | [
"MIT"
] | 4 | 2019-06-21T15:59:56.000Z | 2021-03-17T04:54:42.000Z | from .crypto_stream_reader import CryptoWrappedStreamReader
from .crypto_stream_writer import CryptoWrappedStreamWriter
| 40 | 59 | 0.916667 | from .crypto_stream_reader import CryptoWrappedStreamReader
from .crypto_stream_writer import CryptoWrappedStreamWriter
| 0 | 0 | 0 |
541297fe7105352dad73ba97488ae9bb7f7fcee2 | 947 | py | Python | docs/plots/extended/stereographic/mobius_add.py | SsnL/geoopt | 47e7e6b79c177e3172161afaef8424d61b917a7b | [
"Apache-2.0"
] | 438 | 2019-03-05T11:24:03.000Z | 2022-03-31T14:46:42.000Z | docs/plots/extended/stereographic/mobius_add.py | SsnL/geoopt | 47e7e6b79c177e3172161afaef8424d61b917a7b | [
"Apache-2.0"
] | 98 | 2019-03-07T21:38:24.000Z | 2022-03-25T10:48:45.000Z | docs/plots/extended/stereographic/mobius_add.py | SsnL/geoopt | 47e7e6b79c177e3172161afaef8424d61b917a7b | [
"Apache-2.0"
] | 58 | 2019-04-13T04:52:16.000Z | 2022-03-14T09:26:00.000Z | from geoopt import Stereographic
import torch
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib import rcParams
import shutil
if shutil.which("latex") is not None:
rcParams["text.latex.preamble"] = r"\usepackage{amsmath}"
rcParams["text.usetex"] = True
sns.set_style("white")
x = torch.tensor((-0.25, -0.75)) / 2
y = torch.tensor((0.65, -0.55)) / 2
manifold = Stereographic(-1)
x_plus_y = manifold.mobius_add(x, y)
circle = plt.Circle((0, 0), 1, fill=False, color="b")
plt.gca().add_artist(circle)
plt.xlim(-1.1, 1.1)
plt.ylim(-1.1, 1.1)
plt.gca().set_aspect("equal")
plt.annotate("x", x - 0.09, fontsize=15)
plt.annotate("y", y - 0.09, fontsize=15)
plt.annotate(r"$x\oplus y$", x_plus_y - torch.tensor([0.1, 0.15]), fontsize=15)
plt.arrow(0, 0, *x, width=0.01, color="r")
plt.arrow(0, 0, *y, width=0.01, color="g")
plt.arrow(0, 0, *x_plus_y, width=0.01, color="b")
plt.title(r"Addition $x\oplus y$")
plt.show()
| 27.057143 | 79 | 0.673706 | from geoopt import Stereographic
import torch
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib import rcParams
import shutil
if shutil.which("latex") is not None:
rcParams["text.latex.preamble"] = r"\usepackage{amsmath}"
rcParams["text.usetex"] = True
sns.set_style("white")
x = torch.tensor((-0.25, -0.75)) / 2
y = torch.tensor((0.65, -0.55)) / 2
manifold = Stereographic(-1)
x_plus_y = manifold.mobius_add(x, y)
circle = plt.Circle((0, 0), 1, fill=False, color="b")
plt.gca().add_artist(circle)
plt.xlim(-1.1, 1.1)
plt.ylim(-1.1, 1.1)
plt.gca().set_aspect("equal")
plt.annotate("x", x - 0.09, fontsize=15)
plt.annotate("y", y - 0.09, fontsize=15)
plt.annotate(r"$x\oplus y$", x_plus_y - torch.tensor([0.1, 0.15]), fontsize=15)
plt.arrow(0, 0, *x, width=0.01, color="r")
plt.arrow(0, 0, *y, width=0.01, color="g")
plt.arrow(0, 0, *x_plus_y, width=0.01, color="b")
plt.title(r"Addition $x\oplus y$")
plt.show()
| 0 | 0 | 0 |
3c6171802f81af4f59089aacf6f1c0531e686ce7 | 8,224 | py | Python | project/forms.py | Make-Munich/SaBoT | cabc7e2f5e0f7166d94d2ef683f75d8d3be02834 | [
"MIT"
] | null | null | null | project/forms.py | Make-Munich/SaBoT | cabc7e2f5e0f7166d94d2ef683f75d8d3be02834 | [
"MIT"
] | null | null | null | project/forms.py | Make-Munich/SaBoT | cabc7e2f5e0f7166d94d2ef683f75d8d3be02834 | [
"MIT"
] | null | null | null | from django import forms
from models import Project
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, Submit, Div, HTML, ButtonHolder
from crispy_forms.bootstrap import FormActions, StrictButton, TabHolder, Tab, PrependedText, InlineCheckboxes, InlineField
import requests
from django.contrib.auth.models import User
from django_currentuser.middleware import (
get_current_user, get_current_authenticated_user)
| 33.843621 | 301 | 0.680691 | from django import forms
from models import Project
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, Submit, Div, HTML, ButtonHolder
from crispy_forms.bootstrap import FormActions, StrictButton, TabHolder, Tab, PrependedText, InlineCheckboxes, InlineField
import requests
from django.contrib.auth.models import User
from django_currentuser.middleware import (
get_current_user, get_current_authenticated_user)
class ProjectGeneralForm(forms.ModelForm):
class Meta:
model = Project
fields = (
"projectName",
"firstname",
"lastname",
"email",
"phone",
"homepage",
"projecttype",
"language",
"hear",
"recommendation",
"generalComment"
)
def __init__(self, *args, **kwargs):
super(ProjectGeneralForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
#self.helper.form_class='form-horizontal'
self.helper.layout = Layout(
Field("projectName"),
Div(
Div('firstname', css_class='col-md-6',),
Div('lastname', css_class='col-md-6',),
css_class='row',
),
Field("email"),
Field("phone"),
Field("homepage"),
Field("projecttype"),
Field("language"),
Field("hear"),
Field("recommendation"),
Field("generalComment"),
# FormActions(Submit("Save", "Save changes"))
)
#current_user = get_current_user()
#print "user: %s" % current_user
#print User.objects.all()
#user = User.objects.get(username=get_current_user())
#print user.is_staff
#self.helper[1:3].wrap_together(Div, css_class="name-wrapper")
#self.helper['firstname'].wrap(Field, css_class="col-md-6", wrapper_class="firstname")
#self.helper['lastname'].wrap(Field, css_class="col-md-6", wrapper_class="lastname")
if self.instance is not None and self.instance.id is not None:
self.helper.add_input(Submit("Save", "Save changes"))
else:
self.helper.add_input(Submit("Save", "Register"))
class ProjectDescriptionForm(forms.ModelForm):
class Meta:
model = Project
fields = (
"descriptionDE",
"descriptionEN",
"projectArea",
"logoOrg",
"logo",
"logoTeam",
"video"
)
widgets = {
"logoOrg" : forms.widgets.FileInput,
"logo" : forms.widgets.FileInput,
"logoTeam" : forms.widgets.FileInput,
}
def __init__(self, *args, **kwargs):
super(ProjectDescriptionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div(HTML("<p>Think of it as a letter of introduction, your chance to grab a reader's interest. Tell them why they should care about your work. Preferably you keep it short (300 and 900 characters) You can change the text later.</p>")),
Field("descriptionDE"),
Field("descriptionEN"),
Field("projectArea"),
Div(HTML("<p>Please provide us a picture or logo of your project. We would like to promote your participation on our website.</p>"))
)
if self.instance and self.instance.logoOrg:
self.helper.layout.extend([
Field("logoOrg"),
Div(HTML("<p>Current logo:</p><img src=\"{{object.logoOrg.url}}\" style=\"max-height:200px\"/>"), css_class = "control-group"),
])
else:
self.helper.layout.append(
Div(Div(Field("logoOrg"),css_class = "col-md-2"), css_class = "row"),
)
if self.instance and self.instance.logo:
self.helper.layout.extend([
Field("logo"),
Div(HTML("<p>Current logo:</p><img src=\"{{object.logo.url}}\" style=\"max-height:200px\"/>"), css_class = "control-group"),
])
else:
self.helper.layout.append(
Div(Div(Field("logo"),css_class = "col-md-2"), css_class = "row"),
)
if self.instance and self.instance.logoTeam:
self.helper.layout.extend([
Field("logoTeam"),
Div(HTML("<p>Current logo:</p><img src=\"{{object.logoTeam.url}}\" style=\"max-height:200px\"/>"), css_class = "control-group"),
])
else:
self.helper.layout.append(
Div(Div(Field("logoTeam"),css_class = "col-md-2"), css_class = "row"),
)
self.helper.add_input(Submit("Save", "Save changes"))
class ProjectBoothForm(forms.ModelForm):
class Meta:
model = Project
fields = (
"boothDescription",
"boothTables",
"boothChairs",
"boothBenches",
"boothPower",
"boothExtras",
"boothExtrasComment",
"boothOwn",
"boothSafety"
)
def __init__(self, *args, **kwargs):
super(ProjectBoothForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div(HTML("<p>Maker booths are the core of Make Munich. Now we need some information from you to get an idea in what you are planning.</p><p>Maker Booth: A standard booth consists of 2x2.2m of space, 1 table (0.5x2.2m), and 1 chair. Most stands will have fence back walls.</p>")),
Div(HTML("<p>Describe what you will bring to Make Munich. Do provide any hands-on activities at your booth? (Note: There is a special form for workshops)</p>")),
Field("boothDescription"),
Div(
Div('boothTables', css_class='col-md-4',),
Div('boothChairs', css_class='col-md-4',),
Div('boothBenches', css_class='col-md-4',),
css_class='row',
),
Field("boothPower"),
Field("boothExtras"),
Field("boothExtrasComment"),
Field("boothOwn"),
Field("boothSafety"),
# FormActions(Submit("Save", "Save changes"))
)
self.helper.add_input(Submit("Save","Save changes"))
class ProjectServiceForm(forms.ModelForm):
class Meta:
model = Project
fields = (
"serviceTickets",
"serviceParking",
)
def __init__(self, *args, **kwargs):
super(ProjectServiceForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div(HTML("<p>Team members: Admission tickets for 2 makers are included with your booth. More tickets can be ordered</p><p>Exhibitor parking tickets: On-site parking at the venue is possible and there are 3-day parking permits available (valid from Friday to Sunday) for 10 Euro per vehicle.</p>")),
Div(HTML("<p>Please select your total(!) team size</p>")),
Field("serviceTickets"),
Div(HTML("<p>Please select the number of desired 3-day parking tickets</p>")),
Field("serviceParking"),
# FormActions(Submit("Save", "Save changes"))
)
self.helper.add_input(Submit("Save","Save changes"))
class ProjectTalkForm(forms.ModelForm):
class Meta:
model = Project
fields = ("talkComment",)
#print('ip: %s' % geodata['ip'])
#print('country: %s' % geodata['country_name'])
#talk_user = User.objects.get(email=username)
def __init__(self, *args, **kwargs):
#print kwargs['instance']
self.user = kwargs.pop('user', None)
#kwargs['instance'] = user
print('user: %s' % self.user)
#super().__init__(*args, **kwargs)
#self.talk_user = User.get_username()
#self.user = kwargs.pop('user')
#self.endpoint = 'https://pretalx.mm.derchris.eu/api/events/mm2018/speakers/?q={user_email}'
#self.talk_user = self.user
#self.url = self.endpoint.format(user_email=self.talk_user)
#self.headers = {'Authorization': 'Token b81068d5c94911ac8df1a0ff9d095decde1ced1a', 'Accept': 'application/json'}
#self.response = requests.get(self.url, headers=self.headers)
#if self.response.status_code == 200: # SUCCESS
# self.talksdata = self.response.json()
# print self.talksdata
super(ProjectTalkForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
ButtonHolder(
HTML("<a class='btn btn-primary' href='https://pretalx.mm.derchris.eu/mm2018/me/submissions'>View or add submissions</a>"),
),
Field("talkComment"),
# FormActions(Submit("Save", "Save changes"))
)
self.helper.add_input(Submit("Save","Save changes"))
class ProjectWorkshopForm(forms.ModelForm):
class Meta:
model = Project
fields = ("workshopComment",)
def __init__(self, *args, **kwargs):
super(ProjectWorkshopForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
ButtonHolder(
HTML("<a class='btn btn-primary'href='https://pretalx.mm.derchris.eu/mm19w/me/submissions'>View or add submissions</a>"),
),
Field("workshopComment"),
# FormActions(Submit("Save", "Save changes"))
)
self.helper.add_input(Submit("Save","Save changes"))
| 6,259 | 1,364 | 138 |
4d52788cd7cfccd02577ac558846c01bff961277 | 1,560 | py | Python | 2/CH2_NN.py | ys19931127/TensorFlow | 814ae734db5c252ff8dc5a503523b23ea0661ee3 | [
"MIT"
] | 270 | 2016-11-21T13:54:44.000Z | 2022-02-18T01:50:49.000Z | 2/CH2_NN.py | ys19931127/TensorFlow | 814ae734db5c252ff8dc5a503523b23ea0661ee3 | [
"MIT"
] | 12 | 2016-12-28T00:06:09.000Z | 2021-09-29T06:19:24.000Z | 2/CH2_NN.py | ys19931127/TensorFlow | 814ae734db5c252ff8dc5a503523b23ea0661ee3 | [
"MIT"
] | 192 | 2016-11-21T13:54:47.000Z | 2021-11-25T08:33:18.000Z | import tensorflow as tf
import numpy as np
import time
import matplotlib
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_circles
N=210
K=2
# Maximum number of iterations, if the conditions are not met
MAX_ITERS = 1000
cut=int(N*0.7)
start = time.time()
data, features = make_circles(n_samples=N, shuffle=True, noise= 0.12, factor=0.4)
tr_data, tr_features= data[:cut], features[:cut]
te_data,te_features=data[cut:], features[cut:]
fig, ax = plt.subplots()
ax.scatter(tr_data.transpose()[0], tr_data.transpose()[1], marker = 'o', s = 100, c = tr_features, cmap=plt.cm.coolwarm )
plt.plot()
points=tf.Variable(data)
cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))
sess = tf.Session()
sess.run(tf.initialize_all_variables())
test=[]
for i, j in zip(te_data, te_features):
distances = tf.reduce_sum(tf.square(tf.sub(i , tr_data)),reduction_indices=1)
neighbor = tf.arg_min(distances,0)
#print tr_features[sess.run(neighbor)]
#print j
test.append(tr_features[sess.run(neighbor)])
print test
fig, ax = plt.subplots()
ax.scatter(te_data.transpose()[0], te_data.transpose()[1], marker = 'o', s = 100, c = test, cmap=plt.cm.coolwarm )
plt.plot()
#rep_points_v = tf.reshape(points, [1, N, 2])
#rep_points_h = tf.reshape(points, [N, 2])
#sum_squares = tf.reduce_sum(tf.square(rep_points - rep_points), reduction_indices=2)
#print(sess.run(tf.square(rep_points_v - rep_points_h)))
end = time.time()
print ("Found in %.2f seconds" % (end-start))
print "Cluster assignments:", test
| 27.857143 | 121 | 0.721154 | import tensorflow as tf
import numpy as np
import time
import matplotlib
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_circles
N=210
K=2
# Maximum number of iterations, if the conditions are not met
MAX_ITERS = 1000
cut=int(N*0.7)
start = time.time()
data, features = make_circles(n_samples=N, shuffle=True, noise= 0.12, factor=0.4)
tr_data, tr_features= data[:cut], features[:cut]
te_data,te_features=data[cut:], features[cut:]
fig, ax = plt.subplots()
ax.scatter(tr_data.transpose()[0], tr_data.transpose()[1], marker = 'o', s = 100, c = tr_features, cmap=plt.cm.coolwarm )
plt.plot()
points=tf.Variable(data)
cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))
sess = tf.Session()
sess.run(tf.initialize_all_variables())
test=[]
for i, j in zip(te_data, te_features):
distances = tf.reduce_sum(tf.square(tf.sub(i , tr_data)),reduction_indices=1)
neighbor = tf.arg_min(distances,0)
#print tr_features[sess.run(neighbor)]
#print j
test.append(tr_features[sess.run(neighbor)])
print test
fig, ax = plt.subplots()
ax.scatter(te_data.transpose()[0], te_data.transpose()[1], marker = 'o', s = 100, c = test, cmap=plt.cm.coolwarm )
plt.plot()
#rep_points_v = tf.reshape(points, [1, N, 2])
#rep_points_h = tf.reshape(points, [N, 2])
#sum_squares = tf.reduce_sum(tf.square(rep_points - rep_points), reduction_indices=2)
#print(sess.run(tf.square(rep_points_v - rep_points_h)))
end = time.time()
print ("Found in %.2f seconds" % (end-start))
print "Cluster assignments:", test
| 0 | 0 | 0 |
1fdce70173b0951074f03a0d68e77c0cc775a0fc | 2,936 | py | Python | ds/graph.py | BaiqiangGit/Data-Structure-and-Algorithms---Python3 | 964a41f210356b8d7b192bf54a2a1eb9a754b19e | [
"Apache-2.0"
] | 2 | 2021-01-14T08:58:05.000Z | 2021-11-25T21:25:16.000Z | ds/graph.py | BaiqiangGit/Data-Structure-and-Algorithms---Python3 | 964a41f210356b8d7b192bf54a2a1eb9a754b19e | [
"Apache-2.0"
] | null | null | null | ds/graph.py | BaiqiangGit/Data-Structure-and-Algorithms---Python3 | 964a41f210356b8d7b192bf54a2a1eb9a754b19e | [
"Apache-2.0"
] | 1 | 2021-01-14T08:58:06.000Z | 2021-01-14T08:58:06.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
## Baiqiang XIA implementation of data structures
## Graph represented as Adjacency List
## init
## add Edge
## print
## bread first search (traversal), assuming all node are connected
## the task is to visit all the nodes
## bread first search
## https://www.geeksforgeeks.org/breadth-first-search-or-bfs-for-a-graph/
### depth first search
| 31.913043 | 92 | 0.531676 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
## Baiqiang XIA implementation of data structures
## Graph represented as Adjacency List
class directedGraph(object):
## init
def __init__(self):
self.graph = {} ## adjacency diction
## add Edge
def addEdge(self, fromVertex, toVertex):
## check if vertex exists:
if fromVertex in self.graph:
self.graph[fromVertex].append(toVertex)
else:
self.graph[fromVertex] = [toVertex] ## initialize as list
## print
def __str__(self):
output = []
for each in self.graph:
output.append(' '.join([str(each) + '--->' + str(f) for f in self.graph[each]]))
return '\n'.join(output)
## bread first search (traversal), assuming all node are connected
## the task is to visit all the nodes
def breadFirst_baiqiang(self, entryVertex):
## record the visited vetex, to avoid visiting same node again
visited = set()
## maintain a stack to keep the vertices to be visited
this_level = set([entryVertex])
## iterate till exhausted
while this_level:
next_level = set()
for vertex in this_level:
if not vertex in visited:
visited.add(vertex)
print(vertex, end = ' ')
next_level = next_level.union(set(self.graph[vertex]))
print()
this_level = next_level
## bread first search
## https://www.geeksforgeeks.org/breadth-first-search-or-bfs-for-a-graph/
def breadFirst_geek4geek(self, entryVertex):
## maintain a boolean list to indicate visited or not
visited = [False] * len(self.graph)
## create a queue (strict bread first, FIFO)
queue = []
## enqueue the entry vertex
queue.append(entryVertex)
## iterate
while queue:
## dequeue from the front
cur = queue.pop(0)
print(cur, end = ' ')
## check connected vertices
for nxt in self.graph[cur]:
if visited[nxt] == False:
queue.append(nxt) ## enqueue at the tail
visited[nxt] = True
### depth first search
def DFS(self, start):
## mark all vertices as not visited at beginning
visited = [False] * len(self.graph)
## call private helper func
self.__dfs(start, visited)
def __dfs(self, cur, visited):
## print and mark current
print(cur, end = ' ')
visited[cur] = True
## recur to its adjacent vertices
for ver in self.graph[cur]:
if visited[ver] == False:
self.__dfs(ver, visited)
| 2,220 | 7 | 213 |
a84fec674e866f8507cbef9561c482b7154ff4c0 | 244 | py | Python | codeforces/math数学/1000/1359A摸大王.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | codeforces/math数学/1000/1359A摸大王.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | codeforces/math数学/1000/1359A摸大王.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# https://codeforces.com/problemset/problem/1359/A
t = int(input())
for _ in range(t):
n,m,k = list(map(int,input().split()))
winner = min(n//k,m)
second = (m-winner+k-2)//(k-1)
print(winner-second)
| 22.181818 | 50 | 0.598361 | #!/usr/bin/env python3
# https://codeforces.com/problemset/problem/1359/A
t = int(input())
for _ in range(t):
n,m,k = list(map(int,input().split()))
winner = min(n//k,m)
second = (m-winner+k-2)//(k-1)
print(winner-second)
| 0 | 0 | 0 |
bc3140a00caacc8db57f94c6a0db831bee8d9301 | 244 | py | Python | p16.py | nymoral/euler | 9dcc1bb6c733e3164e06e97f2363993fb932f5fc | [
"MIT"
] | null | null | null | p16.py | nymoral/euler | 9dcc1bb6c733e3164e06e97f2363993fb932f5fc | [
"MIT"
] | null | null | null | p16.py | nymoral/euler | 9dcc1bb6c733e3164e06e97f2363993fb932f5fc | [
"MIT"
] | null | null | null | """
2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
What is the sum of the digits of the number 2^1000?
"""
if __name__ == "__main__":
print(sum_digits(2**1000))
| 22.181818 | 65 | 0.618852 | """
2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
What is the sum of the digits of the number 2^1000?
"""
def sum_digits(n):
return sum(int(x) for x in str(n))
if __name__ == "__main__":
print(sum_digits(2**1000))
| 36 | 0 | 23 |
8d1c1db8400e459c9493cf2d5adb601c5b5910b4 | 2,083 | py | Python | examples/fitbit_interday/sleep_dataset.py | qcri/tasrif | 327bc1eccb8f8e11d8869ba65a7c72ad038aa094 | [
"BSD-3-Clause"
] | 20 | 2021-12-06T10:41:54.000Z | 2022-03-13T16:25:43.000Z | examples/fitbit_interday/sleep_dataset.py | qcri/tasrif | 327bc1eccb8f8e11d8869ba65a7c72ad038aa094 | [
"BSD-3-Clause"
] | 33 | 2021-12-06T08:27:18.000Z | 2022-03-14T05:07:53.000Z | examples/fitbit_interday/sleep_dataset.py | qcri/tasrif | 327bc1eccb8f8e11d8869ba65a7c72ad038aa094 | [
"BSD-3-Clause"
] | 2 | 2022-02-07T08:06:48.000Z | 2022-02-14T07:13:42.000Z | import os
from tasrif.data_readers.fitbit_interday_dataset import FitbitInterdayDataset
from tasrif.processing_pipeline import SequenceOperator
from tasrif.processing_pipeline.custom import AggregateOperator, CreateFeatureOperator
from tasrif.processing_pipeline.pandas import (
ConvertToDatetimeOperator,
SetIndexOperator,
FillNAOperator
)
interday_folder_path = os.environ.get("FITBIT_INTERDAY_PATH", "/mnt/data/fitbit-data/")
df = FitbitInterdayDataset(interday_folder_path, table_name="Sleep").process()[0]
pipeline = SequenceOperator(
[
ConvertToDatetimeOperator(
feature_names=["Start Time", "End Time"], infer_datetime_format=True
),
FillNAOperator(values={'End Time': df['Start Time']}),
CreateFeatureOperator(
feature_name="Date", feature_creator=lambda df: df["End Time"].dt.date
),
AggregateOperator(
groupby_feature_names="Date",
aggregation_definition={
"Minutes Asleep": "sum",
"Minutes Awake": "sum",
"Number of Awakenings": "sum",
"Time in Bed": "sum",
"Minutes REM Sleep": "sum",
"Minutes Light Sleep": "sum",
"Minutes Deep Sleep": "sum",
},
),
SetIndexOperator("Date"),
]
)
df = pipeline.process()
print(df)
import yaml
import tasrif.yaml_parser as yaml_parser
# This is done because this file is executed within a unit test from a different directory
# The relative path would not work in that case.
# __file__ is not defined in iPython interactive shell
try:
yaml_config_path = os.path.join(
os.path.dirname(__file__), "yaml_config/sleep_dataset.yaml"
)
except:
yaml_config_path = "yaml_config/sleep_dataset.yaml"
with open(yaml_config_path, "r") as stream:
try:
# print(json.dumps(yaml.safe_load(stream), indent=4, sort_keys=True))
p = yaml_parser.from_yaml(stream)
except yaml.YAMLError as exc:
print(exc)
df = p.process()
print(df)
| 30.188406 | 90 | 0.662506 | import os
from tasrif.data_readers.fitbit_interday_dataset import FitbitInterdayDataset
from tasrif.processing_pipeline import SequenceOperator
from tasrif.processing_pipeline.custom import AggregateOperator, CreateFeatureOperator
from tasrif.processing_pipeline.pandas import (
ConvertToDatetimeOperator,
SetIndexOperator,
FillNAOperator
)
interday_folder_path = os.environ.get("FITBIT_INTERDAY_PATH", "/mnt/data/fitbit-data/")
df = FitbitInterdayDataset(interday_folder_path, table_name="Sleep").process()[0]
pipeline = SequenceOperator(
[
ConvertToDatetimeOperator(
feature_names=["Start Time", "End Time"], infer_datetime_format=True
),
FillNAOperator(values={'End Time': df['Start Time']}),
CreateFeatureOperator(
feature_name="Date", feature_creator=lambda df: df["End Time"].dt.date
),
AggregateOperator(
groupby_feature_names="Date",
aggregation_definition={
"Minutes Asleep": "sum",
"Minutes Awake": "sum",
"Number of Awakenings": "sum",
"Time in Bed": "sum",
"Minutes REM Sleep": "sum",
"Minutes Light Sleep": "sum",
"Minutes Deep Sleep": "sum",
},
),
SetIndexOperator("Date"),
]
)
df = pipeline.process()
print(df)
import yaml
import tasrif.yaml_parser as yaml_parser
# This is done because this file is executed within a unit test from a different directory
# The relative path would not work in that case.
# __file__ is not defined in iPython interactive shell
try:
yaml_config_path = os.path.join(
os.path.dirname(__file__), "yaml_config/sleep_dataset.yaml"
)
except:
yaml_config_path = "yaml_config/sleep_dataset.yaml"
with open(yaml_config_path, "r") as stream:
try:
# print(json.dumps(yaml.safe_load(stream), indent=4, sort_keys=True))
p = yaml_parser.from_yaml(stream)
except yaml.YAMLError as exc:
print(exc)
df = p.process()
print(df)
| 0 | 0 | 0 |
c3a0dd529b678d2984acd9b3350424464c379f44 | 311 | py | Python | landing/serializers.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | landing/serializers.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | landing/serializers.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from landing.models import Contact
import re
from rest_framework.response import Response
#from store.models import Store
| 28.272727 | 54 | 0.755627 | from rest_framework import serializers
from landing.models import Contact
import re
from rest_framework.response import Response
#from store.models import Store
class ContactSerializer(serializers.ModelSerializer):
class Meta:
model = Contact
fields = ("name", "email", "phone", "website") | 0 | 127 | 23 |
635c5fa8734b68f1c1a855e1daaac1237bea340e | 17,944 | py | Python | app/searchcom/callbacks.py | tg2648/cu-reports-app | 851388320be9382d56c0dbdc89c26f761b4e3e7e | [
"MIT"
] | null | null | null | app/searchcom/callbacks.py | tg2648/cu-reports-app | 851388320be9382d56c0dbdc89c26f761b4e3e7e | [
"MIT"
] | 1 | 2021-04-20T14:56:31.000Z | 2021-04-20T14:56:31.000Z | app/searchcom/callbacks.py | tg2648/cu-reports-app | 851388320be9382d56c0dbdc89c26f761b4e3e7e | [
"MIT"
] | null | null | null | """
Dash callbacks
Only applicant and posting data depend on the requisition number value
Pipeline and subfield data depend on the posting data
Callback chain:
Requisition number dropdown value changes
-> applicant data and posting data load
---> pipeline and subfield data load
-----> charts and footers load
"""
# Third party imports
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import plotly.graph_objs as go
from flask import current_app
from boto3.dynamodb.conditions import Key
# Local application imports
from app.extensions import dynamo
from app.searchcom.chart_config.styling import axes
from app.searchcom.chart_config.colors import colors
# Crosstab table outputs
## Category values as they are entered in HTML id fields
gen_cat_html = ['fem', 'male', 'na']
ethn_cat_html = ['amind', 'asian', 'black', 'pacific', 'white', 'na']
hisp_cat_html = ['hisp', 'nonhisp', 'na']
## Build a list of outputs to all combinations of those fields
xtab_output_list = [Output('searchcom-xtab-table', 'style'), Output('searchcom-xtab-threshold-warning', 'style')]
for i in ethn_cat_html:
for j in hisp_cat_html:
for k in gen_cat_html:
xtab_output_list.append(Output(f"{i}-{j}-{k}", 'children'))
# Category values as they are in the xtab data
# The order should be the same as in HTML categories so that the callback output matches the output list (Male-White-Yes matches male-white-hisp, etc.)
gen_cat = ['Female', 'Male', 'Blank']
ethn_cat = ['American Indian or Alaska Native', 'Asian', 'Black or African American', 'Native Hawaiian or Other Pacific Islander', 'White', 'Blank']
hisp_cat = ['Yes', 'No', 'Blank']
| 41.730233 | 170 | 0.534329 | """
Dash callbacks
Only applicant and posting data depend on the requisition number value
Pipeline and subfield data depend on the posting data
Callback chain:
Requisition number dropdown value changes
-> applicant data and posting data load
---> pipeline and subfield data load
-----> charts and footers load
"""
# Third party imports
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import plotly.graph_objs as go
from flask import current_app
from boto3.dynamodb.conditions import Key
# Local application imports
from app.extensions import dynamo
from app.searchcom.chart_config.styling import axes
from app.searchcom.chart_config.colors import colors
# Crosstab table outputs
## Category values as they are entered in HTML id fields
gen_cat_html = ['fem', 'male', 'na']
ethn_cat_html = ['amind', 'asian', 'black', 'pacific', 'white', 'na']
hisp_cat_html = ['hisp', 'nonhisp', 'na']
## Build a list of outputs to all combinations of those fields
xtab_output_list = [Output('searchcom-xtab-table', 'style'), Output('searchcom-xtab-threshold-warning', 'style')]
for i in ethn_cat_html:
for j in hisp_cat_html:
for k in gen_cat_html:
xtab_output_list.append(Output(f"{i}-{j}-{k}", 'children'))
# Category values as they are in the xtab data
# The order should be the same as in HTML categories so that the callback output matches the output list (Male-White-Yes matches male-white-hisp, etc.)
gen_cat = ['Female', 'Male', 'Blank']
ethn_cat = ['American Indian or Alaska Native', 'Asian', 'Black or African American', 'Native Hawaiian or Other Pacific Islander', 'White', 'Blank']
hisp_cat = ['Yes', 'No', 'Blank']
def register_searchcom_callbacks(dashapp):
posting_table = dynamo.tables[current_app.config['DB_SEARCHCOM_POSTING']]
applicant_table = dynamo.tables[current_app.config['DB_SEARCHCOM_APPLICANT']]
pipeline_table = dynamo.tables[current_app.config['DB_SEARCHCOM_PIPELINE']]
subfields_table = dynamo.tables[current_app.config['DB_SEARCHCOM_SUBFIELDS']]
# DATA
## POSTING DATA
@dashapp.callback(Output('searchcom-session-data-posting', 'data'),
[Input('req-num-dropdown', 'value')],
[State('searchcom-session-data-posting', 'data')])
def load_posting_data(req_num, posting_data):
"""
Loads posting data for the selected job requisition number into session data
"""
if (req_num == '') or (req_num is None):
raise PreventUpdate
response = posting_table.query(KeyConditionExpression=Key('req_num').eq(req_num))
return response['Items'][0] # DynamoDB query returns a list, since we are querying on a unique requisition number, we need the first and only element of the list
## APPLICANT DATA
@dashapp.callback(Output('searchcom-session-data-applicant', 'data'),
[Input('req-num-dropdown', 'value')],
[State('searchcom-session-data-applicant', 'data')])
def load_applicant_data(req_num, applicant_data):
"""
Loads applicant data for the selected job requisition number into session data
"""
if (req_num == '') or (req_num is None):
raise PreventUpdate
response = applicant_table.query(KeyConditionExpression=Key('req_num').eq(req_num))
applicant_data = response['Items'][0]
CHART_THRESHOLD_FAIL = applicant_data['agg']['person_id_count'] < 3
CROSSTAB_THRESHOLD_FAIL = (applicant_data['agg']['gender_Female_sum'] < 5) or (applicant_data['agg']['gender_Male_sum'] < 5)
if CHART_THRESHOLD_FAIL:
applicant_data['agg'] = {}
if CROSSTAB_THRESHOLD_FAIL:
applicant_data['xtab'] = {}
return applicant_data
## PIPELINE DATA
@dashapp.callback(Output('searchcom-session-data-pipeline', 'data'),
[Input('searchcom-session-data-posting', 'modified_timestamp')],
[State('searchcom-session-data-posting', 'data')])
def load_pipeline_data(ts, posting_data):
"""
Loads the pipeline based on the department code of the selected job requisition number
"""
if (ts is None) or (ts == -1):
raise PreventUpdate
dept = posting_data['dept_code']
response = pipeline_table.query(KeyConditionExpression=Key('Dept').eq(dept))
return response['Items'][0]
# LAYOUT
## BASIC SEARCH INFO
@dashapp.callback([Output('search-info-dept', 'children'),
Output('search-info-title', 'children'),
Output('search-info-open-date', 'children'),
Output('search-info-start-date', 'children'),
Output('search-info-field', 'children'),
Output('search-info-data-refresh', 'children')],
[Input('searchcom-session-data-posting', 'modified_timestamp'),
Input('searchcom-session-data-applicant', 'modified_timestamp')],
[State('searchcom-session-data-posting', 'data'),
State('searchcom-session-data-applicant', 'data')])
def populate_search_info(posting_ts, applicant_ts, posting_data, applicant_data):
"""
Populates basic search info from the selected job requisition number's posting data
"""
if (posting_ts is None) or (posting_ts == -1) or (applicant_ts is None) or (applicant_ts == -1):
raise PreventUpdate
return posting_data['dept_name'], \
posting_data['position_title'], \
posting_data['open_date'], \
posting_data['start_date'], \
posting_data['field'], \
applicant_data['refresh_date']
## FOOTER
@dashapp.callback(Output('searchcom-search-subfields', 'children'),
[Input('searchcom-session-data-posting', 'modified_timestamp')],
[State('searchcom-session-data-posting', 'data')])
def populate_footer(ts, posting_data):
"""
Populates the subfield info based on the department code of the selected job requisition number
"""
if (ts is None) or (ts == -1):
raise PreventUpdate
dept = posting_data['dept_code']
response = subfields_table.query(KeyConditionExpression=Key('Dept').eq(dept))
return response['Items'][0]['Subfield']
## TABLE
@dashapp.callback(xtab_output_list,
[Input('searchcom-session-data-applicant', 'modified_timestamp')],
[State('searchcom-session-data-applicant', 'data')])
def build_crosstab_table(applicant_ts, applicant_data):
if (applicant_ts is None) or (applicant_ts == -1):
raise PreventUpdate
if applicant_data['xtab']:
applicant_data_xtab = applicant_data['xtab']
return_list = [
{'display': 'inline'},
{'display': 'none'}
]
# Loop through gender, then hispanic, then ethnicity in order match the output list (Male-White-Yes matches male-white-hisp, etc.)
for j in ethn_cat:
for k in hisp_cat:
for i in gen_cat:
return_value = applicant_data_xtab[i][j][k]
if return_value == 0:
return_list.append('-')
else:
return_list.append(return_value)
return return_list
else:
return_list = [
{'display': 'none'},
{'display': 'inline'}
]
# Fill with blanks
for j in ethn_cat:
for k in hisp_cat:
for i in gen_cat:
return_list.append('-')
return return_list
## CHARTS
@dashapp.callback([Output('searchcom-applicant-chart', 'figure'),
Output('searchcom-applicant-chart', 'style'),
Output('searchcom-chart-threshold-warning', 'style'),
Output('searchcom-subfields-footer', 'style')],
[Input('searchcom-session-data-applicant', 'modified_timestamp'),
Input('searchcom-session-data-pipeline', 'modified_timestamp')],
[State('searchcom-session-data-applicant', 'data'),
State('searchcom-session-data-pipeline', 'data')])
def build_applicant_chart(applicant_ts, pipeline_ts, applicant_data, pipeline_data):
if (applicant_ts is None) or (applicant_ts == -1) or (pipeline_ts is None) or (pipeline_ts == -1):
raise PreventUpdate
if applicant_data['agg']:
applicant_data_agg = applicant_data['agg']
x_axis = ['Female', 'URM', 'Asian', 'White']
chart_data = [
go.Bar(
name='Combined availability 1993-2012',
x=x_axis,
y=[
pipeline_data['combined_1993-2012_women'],
pipeline_data['combined_1993-2012_urm'],
pipeline_data['combined_1993-2012_asian'],
pipeline_data['combined_1993-2012_white'],
],
text=[
str(pipeline_data['combined_1993-2012_women']) + '%',
str(pipeline_data['combined_1993-2012_urm']) + '%',
str(pipeline_data['combined_1993-2012_asian']) + '%',
str(pipeline_data['combined_1993-2012_white']) + '%',
],
hoverinfo='text',
marker=dict(
color=colors.get('blue1'),
)
),
go.Bar(
name='Tenured availability 1993-2007',
x=x_axis,
y=[
pipeline_data['tenured_1993-2007_women'],
pipeline_data['tenured_1993-2007_urm'],
pipeline_data['tenured_1993-2007_asian'],
pipeline_data['tenured_1993-2007_white'],
],
text=[
str(pipeline_data['tenured_1993-2007_women']) + '%',
str(pipeline_data['tenured_1993-2007_urm']) + '%',
str(pipeline_data['tenured_1993-2007_asian']) + '%',
str(pipeline_data['tenured_1993-2007_white']) + '%',
],
hoverinfo='text',
marker=dict(
color=colors.get('blue2'),
)
),
go.Bar(
name='Untenured availability 2008-2012',
x=x_axis,
y=[
pipeline_data['untenured_2008-2012_women'],
pipeline_data['untenured_2008-2012_urm'],
pipeline_data['untenured_2008-2012_asian'],
pipeline_data['untenured_2008-2012_white'],
],
text=[
str(pipeline_data['untenured_2008-2012_women']) + '%',
str(pipeline_data['untenured_2008-2012_urm']) + '%',
str(pipeline_data['untenured_2008-2012_asian']) + '%',
str(pipeline_data['untenured_2008-2012_white']) + '%',
],
hoverinfo='text',
marker=dict(
color=colors.get('blue3'),
)
),
go.Bar(
name='Untenured availability 2013-2016',
x=x_axis,
y=[
pipeline_data['untenured_2013-2016_women'],
pipeline_data['untenured_2013-2016_urm'],
pipeline_data['untenured_2013-2016_asian'],
pipeline_data['untenured_2013-2016_white'],
],
text=[
str(pipeline_data['untenured_2013-2016_women']) + '%',
str(pipeline_data['untenured_2013-2016_urm']) + '%',
str(pipeline_data['untenured_2013-2016_asian']) + '%',
str(pipeline_data['untenured_2013-2016_white']) + '%',
],
hoverinfo='text',
marker=dict(
color=colors.get('blue4'),
)
),
go.Bar(
name='Applicants',
x=x_axis,
y=[
applicant_data_agg['gender_Female_pcnt'],
applicant_data_agg['ethnicity_URM_pcnt'],
applicant_data_agg['ethnicity_Asian_pcnt'],
applicant_data_agg['ethnicity_White_pcnt'],
],
text=[
f"{applicant_data_agg['gender_Female_pcnt']}%<br>n={applicant_data_agg['gender_Female_sum']}",
f"{applicant_data_agg['ethnicity_URM_pcnt']}%<br>n={applicant_data_agg['ethnicity_URM_sum']}",
f"{applicant_data_agg['ethnicity_Asian_pcnt']}%<br>n={applicant_data_agg['ethnicity_Asian_sum']}",
f"{applicant_data_agg['ethnicity_White_pcnt']}%<br>n={applicant_data_agg['ethnicity_White_sum']}",
],
textposition='outside',
hovertext=[
f"{applicant_data_agg['gender_Female_pcnt']}% (n={applicant_data_agg['gender_Female_sum']})",
f"{applicant_data_agg['ethnicity_URM_pcnt']}% (n={applicant_data_agg['ethnicity_URM_sum']})",
f"{applicant_data_agg['ethnicity_Asian_pcnt']}% (n={applicant_data_agg['ethnicity_Asian_sum']})",
f"{applicant_data_agg['ethnicity_White_pcnt']}% (n={applicant_data_agg['ethnicity_White_sum']})",
],
hoverinfo='text',
marker=dict(
color=colors.get('orange1'),
)
),
# go.Scatter(
# name='dept',
# x=x_axis,
# y=[10, 20, 30, 40],
# mode='markers',
# marker=dict(
# symbol='diamond',
# size=8,
# color='#D585E9'
# )
# )
]
chart_layout = go.Layout(
barmode='group',
xaxis=axes(),
yaxis=axes(
title='%',
range=[0, 110]
)
)
return {'data': chart_data, 'layout': chart_layout}, \
{'display': 'inline'}, \
{'display': 'none'}, \
{'display': 'inline'}
else:
return {'data': [], 'layout': {}}, \
{'display': 'none'}, \
{'display': 'inline'}, \
{'display': 'none'}
# NAVBAR
## Navbar Collapse Toggle
@dashapp.callback(Output("navbar-collapse", "is_open"),
[Input("navbar-toggler", "n_clicks")],
[State("navbar-collapse", "is_open")])
def toggle_navbar_collapse(n, is_open):
if n:
return not is_open
return is_open
## Button popups
@dashapp.callback(
Output("changelog-popup", "is_open"),
[Input("changelog-popup-button", "n_clicks"), Input("close-changelog", "n_clicks")],
[State("changelog-popup", "is_open")],
)
def toggle_changelog_modal(n1, n2, is_open):
if n1 or n2:
return not is_open
return is_open
@dashapp.callback(
Output("notes-popup", "is_open"),
[Input("notes-popup-button", "n_clicks"), Input("close-notes", "n_clicks")],
[State("notes-popup", "is_open")],
)
def toggle_notes_modal(n1, n2, is_open):
if n1 or n2:
return not is_open
return is_open
@dashapp.callback(
Output("contact-popup", "is_open"),
[Input("contact-popup-button", "n_clicks"), Input("close-contact", "n_clicks")],
[State("contact-popup", "is_open")],
)
def toggle_modal(n1, n2, is_open):
if n1 or n2:
return not is_open
return is_open
# # TOASTS
# @dashapp.callback([Output("changelog-popup", "is_open"),
# Output("notes-popup", "is_open")],
# [Input("changelog-popup-button", "n_clicks"),
# Input("notes-popup-button", "n_clicks")],
# [State("changelog-popup", "is_open"),
# State("notes-popup", "is_open")])
# def open_toast(n_changelog, n_notes, state_changelog, state_notes):
# # "How do I determine which Input has changed?"
# # https://dash.plot.ly/faqs
# ctx = dash.callback_context
# if n_changelog or n_notes:
# # Determine which button was triggered last
# triggered_button = ctx.triggered[0]['prop_id'].split('.')[0]
# if triggered_button == 'changelog-popup-button': # If changelog is pressed
# return [not state_changelog, False] # Flip state of changelog and close notes
# elif triggered_button == 'notes-popup-button':
# return [False, not state_notes]
# else:
# return [False, False]
| 16,228 | 0 | 23 |
6e1f0cddd752d2501ab71db2277d2b19c88f128c | 656 | py | Python | tracks/views.py | daatrujillopu/Sfotipy | b77466b0b04b1f47b02b66a51b00df0be9b136bc | [
"MIT"
] | null | null | null | tracks/views.py | daatrujillopu/Sfotipy | b77466b0b04b1f47b02b66a51b00df0be9b136bc | [
"MIT"
] | null | null | null | tracks/views.py | daatrujillopu/Sfotipy | b77466b0b04b1f47b02b66a51b00df0be9b136bc | [
"MIT"
] | null | null | null | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
import json
# Create your views here.
from .models import Track | 32.8 | 67 | 0.664634 | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
import json
# Create your views here.
from .models import Track
def track_view(request, title):
track = get_object_or_404(Track, title=title)
data = {
'title': track.title,
'order': track.order,
'album': track.album,
'artist': {
'name':track.artist.first_name+track.artist.last_name,
'bio': track.artist.biography,
}
}
json_data = json.dumps(data)
return HttpResponse(json_data, content_type='application/json')
#return render(request, 'track.html', {'track':track}) | 469 | 0 | 23 |
f67f538e988db0543b692f179b047d468a68118b | 3,132 | py | Python | apps/cp2k/reframe_cp2k.py | stackhpc/nrel-benchmarks | 07e1078590c1668d3d9da6ab98eccfbbf9c5310a | [
"Apache-2.0"
] | null | null | null | apps/cp2k/reframe_cp2k.py | stackhpc/nrel-benchmarks | 07e1078590c1668d3d9da6ab98eccfbbf9c5310a | [
"Apache-2.0"
] | null | null | null | apps/cp2k/reframe_cp2k.py | stackhpc/nrel-benchmarks | 07e1078590c1668d3d9da6ab98eccfbbf9c5310a | [
"Apache-2.0"
] | null | null | null | """ Performance test using CP2K quantum chemistry and solid state physics software package for atomistic simulations.
See README.md for details.
NB:
- The executable is either cp2k.popt (for MPI only) or cp2k.psmp (for MPI + OpenMP).
- Only the former is currently implemented here.
"""
import reframe as rfm
import reframe.utility.sanity as sn
from reframe.utility.sanity import defer
from pprint import pprint
import sys, os
from collections import namedtuple
from reframe.core.logging import getlogger
sys.path.append('.')
from reframe_extras import sequence, Scheduler_Info, CachedRunTest
from modules.utils import parse_time_cmd
# CSCS include a CP2k test which provides the input file we need, so find that test:
RFM_CP2K_PATH = os.path.join(os.path.dirname(rfm.__path__[0]), 'cscs-checks', 'apps', 'cp2k')
node_seq = sequence(1, Scheduler_Info().num_nodes + 1, 2)
@rfm.parameterized_test(*[[n_nodes] for n_nodes in node_seq]) | 41.76 | 125 | 0.604087 | """ Performance test using CP2K quantum chemistry and solid state physics software package for atomistic simulations.
See README.md for details.
NB:
- The executable is either cp2k.popt (for MPI only) or cp2k.psmp (for MPI + OpenMP).
- Only the former is currently implemented here.
"""
import reframe as rfm
import reframe.utility.sanity as sn
from reframe.utility.sanity import defer
from pprint import pprint
import sys, os
from collections import namedtuple
from reframe.core.logging import getlogger
sys.path.append('.')
from reframe_extras import sequence, Scheduler_Info, CachedRunTest
from modules.utils import parse_time_cmd
# CSCS include a CP2k test which provides the input file we need, so find that test:
RFM_CP2K_PATH = os.path.join(os.path.dirname(rfm.__path__[0]), 'cscs-checks', 'apps', 'cp2k')
node_seq = sequence(1, Scheduler_Info().num_nodes + 1, 2)
@rfm.parameterized_test(*[[n_nodes] for n_nodes in node_seq])
class Cp2k_H2O_256(rfm.RunOnlyRegressionTest):
def __init__(self, num_nodes):
self.valid_systems = ['*']
self.valid_prog_environs = ['*']
self.modules = ['cp2k']
self.extra_resources = {}
self.prerun_cmds = ['time \\']
self.executable = 'cp2k.popt'
self.executable_opts = ['H2O-256.inp']
self.sourcesdir = os.path.join(os.path.abspath(RFM_CP2K_PATH), 'src')
self.num_nodes = num_nodes
# these are the ones reframe uses:
self.num_tasks_per_node = Scheduler_Info().pcores_per_node
self.num_tasks = self.num_nodes * self.num_tasks_per_node
self.tags = {'num_procs=%i' % self.num_tasks, 'num_nodes=%i' % self.num_nodes}
self.exclusive_access = True
self.time_limit = None
# 'Sanity checks' based on included CSCS CP2K test, but format seems to have changed slightly for step count & energy
energy = sn.extractsingle(r'\s+ENERGY\| Total FORCE_EVAL \( QS \) '
r'energy \[a\.u\.\]:\s+(?P<energy>\S+)', # note change to [a.u.] rather than (a.u.)
self.stdout, 'energy', float, item=-1)
energy_reference = -4404.2323
energy_diff = sn.abs(energy-energy_reference)
self.sanity_patterns = sn.all([
sn.assert_found(r'PROGRAM STOPPED IN', self.stdout),
sn.assert_eq(sn.count(sn.extractall(
r'Step number',
self.stdout)), 10),
sn.assert_lt(energy_diff, 1e-4)
])
self.perf_patterns = {
# from cp2k output:
'cp2k_time': sn.extractsingle(r'^ CP2K\s+\d+\s+[\d.]+\s+[\d.]+\s+[\d.]+\s+[\d.]+\s+([\d.]+)',
self.stdout, 1, float), # "Total Max" time for CP2K subroutine
# from `time`:
'runtime_real': sn.extractsingle(r'^real\s+(\d+m[\d.]+s)$', self.stderr, 1, parse_time_cmd),
}
self.reference = {
'*': {
'cp2k_time': (0, None, None, 's'),
'runtime_real': (0, None, None, 's'),
}
} | 2,105 | 25 | 49 |
9c93d549439c3cccb498a651bfc4df35dba94430 | 1,817 | py | Python | base/config.py | northwestern-mti/CeNDR-1 | 9d8f9d9a9fba7f4304202094c48bc5f80baf8e71 | [
"MIT"
] | null | null | null | base/config.py | northwestern-mti/CeNDR-1 | 9d8f9d9a9fba7f4304202094c48bc5f80baf8e71 | [
"MIT"
] | 2 | 2021-04-21T15:34:04.000Z | 2021-05-04T20:29:39.000Z | base/config.py | northwestern-mti/CeNDR-1 | 9d8f9d9a9fba7f4304202094c48bc5f80baf8e71 | [
"MIT"
] | null | null | null | # Application Configuration
import os
import yaml
from base.utils.data_utils import json_encoder
# CeNDR Version
APP_CONFIG, CENDR_VERSION = os.environ['GAE_VERSION'].split("-", 1)
if APP_CONFIG not in ['development', 'master']:
APP_CONFIG = 'development'
CENDR_VERSION = CENDR_VERSION.replace("-", '.')
# BUILDS AND RELEASES
# The first release is the current release
# (RELEASE, ANNOTATION_GENOME)
RELEASES = [("20210121", "WS276"),
("20200815", "WS276"),
("20180527", "WS263"),
("20170531", "WS258"),
("20160408", "WS245")]
# The most recent release
DATASET_RELEASE, WORMBASE_VERSION = RELEASES[0]
# SQLITE DATABASE
SQLITE_PATH = f"base/cendr.{DATASET_RELEASE}.{WORMBASE_VERSION}.db"
# CONFIG
def get_config(APP_CONFIG):
"""Load all configuration information including
constants defined above.
(BASE_VARS are the same regardless of whether we are debugging or in production)
"""
config = dict()
BASE_VARS = load_yaml("env_config/base.yaml")
APP_CONFIG_VARS = load_yaml(f"env_config/{APP_CONFIG}.yaml")
config.update(BASE_VARS)
config.update(APP_CONFIG_VARS)
# Add configuration variables
# Remove base prefix for SQLAlchemy as it is loaded
# from application folder
config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{SQLITE_PATH}".replace("base/", "")
config['json_encoder'] = json_encoder
config.update({"CENDR_VERSION": CENDR_VERSION,
"APP_CONFIG": APP_CONFIG,
"DATASET_RELEASE": DATASET_RELEASE,
"WORMBASE_VERSION": WORMBASE_VERSION,
"RELEASES": RELEASES})
return config
# Generate the configuration
config = get_config(APP_CONFIG)
| 30.79661 | 87 | 0.681343 | # Application Configuration
import os
import yaml
from base.utils.data_utils import json_encoder
# CeNDR Version
APP_CONFIG, CENDR_VERSION = os.environ['GAE_VERSION'].split("-", 1)
if APP_CONFIG not in ['development', 'master']:
APP_CONFIG = 'development'
CENDR_VERSION = CENDR_VERSION.replace("-", '.')
# BUILDS AND RELEASES
# The first release is the current release
# (RELEASE, ANNOTATION_GENOME)
RELEASES = [("20210121", "WS276"),
("20200815", "WS276"),
("20180527", "WS263"),
("20170531", "WS258"),
("20160408", "WS245")]
# The most recent release
DATASET_RELEASE, WORMBASE_VERSION = RELEASES[0]
# SQLITE DATABASE
SQLITE_PATH = f"base/cendr.{DATASET_RELEASE}.{WORMBASE_VERSION}.db"
def load_yaml(path):
return yaml.load(open(path), Loader=yaml.SafeLoader)
# CONFIG
def get_config(APP_CONFIG):
"""Load all configuration information including
constants defined above.
(BASE_VARS are the same regardless of whether we are debugging or in production)
"""
config = dict()
BASE_VARS = load_yaml("env_config/base.yaml")
APP_CONFIG_VARS = load_yaml(f"env_config/{APP_CONFIG}.yaml")
config.update(BASE_VARS)
config.update(APP_CONFIG_VARS)
# Add configuration variables
# Remove base prefix for SQLAlchemy as it is loaded
# from application folder
config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{SQLITE_PATH}".replace("base/", "")
config['json_encoder'] = json_encoder
config.update({"CENDR_VERSION": CENDR_VERSION,
"APP_CONFIG": APP_CONFIG,
"DATASET_RELEASE": DATASET_RELEASE,
"WORMBASE_VERSION": WORMBASE_VERSION,
"RELEASES": RELEASES})
return config
# Generate the configuration
config = get_config(APP_CONFIG)
| 56 | 0 | 23 |
777beac9ae08c62f39158657df84701c5b5b92be | 1,477 | py | Python | recipes/blood_cells/object_detection/train.py | lbolanos/HugsVision | c9b1708928916bc413a795590d731308ea8c582b | [
"MIT"
] | 154 | 2021-08-14T00:09:15.000Z | 2022-03-16T02:42:01.000Z | recipes/blood_cells/object_detection/train.py | lbolanos/HugsVision | c9b1708928916bc413a795590d731308ea8c582b | [
"MIT"
] | 36 | 2021-08-13T23:42:15.000Z | 2022-03-31T03:57:23.000Z | recipes/blood_cells/object_detection/train.py | lbolanos/HugsVision | c9b1708928916bc413a795590d731308ea8c582b | [
"MIT"
] | 12 | 2021-09-04T00:49:35.000Z | 2022-02-24T13:20:54.000Z | import argparse
from hugsvision.nnet.ObjectDetectionTrainer import ObjectDetectionTrainer
parser = argparse.ArgumentParser(description='Object Detection')
parser.add_argument('--name', type=str, default="MyDETRModel", help='The name of the model')
parser.add_argument('--train', type=str, default="./BCCD_COCO/train/", help='The directory of the train folder containing the _annotations.coco.json')
parser.add_argument('--dev', type=str, default="./BCCD_COCO/valid/", help='The directory of the dev folder containing the _annotations.coco.json')
parser.add_argument('--test', type=str, default="./BCCD_COCO/test/", help='The directory of the test folder containing the _annotations.coco.json')
parser.add_argument('--output', type=str, default="./out/", help='The output directory of the model')
parser.add_argument('--epochs', type=int, default=1, help='Number of Epochs')
parser.add_argument('--batch_size', type=int, default=4, help='Batch size')
args = parser.parse_args()
huggingface_model = "facebook/detr-resnet-50"
# huggingface_model = "facebook/detr-resnet-101"
# Train the model
trainer = ObjectDetectionTrainer(
model_name = args.name,
output_dir = args.output,
train_path = args.train,
dev_path = args.dev,
test_path = args.test,
model_path = huggingface_model,
max_epochs = args.epochs,
batch_size = args.batch_size,
)
# Test on a single image
trainer.testing(img_path='../../../samples/blood_cells/42.jpg') | 43.441176 | 151 | 0.738659 | import argparse
from hugsvision.nnet.ObjectDetectionTrainer import ObjectDetectionTrainer
parser = argparse.ArgumentParser(description='Object Detection')
parser.add_argument('--name', type=str, default="MyDETRModel", help='The name of the model')
parser.add_argument('--train', type=str, default="./BCCD_COCO/train/", help='The directory of the train folder containing the _annotations.coco.json')
parser.add_argument('--dev', type=str, default="./BCCD_COCO/valid/", help='The directory of the dev folder containing the _annotations.coco.json')
parser.add_argument('--test', type=str, default="./BCCD_COCO/test/", help='The directory of the test folder containing the _annotations.coco.json')
parser.add_argument('--output', type=str, default="./out/", help='The output directory of the model')
parser.add_argument('--epochs', type=int, default=1, help='Number of Epochs')
parser.add_argument('--batch_size', type=int, default=4, help='Batch size')
args = parser.parse_args()
huggingface_model = "facebook/detr-resnet-50"
# huggingface_model = "facebook/detr-resnet-101"
# Train the model
trainer = ObjectDetectionTrainer(
model_name = args.name,
output_dir = args.output,
train_path = args.train,
dev_path = args.dev,
test_path = args.test,
model_path = huggingface_model,
max_epochs = args.epochs,
batch_size = args.batch_size,
)
# Test on a single image
trainer.testing(img_path='../../../samples/blood_cells/42.jpg') | 0 | 0 | 0 |
673d9575322508e179fee0b5f4ca442afccabd22 | 16,575 | py | Python | montepython/PolyChord.py | fkoehlin/montepython_2cosmos_public | 9901eaad8fb2497eb8fd899ec2a40c405e421d98 | [
"MIT"
] | 2 | 2019-02-14T18:44:52.000Z | 2020-04-06T10:25:31.000Z | montepython/PolyChord.py | fkoehlin/montepython_2cosmos_public | 9901eaad8fb2497eb8fd899ec2a40c405e421d98 | [
"MIT"
] | 2 | 2019-07-10T13:49:38.000Z | 2019-08-07T16:00:31.000Z | montepython/PolyChord.py | fkoehlin/montepython_2cosmos_public | 9901eaad8fb2497eb8fd899ec2a40c405e421d98 | [
"MIT"
] | null | null | null | """
.. module:: PolyChord
:synopsis: Interface the PolyChord program with MontePython
This implementation relies heavily on the existing Python wrapper for
PolyChord, called PyPolyChord, which comes with the PolyChord code.
To install PolyChord, download it from
`its GitHub repo <https://github.com/PolyChord/PolyChordLite>`_
and follow `these instructions <https://github.com/PolyChord/PolyChordLite#python-likelihoods-pypolychord>`_.
The main routine, :func:`run`, truly interfaces the two codes. It takes for
input the cosmological module, data and command line. It then defines
internally two functions, :func:`prior() <PolyChord.prior>` and
:func:`loglike` that will serve as input for the run function of PyPolyChord.
.. moduleauthor:: Will Handley <wh260@cam.ac.uk>
"""
from __future__ import print_function
from pypolychord import run_polychord as polychord_run
from pypolychord.settings import PolyChordSettings as PC_Settings
import numpy as np
import os
#from copy import copy
import warnings
import io_mp
import sampler
# Data on file names and PolyChord options, that may be called by other modules
# PolyChord subfolder and name separator
PC_subfolder = 'PC'
name_rejected = '_dead-birth.txt' # rejected points
name_post = '.txt' # accepted points
name_stats = '.stats' # summarized information, explained
# New files
name_paramnames = '.paramnames' # in the PC/ subfolder
name_arguments = '.arguments' # in the PC/ subfolder
name_chain_acc = 'chain_PC__accepted.txt' # in the chain root folder
name_chain_rej = 'chain_PC__rejected.txt' # in the chain root folder
# Log.param name (ideally, we should import this one from somewhere else)
name_logparam = 'log.param'
# PolyChord option prefix
PC_prefix = 'PC_'
# User-defined arguments of PyPolyChord, and 'argparse' keywords
# First: basic string -> bool type conversion:
str2bool = lambda s: True if s.lower() == 'true' else False
PC_user_arguments = {
'nlive':
{'type': int,
'help':(
'(Default: nDims*25)\n'
'The number of live points.\n'
'Increasing nlive increases the accuracy of posteriors and evidences,\n'
'and proportionally increases runtime ~ O(nlive).'
)
},
'num_repeats' :
{'type': int,
'help':(
'(Default: nDims*5)\n'
'The number of slice slice-sampling steps to generate a new point.\n'
'Increasing num_repeats increases the reliability of the algorithm.\n'
'Typically\n'
'* for reliable evidences need num_repeats ~ O(5*nDims).\n'
'* for reliable posteriors need num_repeats ~ O(nDims)'
)
},
'do_clustering' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Whether or not to use clustering at run time.'
)
},
'feedback' :
{'type': int,
'help':(
'(Default: 1)\n'
'How much command line feedback to give\n'
'[0,1,2,3]'
)
},
'precision_criterion' :
{'type': float,
'help':(
'(Default: 0.001)\n'
'Termination criterion. Nested sampling terminates when the evidence\n'
'contained in the live points is precision_criterion fraction of the\n'
'total evidence.'
)
},
'max_ndead' :
{'type': int,
'help':(
'(Default: -1)\n'
'Alternative termination criterion. Stop after max_ndead iterations.\n'
'Set negative to ignore (default).'
)
},
'boost_posterior' :
{'type': float,
'help':(
'(Default: 0.0)\n'
'Increase the number of posterior samples produced. This can be set\n'
'arbitrarily high, but you won\'t be able to boost by more than\n'
'num_repeats\n'
'Warning: in high dimensions PolyChord produces _a lot_ of posterior\n'
'samples. You probably don\'t need to change this'
)
},
'posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (weighted) posterior samples. Stored in <root>.txt.'
)
},
'equals' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (equally weighted) posterior samples. Stored in\n'
'<root>_equal_weights.txt'
)
},
'cluster_posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce posterior files for each cluster?\n'
'Does nothing if do_clustering=False.'
)
},
'write_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Create a resume file.'
)
},
'read_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Read from resume file.'
)
},
'write_stats' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write an evidence statistics file.'
)
},
'write_live' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a live points file.'
)
},
'write_dead' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a dead points file.'
)
},
'compression_factor' :
{'type': float,
'help':(
'(Default: exp(-1))\n'
'How often to update the files and do clustering.'
)
}
}
# Automatically-defined arguments of PyMultiNest, type specified
PC_auto_arguments = {
'file_root': {'type': str},
'base_dir': {'type': str},
'grade_dims': {'type': list},
'grade_frac': {'type': list}
}
# FK: changes for 2cosmos capability
def initialise(cosmo1, cosmo2, data, command_line):
"""
Main call to prepare the information for the MultiNest run.
"""
# Convenience variables
varying_param_names = data.get_mcmc_parameters(['varying'])
derived_param_names = data.get_mcmc_parameters(['derived'])
nslow = len(data.get_mcmc_parameters(['varying', 'cosmo1'])) + len(data.get_mcmc_parameters(['varying', 'cosmo2']))
nfast = len(data.get_mcmc_parameters(['varying', 'nuisance']))
# Check that all the priors are flat and that all the parameters are bound
is_flat, is_bound = sampler.check_flat_bound_priors(
data.mcmc_parameters, varying_param_names)
if not is_flat:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'with flat priors. Sorry!')
if not is_bound:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'for bound parameters. Set reasonable bounds for them in the ' +
'".param" file.')
# If absent, create the sub-folder PC
PC_folder = os.path.join(command_line.folder, PC_subfolder)
if not os.path.exists(PC_folder):
os.makedirs(PC_folder)
# If absent, create the sub-folder PC/clusters
PC_clusters_folder = os.path.join(PC_folder,'clusters')
if not os.path.exists(PC_clusters_folder):
os.makedirs(PC_clusters_folder)
# Use chain name as a base name for PolyChord files
chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1]
base_name = os.path.join(PC_folder, chain_name)
# Prepare arguments for PyPolyChord
# -- Automatic arguments
data.PC_arguments['file_root'] = chain_name
data.PC_arguments['base_dir'] = PC_folder
data.PC_arguments['grade_dims'] = []
data.PC_arguments['grade_frac'] = []
if nslow:
data.PC_arguments['grade_dims'].append(nslow)
data.PC_arguments['grade_frac'].append(0.75)
if nfast:
data.PC_arguments['grade_dims'].append(nfast)
data.PC_arguments['grade_frac'].append(0.25)
data.PC_arguments['num_repeats'] = data.PC_arguments['grade_dims'][0] * 2
# -- User-defined arguments
for arg in PC_user_arguments:
value = getattr(command_line, PC_prefix+arg)
if value != -1:
data.PC_arguments[arg] = value
# else: don't define them -> use PyPolyChord default value
data.PC_param_names = varying_param_names
# Write the PolyChord arguments and parameter ordering
with open(base_name+name_arguments, 'w') as afile:
for arg in data.PC_arguments:
afile.write(' = '.join(
[str(arg), str(data.PC_arguments[arg])]))
afile.write('\n')
with open(base_name+name_paramnames, 'w') as pfile:
pfile.write('\n'.join(data.PC_param_names+derived_param_names))
# FK: changes for 2cosmos capability
def run(cosmo1, cosmo2, data, command_line):
"""
Main call to run the PolyChord sampler.
Note the unusual set-up here, with the two following functions, `prior` and
`loglike` having their docstrings written in the encompassing function.
This trick was necessary as PolyChord required these two functions to be
defined with a given number of parameters, so we could not add `data`. By
defining them inside the run function, this problem was by-passed.
.. function:: prior
Generate the prior function for PolyChord
It should transform the input unit cube into the parameter cube. This
function actually wraps the method :func:`map_from_unit_interval()
<prior.Prior.map_from_unit_interval>` of the class :class:`Prior
<prior.Prior>`.
Parameters
----------
cube : list
Contains the current point in unit parameter space that has been
selected within the PolyChord part.
Returns
-------
theta : list
The transformed physical parameters
.. function:: loglike
Generate the Likelihood function for PolyChord
Parameters
----------
theta : array
Contains the current point in the correct parameter space after
transformation from :func:`prior`.
Returns
-------
logl : float
The loglikelihood of theta
phi : list
The derived parameters
"""
# Convenience variables
derived_param_names = data.get_mcmc_parameters(['derived'])
nDims = len(data.PC_param_names)
nDerived = len(derived_param_names)
# Function giving the prior probability
def prior(hypercube):
"""
Please see the encompassing function docstring
"""
theta = [0.0] * nDims
for i, name in enumerate(data.PC_param_names):
theta[i] = data.mcmc_parameters[name]['prior']\
.map_from_unit_interval(hypercube[i])
return theta
# Function giving the likelihood probability
def loglike(theta):
"""
Please see the encompassing function docstring
"""
# Updates values: theta --> data
try:
data.check_for_slow_step(theta)
except KeyError:
pass
for i, name in enumerate(data.PC_param_names):
data.mcmc_parameters[name]['current'] = theta[i]
data.update_cosmo1_arguments()
data.update_cosmo2_arguments()
# Compute likelihood
#logl = sampler.compute_lkl(cosmo1, cosmo2, data)[0,0]
# FK: index to scalar variable error...
logl = sampler.compute_lkl(cosmo1, cosmo2, data)
# Compute derived parameters and pass them back
phi = [0.0] * nDerived
for i, name in enumerate(derived_param_names):
phi[i] = float(data.mcmc_parameters[name]['current'])
return logl, phi
# Pass over the settings
settings = PC_Settings(nDims,nDerived)
for arg, val in data.PC_arguments.iteritems():
setattr(settings, arg, val)
# Launch PolyChord
polychord_run(loglike, nDims, nDerived, settings, prior)
# FK: write out the warning message below also as a file in the PC-subfolder
# so that there's a clear indication for convergence instead of just looking at
# the STDOUT-log!
text = 'The sampling with PolyChord is done.\n' + \
'You can now analyse the output calling Monte Python ' + \
'with the -info flag in the chain_name/PC subfolder.'
warnings.warn(text)
fname = os.path.join(data.PC_arguments['base_dir'], 'convergence.txt')
with open(fname, 'w') as afile:
afile.write(text)
def from_PC_output_to_chains(folder):
"""
Translate the output of PolyChord into readable output for Monte Python
This routine will be called by the module :mod:`analyze`.
If mode separation has been performed (i.e., multimodal=True), it creates
'mode_#' subfolders containing a chain file with the corresponding samples
and a 'log.param' file in which the starting point is the best fit of the
nested sampling, and the same for the sigma. The minimum and maximum value
are cropped to the extent of the modes in the case of the parameters used
for the mode separation, and preserved in the rest.
The mono-modal case is treated as a special case of the multi-modal one.
"""
chain_name = [a for a in folder.split(os.path.sep) if a][-2]
base_name = os.path.join(folder, chain_name)
# Read the arguments of the PC run
# This file is intended to be machine generated: no "#" ignored or tests
# done
PC_arguments = {}
with open(base_name+name_arguments, 'r') as afile:
for line in afile:
arg = line.split('=')[0].strip()
value = line.split('=')[1].strip()
arg_type = (PC_user_arguments[arg]['type']
if arg in PC_user_arguments else
PC_auto_arguments[arg]['type'])
value = arg_type(value)
if arg == 'clustering_params':
value = [a.strip() for a in value.split()]
PC_arguments[arg] = value
multimodal = PC_arguments.get('multimodal')
# Read parameters order
PC_param_names = np.loadtxt(base_name+name_paramnames, dtype='str').tolist()
# In multimodal case, if there were no clustering params specified, ALL are
if multimodal and not PC_arguments.get('clustering_params'):
PC_arguments['clustering_params'] = PC_param_names
# Extract the necessary information from the log.param file
# Including line numbers of the parameters
with open(os.path.join(folder, '..', name_logparam), 'r') as log_file:
log_lines = log_file.readlines()
# Number of the lines to be changed
param_names = []
param_lines = {}
param_data = {}
pre, pos = 'data.parameters[', ']'
for i, line in enumerate(log_lines):
if pre in line:
if line.strip()[0] == '#':
continue
# These lines allow PolyChord to deal with fixed nuisance parameters
sigma = float(line.split(',')[3].strip())
if sigma == 0.0:
#If derived parameter, keep it, else discard it:
paramtype = line.split(',')[5].strip()[1:-2]
if paramtype != 'derived':
continue
param_name = line.split('=')[0][line.find(pre)+len(pre):
line.find(pos)]
param_name = param_name.replace('"','').replace("'",'').strip()
param_names.append(param_name)
param_data[param_name] = [a.strip() for a in
line.split('=')[1].strip('[]').split(',')]
param_lines[param_name] = i
# Create the mapping from PC ordering to log.param ordering
columns_reorder = [PC_param_names.index(param) for param in param_names]
# Open the 'stats.dat' file to see what happened and retrieve some info
stats_file = open(base_name+name_stats, 'r')
lines = stats_file.readlines()
stats_file.close()
for line in lines:
if 'log(Z) =' in line:
global_logZ, global_logZ_err = [float(a.strip()) for a in
line.split('=')[1].split('+/-')]
# Prepare the accepted-points file -- modes are separated by 2 line breaks
accepted_name = base_name + name_post
data = np.loadtxt(accepted_name)
data[:, 1] = data[:, 1] / 2.
np.savetxt(os.path.join(folder, '..', name_chain_acc), data, fmt='%.6e')
| 34.245868 | 119 | 0.614057 | """
.. module:: PolyChord
:synopsis: Interface the PolyChord program with MontePython
This implementation relies heavily on the existing Python wrapper for
PolyChord, called PyPolyChord, which comes with the PolyChord code.
To install PolyChord, download it from
`its GitHub repo <https://github.com/PolyChord/PolyChordLite>`_
and follow `these instructions <https://github.com/PolyChord/PolyChordLite#python-likelihoods-pypolychord>`_.
The main routine, :func:`run`, truly interfaces the two codes. It takes for
input the cosmological module, data and command line. It then defines
internally two functions, :func:`prior() <PolyChord.prior>` and
:func:`loglike` that will serve as input for the run function of PyPolyChord.
.. moduleauthor:: Will Handley <wh260@cam.ac.uk>
"""
from __future__ import print_function
from pypolychord import run_polychord as polychord_run
from pypolychord.settings import PolyChordSettings as PC_Settings
import numpy as np
import os
#from copy import copy
import warnings
import io_mp
import sampler
# Data on file names and PolyChord options, that may be called by other modules
# PolyChord subfolder and name separator
PC_subfolder = 'PC'
name_rejected = '_dead-birth.txt' # rejected points
name_post = '.txt' # accepted points
name_stats = '.stats' # summarized information, explained
# New files
name_paramnames = '.paramnames' # in the PC/ subfolder
name_arguments = '.arguments' # in the PC/ subfolder
name_chain_acc = 'chain_PC__accepted.txt' # in the chain root folder
name_chain_rej = 'chain_PC__rejected.txt' # in the chain root folder
# Log.param name (ideally, we should import this one from somewhere else)
name_logparam = 'log.param'
# PolyChord option prefix
PC_prefix = 'PC_'
# User-defined arguments of PyPolyChord, and 'argparse' keywords
# First: basic string -> bool type conversion:
str2bool = lambda s: True if s.lower() == 'true' else False
PC_user_arguments = {
'nlive':
{'type': int,
'help':(
'(Default: nDims*25)\n'
'The number of live points.\n'
'Increasing nlive increases the accuracy of posteriors and evidences,\n'
'and proportionally increases runtime ~ O(nlive).'
)
},
'num_repeats' :
{'type': int,
'help':(
'(Default: nDims*5)\n'
'The number of slice slice-sampling steps to generate a new point.\n'
'Increasing num_repeats increases the reliability of the algorithm.\n'
'Typically\n'
'* for reliable evidences need num_repeats ~ O(5*nDims).\n'
'* for reliable posteriors need num_repeats ~ O(nDims)'
)
},
'do_clustering' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Whether or not to use clustering at run time.'
)
},
'feedback' :
{'type': int,
'help':(
'(Default: 1)\n'
'How much command line feedback to give\n'
'[0,1,2,3]'
)
},
'precision_criterion' :
{'type': float,
'help':(
'(Default: 0.001)\n'
'Termination criterion. Nested sampling terminates when the evidence\n'
'contained in the live points is precision_criterion fraction of the\n'
'total evidence.'
)
},
'max_ndead' :
{'type': int,
'help':(
'(Default: -1)\n'
'Alternative termination criterion. Stop after max_ndead iterations.\n'
'Set negative to ignore (default).'
)
},
'boost_posterior' :
{'type': float,
'help':(
'(Default: 0.0)\n'
'Increase the number of posterior samples produced. This can be set\n'
'arbitrarily high, but you won\'t be able to boost by more than\n'
'num_repeats\n'
'Warning: in high dimensions PolyChord produces _a lot_ of posterior\n'
'samples. You probably don\'t need to change this'
)
},
'posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (weighted) posterior samples. Stored in <root>.txt.'
)
},
'equals' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (equally weighted) posterior samples. Stored in\n'
'<root>_equal_weights.txt'
)
},
'cluster_posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce posterior files for each cluster?\n'
'Does nothing if do_clustering=False.'
)
},
'write_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Create a resume file.'
)
},
'read_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Read from resume file.'
)
},
'write_stats' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write an evidence statistics file.'
)
},
'write_live' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a live points file.'
)
},
'write_dead' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a dead points file.'
)
},
'compression_factor' :
{'type': float,
'help':(
'(Default: exp(-1))\n'
'How often to update the files and do clustering.'
)
}
}
# Automatically-defined arguments of PyMultiNest, type specified
PC_auto_arguments = {
'file_root': {'type': str},
'base_dir': {'type': str},
'grade_dims': {'type': list},
'grade_frac': {'type': list}
}
# FK: changes for 2cosmos capability
def initialise(cosmo1, cosmo2, data, command_line):
"""
Main call to prepare the information for the MultiNest run.
"""
# Convenience variables
varying_param_names = data.get_mcmc_parameters(['varying'])
derived_param_names = data.get_mcmc_parameters(['derived'])
nslow = len(data.get_mcmc_parameters(['varying', 'cosmo1'])) + len(data.get_mcmc_parameters(['varying', 'cosmo2']))
nfast = len(data.get_mcmc_parameters(['varying', 'nuisance']))
# Check that all the priors are flat and that all the parameters are bound
is_flat, is_bound = sampler.check_flat_bound_priors(
data.mcmc_parameters, varying_param_names)
if not is_flat:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'with flat priors. Sorry!')
if not is_bound:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'for bound parameters. Set reasonable bounds for them in the ' +
'".param" file.')
# If absent, create the sub-folder PC
PC_folder = os.path.join(command_line.folder, PC_subfolder)
if not os.path.exists(PC_folder):
os.makedirs(PC_folder)
# If absent, create the sub-folder PC/clusters
PC_clusters_folder = os.path.join(PC_folder,'clusters')
if not os.path.exists(PC_clusters_folder):
os.makedirs(PC_clusters_folder)
# Use chain name as a base name for PolyChord files
chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1]
base_name = os.path.join(PC_folder, chain_name)
# Prepare arguments for PyPolyChord
# -- Automatic arguments
data.PC_arguments['file_root'] = chain_name
data.PC_arguments['base_dir'] = PC_folder
data.PC_arguments['grade_dims'] = []
data.PC_arguments['grade_frac'] = []
if nslow:
data.PC_arguments['grade_dims'].append(nslow)
data.PC_arguments['grade_frac'].append(0.75)
if nfast:
data.PC_arguments['grade_dims'].append(nfast)
data.PC_arguments['grade_frac'].append(0.25)
data.PC_arguments['num_repeats'] = data.PC_arguments['grade_dims'][0] * 2
# -- User-defined arguments
for arg in PC_user_arguments:
value = getattr(command_line, PC_prefix+arg)
if value != -1:
data.PC_arguments[arg] = value
# else: don't define them -> use PyPolyChord default value
data.PC_param_names = varying_param_names
# Write the PolyChord arguments and parameter ordering
with open(base_name+name_arguments, 'w') as afile:
for arg in data.PC_arguments:
afile.write(' = '.join(
[str(arg), str(data.PC_arguments[arg])]))
afile.write('\n')
with open(base_name+name_paramnames, 'w') as pfile:
pfile.write('\n'.join(data.PC_param_names+derived_param_names))
# FK: changes for 2cosmos capability
def run(cosmo1, cosmo2, data, command_line):
"""
Main call to run the PolyChord sampler.
Note the unusual set-up here, with the two following functions, `prior` and
`loglike` having their docstrings written in the encompassing function.
This trick was necessary as PolyChord required these two functions to be
defined with a given number of parameters, so we could not add `data`. By
defining them inside the run function, this problem was by-passed.
.. function:: prior
Generate the prior function for PolyChord
It should transform the input unit cube into the parameter cube. This
function actually wraps the method :func:`map_from_unit_interval()
<prior.Prior.map_from_unit_interval>` of the class :class:`Prior
<prior.Prior>`.
Parameters
----------
cube : list
Contains the current point in unit parameter space that has been
selected within the PolyChord part.
Returns
-------
theta : list
The transformed physical parameters
.. function:: loglike
Generate the Likelihood function for PolyChord
Parameters
----------
theta : array
Contains the current point in the correct parameter space after
transformation from :func:`prior`.
Returns
-------
logl : float
The loglikelihood of theta
phi : list
The derived parameters
"""
# Convenience variables
derived_param_names = data.get_mcmc_parameters(['derived'])
nDims = len(data.PC_param_names)
nDerived = len(derived_param_names)
# Function giving the prior probability
def prior(hypercube):
"""
Please see the encompassing function docstring
"""
theta = [0.0] * nDims
for i, name in enumerate(data.PC_param_names):
theta[i] = data.mcmc_parameters[name]['prior']\
.map_from_unit_interval(hypercube[i])
return theta
# Function giving the likelihood probability
def loglike(theta):
"""
Please see the encompassing function docstring
"""
# Updates values: theta --> data
try:
data.check_for_slow_step(theta)
except KeyError:
pass
for i, name in enumerate(data.PC_param_names):
data.mcmc_parameters[name]['current'] = theta[i]
data.update_cosmo1_arguments()
data.update_cosmo2_arguments()
# Compute likelihood
#logl = sampler.compute_lkl(cosmo1, cosmo2, data)[0,0]
# FK: index to scalar variable error...
logl = sampler.compute_lkl(cosmo1, cosmo2, data)
# Compute derived parameters and pass them back
phi = [0.0] * nDerived
for i, name in enumerate(derived_param_names):
phi[i] = float(data.mcmc_parameters[name]['current'])
return logl, phi
# Pass over the settings
settings = PC_Settings(nDims,nDerived)
for arg, val in data.PC_arguments.iteritems():
setattr(settings, arg, val)
# Launch PolyChord
polychord_run(loglike, nDims, nDerived, settings, prior)
# FK: write out the warning message below also as a file in the PC-subfolder
# so that there's a clear indication for convergence instead of just looking at
# the STDOUT-log!
text = 'The sampling with PolyChord is done.\n' + \
'You can now analyse the output calling Monte Python ' + \
'with the -info flag in the chain_name/PC subfolder.'
warnings.warn(text)
fname = os.path.join(data.PC_arguments['base_dir'], 'convergence.txt')
with open(fname, 'w') as afile:
afile.write(text)
def from_PC_output_to_chains(folder):
"""
Translate the output of PolyChord into readable output for Monte Python
This routine will be called by the module :mod:`analyze`.
If mode separation has been performed (i.e., multimodal=True), it creates
'mode_#' subfolders containing a chain file with the corresponding samples
and a 'log.param' file in which the starting point is the best fit of the
nested sampling, and the same for the sigma. The minimum and maximum value
are cropped to the extent of the modes in the case of the parameters used
for the mode separation, and preserved in the rest.
The mono-modal case is treated as a special case of the multi-modal one.
"""
chain_name = [a for a in folder.split(os.path.sep) if a][-2]
base_name = os.path.join(folder, chain_name)
# Read the arguments of the PC run
# This file is intended to be machine generated: no "#" ignored or tests
# done
PC_arguments = {}
with open(base_name+name_arguments, 'r') as afile:
for line in afile:
arg = line.split('=')[0].strip()
value = line.split('=')[1].strip()
arg_type = (PC_user_arguments[arg]['type']
if arg in PC_user_arguments else
PC_auto_arguments[arg]['type'])
value = arg_type(value)
if arg == 'clustering_params':
value = [a.strip() for a in value.split()]
PC_arguments[arg] = value
multimodal = PC_arguments.get('multimodal')
# Read parameters order
PC_param_names = np.loadtxt(base_name+name_paramnames, dtype='str').tolist()
# In multimodal case, if there were no clustering params specified, ALL are
if multimodal and not PC_arguments.get('clustering_params'):
PC_arguments['clustering_params'] = PC_param_names
# Extract the necessary information from the log.param file
# Including line numbers of the parameters
with open(os.path.join(folder, '..', name_logparam), 'r') as log_file:
log_lines = log_file.readlines()
# Number of the lines to be changed
param_names = []
param_lines = {}
param_data = {}
pre, pos = 'data.parameters[', ']'
for i, line in enumerate(log_lines):
if pre in line:
if line.strip()[0] == '#':
continue
# These lines allow PolyChord to deal with fixed nuisance parameters
sigma = float(line.split(',')[3].strip())
if sigma == 0.0:
#If derived parameter, keep it, else discard it:
paramtype = line.split(',')[5].strip()[1:-2]
if paramtype != 'derived':
continue
param_name = line.split('=')[0][line.find(pre)+len(pre):
line.find(pos)]
param_name = param_name.replace('"','').replace("'",'').strip()
param_names.append(param_name)
param_data[param_name] = [a.strip() for a in
line.split('=')[1].strip('[]').split(',')]
param_lines[param_name] = i
# Create the mapping from PC ordering to log.param ordering
columns_reorder = [PC_param_names.index(param) for param in param_names]
# Open the 'stats.dat' file to see what happened and retrieve some info
stats_file = open(base_name+name_stats, 'r')
lines = stats_file.readlines()
stats_file.close()
for line in lines:
if 'log(Z) =' in line:
global_logZ, global_logZ_err = [float(a.strip()) for a in
line.split('=')[1].split('+/-')]
# Prepare the accepted-points file -- modes are separated by 2 line breaks
accepted_name = base_name + name_post
data = np.loadtxt(accepted_name)
data[:, 1] = data[:, 1] / 2.
np.savetxt(os.path.join(folder, '..', name_chain_acc), data, fmt='%.6e')
| 0 | 0 | 0 |
71f3a33777560c44b8e78c3aa4322f9ac1433b34 | 1,226 | py | Python | services/ldnormaliser/servicer.py | SailSlick/rabble | 8da1705d15d265abf8c3173851d8894a309c955b | [
"MIT"
] | null | null | null | services/ldnormaliser/servicer.py | SailSlick/rabble | 8da1705d15d265abf8c3173851d8894a309c955b | [
"MIT"
] | null | null | null | services/ldnormaliser/servicer.py | SailSlick/rabble | 8da1705d15d265abf8c3173851d8894a309c955b | [
"MIT"
] | null | null | null | import json
from pyld import jsonld
from services.proto import ldnorm_pb2_grpc
from services.proto import ldnorm_pb2 as lpb2
from services.proto import general_pb2
| 35.028571 | 78 | 0.655791 | import json
from pyld import jsonld
from services.proto import ldnorm_pb2_grpc
from services.proto import ldnorm_pb2 as lpb2
from services.proto import general_pb2
class LDNormServicer(ldnorm_pb2_grpc.LDNormServicer):
def __init__(self, logger):
self._logger = logger
requests = jsonld.requests_document_loader(timeout=10)
jsonld.set_document_loader(requests)
def _norm(self, ld):
j = json.loads(ld)
flat = jsonld.flatten(j)
return json.dumps(flat)
def Normalise(self, request, context):
self._logger.debug('Got normalise request for: %s', request.json)
resp = lpb2.NormaliseResponse(
result_type=general_pb2.ResultType.OK
)
try:
resp.normalised = self._norm(request.json)
except (Exception, jsonld.JsonLdError) as e:
# For some reason JsonLdError doesn't inherit from Exception so it
# has to be caught seperately, it also has a super long message
# (~20 lines) so I truncate it.
self._logger.error(
"JSON-LD could not be normalised: %s", str(e)[:50])
resp.result_type = general_pb2.ResultType.ERROR
return resp
| 925 | 32 | 103 |
6ce0196b579de3a6a9f29c8aeebefeb7079eb496 | 607 | py | Python | GameOfLife/__init__.py | JnyJny/GameOfLife | 5df27fa0b288466b779a9a89590b3e9b022583d2 | [
"MIT"
] | 21 | 2016-06-29T20:50:01.000Z | 2022-03-08T03:38:30.000Z | GameOfLife/__init__.py | JnyJny/GameOfLife | 5df27fa0b288466b779a9a89590b3e9b022583d2 | [
"MIT"
] | null | null | null | GameOfLife/__init__.py | JnyJny/GameOfLife | 5df27fa0b288466b779a9a89590b3e9b022583d2 | [
"MIT"
] | 5 | 2016-06-29T20:50:16.000Z | 2021-06-02T10:10:12.000Z | '''Conway's Game of Life
See: https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life
This particular Game of Life is implemented as a two dimensional
world populated with cells.
'''
__author__ = '\n'.join(["Erik O'Shaughnessy",
'erik.oshaughnessy@gmail.com',
'https://github.com/JnyJny/GameOfLife'])
__version__ = "0.1.3"
from .cell import Cell as Cell
from .world import OptimizedWorld as World
from .world import OptimizedNumpyWorld as NumpyWorld
from .patterns import Patterns
__all__ = ['Cell', 'World', 'Patterns', 'tests',
'NumpyWorld']
| 27.590909 | 64 | 0.678748 | '''Conway's Game of Life
See: https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life
This particular Game of Life is implemented as a two dimensional
world populated with cells.
'''
__author__ = '\n'.join(["Erik O'Shaughnessy",
'erik.oshaughnessy@gmail.com',
'https://github.com/JnyJny/GameOfLife'])
__version__ = "0.1.3"
from .cell import Cell as Cell
from .world import OptimizedWorld as World
from .world import OptimizedNumpyWorld as NumpyWorld
from .patterns import Patterns
__all__ = ['Cell', 'World', 'Patterns', 'tests',
'NumpyWorld']
| 0 | 0 | 0 |
c31e92f37d0b6f7546db17b2925dd74a6087d199 | 8,109 | py | Python | mybitbank/libs/entities/coinaccount.py | kahazazzu/mybitbank | 945e604e5fee3914c7c98a25c2c34831ba0ad946 | [
"MIT"
] | 15 | 2015-08-29T12:35:59.000Z | 2018-02-06T06:26:26.000Z | mybitbank/libs/entities/coinaccount.py | FireWalkerX/mybitbank | 945e604e5fee3914c7c98a25c2c34831ba0ad946 | [
"MIT"
] | null | null | null | mybitbank/libs/entities/coinaccount.py | FireWalkerX/mybitbank | 945e604e5fee3914c7c98a25c2c34831ba0ad946 | [
"MIT"
] | 19 | 2015-02-03T21:32:51.000Z | 2021-11-06T12:08:26.000Z | """
The MIT License (MIT)
Copyright (c) 2016 Stratos Goudelis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import hashlib
from cacher import Cacher
from coinaddress import CoinAddress
from cointransaction import CoinTransaction
from mybitbank.libs import misc
from mybitbank.libs.connections import connector
class CoinAccount(object):
'''
Class for an account
'''
@property
def provider_id(self):
'''
Property for the provider id
'''
return self.get('provider_id', None)
def __getitem__(self, key):
'''
Getter for dictionary-line behavior
'''
if key == "addresses":
return self.getAddresses()
elif key == "last_activity":
return self.getLastActivity()
elif key == "currency_symbol":
return self.getCurrencySymbol()
elif key == "currency_code":
return self.getCurrencyCode()
elif key == 'identifier':
return self.getIdentifier()
account = getattr(self, '_account')
return account.get(key, None)
def __setitem__(self, key, value):
'''
Setter for dictionary-line behavior
'''
account = getattr(self, '_account')
account[key] = value
return setattr(self, '_account', account)
def get(self, key, default=False):
'''
Getter for dictionary-line behavior
'''
if self._account.get(key, False):
return self._account.get(key, False)
else:
return default
def haskey(self, key):
'''
Check the existence of key
'''
if key in self._account.keys():
return True
else:
return False
def getParamHash(self, param=""):
'''
This function takes a string and calculates a sha224 hash out of it.
It is used to hash the input parameters of functions/method in order to
uniquely identify a cached result based only on the input parameters of
the function/method call.
'''
cache_hash = hashlib.sha224(param).hexdigest()
return cache_hash
def getIdentifier(self):
'''
There is no unique identifier for an account in a xxxcoind daemon
so lets make one. Hopefully the below hashing method will uniquely
identify an account for us
'''
unique_string = "provider_id=%s&name=%s¤cy=%s" % (self['provider_id'], self['name'], self['currency'])
identifier = hashlib.sha1(unique_string).hexdigest()
return identifier
def isDefault(self):
'''
Return bool whether this is a default account or not
'''
if self._account['name'] == u"":
self._hidden = True
return True
else:
return False
def getBalance(self):
'''
Return the account balance
'''
balance = connector.getBalance(self.provider_id, self['name'])
return misc.longNumber(balance)
def isHidden(self):
'''
Return bool if this account is hidden
'''
return self._hidden or self._account['hidden'] or self.isDefault()
def getAddresses(self):
'''
Get the address for an account name
'''
# check for cached data, use that or get it again
cache_hash = self.getParamHash("name=%s" % (self['name']))
cached_object = self._cache.fetch('addressesbyaccount', cache_hash)
if cached_object:
return cached_object
addresses = connector.getAddressesByAccount(self['name'], self.provider_id)
addresses_list = []
for address in addresses:
coinaddr = CoinAddress(address, self)
addresses_list.append(coinaddr)
# cache the result
self._cache.store('addressesbyaccount', cache_hash, addresses_list)
return addresses_list
def getAddressesCount(self):
'''
Return the number of address under this account
'''
addresses = self.getAddresses()
return len(addresses)
def getLastActivity(self):
'''
Return the date of the last activity
'''
last_transaction = self.listTransactions(1, 0)
if last_transaction:
last_activity = misc.twitterizeDate(last_transaction[0]['time'])
else:
last_activity = "never"
self['last_activity'] = last_activity
return last_activity
def getCurrencySymbol(self):
'''
Return the Unicode currency symbol
'''
return misc.getCurrencySymbol(connector, self.getCurrencyCode())
def getCurrencyCode(self):
'''
Return the currency code
'''
return self.get('currency', "").lower()
def listTransactions(self, limit=100000, start=0, orderby='time', reverse=True):
'''
Get a list of transactions by account name and provider_id
'''
cache_hash = self.getParamHash("limit=%s&start=%sorderby=%s&reverse=%s" % (limit, start, orderby, reverse))
cached_object = self._cache.fetch('transactions', cache_hash)
if cached_object:
return cached_object
transactions = []
transaction_list = connector.listTransactionsByAccount(self['name'], self['provider_id'], limit, start)
for entry in transaction_list:
if entry.get('address', False):
entry['address'] = CoinAddress(entry['address'], self)
# give out a provider id and a currency code to the transaction dict
entry['provider_id'] = self.provider_id
entry['currency'] = self['currency']
if entry['category'] == 'receive':
entry['source_address'] = CoinAddress(entry.get('details', {}).get('sender_address', False), "This is a sender address!")
elif entry['category'] == 'send':
entry['source_addresses'] = self['wallet'].getAddressesByAccount(entry['account'])
entry['wallet'] = self['wallet']
coin_transaction = CoinTransaction(entry)
transactions.append(coin_transaction)
# sort result
transactions = sorted(transactions, key=lambda transaction: transaction[orderby], reverse=reverse)
# cache the result
self._cache.store('transactions', cache_hash, transactions)
return transactions
| 33.7875 | 137 | 0.60735 | """
The MIT License (MIT)
Copyright (c) 2016 Stratos Goudelis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import hashlib
from cacher import Cacher
from coinaddress import CoinAddress
from cointransaction import CoinTransaction
from mybitbank.libs import misc
from mybitbank.libs.connections import connector
class CoinAccount(object):
'''
Class for an account
'''
def __init__(self, accountDetails):
self._errors = []
self._account = {}
self._hidden = False
self._cache = Cacher({
'transactions': {},
'balances': {},
'addressesbyaccount': {},
})
if type(accountDetails) is dict:
self._account = accountDetails
self._provider_id = accountDetails['provider_id']
@property
def provider_id(self):
'''
Property for the provider id
'''
return self.get('provider_id', None)
def __getitem__(self, key):
'''
Getter for dictionary-line behavior
'''
if key == "addresses":
return self.getAddresses()
elif key == "last_activity":
return self.getLastActivity()
elif key == "currency_symbol":
return self.getCurrencySymbol()
elif key == "currency_code":
return self.getCurrencyCode()
elif key == 'identifier':
return self.getIdentifier()
account = getattr(self, '_account')
return account.get(key, None)
def __setitem__(self, key, value):
'''
Setter for dictionary-line behavior
'''
account = getattr(self, '_account')
account[key] = value
return setattr(self, '_account', account)
def get(self, key, default=False):
'''
Getter for dictionary-line behavior
'''
if self._account.get(key, False):
return self._account.get(key, False)
else:
return default
def haskey(self, key):
'''
Check the existence of key
'''
if key in self._account.keys():
return True
else:
return False
def getParamHash(self, param=""):
'''
This function takes a string and calculates a sha224 hash out of it.
It is used to hash the input parameters of functions/method in order to
uniquely identify a cached result based only on the input parameters of
the function/method call.
'''
cache_hash = hashlib.sha224(param).hexdigest()
return cache_hash
def getIdentifier(self):
'''
There is no unique identifier for an account in a xxxcoind daemon
so lets make one. Hopefully the below hashing method will uniquely
identify an account for us
'''
unique_string = "provider_id=%s&name=%s¤cy=%s" % (self['provider_id'], self['name'], self['currency'])
identifier = hashlib.sha1(unique_string).hexdigest()
return identifier
def isDefault(self):
'''
Return bool whether this is a default account or not
'''
if self._account['name'] == u"":
self._hidden = True
return True
else:
return False
def getBalance(self):
'''
Return the account balance
'''
balance = connector.getBalance(self.provider_id, self['name'])
return misc.longNumber(balance)
def isHidden(self):
'''
Return bool if this account is hidden
'''
return self._hidden or self._account['hidden'] or self.isDefault()
def getAddresses(self):
'''
Get the address for an account name
'''
# check for cached data, use that or get it again
cache_hash = self.getParamHash("name=%s" % (self['name']))
cached_object = self._cache.fetch('addressesbyaccount', cache_hash)
if cached_object:
return cached_object
addresses = connector.getAddressesByAccount(self['name'], self.provider_id)
addresses_list = []
for address in addresses:
coinaddr = CoinAddress(address, self)
addresses_list.append(coinaddr)
# cache the result
self._cache.store('addressesbyaccount', cache_hash, addresses_list)
return addresses_list
def getAddressesCount(self):
'''
Return the number of address under this account
'''
addresses = self.getAddresses()
return len(addresses)
def getLastActivity(self):
'''
Return the date of the last activity
'''
last_transaction = self.listTransactions(1, 0)
if last_transaction:
last_activity = misc.twitterizeDate(last_transaction[0]['time'])
else:
last_activity = "never"
self['last_activity'] = last_activity
return last_activity
def getCurrencySymbol(self):
'''
Return the Unicode currency symbol
'''
return misc.getCurrencySymbol(connector, self.getCurrencyCode())
def getCurrencyCode(self):
'''
Return the currency code
'''
return self.get('currency', "").lower()
def listTransactions(self, limit=100000, start=0, orderby='time', reverse=True):
'''
Get a list of transactions by account name and provider_id
'''
cache_hash = self.getParamHash("limit=%s&start=%sorderby=%s&reverse=%s" % (limit, start, orderby, reverse))
cached_object = self._cache.fetch('transactions', cache_hash)
if cached_object:
return cached_object
transactions = []
transaction_list = connector.listTransactionsByAccount(self['name'], self['provider_id'], limit, start)
for entry in transaction_list:
if entry.get('address', False):
entry['address'] = CoinAddress(entry['address'], self)
# give out a provider id and a currency code to the transaction dict
entry['provider_id'] = self.provider_id
entry['currency'] = self['currency']
if entry['category'] == 'receive':
entry['source_address'] = CoinAddress(entry.get('details', {}).get('sender_address', False), "This is a sender address!")
elif entry['category'] == 'send':
entry['source_addresses'] = self['wallet'].getAddressesByAccount(entry['account'])
entry['wallet'] = self['wallet']
coin_transaction = CoinTransaction(entry)
transactions.append(coin_transaction)
# sort result
transactions = sorted(transactions, key=lambda transaction: transaction[orderby], reverse=reverse)
# cache the result
self._cache.store('transactions', cache_hash, transactions)
return transactions
| 383 | 0 | 31 |
ced18a94b7867a0d537b815e9378c0be5f8d23c8 | 1,226 | py | Python | week_3/POO_livros.py | angelitabrg/lih_lab_python2 | 88d19d20704552e1a82c29793073e59dcf4b78c6 | [
"MIT"
] | null | null | null | week_3/POO_livros.py | angelitabrg/lih_lab_python2 | 88d19d20704552e1a82c29793073e59dcf4b78c6 | [
"MIT"
] | null | null | null | week_3/POO_livros.py | angelitabrg/lih_lab_python2 | 88d19d20704552e1a82c29793073e59dcf4b78c6 | [
"MIT"
] | null | null | null |
main() | 43.785714 | 145 | 0.614192 | def main(self):
livro1 = Livros_lidos('Eleanor & Park', 'Rainbow Rowell', 2012, 5, 3.99)
livro2 = Livros_lidos('O mito da beleza', 'Naomi Wolf', 1990, 4.5, 3.90)
livro3 = Livros_lidos('21 Lessons for the 21st Century', 'Yuval Noah Harari', 2018, 4.75, 4.17)
livro4 = Livros_lidos('In the Penal Colony', 'Franz Kafka', 1919, 3, 3.97)
livro1.imprima()
livro2.imprima()
livro3.imprima()
livro4.imprima()
class Livros_lidos:
def __init__(self, titulo, autor, ano, nota, aval_publica):
self.titulo = titulo
self.autor = autor
self.ano = ano
self.nota = nota
self.goodreads = aval_publica
def imprima(self):
if self.goodreads < 1: # Ninguém gostou do livro
print('Quase ninguém gostou do livro %s do(a) autor(a) %s escrito em %d.'%(self.titulo, self.autor, self.ano))
elif self.nota > self.goodreads:
print('Eu gostei do livro %s do(a) autor(a) %s escrito em %d mais do que a maioria das pessoas.'%(self.titulo, self.autor, self.ano))
else:
print('O livro %s do(a) autor(a) %s escrito em %d é um livro bem avaliado.'%(self.titulo, self.autor, self.ano))
main() | 1,126 | -2 | 98 |
0d0d20b8d1fe4a06cb81c806169aa0a616284346 | 382 | py | Python | school/simpleApi/migrations/0007_alter_gradeable_student.py | kiarashplusplus/PaperPileSchool | 40f91eea15d743bd22f918cec42e9c778b3d6d7d | [
"MIT"
] | null | null | null | school/simpleApi/migrations/0007_alter_gradeable_student.py | kiarashplusplus/PaperPileSchool | 40f91eea15d743bd22f918cec42e9c778b3d6d7d | [
"MIT"
] | null | null | null | school/simpleApi/migrations/0007_alter_gradeable_student.py | kiarashplusplus/PaperPileSchool | 40f91eea15d743bd22f918cec42e9c778b3d6d7d | [
"MIT"
] | null | null | null | # Generated by Django 3.2.4 on 2021-07-07 22:45
from django.db import migrations, models
| 20.105263 | 49 | 0.596859 | # Generated by Django 3.2.4 on 2021-07-07 22:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('simpleApi', '0006_auto_20210707_2119'),
]
operations = [
migrations.AlterField(
model_name='gradeable',
name='student',
field=models.IntegerField(),
),
]
| 0 | 268 | 23 |
552249907b4b1b3ba5dc30b9255b8c99440d938a | 51 | py | Python | python/191108_python_review/datatype.py | hbyyy/TIL | e89ae2913a8a38eb7f480a9ec2324c3ac11e309e | [
"MIT"
] | null | null | null | python/191108_python_review/datatype.py | hbyyy/TIL | e89ae2913a8a38eb7f480a9ec2324c3ac11e309e | [
"MIT"
] | 1 | 2022-03-26T07:50:54.000Z | 2022-03-26T07:50:54.000Z | python/191108_python_review/datatype.py | hbyyy/TIL | e89ae2913a8a38eb7f480a9ec2324c3ac11e309e | [
"MIT"
] | null | null | null | a = {'a': 1, 'b' : 2}
b = list(a.items())
print(b)
| 12.75 | 21 | 0.431373 | a = {'a': 1, 'b' : 2}
b = list(a.items())
print(b)
| 0 | 0 | 0 |
944fe4b4d75c9a0c4096ba02d7a1fe92bd4513de | 2,968 | py | Python | main.py | manacute/Runtime-Terror-C4 | 6c9d9246090c32077bc4425c75e80f485037fe3e | [
"Apache-2.0"
] | null | null | null | main.py | manacute/Runtime-Terror-C4 | 6c9d9246090c32077bc4425c75e80f485037fe3e | [
"Apache-2.0"
] | null | null | null | main.py | manacute/Runtime-Terror-C4 | 6c9d9246090c32077bc4425c75e80f485037fe3e | [
"Apache-2.0"
] | null | null | null | import pygame, sys
from Move import Move
from BoardModel import BoardModel
from MoveController import MoveController
from HelpModel import HelpModel
from MenuModel import MenuModel
from Model import Model
'''
CSC290 Group Project
C4: Four In A Row
University of Toronto Mississauga
'''
class Game():
'''
Base game instance, which handles game states and transitions
between the different game screens. The play() method serves
as the main gameplay loop.
Structure of state machine inspired by iminurnamez:
https://gist.github.com/iminurnamez/8d51f5b40032f106a847
Licensed under CC0 1.0 Universal.
'''
def __init__(self, display, screens, model_name):
'''
Initialize the Game object.
Keyword arguments:
display -- the display Surface used to draw the game
screens -- a dict mapping names of models to their Model objects
model_name -- the name of the model of the first game screen
'''
self.playing = True
self.fps = 60
self.screen = display
self.screens = screens
self.model_name = model_name
self.model = screens[self.model_name]
self.clock = pygame.time.Clock()
self.controller = MoveController(screens["board"])
def event_loop(self):
'''Pass pygame events to current model to handle current game state.'''
for event in pygame.event.get():
if self.model_name == "board":
self.model.get_event(event, self.controller)
else:
self.model.get_event(event)
def draw(self):
'''Pass screen to current model to draw current game state.'''
self.model.draw(self.screen)
def update(self, frame_time):
'''
Update current model if there is a change, it
signals for a change in models, or if there is
a game quit event.
Keyword arguments:
frame_time -- milliseconds since last frame
'''
if self.model.quit:
self.playing = False
elif self.model.done:
self.change_screen()
def change_screen(self):
'''Change the model being used according to next_model.'''
self.model.done = False
self.model_name = self.model.next_model
self.model = screens[self.model_name]
def play(self):
'''The main game loop. Halts upon game exit.'''
while self.playing:
frame_time = self.clock.tick(self.fps)
self.event_loop()
self.update(frame_time)
self.draw()
pygame.display.update()
if __name__ == '__main__':
pygame.init()
pygame.font.init()
screen = pygame.display.set_mode((900, 700))
screens = {"menu": MenuModel(), "help": HelpModel(), "board": BoardModel()}
game = Game(screen, screens, "menu")
game.play()
pygame.quit()
sys.exit()
| 31.242105 | 79 | 0.618598 | import pygame, sys
from Move import Move
from BoardModel import BoardModel
from MoveController import MoveController
from HelpModel import HelpModel
from MenuModel import MenuModel
from Model import Model
'''
CSC290 Group Project
C4: Four In A Row
University of Toronto Mississauga
'''
class Game():
'''
Base game instance, which handles game states and transitions
between the different game screens. The play() method serves
as the main gameplay loop.
Structure of state machine inspired by iminurnamez:
https://gist.github.com/iminurnamez/8d51f5b40032f106a847
Licensed under CC0 1.0 Universal.
'''
def __init__(self, display, screens, model_name):
'''
Initialize the Game object.
Keyword arguments:
display -- the display Surface used to draw the game
screens -- a dict mapping names of models to their Model objects
model_name -- the name of the model of the first game screen
'''
self.playing = True
self.fps = 60
self.screen = display
self.screens = screens
self.model_name = model_name
self.model = screens[self.model_name]
self.clock = pygame.time.Clock()
self.controller = MoveController(screens["board"])
def event_loop(self):
'''Pass pygame events to current model to handle current game state.'''
for event in pygame.event.get():
if self.model_name == "board":
self.model.get_event(event, self.controller)
else:
self.model.get_event(event)
def draw(self):
'''Pass screen to current model to draw current game state.'''
self.model.draw(self.screen)
def update(self, frame_time):
'''
Update current model if there is a change, it
signals for a change in models, or if there is
a game quit event.
Keyword arguments:
frame_time -- milliseconds since last frame
'''
if self.model.quit:
self.playing = False
elif self.model.done:
self.change_screen()
def change_screen(self):
'''Change the model being used according to next_model.'''
self.model.done = False
self.model_name = self.model.next_model
self.model = screens[self.model_name]
def play(self):
'''The main game loop. Halts upon game exit.'''
while self.playing:
frame_time = self.clock.tick(self.fps)
self.event_loop()
self.update(frame_time)
self.draw()
pygame.display.update()
if __name__ == '__main__':
pygame.init()
pygame.font.init()
screen = pygame.display.set_mode((900, 700))
screens = {"menu": MenuModel(), "help": HelpModel(), "board": BoardModel()}
game = Game(screen, screens, "menu")
game.play()
pygame.quit()
sys.exit()
| 0 | 0 | 0 |
bb981e8b64818ad0fa1689a93c7582162a9771aa | 2,326 | py | Python | cauldron/test/cli/commands/test_steps_insert.py | JohnnyPeng18/cauldron | 09120c2a4cef65df46f8c0c94f5d79395b3298cd | [
"MIT"
] | 90 | 2016-09-02T15:11:10.000Z | 2022-01-02T11:37:57.000Z | cauldron/test/cli/commands/test_steps_insert.py | JohnnyPeng18/cauldron | 09120c2a4cef65df46f8c0c94f5d79395b3298cd | [
"MIT"
] | 86 | 2016-09-23T16:52:22.000Z | 2022-03-31T21:39:56.000Z | cauldron/test/cli/commands/test_steps_insert.py | JohnnyPeng18/cauldron | 09120c2a4cef65df46f8c0c94f5d79395b3298cd | [
"MIT"
] | 261 | 2016-12-22T05:36:48.000Z | 2021-11-26T12:40:42.000Z | import cauldron
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestStepsInsert(scaffolds.ResultsTest):
"""..."""
def test_before(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candice')
support.add_step(self)
support.add_step(self, position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types_many(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self)
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
def test_multiple_file_types_named(self):
"""Should properly rename customized filenames."""
support.create_project(self, 'candera')
support.add_step(self, name='A')
support.add_step(self, name='B')
support.add_step(self, name='C')
support.add_step(self, name='D.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01-D'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
| 34.205882 | 62 | 0.662511 | import cauldron
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestStepsInsert(scaffolds.ResultsTest):
"""..."""
def test_before(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candice')
support.add_step(self)
support.add_step(self, position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types_many(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self)
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
def test_multiple_file_types_named(self):
"""Should properly rename customized filenames."""
support.create_project(self, 'candera')
support.add_step(self, name='A')
support.add_step(self, name='B')
support.add_step(self, name='C')
support.add_step(self, name='D.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01-D'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
| 0 | 0 | 0 |
b5a9c66b94d0c7027220e3333efdb755dfe9e90b | 284 | py | Python | grb/defense/base.py | Stanislas0/grb | 96fc521f57fdb06ab6a3c442fcf4a8bc97894829 | [
"MIT"
] | null | null | null | grb/defense/base.py | Stanislas0/grb | 96fc521f57fdb06ab6a3c442fcf4a8bc97894829 | [
"MIT"
] | null | null | null | grb/defense/base.py | Stanislas0/grb | 96fc521f57fdb06ab6a3c442fcf4a8bc97894829 | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
| 17.75 | 54 | 0.56338 | from abc import ABCMeta, abstractmethod
class Defense(metaclass=ABCMeta):
@abstractmethod
def defense(self, model, adj, features, **kwargs):
"""
:param model:
:param features:
:param adj:
:param kwargs:
:return:
"""
| 0 | 220 | 23 |
230a51a7ddf70165298befece4dc77ccda252454 | 24,386 | py | Python | bin/FMU-QSS.gen.py | NREL/SOEP-QSS | 354df458f6eaf13e9f0271eccd747047ab4f0f71 | [
"BSD-3-Clause"
] | 13 | 2017-08-04T15:15:59.000Z | 2022-03-24T06:02:22.000Z | bin/FMU-QSS.gen.py | NREL/SOEP-QSS | 354df458f6eaf13e9f0271eccd747047ab4f0f71 | [
"BSD-3-Clause"
] | 4 | 2021-08-19T01:56:30.000Z | 2021-08-23T01:41:36.000Z | bin/FMU-QSS.gen.py | NREL/SOEP-QSS | 354df458f6eaf13e9f0271eccd747047ab4f0f71 | [
"BSD-3-Clause"
] | 1 | 2017-09-15T17:14:57.000Z | 2017-09-15T17:14:57.000Z | #!/usr/bin/env python
# FMU-QSS Generator
#
# Project: QSS Solver
#
# Language: Python 2.7 and 3.x
#
# Developed by Objexx Engineering, Inc. (https://objexx.com) under contract to
# the National Renewable Energy Laboratory of the U.S. Department of Energy
#
# Copyright (c) 2017-2021 Objexx Engineering, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER, THE UNITED STATES
# GOVERNMENT, OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Notes
# Generates an FMU-QSS from an FMU-ME with specified QSS options
# ElementTree reorders attributes lexicographically and omits comments so we are using lxml
# lxml should be faster than ElementTree other than initial parsing
# lxml is not included with most Python distributions but can be installed with pip or, on Linux, from a package
# Discrete variables that aren't output variables are elided from the FMU-QSS XML
# Zero-crossing variables (__zc_*) are output variables in our FMU-ME for now but are elided from the FMU-QSS XML
# Do
# Add more QSS options->annotations as needed
# Imports
import argparse, errno, os, platform, shutil, subprocess, sys
from zipfile import ZipFile
from collections import OrderedDict
from lxml import etree
def fmu_qss_gen():
'''Generate an FMU-QSS from an FMU-ME'''
# Process arguments
parser = argparse.ArgumentParser()
parser.add_argument( 'ME', help = 'FMU-ME fmu or xml file', default = 'modelDescription.xml' )
parser.add_argument( '--qss', help = 'QSS method (x)(LI)QSS(1|2|3) [QSS2]', default = 'QSS2' )
parser.add_argument( '--rTol', help = 'relative tolerance [FMU]', type = float )
parser.add_argument( '--aTol', help = 'absolute tolerance [1e-6]', type = float, default = 1.0e-6 )
parser.add_argument( '--tEnd', help = 'simulation end time [FMU]', type = float )
args = parser.parse_args()
args.qss = args.qss.upper()
if args.qss not in ( 'QSS1', 'QSS2', 'QSS3', 'LIQSS1', 'LIQSS2', 'LIQSS3', 'xQSS1', 'xQSS2', 'xQSS3' ):
print( '\nUnsupported QSS method: ' + args.qss + ': Must be one of QSS1, QSS2, QSS3, LIQSS1, LIQSS2, LIQSS3, xQSS1, xQSS2, xQSS3' )
sys.exit( 1 )
if ( args.rTol is not None ) and ( args.rTol < 0.0 ):
print( '\nNegative rTol: ' + "{:.16f}".format( args.rTol ) )
sys.exit( 1 )
if args.aTol <= 0.0:
print( '\nNonpositive aTol: ' +"{:.16f}".format( args.aTol ) )
sys.exit( 1 )
if ( args.tEnd is not None ) and ( args.tEnd < 0.0 ):
print( '\nNegative tEnd: ' +"{:.16f}".format( args.tEnd ) )
sys.exit( 1 )
ME_lower = args.ME.lower()
if ME_lower.endswith( '.xml' ): # XML input
me_fmu_name = me_name = None
me_xml_name = args.ME
elif ME_lower.endswith( '.fmu' ): # FMU input
me_fmu_name = args.ME
me_name = os.path.splitext( os.path.basename( me_fmu_name ) )[ 0 ]
me_xml_name = 'modelDescription.xml'
else:
print( '\nFMU-ME input is not a .fmu or .xml file: ' + args.ME )
sys.exit( 1 )
# Extract modelDescription.xml from FMU-ME
if me_fmu_name:
try:
zip_file = ZipFile( me_fmu_name )
zip_file.extract( 'modelDescription.xml' )
zip_file.close()
except:
print( '\nExtracting modelDescription.xml from FMU-ME fmu failed' )
sys.exit( 1 )
# Read FMU-ME xml file
try:
parser = etree.XMLParser( remove_blank_text = True )
tree = etree.parse( me_xml_name, parser )
root = tree.getroot()
except:
print( '\nFMU-ME XML open and parse failed: ' + me_xml_name )
sys.exit( 1 )
# fmiModelDescription changes
if root.tag != "fmiModelDescription":
print( '\nRoot is not fmiModelDescription in FMU-ME XML: ' + me_xml_name )
sys.exit( 1 )
fmiModelDescription = root
if 'modelName' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'modelName' ] = fmiModelDescription.attrib[ 'modelName' ] + '_QSS' # FMU-QSS model name gets _QSS suffix
if 'numberOfEventIndicators' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'numberOfEventIndicators' ] = '0' # FMU-QSS has no event indicators
guid_placeholder = '@FMU-QSS_GUID@'
fmiModelDescription.attrib[ 'guid' ] = guid_placeholder # Placeholder for running fmu-uuid
# ModelExchange changes
ModelExchange = root.find( 'ModelExchange' )
if ModelExchange is None:
print( '\nModelExchange not found in ' + me_xml_name )
sys.exit( 1 )
if 'modelIdentifier' in ModelExchange.attrib:
ModelExchange.attrib[ 'modelIdentifier' ] = ModelExchange.attrib[ 'modelIdentifier' ] + '_QSS' # FMU-QSS model identifier gets _QSS suffix
# Find ModelVariables
ModelVariables = root.find( 'ModelVariables' )
if ModelVariables is None:
print( '\nModelVariables not found in ' + me_xml_name )
sys.exit( 1 )
# Add QSS annotations
VendorAnnotations = root.find( 'VendorAnnotations' )
if VendorAnnotations is None:
VendorAnnotations = etree.Element( 'VendorAnnotations' )
ModelVariables.addprevious( VendorAnnotations )
QSS = etree.SubElement( VendorAnnotations, 'Tool', attrib = { 'name': 'QSS' } )
Annotations = etree.SubElement( QSS, 'Annotations' )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'qss' ), ( 'value', args.qss ) ] ) )
if args.rTol is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'rTol' ), ( 'value', "{:.16f}".format( args.rTol ) ) ] ) )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'aTol' ), ( 'value', "{:.16f}".format( args.aTol ) ) ] ) )
if args.tEnd is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'tEnd' ), ( 'value', "{:.16f}".format( args.tEnd ) ) ] ) )
#Do Add other annotations as needed
# Generate model-specific QSS header
try: # Create QSS options header
QSS_option_name = 'FMU_QSS_options.hh'
if sys.version_info >= ( 3, 0 ):
QSS_option_file = open( QSS_option_name, 'w', newline = '\n' )
else:
QSS_option_file = open( QSS_option_name, 'wb' )
except:
print( '\nQSS options header open failed: ' + QSS_option_name )
sys.exit( 1 )
try: # Write QSS_option header
QSS_option_file.write( '#ifndef FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( '#define FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( 'QSS::options::QSS const fmu_qss_qss( QSS::options::QSS::' + args.qss + ' );\n' )
if args.rTol is not None:
QSS_option_file.write( 'double const fmu_qss_rTol( ' + "{:.16f}".format( args.rTol ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_rTol( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( 'double const fmu_qss_aTol( ' + "{:.16f}".format( args.aTol ) + ' );\n' )
if args.tEnd is not None:
QSS_option_file.write( 'double const fmu_qss_tEnd( ' + "{:.16f}".format( args.tEnd ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_tEnd( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( '#endif\n' )
QSS_option_file.close()
except Exception as err:
print( '\nQSS options header write failed: ' + QSS_option_name + ': ' + str( err ) )
sys.exit( 1 )
# Find ScalarVariables
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable
# Identify state variables
try:
ModelStructure = root.find( 'ModelStructure' )
Derivatives = ModelStructure.find( 'Derivatives' )
Unknowns = Derivatives.findall( 'Unknown' )
except:
Unknowns = [] # No state variable derivatives
derivatives_indexes = set() # State variable derivative indexes
for Unknown in Unknowns:
try:
derivatives_indexes.add( int( Unknown.attrib[ 'index' ] ) )
except:
pass
is_state = { i: False for i in range( 1, len( ScalarVariables ) + 1 ) }
for i in range( len( ScalarVariables ) ):
v = ScalarVariables[ i ]
Real = v.find( 'Real' )
if Real is not None:
derivative_of = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative_of is not None:
try:
derivative_of_index = int( derivative_of )
if i + 1 in derivatives_indexes: is_state[ derivative_of_index ] = True # Otherwise could be a non-state (internal or input) variable with a derivative
except Exception as err:
name = v.attrib[ 'name' ] if 'name' in v.attrib else ''
print( 'Non-integer derivative in ' + name + ': ' + str( derivative_of ) )
# Migrate variables and map indexes
io = {} # FMU-ME to FMU-QSS variable index map
# oi = {} # FMU-QSS to FMU-ME variable index map
i = o = 0
outputs = []
n_real = n_integer = n_boolean = n_string = 0
n_input_real = n_output_real = 0
try:
n_input_real_max_order = n_output_real_max_order = int( args.qss[ -1 ] )
except Exception as err:
print( '\nFMU-QSS XML generation failed: QSS method order not identified from last character of qss argument: ' + str( args.qss ) )
sys.exit( 1 )
for v in ScalarVariables:
i += 1 # FMU-ME variable index
a = v.attrib
name = a[ 'name' ] if 'name' in a else ''
causality = a[ 'causality' ] if 'causality' in a else 'local'
variability = a[ 'variability' ] if 'variability' in a else 'continuous'
previous = v.getprevious()
comment = previous if ( previous is not None ) and ( previous.tag is etree.Comment ) and str( previous ).startswith( ( '<!-- Variable with index #', '<!-- Index for next variable = ' ) ) else None
if causality in ( 'input', 'output' ) and not ( ( causality == 'output' ) and name.startswith( '__zc_' ) ): # Keep (except zero-crossing output variables)
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
Real = v.find( 'Real' )
Integer = v.find( 'Integer' )
Boolean = v.find( 'Boolean' )
String = v.find( 'String' )
if Real is not None:
n_real += 1
elif Integer is not None:
n_integer += 1
elif Boolean is not None:
n_boolean += 1
elif String is not None:
n_string += 1
if causality == 'output':
outputs.append( o )
if Real is not None:
n_output_real += 1
else: # Input
if Real is not None:
n_input_real += 1
set_comment = True
elif ( causality == 'local' ) and ( variability == 'continuous' ) and is_state[ i ]: # State to output variable
a[ 'causality' ] = 'output'
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
outputs.append( o )
if 'initial' in a: del a[ 'initial' ] # Drop initial spec
set_comment = True
Real = v.find( 'Real' )
if Real is not None:
n_real += 1
n_output_real += 1
if 'start' in Real.attrib: del Real.attrib[ 'start' ] # Remove start spec
else:
print( '\nFMU-ME (continuous) state variable is not Real: ' + name )
sys.exit( 1 )
else: # Remove
ModelVariables.remove( v )
if comment is not None: ModelVariables.remove( comment )
set_comment = False
if set_comment:
if comment is not None: # Adjust variable index in comment
# comment.text = ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' # Dymola format
comment.text = ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' # OCT format
else: # Insert comment
# v.addprevious( etree.Comment( ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' ) ) # Dymola format
v.addprevious( etree.Comment( ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' ) ) # OCT format
# Re-index derivatives
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable after above pruning
for v in ScalarVariables:
Real = v.find( 'Real' )
if Real is not None:
derivative = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative is not None:
try:
derivative_index = int( derivative )
try:
Real.attrib[ 'derivative' ] = str( io[ derivative_index ] )
except Exception as err:
print( 'Derivative re-indexing failed for ' + ( v.attrib[ 'name' ] if 'name' in v.attrib else '' ) + ': ' + str( err ) )
except:
pass # Already reported this above
# Migrate ModelStructure
ModelStructure = root.find( 'ModelStructure' )
if ModelStructure is None:
ModelStructure = etree.Element( 'ModelStructure' )
ModelVariables.addnext( ModelStructure )
for g in ( 'Derivatives', 'DiscreteStates', 'InitialUnknowns' ): # Remove these sections
e = ModelStructure.find( g )
if e is not None:
ModelStructure.remove( e )
Outputs = ModelStructure.find( 'Outputs' )
if Outputs is None:
Outputs = etree.SubElement( ModelStructure, 'Outputs' )
Unknowns = Outputs.findall( 'Unknown' )
for u in Unknowns: # Remove previous entries
Outputs.remove( u )
for o in outputs:
etree.SubElement( Outputs, 'Unknown', attrib = OrderedDict( [ ( 'index', str( o ) ), ( 'dependencies', '' ) ] ) )
# Write FMU-QSS xml file
#print( etree.tostring( root, pretty_print=True, encoding='unicode' ) ) #Debug#####
try:
qss_xml_name = 'FMU-QSS_' + os.path.basename( me_xml_name )
tree.write( qss_xml_name, encoding = 'UTF-8', xml_declaration = True, pretty_print = True )
except Exception as err:
print( '\nFMU-QSS XML write failed: ' + qss_xml_name + ': ' + str( err ) )
sys.exit( 1 )
# Add GUID to FMU-QSS xml file and generate GUID header
try:
subprocess.call( [ 'fmu-uuid', qss_xml_name, guid_placeholder, qss_xml_name, 'FMU_QSS_GUID.hh', 'FMU_QSS_GUID' ] )
except OSError as e:
if e.errno == errno.ENOENT:
print( '\nFMU-QSS XML GUID computation failed: fmu-uuid program not in PATH' )
else:
print( '\nFMU-QSS XML GUID computation failed: ' + str( e ) )
print( 'Generic no-check GUID header generated' )
try:
guid_name = 'FMU_QSS_GUID.hh'
if sys.version_info >= ( 3, 0 ):
guid_file = open( guid_name, 'w', newline = '\n' )
else:
guid_file = open( guid_name, 'wb' )
except:
print( '\nGUID header open failed: ' + guid_name )
sys.exit( 1 )
try:
guid_file.write( '#ifndef FMU_QSS_GUID\n' )
guid_file.write( '#define FMU_QSS_GUID "FMU-QSS_GUID" // No-check value\n' )
guid_file.write( '#endif\n' )
guid_file.close()
except:
print( '\nGUID header write failed: ' + guid_name )
sys.exit( 1 )
# Generate model-specific size definitions header
try: # Create sizing header
sizing_name = 'FMU_QSS_defines.hh'
if sys.version_info >= ( 3, 0 ):
sizing_file = open( sizing_name, 'w', newline = '\n' )
else:
sizing_file = open( sizing_name, 'wb' )
except:
print( '\nSizing header open failed: ' + sizing_name )
sys.exit( 1 )
try: # Write sizing header: Sizes >=1 to avoid illegal 0-sized arrays
sizing_file.write( '#ifndef FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '#define FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '// Note: Sizes are >=1 to avoid illegal 0-sized arrays\n' )
sizing_file.write( '#define BUFFER 1024\n' )
sizing_file.write( '#define N_REAL ' + str( max( n_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INTEGER ' + str( max( n_integer, 1 ) ) + '\n' )
sizing_file.write( '#define N_BOOLEAN ' + str( max( n_boolean, 1 ) ) + '\n' )
sizing_file.write( '#define N_STRING ' + str( max( n_string, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL ' + str( max( n_input_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL_MAX_ORDER ' + str( max( n_input_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL ' + str( max( n_output_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL_MAX_ORDER ' + str( max( n_output_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#endif\n' )
sizing_file.close()
except Exception as err:
print( '\nSizing header write failed: ' + sizing_name + ': ' + str( err ) )
sys.exit( 1 )
# Generate FMU-QSS
if me_fmu_name:
try: # Directory tree setup
qss_name = me_name + '_QSS'
if os.path.exists( qss_name ):
if os.path.isdir( qss_name ):
shutil.rmtree( qss_name )
elif os.path.isfile( qss_name ):
os.remove( qss_name )
os.mkdir( qss_name )
os.mkdir( os.path.join( qss_name, 'binaries' ) )
if not platform.machine().endswith( '64' ):
print( '\nFMU-QSS generation only supports 64-bit OS at this time' )
sys.exit( 1 )
if sys.platform.startswith( 'linux' ):
binaries_dir = qss_name + '/binaries/linux64'
elif sys.platform.startswith( 'win' ):
binaries_dir = qss_name + '\\binaries\\win64'
else:
print( '\nPlatform is not supported for FMU-QSS generation' )
sys.exit( 1 )
os.mkdir( binaries_dir )
os.mkdir( qss_name + os.sep + 'resources' )
#os.mkdir( qss_name + os.sep + 'sources' )
except Exception as err:
print( '\nFMU-QSS directory tree setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Build FMU-QSS library
QSS = os.environ.get( 'QSS' )
QSS_bin = os.environ.get( 'QSS_bin' )
PlatformCompiler = os.environ.get( 'PlatformCompiler' )
if QSS and QSS_bin and PlatformCompiler: # Configured for building
QSS_src = os.path.join( QSS, 'src', 'QSS' )
fmu_src = os.path.join( QSS_src, 'fmu' )
if os.path.exists( 'src' ):
if os.path.isdir( 'src' ):
shutil.rmtree( 'src' )
elif os.path.isfile( 'src' ):
os.remove( 'src' )
src_bld = 'src'
fmu_bld = os.path.join( 'src', 'QSS', 'fmu' )
os.makedirs( fmu_bld )
shutil.copy( 'FMU_QSS_defines.hh', fmu_bld )
shutil.copy( 'FMU_QSS_GUID.hh', fmu_bld )
shutil.copy( os.path.join( fmu_src, PlatformCompiler, 'GNUmakefile' ), src_bld )
cwd = os.getcwd()
os.chdir( src_bld )
with open( 'GNUmakefile', 'r' ) as sources:
lines = sources.readlines()
with open( 'GNUmakefile', 'w' ) as sources:
for line in lines:
if line.startswith( 'DLB := $(BIN_PATH)' + os.sep ):
sources.write( line.replace( '$(BIN_PATH)' + os.sep, '' ) )
else:
sources.write( line )
try:
import psutil
n_processors = psutil.cpu_count()
except:
print( '\nNon-parallel make used: psutil processor count lookup failed' )
n_processors = 1
try:
subprocess.call( [ 'make', '-j', str( n_processors ) ] )
try:
if sys.platform.startswith( 'linux' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.so' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.so', qss_lib )
elif sys.platform.startswith( 'win' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.dll' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.dll', qss_lib )
except Exception as err:
print( '\nFMU-QSS library move into staging directory failed: ' + str( err ) )
except Exception as err:
print( '\nFMU-QSS library make failed: ' + str( err ) )
os.chdir( cwd )
shutil.rmtree( src_bld )
else:
print( '\nFMU-QSS library can\'t be built: QSS and QSS_bin environment variables are not set' )
except Exception as err:
print( '\nFMU-QSS library build failed: ' + str( err ) )
try: # File setup
shutil.copyfile( qss_xml_name, qss_name + os.sep + 'modelDescription.xml' )
shutil.copy( me_fmu_name, qss_name + os.sep + 'resources' )
except Exception as err:
print( '\nFMU-QSS file setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Zip FMU-QSS
qss_fmu_name = qss_name + '.fmu'
if os.path.exists( qss_fmu_name ):
if os.path.isfile( qss_fmu_name ):
os.remove( qss_fmu_name )
elif os.path.isdir( qss_fmu_name ):
shutil.rmtree( qss_fmu_name )
zip_file = ZipFile( qss_fmu_name, mode = 'w' )
os.chdir( qss_name )
for root, dirs, files in os.walk( '.' ):
dirs.sort()
for dir in dirs:
zip_file.write( os.path.join( root, dir ) )
files.sort()
for file in files:
zip_file.write( os.path.join( root, file ) )
os.chdir( '..' )
zip_file.close()
except Exception as err:
print( '\nFMU-QSS zip into .fmu failed: ' + str( err ) )
sys.exit( 1 )
if __name__ == '__main__':
fmu_qss_gen()
| 49.464503 | 204 | 0.580005 | #!/usr/bin/env python
# FMU-QSS Generator
#
# Project: QSS Solver
#
# Language: Python 2.7 and 3.x
#
# Developed by Objexx Engineering, Inc. (https://objexx.com) under contract to
# the National Renewable Energy Laboratory of the U.S. Department of Energy
#
# Copyright (c) 2017-2021 Objexx Engineering, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER, THE UNITED STATES
# GOVERNMENT, OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Notes
# Generates an FMU-QSS from an FMU-ME with specified QSS options
# ElementTree reorders attributes lexicographically and omits comments so we are using lxml
# lxml should be faster than ElementTree other than initial parsing
# lxml is not included with most Python distributions but can be installed with pip or, on Linux, from a package
# Discrete variables that aren't output variables are elided from the FMU-QSS XML
# Zero-crossing variables (__zc_*) are output variables in our FMU-ME for now but are elided from the FMU-QSS XML
# Do
# Add more QSS options->annotations as needed
# Imports
import argparse, errno, os, platform, shutil, subprocess, sys
from zipfile import ZipFile
from collections import OrderedDict
from lxml import etree
def fmu_qss_gen():
'''Generate an FMU-QSS from an FMU-ME'''
# Process arguments
parser = argparse.ArgumentParser()
parser.add_argument( 'ME', help = 'FMU-ME fmu or xml file', default = 'modelDescription.xml' )
parser.add_argument( '--qss', help = 'QSS method (x)(LI)QSS(1|2|3) [QSS2]', default = 'QSS2' )
parser.add_argument( '--rTol', help = 'relative tolerance [FMU]', type = float )
parser.add_argument( '--aTol', help = 'absolute tolerance [1e-6]', type = float, default = 1.0e-6 )
parser.add_argument( '--tEnd', help = 'simulation end time [FMU]', type = float )
args = parser.parse_args()
args.qss = args.qss.upper()
if args.qss not in ( 'QSS1', 'QSS2', 'QSS3', 'LIQSS1', 'LIQSS2', 'LIQSS3', 'xQSS1', 'xQSS2', 'xQSS3' ):
print( '\nUnsupported QSS method: ' + args.qss + ': Must be one of QSS1, QSS2, QSS3, LIQSS1, LIQSS2, LIQSS3, xQSS1, xQSS2, xQSS3' )
sys.exit( 1 )
if ( args.rTol is not None ) and ( args.rTol < 0.0 ):
print( '\nNegative rTol: ' + "{:.16f}".format( args.rTol ) )
sys.exit( 1 )
if args.aTol <= 0.0:
print( '\nNonpositive aTol: ' +"{:.16f}".format( args.aTol ) )
sys.exit( 1 )
if ( args.tEnd is not None ) and ( args.tEnd < 0.0 ):
print( '\nNegative tEnd: ' +"{:.16f}".format( args.tEnd ) )
sys.exit( 1 )
ME_lower = args.ME.lower()
if ME_lower.endswith( '.xml' ): # XML input
me_fmu_name = me_name = None
me_xml_name = args.ME
elif ME_lower.endswith( '.fmu' ): # FMU input
me_fmu_name = args.ME
me_name = os.path.splitext( os.path.basename( me_fmu_name ) )[ 0 ]
me_xml_name = 'modelDescription.xml'
else:
print( '\nFMU-ME input is not a .fmu or .xml file: ' + args.ME )
sys.exit( 1 )
# Extract modelDescription.xml from FMU-ME
if me_fmu_name:
try:
zip_file = ZipFile( me_fmu_name )
zip_file.extract( 'modelDescription.xml' )
zip_file.close()
except:
print( '\nExtracting modelDescription.xml from FMU-ME fmu failed' )
sys.exit( 1 )
# Read FMU-ME xml file
try:
parser = etree.XMLParser( remove_blank_text = True )
tree = etree.parse( me_xml_name, parser )
root = tree.getroot()
except:
print( '\nFMU-ME XML open and parse failed: ' + me_xml_name )
sys.exit( 1 )
# fmiModelDescription changes
if root.tag != "fmiModelDescription":
print( '\nRoot is not fmiModelDescription in FMU-ME XML: ' + me_xml_name )
sys.exit( 1 )
fmiModelDescription = root
if 'modelName' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'modelName' ] = fmiModelDescription.attrib[ 'modelName' ] + '_QSS' # FMU-QSS model name gets _QSS suffix
if 'numberOfEventIndicators' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'numberOfEventIndicators' ] = '0' # FMU-QSS has no event indicators
guid_placeholder = '@FMU-QSS_GUID@'
fmiModelDescription.attrib[ 'guid' ] = guid_placeholder # Placeholder for running fmu-uuid
# ModelExchange changes
ModelExchange = root.find( 'ModelExchange' )
if ModelExchange is None:
print( '\nModelExchange not found in ' + me_xml_name )
sys.exit( 1 )
if 'modelIdentifier' in ModelExchange.attrib:
ModelExchange.attrib[ 'modelIdentifier' ] = ModelExchange.attrib[ 'modelIdentifier' ] + '_QSS' # FMU-QSS model identifier gets _QSS suffix
# Find ModelVariables
ModelVariables = root.find( 'ModelVariables' )
if ModelVariables is None:
print( '\nModelVariables not found in ' + me_xml_name )
sys.exit( 1 )
# Add QSS annotations
VendorAnnotations = root.find( 'VendorAnnotations' )
if VendorAnnotations is None:
VendorAnnotations = etree.Element( 'VendorAnnotations' )
ModelVariables.addprevious( VendorAnnotations )
QSS = etree.SubElement( VendorAnnotations, 'Tool', attrib = { 'name': 'QSS' } )
Annotations = etree.SubElement( QSS, 'Annotations' )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'qss' ), ( 'value', args.qss ) ] ) )
if args.rTol is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'rTol' ), ( 'value', "{:.16f}".format( args.rTol ) ) ] ) )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'aTol' ), ( 'value', "{:.16f}".format( args.aTol ) ) ] ) )
if args.tEnd is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'tEnd' ), ( 'value', "{:.16f}".format( args.tEnd ) ) ] ) )
#Do Add other annotations as needed
# Generate model-specific QSS header
try: # Create QSS options header
QSS_option_name = 'FMU_QSS_options.hh'
if sys.version_info >= ( 3, 0 ):
QSS_option_file = open( QSS_option_name, 'w', newline = '\n' )
else:
QSS_option_file = open( QSS_option_name, 'wb' )
except:
print( '\nQSS options header open failed: ' + QSS_option_name )
sys.exit( 1 )
try: # Write QSS_option header
QSS_option_file.write( '#ifndef FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( '#define FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( 'QSS::options::QSS const fmu_qss_qss( QSS::options::QSS::' + args.qss + ' );\n' )
if args.rTol is not None:
QSS_option_file.write( 'double const fmu_qss_rTol( ' + "{:.16f}".format( args.rTol ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_rTol( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( 'double const fmu_qss_aTol( ' + "{:.16f}".format( args.aTol ) + ' );\n' )
if args.tEnd is not None:
QSS_option_file.write( 'double const fmu_qss_tEnd( ' + "{:.16f}".format( args.tEnd ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_tEnd( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( '#endif\n' )
QSS_option_file.close()
except Exception as err:
print( '\nQSS options header write failed: ' + QSS_option_name + ': ' + str( err ) )
sys.exit( 1 )
# Find ScalarVariables
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable
# Identify state variables
try:
ModelStructure = root.find( 'ModelStructure' )
Derivatives = ModelStructure.find( 'Derivatives' )
Unknowns = Derivatives.findall( 'Unknown' )
except:
Unknowns = [] # No state variable derivatives
derivatives_indexes = set() # State variable derivative indexes
for Unknown in Unknowns:
try:
derivatives_indexes.add( int( Unknown.attrib[ 'index' ] ) )
except:
pass
is_state = { i: False for i in range( 1, len( ScalarVariables ) + 1 ) }
for i in range( len( ScalarVariables ) ):
v = ScalarVariables[ i ]
Real = v.find( 'Real' )
if Real is not None:
derivative_of = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative_of is not None:
try:
derivative_of_index = int( derivative_of )
if i + 1 in derivatives_indexes: is_state[ derivative_of_index ] = True # Otherwise could be a non-state (internal or input) variable with a derivative
except Exception as err:
name = v.attrib[ 'name' ] if 'name' in v.attrib else ''
print( 'Non-integer derivative in ' + name + ': ' + str( derivative_of ) )
# Migrate variables and map indexes
io = {} # FMU-ME to FMU-QSS variable index map
# oi = {} # FMU-QSS to FMU-ME variable index map
i = o = 0
outputs = []
n_real = n_integer = n_boolean = n_string = 0
n_input_real = n_output_real = 0
try:
n_input_real_max_order = n_output_real_max_order = int( args.qss[ -1 ] )
except Exception as err:
print( '\nFMU-QSS XML generation failed: QSS method order not identified from last character of qss argument: ' + str( args.qss ) )
sys.exit( 1 )
for v in ScalarVariables:
i += 1 # FMU-ME variable index
a = v.attrib
name = a[ 'name' ] if 'name' in a else ''
causality = a[ 'causality' ] if 'causality' in a else 'local'
variability = a[ 'variability' ] if 'variability' in a else 'continuous'
previous = v.getprevious()
comment = previous if ( previous is not None ) and ( previous.tag is etree.Comment ) and str( previous ).startswith( ( '<!-- Variable with index #', '<!-- Index for next variable = ' ) ) else None
if causality in ( 'input', 'output' ) and not ( ( causality == 'output' ) and name.startswith( '__zc_' ) ): # Keep (except zero-crossing output variables)
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
Real = v.find( 'Real' )
Integer = v.find( 'Integer' )
Boolean = v.find( 'Boolean' )
String = v.find( 'String' )
if Real is not None:
n_real += 1
elif Integer is not None:
n_integer += 1
elif Boolean is not None:
n_boolean += 1
elif String is not None:
n_string += 1
if causality == 'output':
outputs.append( o )
if Real is not None:
n_output_real += 1
else: # Input
if Real is not None:
n_input_real += 1
set_comment = True
elif ( causality == 'local' ) and ( variability == 'continuous' ) and is_state[ i ]: # State to output variable
a[ 'causality' ] = 'output'
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
outputs.append( o )
if 'initial' in a: del a[ 'initial' ] # Drop initial spec
set_comment = True
Real = v.find( 'Real' )
if Real is not None:
n_real += 1
n_output_real += 1
if 'start' in Real.attrib: del Real.attrib[ 'start' ] # Remove start spec
else:
print( '\nFMU-ME (continuous) state variable is not Real: ' + name )
sys.exit( 1 )
else: # Remove
ModelVariables.remove( v )
if comment is not None: ModelVariables.remove( comment )
set_comment = False
if set_comment:
if comment is not None: # Adjust variable index in comment
# comment.text = ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' # Dymola format
comment.text = ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' # OCT format
else: # Insert comment
# v.addprevious( etree.Comment( ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' ) ) # Dymola format
v.addprevious( etree.Comment( ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' ) ) # OCT format
# Re-index derivatives
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable after above pruning
for v in ScalarVariables:
Real = v.find( 'Real' )
if Real is not None:
derivative = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative is not None:
try:
derivative_index = int( derivative )
try:
Real.attrib[ 'derivative' ] = str( io[ derivative_index ] )
except Exception as err:
print( 'Derivative re-indexing failed for ' + ( v.attrib[ 'name' ] if 'name' in v.attrib else '' ) + ': ' + str( err ) )
except:
pass # Already reported this above
# Migrate ModelStructure
ModelStructure = root.find( 'ModelStructure' )
if ModelStructure is None:
ModelStructure = etree.Element( 'ModelStructure' )
ModelVariables.addnext( ModelStructure )
for g in ( 'Derivatives', 'DiscreteStates', 'InitialUnknowns' ): # Remove these sections
e = ModelStructure.find( g )
if e is not None:
ModelStructure.remove( e )
Outputs = ModelStructure.find( 'Outputs' )
if Outputs is None:
Outputs = etree.SubElement( ModelStructure, 'Outputs' )
Unknowns = Outputs.findall( 'Unknown' )
for u in Unknowns: # Remove previous entries
Outputs.remove( u )
for o in outputs:
etree.SubElement( Outputs, 'Unknown', attrib = OrderedDict( [ ( 'index', str( o ) ), ( 'dependencies', '' ) ] ) )
# Write FMU-QSS xml file
#print( etree.tostring( root, pretty_print=True, encoding='unicode' ) ) #Debug#####
try:
qss_xml_name = 'FMU-QSS_' + os.path.basename( me_xml_name )
tree.write( qss_xml_name, encoding = 'UTF-8', xml_declaration = True, pretty_print = True )
except Exception as err:
print( '\nFMU-QSS XML write failed: ' + qss_xml_name + ': ' + str( err ) )
sys.exit( 1 )
# Add GUID to FMU-QSS xml file and generate GUID header
try:
subprocess.call( [ 'fmu-uuid', qss_xml_name, guid_placeholder, qss_xml_name, 'FMU_QSS_GUID.hh', 'FMU_QSS_GUID' ] )
except OSError as e:
if e.errno == errno.ENOENT:
print( '\nFMU-QSS XML GUID computation failed: fmu-uuid program not in PATH' )
else:
print( '\nFMU-QSS XML GUID computation failed: ' + str( e ) )
print( 'Generic no-check GUID header generated' )
try:
guid_name = 'FMU_QSS_GUID.hh'
if sys.version_info >= ( 3, 0 ):
guid_file = open( guid_name, 'w', newline = '\n' )
else:
guid_file = open( guid_name, 'wb' )
except:
print( '\nGUID header open failed: ' + guid_name )
sys.exit( 1 )
try:
guid_file.write( '#ifndef FMU_QSS_GUID\n' )
guid_file.write( '#define FMU_QSS_GUID "FMU-QSS_GUID" // No-check value\n' )
guid_file.write( '#endif\n' )
guid_file.close()
except:
print( '\nGUID header write failed: ' + guid_name )
sys.exit( 1 )
# Generate model-specific size definitions header
try: # Create sizing header
sizing_name = 'FMU_QSS_defines.hh'
if sys.version_info >= ( 3, 0 ):
sizing_file = open( sizing_name, 'w', newline = '\n' )
else:
sizing_file = open( sizing_name, 'wb' )
except:
print( '\nSizing header open failed: ' + sizing_name )
sys.exit( 1 )
try: # Write sizing header: Sizes >=1 to avoid illegal 0-sized arrays
sizing_file.write( '#ifndef FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '#define FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '// Note: Sizes are >=1 to avoid illegal 0-sized arrays\n' )
sizing_file.write( '#define BUFFER 1024\n' )
sizing_file.write( '#define N_REAL ' + str( max( n_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INTEGER ' + str( max( n_integer, 1 ) ) + '\n' )
sizing_file.write( '#define N_BOOLEAN ' + str( max( n_boolean, 1 ) ) + '\n' )
sizing_file.write( '#define N_STRING ' + str( max( n_string, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL ' + str( max( n_input_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL_MAX_ORDER ' + str( max( n_input_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL ' + str( max( n_output_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL_MAX_ORDER ' + str( max( n_output_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#endif\n' )
sizing_file.close()
except Exception as err:
print( '\nSizing header write failed: ' + sizing_name + ': ' + str( err ) )
sys.exit( 1 )
# Generate FMU-QSS
if me_fmu_name:
try: # Directory tree setup
qss_name = me_name + '_QSS'
if os.path.exists( qss_name ):
if os.path.isdir( qss_name ):
shutil.rmtree( qss_name )
elif os.path.isfile( qss_name ):
os.remove( qss_name )
os.mkdir( qss_name )
os.mkdir( os.path.join( qss_name, 'binaries' ) )
if not platform.machine().endswith( '64' ):
print( '\nFMU-QSS generation only supports 64-bit OS at this time' )
sys.exit( 1 )
if sys.platform.startswith( 'linux' ):
binaries_dir = qss_name + '/binaries/linux64'
elif sys.platform.startswith( 'win' ):
binaries_dir = qss_name + '\\binaries\\win64'
else:
print( '\nPlatform is not supported for FMU-QSS generation' )
sys.exit( 1 )
os.mkdir( binaries_dir )
os.mkdir( qss_name + os.sep + 'resources' )
#os.mkdir( qss_name + os.sep + 'sources' )
except Exception as err:
print( '\nFMU-QSS directory tree setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Build FMU-QSS library
QSS = os.environ.get( 'QSS' )
QSS_bin = os.environ.get( 'QSS_bin' )
PlatformCompiler = os.environ.get( 'PlatformCompiler' )
if QSS and QSS_bin and PlatformCompiler: # Configured for building
QSS_src = os.path.join( QSS, 'src', 'QSS' )
fmu_src = os.path.join( QSS_src, 'fmu' )
if os.path.exists( 'src' ):
if os.path.isdir( 'src' ):
shutil.rmtree( 'src' )
elif os.path.isfile( 'src' ):
os.remove( 'src' )
src_bld = 'src'
fmu_bld = os.path.join( 'src', 'QSS', 'fmu' )
os.makedirs( fmu_bld )
shutil.copy( 'FMU_QSS_defines.hh', fmu_bld )
shutil.copy( 'FMU_QSS_GUID.hh', fmu_bld )
shutil.copy( os.path.join( fmu_src, PlatformCompiler, 'GNUmakefile' ), src_bld )
cwd = os.getcwd()
os.chdir( src_bld )
with open( 'GNUmakefile', 'r' ) as sources:
lines = sources.readlines()
with open( 'GNUmakefile', 'w' ) as sources:
for line in lines:
if line.startswith( 'DLB := $(BIN_PATH)' + os.sep ):
sources.write( line.replace( '$(BIN_PATH)' + os.sep, '' ) )
else:
sources.write( line )
try:
import psutil
n_processors = psutil.cpu_count()
except:
print( '\nNon-parallel make used: psutil processor count lookup failed' )
n_processors = 1
try:
subprocess.call( [ 'make', '-j', str( n_processors ) ] )
try:
if sys.platform.startswith( 'linux' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.so' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.so', qss_lib )
elif sys.platform.startswith( 'win' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.dll' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.dll', qss_lib )
except Exception as err:
print( '\nFMU-QSS library move into staging directory failed: ' + str( err ) )
except Exception as err:
print( '\nFMU-QSS library make failed: ' + str( err ) )
os.chdir( cwd )
shutil.rmtree( src_bld )
else:
print( '\nFMU-QSS library can\'t be built: QSS and QSS_bin environment variables are not set' )
except Exception as err:
print( '\nFMU-QSS library build failed: ' + str( err ) )
try: # File setup
shutil.copyfile( qss_xml_name, qss_name + os.sep + 'modelDescription.xml' )
shutil.copy( me_fmu_name, qss_name + os.sep + 'resources' )
except Exception as err:
print( '\nFMU-QSS file setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Zip FMU-QSS
qss_fmu_name = qss_name + '.fmu'
if os.path.exists( qss_fmu_name ):
if os.path.isfile( qss_fmu_name ):
os.remove( qss_fmu_name )
elif os.path.isdir( qss_fmu_name ):
shutil.rmtree( qss_fmu_name )
zip_file = ZipFile( qss_fmu_name, mode = 'w' )
os.chdir( qss_name )
for root, dirs, files in os.walk( '.' ):
dirs.sort()
for dir in dirs:
zip_file.write( os.path.join( root, dir ) )
files.sort()
for file in files:
zip_file.write( os.path.join( root, file ) )
os.chdir( '..' )
zip_file.close()
except Exception as err:
print( '\nFMU-QSS zip into .fmu failed: ' + str( err ) )
sys.exit( 1 )
if __name__ == '__main__':
fmu_qss_gen()
| 0 | 0 | 0 |
c31cb5968a3e41dcb813778db8984573df0ddd4c | 9,324 | py | Python | src/Mapping/gridmap.py | josiahw/pyrover | c73aabb4b05d047c4a10b832627a0eae70cbe157 | [
"BSD-2-Clause"
] | 6 | 2017-05-23T10:34:07.000Z | 2018-11-27T09:41:13.000Z | src/Mapping/gridmap.py | josiahw/pyrover | c73aabb4b05d047c4a10b832627a0eae70cbe157 | [
"BSD-2-Clause"
] | null | null | null | src/Mapping/gridmap.py | josiahw/pyrover | c73aabb4b05d047c4a10b832627a0eae70cbe157 | [
"BSD-2-Clause"
] | 1 | 2019-07-23T02:36:44.000Z | 2019-07-23T02:36:44.000Z | """
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>
@author: Josiah Walker
"""
import numpy,random
from BlockSparseMatrix import BlockSparseMatrix
from BresenhamAlgorithms import BresenhamLine,BresenhamTriangle,BresenhamPolygon
#ranges all given in cm
SonarSensor = {"spread": 15.*numpy.pi/180., "range": 500., "phitfree": -0.3, "phitoccupied": 3.}
class GridMap:
"""
Sparse gridmap for 2D mapping.
"""
def __init__(self,scale=1.0):
"""
@brief Initialise a sparse block grid-map with arc-based sensor updates.
@param scale The multiplier to rescale from input units to map cell size.
"""
self._scale = scale
self._map = BlockSparseMatrix()
def update(self,position,distance,sensorangle,sensor):
"""
@brief Update the map with a sensor reading.
@param position The robot's current position given as (x,y,theta) for hte robot's position and angle.
@param distance The distance measurement from the sensor.
@param sensorangle The current angle from the robot's forward direction to the sensor.
@param sensor A dict holding sensor-specific hardware data (see SonarSensor in this file).
"""
#generate the angle positions (change angleUpdates for more accurate approximation)
angleUpdates = 4
thetas = []
for i in xrange(angleUpdates-1):
thetas.append(position[2] + i*sensor["spread"]/angleUpdates - sensor["spread"]/2. + sensorangle)
thetas.append(position[2] + sensor["spread"]/2. + sensorangle)
#generate the arc and robot positions
positions = [numpy.array(position[:2])*self._scale]
for t in thetas:
positions.append(
numpy.round(
numpy.array([numpy.cos(t),numpy.sin(t)]) *
distance *
self._scale + positions[0]
).astype(numpy.int64)
)
positions[0] = numpy.round(positions[0]).astype(numpy.int64)
#FILL THE EMPTY ARC AREA OF THE SENSOR (as an approximate polygon)
emptyVal = sensor["phitfree"]
for cell in BresenhamPolygon(positions):
self._map[cell[0],cell[1]] = max(emptyVal+self._map[cell[0],cell[1]],-20.) #clip to -20
#DO BRESENHAM detection on the arc edge for object hits
hitVals = BresenhamLine(positions[1],positions[2])
solidVal = sensor["phitoccupied"]
startpt = 0
for i in xrange(1,len(positions)-1):
hitVals = BresenhamLine(positions[i],positions[i+1])
solidVal = sensor["phitoccupied"]
for h in hitVals[startpt:]:
self._map[h[0],h[1]] = min(solidVal+self._map[h[0],h[1]],120.) #clip to 120
startpt = 1 #skip the first part of all following line segments
def get(self,location):
"""
@brief Get the value at a certain x,y location.
@param location A location in the form [x,y]
"""
location = numpy.round(location*self._scale).astype(numpy.int64)
return self._map(location[0],location[1])
def getRange(self,topleft,bottomright):
"""
@brief Get the values for a range of locations as a matrix. Note: this returns at the internal scale, not the external scale
@param topleft A location in the form [x,y] in external units designating the top left of the area
@param bottomright A location in the form [x,y] in external units designating the bottom right of the area
"""
#convert into map scale
topleft = numpy.round(numpy.array(topleft)*self._scale).astype(numpy.int64)
bottomright = numpy.round(numpy.array(bottomright)*self._scale).astype(numpy.int64)
#fill in the output
result = numpy.zeros((bottomright[0]-topleft[0],bottomright[1]-topleft[1]))
for i in xrange(topleft[0],bottomright[0]):
ival = numpy.round(i).astype(numpy.int64)
for j in xrange(topleft[1],bottomright[1]):
jval = numpy.round(j).astype(numpy.int64)
result[i-topleft[0],j-topleft[1]] = self._map[ival,jval]
return result
if __name__ == '__main__':
"""
Do validation test
"""
import time,os
from matplotlib import pyplot
#set this true and have mencoder to create a video of the test
makevideo = True
#set up the map and scale
scale = 100.0
groundtruth = ((1,1,1,1,1),
(1,0,0,0,1),
(1,0,1,0,1),
(1,0,0,0,1),
(1,1,1,1,1))
gridScale = 0.5
#set up the grid map on a 2cm scale (half the input resolution)
estmap = GridMap(scale=gridScale)
#this is the set of positions the rover moves between
tour = ((150.0,150.0,0.0),(350.0,150.0,0.0),
(350.0,150.0,numpy.pi/2.0),(350.0,350.0,numpy.pi/2.0),
(350.0,350.0,numpy.pi),(150.0,350.0,numpy.pi),
(150.0,350.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*2))
#this is the number of steps along each part of the tour
divs =100
vals = []
for i in xrange(len(tour)-1):
for j in xrange(divs):
position = numpy.array(tour[i])*(1.-j/float(divs))+numpy.array(tour[(i+1)%len(tour)])*(j/float(divs))
p = position[:2]
a = -position[2]+numpy.pi
offset = numpy.array([numpy.sin(a),numpy.cos(a)])*20.
for k in xrange(4):
#simulate each of the sonar sensor sweeps and see if we hit anything.
sensor = SonarSensor
sensorangle = numpy.pi/2*k
thetamax = position[2] + sensor["spread"]/2. + sensorangle
thetamin = position[2] - sensor["spread"]/2. + sensorangle
baseB = numpy.array([numpy.cos(thetamax),numpy.sin(thetamax)])
baseC = numpy.array([numpy.cos(thetamin),numpy.sin(thetamin)])
hit = False
for distance in xrange(int(sensor["range"])):
B = numpy.round(baseB*distance + position[:2]).astype(numpy.int32)
C = numpy.round(baseC*distance + position[:2]).astype(numpy.int32)
for pos in BresenhamLine(B,C):
if groundtruth[int((pos[0]/scale))][int((pos[1]/scale))] == 1:
distance = numpy.linalg.norm(position[:2] - pos) #add noise in here if you want noise
hit = True
break
if hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
break
if not hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
if makevideo: #save out png's for the video
fname = '_tmp%05d.png'%(i*divs+j)
tl = (95,95)
print (i*divs+j)
robot = (numpy.array([p+offset,p-offset,p+numpy.array([-offset[1],offset[0]])])*gridScale-numpy.array(tl)*gridScale).astype(numpy.int64)
emap = numpy.clip(estmap.getRange(tl,(405,405)), -1000,1000 )
for cell in BresenhamTriangle(robot[0],robot[1],robot[2]):
emap[cell[0],cell[1]] = 120
pyplot.imsave(fname,emap)
pyplot.clf()
print "Mean Sensor Update Time:", numpy.mean(vals)
if makevideo: #convert png's to video
#recent ubuntu versions use avconv
os.system("avconv -r 30 -i _tmp%05d.png -b:v 1000k rovertest.mp4")
#os.system("mencoder 'mf://*.png' -mf type=png:fps=30 -ovc lavc -lavcopts vcodec=wmv2 -oac copy -o rovertest.avi")
os.system("rm -f _tmp*.png")
| 43.570093 | 152 | 0.593844 | """
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>
@author: Josiah Walker
"""
import numpy,random
from BlockSparseMatrix import BlockSparseMatrix
from BresenhamAlgorithms import BresenhamLine,BresenhamTriangle,BresenhamPolygon
#ranges all given in cm
SonarSensor = {"spread": 15.*numpy.pi/180., "range": 500., "phitfree": -0.3, "phitoccupied": 3.}
class GridMap:
"""
Sparse gridmap for 2D mapping.
"""
def __init__(self,scale=1.0):
"""
@brief Initialise a sparse block grid-map with arc-based sensor updates.
@param scale The multiplier to rescale from input units to map cell size.
"""
self._scale = scale
self._map = BlockSparseMatrix()
def update(self,position,distance,sensorangle,sensor):
"""
@brief Update the map with a sensor reading.
@param position The robot's current position given as (x,y,theta) for hte robot's position and angle.
@param distance The distance measurement from the sensor.
@param sensorangle The current angle from the robot's forward direction to the sensor.
@param sensor A dict holding sensor-specific hardware data (see SonarSensor in this file).
"""
#generate the angle positions (change angleUpdates for more accurate approximation)
angleUpdates = 4
thetas = []
for i in xrange(angleUpdates-1):
thetas.append(position[2] + i*sensor["spread"]/angleUpdates - sensor["spread"]/2. + sensorangle)
thetas.append(position[2] + sensor["spread"]/2. + sensorangle)
#generate the arc and robot positions
positions = [numpy.array(position[:2])*self._scale]
for t in thetas:
positions.append(
numpy.round(
numpy.array([numpy.cos(t),numpy.sin(t)]) *
distance *
self._scale + positions[0]
).astype(numpy.int64)
)
positions[0] = numpy.round(positions[0]).astype(numpy.int64)
#FILL THE EMPTY ARC AREA OF THE SENSOR (as an approximate polygon)
emptyVal = sensor["phitfree"]
for cell in BresenhamPolygon(positions):
self._map[cell[0],cell[1]] = max(emptyVal+self._map[cell[0],cell[1]],-20.) #clip to -20
#DO BRESENHAM detection on the arc edge for object hits
hitVals = BresenhamLine(positions[1],positions[2])
solidVal = sensor["phitoccupied"]
startpt = 0
for i in xrange(1,len(positions)-1):
hitVals = BresenhamLine(positions[i],positions[i+1])
solidVal = sensor["phitoccupied"]
for h in hitVals[startpt:]:
self._map[h[0],h[1]] = min(solidVal+self._map[h[0],h[1]],120.) #clip to 120
startpt = 1 #skip the first part of all following line segments
def get(self,location):
"""
@brief Get the value at a certain x,y location.
@param location A location in the form [x,y]
"""
location = numpy.round(location*self._scale).astype(numpy.int64)
return self._map(location[0],location[1])
def getRange(self,topleft,bottomright):
"""
@brief Get the values for a range of locations as a matrix. Note: this returns at the internal scale, not the external scale
@param topleft A location in the form [x,y] in external units designating the top left of the area
@param bottomright A location in the form [x,y] in external units designating the bottom right of the area
"""
#convert into map scale
topleft = numpy.round(numpy.array(topleft)*self._scale).astype(numpy.int64)
bottomright = numpy.round(numpy.array(bottomright)*self._scale).astype(numpy.int64)
#fill in the output
result = numpy.zeros((bottomright[0]-topleft[0],bottomright[1]-topleft[1]))
for i in xrange(topleft[0],bottomright[0]):
ival = numpy.round(i).astype(numpy.int64)
for j in xrange(topleft[1],bottomright[1]):
jval = numpy.round(j).astype(numpy.int64)
result[i-topleft[0],j-topleft[1]] = self._map[ival,jval]
return result
if __name__ == '__main__':
"""
Do validation test
"""
import time,os
from matplotlib import pyplot
#set this true and have mencoder to create a video of the test
makevideo = True
#set up the map and scale
scale = 100.0
groundtruth = ((1,1,1,1,1),
(1,0,0,0,1),
(1,0,1,0,1),
(1,0,0,0,1),
(1,1,1,1,1))
gridScale = 0.5
#set up the grid map on a 2cm scale (half the input resolution)
estmap = GridMap(scale=gridScale)
#this is the set of positions the rover moves between
tour = ((150.0,150.0,0.0),(350.0,150.0,0.0),
(350.0,150.0,numpy.pi/2.0),(350.0,350.0,numpy.pi/2.0),
(350.0,350.0,numpy.pi),(150.0,350.0,numpy.pi),
(150.0,350.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*2))
#this is the number of steps along each part of the tour
divs =100
vals = []
for i in xrange(len(tour)-1):
for j in xrange(divs):
position = numpy.array(tour[i])*(1.-j/float(divs))+numpy.array(tour[(i+1)%len(tour)])*(j/float(divs))
p = position[:2]
a = -position[2]+numpy.pi
offset = numpy.array([numpy.sin(a),numpy.cos(a)])*20.
for k in xrange(4):
#simulate each of the sonar sensor sweeps and see if we hit anything.
sensor = SonarSensor
sensorangle = numpy.pi/2*k
thetamax = position[2] + sensor["spread"]/2. + sensorangle
thetamin = position[2] - sensor["spread"]/2. + sensorangle
baseB = numpy.array([numpy.cos(thetamax),numpy.sin(thetamax)])
baseC = numpy.array([numpy.cos(thetamin),numpy.sin(thetamin)])
hit = False
for distance in xrange(int(sensor["range"])):
B = numpy.round(baseB*distance + position[:2]).astype(numpy.int32)
C = numpy.round(baseC*distance + position[:2]).astype(numpy.int32)
for pos in BresenhamLine(B,C):
if groundtruth[int((pos[0]/scale))][int((pos[1]/scale))] == 1:
distance = numpy.linalg.norm(position[:2] - pos) #add noise in here if you want noise
hit = True
break
if hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
break
if not hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
if makevideo: #save out png's for the video
fname = '_tmp%05d.png'%(i*divs+j)
tl = (95,95)
print (i*divs+j)
robot = (numpy.array([p+offset,p-offset,p+numpy.array([-offset[1],offset[0]])])*gridScale-numpy.array(tl)*gridScale).astype(numpy.int64)
emap = numpy.clip(estmap.getRange(tl,(405,405)), -1000,1000 )
for cell in BresenhamTriangle(robot[0],robot[1],robot[2]):
emap[cell[0],cell[1]] = 120
pyplot.imsave(fname,emap)
pyplot.clf()
print "Mean Sensor Update Time:", numpy.mean(vals)
if makevideo: #convert png's to video
#recent ubuntu versions use avconv
os.system("avconv -r 30 -i _tmp%05d.png -b:v 1000k rovertest.mp4")
#os.system("mencoder 'mf://*.png' -mf type=png:fps=30 -ovc lavc -lavcopts vcodec=wmv2 -oac copy -o rovertest.avi")
os.system("rm -f _tmp*.png")
| 0 | 0 | 0 |
40229217811bbae1abcba8967ec11cb22fe0716e | 4,766 | py | Python | database/scores.py | Ollie-Hooper/SpotifySentiment | bafdacd3a6f668c708c67dab40055a3b5629aa87 | [
"MIT"
] | 10 | 2020-05-23T12:01:20.000Z | 2021-07-23T12:58:29.000Z | database/scores.py | Ollie-Hooper/SpotifySentiment | bafdacd3a6f668c708c67dab40055a3b5629aa87 | [
"MIT"
] | 1 | 2021-03-31T19:38:42.000Z | 2021-03-31T19:38:42.000Z | database/scores.py | Ollie-Hooper/SpotifySentiment | bafdacd3a6f668c708c67dab40055a3b5629aa87 | [
"MIT"
] | 4 | 2020-06-19T20:52:04.000Z | 2021-02-11T17:07:06.000Z | import pandas as pd
from database.tools import Database, str_list
from app.functions import time_method
audio_features = ['danceability', 'energy', 'key', 'loudness', 'mode', 'speechiness', 'acousticness',
'instrumentalness', 'liveness', 'valence', 'tempo', 'duration_ms', 'time_signature']
@time_method
| 34.788321 | 102 | 0.626311 | import pandas as pd
from database.tools import Database, str_list
from app.functions import time_method
audio_features = ['danceability', 'energy', 'key', 'loudness', 'mode', 'speechiness', 'acousticness',
'instrumentalness', 'liveness', 'valence', 'tempo', 'duration_ms', 'time_signature']
def init_scores_db(charts, time_frames):
fields = {
'id': 'text',
'date': 'text',
'country': 'text',
'danceability': 'integer',
'energy': 'integer',
'key': 'integer',
'loudness': 'integer',
'mode': 'integer',
'speechiness': 'integer',
'acousticness': 'integer',
'instrumentalness': 'integer',
'liveness': 'integer',
'valence': 'integer',
'tempo': 'integer',
'duration_ms': 'integer',
'time_signature': 'integer',
's_danceability': 'integer',
's_energy': 'integer',
's_key': 'integer',
's_loudness': 'integer',
's_mode': 'integer',
's_speechiness': 'integer',
's_acousticness': 'integer',
's_instrumentalness': 'integer',
's_liveness': 'integer',
's_valence': 'integer',
's_tempo': 'integer',
's_duration_ms': 'integer',
's_time_signature': 'integer',
}
with Database('scores') as db:
for chart in charts:
for time_frame in time_frames:
t_name = f"{chart.replace(' ', '').lower()}_{time_frame[0]}"
db.create(t_name, fields, 'id')
@time_method
def run_scores_db(chart='Top 200', time_frame='weekly'):
dates = get_dates(chart, time_frame)
if len(dates) > 0:
charts_df = get_charts_df(dates, chart, time_frame)
tracks = charts_df['track_id'].unique()
tracks_df = get_tracks_df(tracks)
merged_df = get_merged_df(charts_df, tracks_df)
del charts_df
del tracks_df
scores_df = format_scores_df(calculate_scores(merged_df))
del merged_df
upload_scores_df(scores_df, chart, time_frame)
del scores_df
update_standardized_scores(chart, time_frame)
print('Finished updating scores db!')
else:
print('Scores db already up to date!')
def update_standardized_scores(chart, time_frame):
t_name = f"{chart.replace(' ', '').lower()}_{time_frame[0]}"
with Database('scores') as db:
scores_df = db.select(t_name, ['id', 'country', *audio_features])
scores_df = standardize_scores(scores_df)
db.update(t_name, scores_df[[f's_{k}' for k in audio_features]])
def standardize_scores(df):
mean = df.groupby('country').mean()
std = df.groupby('country').std()
df = ((df.reset_index().set_index(['id', 'country'])[
audio_features] - mean) / std).reset_index().set_index('id')
return df.rename(columns={k: f's_{k}' for k in audio_features})
def get_charts_df(dates, chart, time_frame):
t_name = f"{chart.replace(' ', '').lower()}_{time_frame[0]}"
with Database('charts') as db:
charts_df = db.select(t_name, ['date', 'country', 'track_id'], {'date': str_list(dates)})
charts_df['date'] = pd.to_datetime(charts_df['date'])
charts_df.set_index(['date', 'country'], inplace=True)
return charts_df
def get_tracks_df(tracks):
t_name = 'audio_features'
with Database('tracks') as db:
tracks_df = db.select(t_name, filters={'id': str_list(tracks)})
return tracks_df
def get_merged_df(charts_df, tracks_df):
merged_df = pd.merge(charts_df, tracks_df, how="inner", left_on='track_id', right_index=True)
return merged_df
def calculate_scores(merged_df):
scores_df = merged_df.groupby(['date', 'country']).mean()
return scores_df
def format_scores_df(scores_df):
scores_df.reset_index(inplace=True)
scores_df['date'] = scores_df['date'].apply(lambda x: x.strftime('%m/%d/%Y'))
scores_df['id'] = scores_df['date'] + '|' + scores_df['country']
return scores_df.set_index('id')
def upload_scores_df(scores_df, chart, time_frame):
t_name = f"{chart.replace(' ', '').lower()}_{time_frame[0]}"
with Database('scores') as db:
db.insert(t_name, scores_df)
def get_dates(chart, time_frame):
t_name = f"{chart.replace(' ', '').lower()}_{time_frame[0]}"
with Database('scores') as scores_db:
scores_db.cur.execute(f"SELECT DISTINCT date FROM {t_name}")
calculated_dates = [x[0] for x in scores_db.cur.fetchall()]
with Database('charts') as charts_db:
charts_db.cur.execute(f"SELECT DISTINCT date FROM {t_name}")
avail_dates = [x[0] for x in charts_db.cur.fetchall()]
dates = [date for date in avail_dates if date not in calculated_dates]
return dates
| 4,178 | 0 | 252 |
2a6c9ecec46d0b8f0cd8c8ef989149930ee5589a | 4,698 | py | Python | 2021/Day5_argparse_makefile_docker/Makefile/4_paper2_bib/Fig1gen.py | afarnudi/ScientificSoftwareDevelopment | c70f8b1c80d24dbcca12dbcca3722053954f7eaa | [
"BSD-3-Clause"
] | null | null | null | 2021/Day5_argparse_makefile_docker/Makefile/4_paper2_bib/Fig1gen.py | afarnudi/ScientificSoftwareDevelopment | c70f8b1c80d24dbcca12dbcca3722053954f7eaa | [
"BSD-3-Clause"
] | null | null | null | 2021/Day5_argparse_makefile_docker/Makefile/4_paper2_bib/Fig1gen.py | afarnudi/ScientificSoftwareDevelopment | c70f8b1c80d24dbcca12dbcca3722053954f7eaa | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 8 10:19:43 2021
@author: ali
"""
import numpy as np
import matplotlib.pyplot as plt
from funcs import calcHelfrichUq
from funcs import calc_Helfrich_curve
from funcs import u_m2_calculator
from funcs import calcNelsonUl
from funcs import calc_nelson_amplitude
from funcs import calc_SoftMatter_amplitude
from funcs import get_um2
from funcs import process_ulm2
if __name__ == "__main__":
main()
| 30.309677 | 233 | 0.631332 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 8 10:19:43 2021
@author: ali
"""
import numpy as np
import matplotlib.pyplot as plt
from funcs import calcHelfrichUq
from funcs import calc_Helfrich_curve
from funcs import u_m2_calculator
from funcs import calcNelsonUl
from funcs import calc_nelson_amplitude
from funcs import calc_SoftMatter_amplitude
from funcs import get_um2
from funcs import process_ulm2
def main():
savefig=True
dpi = 1200
figFormat='pdf'
# plt.rcParams["font.family"] = "Times New Roman"
plt.rcParams.update({'font.size': 18})
xlabelpad = 0
ylabelpad = -2
legendfont = 18
titlefont = 16
linesize = 4
linealpha=0.4
fig, axes = plt.subplots(2, 1,figsize=(10,8))
kbtInKJperMol = 2.494
rInNm = 100
YoungInKJperMolNm2 = 0.005*2/np.sqrt(3)*kbtInKJperMol
kappaInKJperMol = 100*np.sqrt(3)/2*kbtInKJperMol
#Nelson Model
NelsonP = 0
from matplotlib.pyplot import cm
colors = cm.rainbow(np.linspace(0,1,5))
alpha= 1
for c in colors:
c[3]=alpha
qmax=81
q=np.arange(qmax)
beg=2
end=qmax
axi=0
axj=0
xplot, yplotHelfrich = calcHelfrichUq(qmax, rInMum = rInNm, kappaInKJperMol = kappaInKJperMol, youngInKJperMolNm2 = YoungInKJperMolNm2, kbtInKJperMol = kbtInKJperMol)
yplotHelfrich=yplotHelfrich/rInNm**2
ell_max=qmax
model = 'Safran-Milner'
u_m2 = get_um2(ell_max, rInNm, kappaInKJperMol, YoungInKJperMolNm2, kbtInKJperMol, model)
axes[0].plot(xplot[beg:end], yplotHelfrich[beg:end] ,label= r"$\frac{\overline{\langle|U_{m}|^2\rangle}_{Helfrich}}{r^2}$" ,lw =linesize, c=colors[0])
axes[0].plot(xplot[beg:end], u_m2[beg:end] ,label=r"$\langle|U_{m}(\frac{\pi}{2})|^2\rangle_{Safran-Milner}$" ,lw=linesize,c=colors[1])
axes[0].legend(fontsize=legendfont)
axes[0].set_title(r"$\sigma={:.4f}$".format(YoungInKJperMolNm2)
+", "
+r"$\mathcal{{\kappa}}={:.4f}$".format(kappaInKJperMol)
+", "
+r"$r={:.4f}$".format(rInNm)
, fontsize=titlefont)
axes[0].set_ylabel('Amplitudes', labelpad=ylabelpad)
# axes[0].set_xlabel('Mode number, $m$ ')
axes[0].set_yscale("log")
axes[0].set_xscale("log")
axes[0].grid(axis='x', which='both', color='k',alpha=0.5, linestyle=':')
axes[1].plot(xplot[beg:end], yplotHelfrich[beg:end]/u_m2[beg:end] ,label= r"$\frac{\overline{\langle|U_{m}|^2\rangle}_{Helfrich}}{r^2} .\frac{1}{\langle|U_{m}(\frac{\pi}{2})|^2\rangle_{Safran-Milner}}$" , c=colors[4],lw=linesize)
axes[1].hlines(y=1,color='k',ls=':', xmin = xplot[beg], xmax=xplot[end-1],lw=linesize)
axes[1].legend(fontsize=legendfont+4, loc ='upper center')
# axes[1].set_title("2D", fontsize=titlefont)
axes[1].set_ylabel('Amplitude ratio', labelpad=ylabelpad+32)
axes[1].set_xlabel('Mode number, $m$ ',labelpad=xlabelpad)
axes[1].set_yscale("log")
axes[1].set_xscale("log")
# axes[1].ticklabel_format(axis='y', style='plain')
axes[0].text(1.7, 0.5*10**-4, '$a)$', fontsize=15)
axes[1].text(1.7, 3.8, '$b)$', fontsize=15)
#set x ticks
import matplotlib.transforms
# Create offset transform by 5 points in x direction
dx = 5/72.; dy = 0/72.
offset = matplotlib.transforms.ScaledTranslation(dx, dy, fig.dpi_scale_trans)
# apply offset transform to all x ticklabels.
for label in axes[1].xaxis.get_majorticklabels():
label.set_transform(label.get_transform() + offset)
from matplotlib.ticker import StrMethodFormatter
axes[1].xaxis.set_major_formatter(StrMethodFormatter('{x:.0f}'))
from matplotlib.ticker import FixedLocator
axes[0].xaxis.set_minor_locator(FixedLocator([2,4,6,8,20,40,60,80]))
axes[1].xaxis.set_minor_locator(FixedLocator([2,4,6,8,20,40,60,80]))
axes[1].xaxis.set_minor_formatter(StrMethodFormatter('{x:.0f}'))
#set y ticks
axes[1].yaxis.set_major_formatter(StrMethodFormatter('{x:.0f}'))
yticks = [1,2,3,4]
axes[1].set_yticks(yticks)
axes[1].set_yticklabels(yticks)
axes[1].grid(axis='x', which='both', color='k',alpha=0.5, linestyle=':')
fig.subplots_adjust(hspace=0.02, wspace=0.4)
if savefig==True:
# fig.savefig('3DAnalysisSamples_{}_transparent'.format(tempStat), dpi=800, transparent=True)
fig.savefig('Fig1.{}'.format(figFormat), dpi=dpi, transparent=False, format=figFormat)
if __name__ == "__main__":
main()
| 4,180 | 0 | 23 |
13f21ee9ec6be5b734c4474d2ae83b3205c0edf6 | 9,427 | py | Python | multi_obj_pose_estimation/region_loss_multi.py | HannahHaensen/singleshotpose | 0cf15d500bc07bd113d1330bbf439b06c7b3eb54 | [
"MIT"
] | null | null | null | multi_obj_pose_estimation/region_loss_multi.py | HannahHaensen/singleshotpose | 0cf15d500bc07bd113d1330bbf439b06c7b3eb54 | [
"MIT"
] | null | null | null | multi_obj_pose_estimation/region_loss_multi.py | HannahHaensen/singleshotpose | 0cf15d500bc07bd113d1330bbf439b06c7b3eb54 | [
"MIT"
] | null | null | null | import time
import torch
import math
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from utils_multi import *
| 45.322115 | 208 | 0.55341 | import time
import torch
import math
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from utils_multi import *
def build_targets(pred_corners, target, num_keypoints, anchors, num_anchors, num_classes, nH, nW, noobject_scale,
object_scale, sil_thresh, seen):
nB = target.size(0)
nA = num_anchors
nC = num_classes
anchor_step = len(anchors) // num_anchors
conf_mask = torch.ones(nB, nA, nH, nW) * noobject_scale
coord_mask = torch.zeros(nB, nA, nH, nW)
cls_mask = torch.zeros(nB, nA, nH, nW)
txs = list()
tys = list()
for i in range(num_keypoints):
txs.append(torch.zeros(nB, nA, nH, nW))
tys.append(torch.zeros(nB, nA, nH, nW))
tconf = torch.zeros(nB, nA, nH, nW)
tcls = torch.zeros(nB, nA, nH, nW)
num_labels = 2 * num_keypoints + 3 # +2 for width, height and +1 for class within label files
nAnchors = nA * nH * nW
nPixels = nH * nW
for b in range(nB):
cur_pred_corners = pred_corners[b * nAnchors:(b + 1) * nAnchors].t()
cur_confs = torch.zeros(nAnchors)
for t in range(50):
if target[b][t * num_labels + 1] == 0:
break
g = list()
for i in range(num_keypoints):
g.append(target[b][t * num_labels + 2 * i + 1])
g.append(target[b][t * num_labels + 2 * i + 2])
cur_gt_corners = torch.FloatTensor(g).repeat(nAnchors, 1).t() # 18 x nAnchors
cur_confs = torch.max(cur_confs.view_as(conf_mask[b]),
corner_confidences(cur_pred_corners, cur_gt_corners).view_as(conf_mask[
b])) # some irrelevant areas are filtered, in the same grid multiple anchor boxes might exceed the threshold
conf_mask[b][cur_confs > sil_thresh] = 0
nGT = 0
nCorrect = 0
for b in range(nB):
for t in range(50):
if target[b][t * num_labels + 1] == 0:
break
nGT = nGT + 1
best_iou = 0.0
best_n = -1
min_dist = sys.maxsize
gx = list()
gy = list()
gt_box = list()
for i in range(num_keypoints):
gt_box.extend([target[b][t * num_labels + 2 * i + 1], target[b][t * num_labels + 2 * i + 2]])
gx.append(target[b][t * num_labels + 2 * i + 1] * nW)
gy.append(target[b][t * num_labels + 2 * i + 2] * nH)
if i == 0:
gi0 = int(gx[i])
gj0 = int(gy[i])
pred_box = pred_corners[b * nAnchors + best_n * nPixels + gj0 * nW + gi0]
conf = corner_confidence(gt_box, pred_box)
# Decide which anchor to use during prediction
gw = target[b][t * num_labels + num_labels - 2] * nW
gh = target[b][t * num_labels + num_labels - 1] * nH
gt_2d_box = [0, 0, gw, gh]
for n in range(nA):
aw = anchors[anchor_step * n]
ah = anchors[anchor_step * n + 1]
anchor_box = [0, 0, aw, ah]
iou = bbox_iou(anchor_box, gt_2d_box, x1y1x2y2=False)
if iou > best_iou:
best_iou = iou
best_n = n
coord_mask[b][best_n][gj0][gi0] = 1
cls_mask[b][best_n][gj0][gi0] = 1
conf_mask[b][best_n][gj0][gi0] = object_scale
# Update targets
for i in range(num_keypoints):
txs[i][b][best_n][gj0][gi0] = gx[i] - gi0
tys[i][b][best_n][gj0][gi0] = gy[i] - gj0
tconf[b][best_n][gj0][gi0] = conf
tcls[b][best_n][gj0][gi0] = target[b][t * num_labels]
if conf > 0.5:
nCorrect = nCorrect + 1
return nGT, nCorrect, coord_mask, conf_mask, cls_mask, txs, tys, tconf, tcls
class RegionLoss(nn.Module):
def __init__(self, num_keypoints=9, num_classes=13, anchors=[], num_anchors=5, pretrain_num_epochs=15):
super(RegionLoss, self).__init__()
self.num_classes = num_classes
self.anchors = anchors
self.num_anchors = num_anchors
self.anchor_step = len(anchors) / num_anchors
self.num_keypoints = num_keypoints
self.coord_scale = 1
self.noobject_scale = 1
self.object_scale = 5
self.class_scale = 1
self.thresh = 0.6
self.seen = 0
self.pretrain_num_epochs = pretrain_num_epochs
def forward(self, output, target, epoch):
# Parameters
t0 = time.time()
nB = output.data.size(0)
nA = self.num_anchors
nC = self.num_classes
nH = output.data.size(2)
nW = output.data.size(3)
# Activation
output = output.view(nB, nA, (2 * self.num_keypoints + 1 + nC), nH, nW)
x = list()
y = list()
x.append(torch.sigmoid(output.index_select(2, Variable(torch.cuda.LongTensor([0]))).view(nB, nA, nH, nW)))
y.append(torch.sigmoid(output.index_select(2, Variable(torch.cuda.LongTensor([1]))).view(nB, nA, nH, nW)))
for i in range(1, self.num_keypoints):
x.append(output.index_select(2, Variable(torch.cuda.LongTensor([2 * i + 0]))).view(nB, nA, nH, nW))
y.append(output.index_select(2, Variable(torch.cuda.LongTensor([2 * i + 1]))).view(nB, nA, nH, nW))
conf = torch.sigmoid(
output.index_select(2, Variable(torch.cuda.LongTensor([2 * self.num_keypoints]))).view(nB, nA, nH, nW))
cls = output.index_select(2, Variable(
torch.linspace(2 * self.num_keypoints + 1, 2 * self.num_keypoints + 1 + nC - 1, nC).long().cuda()))
cls = cls.view(nB * nA, nC, nH * nW).transpose(1, 2).contiguous().view(nB * nA * nH * nW, nC)
t1 = time.time()
# Create pred boxes
pred_corners = torch.cuda.FloatTensor(2 * self.num_keypoints, nB * nA * nH * nW)
grid_x = torch.linspace(0, nW - 1, nW).repeat(nH, 1).repeat(nB * nA, 1, 1).view(nB * nA * nH * nW).cuda()
grid_y = torch.linspace(0, nH - 1, nH).repeat(nW, 1).t().repeat(nB * nA, 1, 1).view(nB * nA * nH * nW).cuda()
for i in range(self.num_keypoints):
pred_corners[2 * i + 0] = (x[i].data.view_as(grid_x) + grid_x) / nW
pred_corners[2 * i + 1] = (y[i].data.view_as(grid_y) + grid_y) / nH
gpu_matrix = pred_corners.transpose(0, 1).contiguous().view(-1, 2 * self.num_keypoints)
pred_corners = convert2cpu(gpu_matrix)
t2 = time.time()
# Build targets
nGT, nCorrect, coord_mask, conf_mask, cls_mask, txs, tys, tconf, tcls = \
build_targets(pred_corners, target.data, self.num_keypoints, self.anchors, nA, nC, nH, nW,
self.noobject_scale, self.object_scale, self.thresh, self.seen)
cls_mask = (cls_mask == 1)
nProposals = int((conf > 0.25).sum().item())
for i in range(self.num_keypoints):
txs[i] = Variable(txs[i].cuda())
tys[i] = Variable(tys[i].cuda())
tconf = Variable(tconf.cuda())
tcls = Variable(tcls[cls_mask].long().cuda())
coord_mask = Variable(coord_mask.cuda())
conf_mask = Variable(conf_mask.cuda().sqrt())
cls_mask = Variable(cls_mask.view(-1, 1).repeat(1, nC).cuda())
cls = cls[cls_mask].view(-1, nC)
t3 = time.time()
# Create loss
loss_xs = list()
loss_ys = list()
for i in range(self.num_keypoints):
loss_xs.append(
self.coord_scale * nn.MSELoss(size_average=False)(x[i] * coord_mask, txs[i] * coord_mask) / 2.0)
loss_ys.append(
self.coord_scale * nn.MSELoss(size_average=False)(y[i] * coord_mask, tys[i] * coord_mask) / 2.0)
loss_conf = nn.MSELoss(size_average=False)(conf * conf_mask, tconf * conf_mask) / 2.0
# used in original implementation:
# loss_x = np.sum(loss_xs)
# loss_y = np.sum(loss_ys)
# adapted to be used in PyTorch >= 1.7
loss_x = torch.stack(loss_xs)
loss_y = torch.stack(loss_ys)
loss_x = torch.sum(loss_x)
loss_y = torch.sum(loss_y)
loss_cls = self.class_scale * nn.CrossEntropyLoss(size_average=False)(cls, tcls)
if epoch > self.pretrain_num_epochs:
loss = loss_x + loss_y + loss_cls + loss_conf # in single object pose estimation, there is no classification loss
else:
# pretrain initially without confidence loss
# once the coordinate predictions get better, start training for confidence as well
loss = loss_x + loss_y + loss_cls
print('%d: nGT %d, recall %d, proposals %d, loss: x %f, y %f, conf %f, cls %f, total %f' % (
self.seen, nGT, nCorrect, nProposals, loss_x.item(), loss_y.item(), loss_conf.item(), loss_cls.item(),
loss.item()))
t4 = time.time()
if False:
print('-----------------------------------')
print(' activation : %f' % (t1 - t0))
print(' create pred_corners : %f' % (t2 - t1))
print(' build targets : %f' % (t3 - t2))
print(' create loss : %f' % (t4 - t3))
print(' total : %f' % (t4 - t0))
return loss
| 9,166 | 7 | 99 |
d98034ed1a5e72aceede024697574ded18bae3a8 | 317 | py | Python | stock/forms.py | pmaigutyak/mp-stock | 42051acebeda32803398de925497eece4b273d47 | [
"0BSD"
] | 1 | 2021-09-25T14:31:47.000Z | 2021-09-25T14:31:47.000Z | stock/forms.py | pmaigutyak/mp-stock | 42051acebeda32803398de925497eece4b273d47 | [
"0BSD"
] | null | null | null | stock/forms.py | pmaigutyak/mp-stock | 42051acebeda32803398de925497eece4b273d47 | [
"0BSD"
] | null | null | null |
from django import forms
from django.utils.translation import ugettext_lazy as _
from categories.models import Category
| 21.133333 | 55 | 0.757098 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from categories.models import Category
class StockReportForm(forms.Form):
categories = forms.ModelMultipleChoiceField(
required=False,
queryset=Category.objects.all())
ids = forms.CharField(required=False)
| 0 | 171 | 23 |
711028c668c7cf809b88df63c59f52dd682db445 | 1,783 | py | Python | src/passmanager.py | Nova-Striker/Open-Palm | c7a4b0ae11fc12706bb93deca489ca622295e66d | [
"MIT"
] | 4 | 2020-10-02T05:15:29.000Z | 2020-10-09T16:42:31.000Z | src/passmanager.py | JDeepD/Open-Palm | 894e4e53f5766d98e6f16e54aafd1f5f8f3f7815 | [
"MIT"
] | 5 | 2020-11-20T18:53:05.000Z | 2021-04-08T10:42:54.000Z | src/passmanager.py | Nova-Striker/Open-Palm | c7a4b0ae11fc12706bb93deca489ca622295e66d | [
"MIT"
] | 1 | 2020-10-30T05:42:40.000Z | 2020-10-30T05:42:40.000Z | """This module will deal with password management"""
import csv
def storepass(user, passwd, target="admins.csv"):
"""This function is used for storing user-pass combo
as elements to a csv file. By default, the values will be
stored in `creds/admins.csv`. The csvs will always be
saved in the `creds` directory but the filenames can
be changed by using the optional `target` parameter
"""
with open(f"creds/{target}", 'a+') as fil:
writer = csv.writer(fil)
writer.writerow([user, passwd])
def cipherpass(passwd):
"""Inputs a string. Ciphers it using the following
algorithm and returns the ciphered password
Algo:
1. Takes the string.
2. Tranverse though each letter.
3. Take the ascii value of that letter
and doubles it using `chr` function
4. Converts the new ascii value back to
a new letter.
5. Adds that letter to an empty string and
repeat from Step 1 until all letters are
traversed.
6. Returns the `ciphered` string.
"""
tmp = ""
for i in passwd:
tmp += chr(ord(i)*2)
return tmp
def decipherpass(encr):
"""Inputs a strings. Deciphers in using the same algorithm
that was used in `cipherpass`. Returns the original passwd
"""
tmp = ""
for i in encr:
tmp += chr(int(ord(i)/2))
return tmp
def get_pass(target="admins.csv"): # gets the user info from the Csv file
"""This function is used for reading a csv file
and returning the contents in the form of a
dictionary
"""
with open(f"creds/{target}", 'r+', encoding="utf8") as fil:
reader = csv.reader(fil)
print(list(reader))
dic = {}
for i in reader:
dic[i[0]] = i[1]
return dic
| 27.859375 | 74 | 0.630959 | """This module will deal with password management"""
import csv
def storepass(user, passwd, target="admins.csv"):
"""This function is used for storing user-pass combo
as elements to a csv file. By default, the values will be
stored in `creds/admins.csv`. The csvs will always be
saved in the `creds` directory but the filenames can
be changed by using the optional `target` parameter
"""
with open(f"creds/{target}", 'a+') as fil:
writer = csv.writer(fil)
writer.writerow([user, passwd])
def cipherpass(passwd):
"""Inputs a string. Ciphers it using the following
algorithm and returns the ciphered password
Algo:
1. Takes the string.
2. Tranverse though each letter.
3. Take the ascii value of that letter
and doubles it using `chr` function
4. Converts the new ascii value back to
a new letter.
5. Adds that letter to an empty string and
repeat from Step 1 until all letters are
traversed.
6. Returns the `ciphered` string.
"""
tmp = ""
for i in passwd:
tmp += chr(ord(i)*2)
return tmp
def decipherpass(encr):
"""Inputs a strings. Deciphers in using the same algorithm
that was used in `cipherpass`. Returns the original passwd
"""
tmp = ""
for i in encr:
tmp += chr(int(ord(i)/2))
return tmp
def get_pass(target="admins.csv"): # gets the user info from the Csv file
"""This function is used for reading a csv file
and returning the contents in the form of a
dictionary
"""
with open(f"creds/{target}", 'r+', encoding="utf8") as fil:
reader = csv.reader(fil)
print(list(reader))
dic = {}
for i in reader:
dic[i[0]] = i[1]
return dic
| 0 | 0 | 0 |
17b06b8704d83d3bd45aff590923ec8ff1b8db9b | 1,568 | py | Python | app/error_handler.py | drone/ff-python-flask-sample | 7ec1d81b3a61610a6c57ac5745d561c75830f96a | [
"Apache-2.0"
] | null | null | null | app/error_handler.py | drone/ff-python-flask-sample | 7ec1d81b3a61610a6c57ac5745d561c75830f96a | [
"Apache-2.0"
] | null | null | null | app/error_handler.py | drone/ff-python-flask-sample | 7ec1d81b3a61610a6c57ac5745d561c75830f96a | [
"Apache-2.0"
] | 1 | 2021-11-01T09:53:37.000Z | 2021-11-01T09:53:37.000Z | from flask import make_response, jsonify
class Error(Exception):
"""Base class for exceptions in this module."""
pass
| 25.704918 | 72 | 0.66773 | from flask import make_response, jsonify
def init_errorhandler(app):
@app.errorhandler(400)
def bad_request(error):
return make_response(jsonify({'error': error.description}), 400)
@app.errorhandler(401)
def unauthorized(error):
return make_response(jsonify({'error': error.description}), 401)
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': error.description}), 404)
@app.errorhandler(500)
def server_error(error):
return make_response(jsonify({'error': error.description}), 500)
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class ResourceNotFound(Error):
def __init__(self):
self.description = 'Resource not found.'
class TenantNotFound(Error):
def __init__(self, tenant_id=None):
self.description = 'Tenant {0} not found.'.format(tenant_id)
class UserNotFound(Error):
def __init__(self, user_id=None):
self.description = 'User {0} not found.'.format(user_id)
class Unauthorized(Error):
def __init__(self):
self.description = 'Unauthorized'
class BadRequest(Error):
def __init__(self, description=None):
if description is not None:
self.description = description
else:
self.description = 'Malformed request'
class ServerError(Error):
def __init__(self, description=None):
if description is not None:
self.description = description
else:
self.description = 'Server Error'
| 1,083 | 33 | 317 |
9555d4d2d0c4a3b8b918f4cc4516e07e520a98bc | 3,163 | py | Python | webkit/build/rule_binding.py | zachlatta/chromium | c4625eefca763df86471d798ee5a4a054b4716ae | [
"BSD-3-Clause"
] | 1 | 2021-09-24T22:49:10.000Z | 2021-09-24T22:49:10.000Z | webkit/build/rule_binding.py | changbai1980/chromium | c4625eefca763df86471d798ee5a4a054b4716ae | [
"BSD-3-Clause"
] | null | null | null | webkit/build/rule_binding.py | changbai1980/chromium | c4625eefca763df86471d798ee5a4a054b4716ae | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# usage: rule_binding.py INPUT CPPDIR HDIR -- INPUTS -- OPTIONS
#
# INPUT is an IDL file, such as Whatever.idl.
#
# CPPDIR is the directory into which V8Whatever.cpp will be placed. HDIR is
# the directory into which V8Whatever.h will be placed.
#
# The first item in INPUTS is the path to generate-bindings.pl. Remaining
# items in INPUTS are used to build the Perl module include path.
#
# OPTIONS are passed as-is to generate-bindings.pl as additional arguments.
import errno
import os
import shlex
import shutil
import subprocess
import sys
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 28.754545 | 80 | 0.68258 | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# usage: rule_binding.py INPUT CPPDIR HDIR -- INPUTS -- OPTIONS
#
# INPUT is an IDL file, such as Whatever.idl.
#
# CPPDIR is the directory into which V8Whatever.cpp will be placed. HDIR is
# the directory into which V8Whatever.h will be placed.
#
# The first item in INPUTS is the path to generate-bindings.pl. Remaining
# items in INPUTS are used to build the Perl module include path.
#
# OPTIONS are passed as-is to generate-bindings.pl as additional arguments.
import errno
import os
import shlex
import shutil
import subprocess
import sys
def SplitArgsIntoSections(args):
sections = []
while len(args) > 0:
if not '--' in args:
# If there is no '--' left, everything remaining is an entire section.
dashes = len(args)
else:
dashes = args.index('--')
sections.append(args[:dashes])
# Next time through the loop, look at everything after this '--'.
if dashes + 1 == len(args):
# If the '--' is at the end of the list, we won't come back through the
# loop again. Add an empty section now corresponding to the nothingness
# following the final '--'.
args = []
sections.append(args)
else:
args = args[dashes + 1:]
return sections
def main(args):
sections = SplitArgsIntoSections(args[1:])
assert len(sections) == 3
(base, inputs, options) = sections
assert len(base) == 3
input = base[0]
cppdir = base[1]
hdir = base[2]
assert len(inputs) > 1
generate_bindings = inputs[0]
perl_modules = inputs[1:]
include_dirs = []
for perl_module in perl_modules:
include_dir = os.path.dirname(perl_module)
if not include_dir in include_dirs:
include_dirs.append(include_dir)
# The defines come in as one flat string. Split it up into distinct arguments.
if '--defines' in options:
defines_index = options.index('--defines')
if defines_index + 1 < len(options):
split_options = shlex.split(options[defines_index + 1])
if split_options:
options[defines_index + 1] = ' '.join(split_options)
# Build up the command.
command = ['perl', '-w']
for include_dir in include_dirs:
command.extend(['-I', include_dir])
command.append(generate_bindings)
command.extend(options)
command.extend(['--outputDir', cppdir, input])
# Do it. check_call is new in 2.5, so simulate its behavior with call and
# assert.
return_code = subprocess.call(command)
assert return_code == 0
# Both the .cpp and .h were generated in cppdir, but if hdir is different,
# the .h needs to move. Copy it instead of using os.rename for maximum
# portability in all cases.
if cppdir != hdir:
input_basename = os.path.basename(input)
(root, ext) = os.path.splitext(input_basename)
hname = 'V8%s.h' % root
hsrc = os.path.join(cppdir, hname)
hdst = os.path.join(hdir, hname)
shutil.copyfile(hsrc, hdst)
os.unlink(hsrc)
return return_code
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 2,324 | 0 | 46 |
82cadcf5c827cd7c4238ba6a9191950713dbe2fe | 4,162 | py | Python | src/test/python/apache/aurora/client/commands/util.py | wickman/incubator-aurora | 9906d217093568ed4c9cfe620862818f15ce4150 | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/client/commands/util.py | wickman/incubator-aurora | 9906d217093568ed4c9cfe620862818f15ce4150 | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/client/commands/util.py | wickman/incubator-aurora | 9906d217093568ed4c9cfe620862818f15ce4150 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2013 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from apache.aurora.client.hooks.hooked_api import HookedAuroraClientAPI
from apache.aurora.common.cluster import Cluster
from apache.aurora.common.clusters import Clusters
from gen.apache.aurora.ttypes import (
Response,
ResponseCode,
Result,
)
from mock import Mock
| 30.82963 | 91 | 0.717924 | #
# Copyright 2013 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from apache.aurora.client.hooks.hooked_api import HookedAuroraClientAPI
from apache.aurora.common.cluster import Cluster
from apache.aurora.common.clusters import Clusters
from gen.apache.aurora.ttypes import (
Response,
ResponseCode,
Result,
)
from mock import Mock
class AuroraClientCommandTest(unittest.TestCase):
@classmethod
def create_blank_response(cls, code, msg):
response = Mock(spec=Response)
response.responseCode = code
response.message = msg
response.result = Mock(spec=Result)
return response
@classmethod
def create_simple_success_response(cls):
return cls.create_blank_response(ResponseCode.OK, 'OK')
@classmethod
def create_error_response(cls):
return cls.create_blank_response(ResponseCode.ERROR, 'Damn')
@classmethod
def create_mock_api(cls):
"""Builds up a mock API object, with a mock SchedulerProxy"""
# This looks strange, but we set up the same object to use as both
# the SchedulerProxy and the SchedulerClient. These tests want to observe
# what API calls get made against the scheduler, and both of these objects
# delegate calls to the scheduler. It doesn't matter which one is used:
# what we care about is that the right API calls get made.
mock_scheduler_proxy = Mock()
mock_scheduler_proxy.url = "http://something_or_other"
mock_scheduler_proxy.scheduler_client.return_value = mock_scheduler_proxy
mock_api = Mock(spec=HookedAuroraClientAPI)
mock_api.scheduler_proxy = mock_scheduler_proxy
return (mock_api, mock_scheduler_proxy)
@classmethod
def create_mock_api_factory(cls):
"""Create a collection of mocks for a test that wants to mock out the client API
by patching the api factory."""
mock_api, mock_scheduler_proxy = cls.create_mock_api()
mock_api_factory = Mock()
mock_api_factory.return_value = mock_api
return mock_api_factory, mock_scheduler_proxy
FAKE_TIME = 42131
@classmethod
def fake_time(cls, ignored):
"""Utility function used for faking time to speed up tests."""
cls.FAKE_TIME += 2
return cls.FAKE_TIME
CONFIG_BASE = """
HELLO_WORLD = Job(
name = '%(job)s',
role = '%(role)s',
cluster = '%(cluster)s',
environment = '%(env)s',
instances = 20,
%(inner)s
update_config = UpdateConfig(
batch_size = 5,
restart_threshold = 30,
watch_secs = 10,
max_per_shard_failures = 2,
),
task = Task(
name = 'test',
processes = [Process(name = 'hello_world', cmdline = 'echo {{thermos.ports[http]}}')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
jobs = [HELLO_WORLD]
"""
TEST_ROLE = 'mchucarroll'
TEST_ENV = 'test'
TEST_JOB = 'hello'
TEST_CLUSTER = 'west'
TEST_JOBSPEC = 'west/mchucarroll/test/hello'
TEST_CLUSTERS = Clusters([Cluster(
name='west',
packer_copy_command='copying {{package}}',
zk='zookeeper.example.com',
scheduler_zk_path='/foo/bar',
auth_mechanism='UNAUTHENTICATED')])
@classmethod
def get_test_config(cls, cluster, role, env, job, filler=''):
"""Create a config from the template"""
return cls.CONFIG_BASE % {'job': job, 'role': role, 'env': env, 'cluster': cluster,
'inner': filler}
@classmethod
def get_valid_config(cls):
return cls.get_test_config(cls.TEST_CLUSTER, cls.TEST_ROLE, cls.TEST_ENV, cls.TEST_JOB)
@classmethod
def get_invalid_config(cls, bad_clause):
return cls.get_test_config(cls.TEST_CLUSTER, cls.TEST_ROLE, cls.TEST_ENV, cls.TEST_JOB,
bad_clause)
| 558 | 2,687 | 23 |
071c73c6884f4a13b40eb710f548d4aa790d04e8 | 2,088 | py | Python | experiments/dataset_stats/stats_utils.py | Tobias-Fischer/dreyeve | a65342d9c503ce3ec932e2229b90aaeebfd82944 | [
"MIT"
] | 83 | 2017-05-29T04:16:42.000Z | 2022-03-03T08:09:22.000Z | experiments/dataset_stats/stats_utils.py | ashinmarin/dreyeve | d73979d738e706d90a8aa9d696c6e4dcb19c1134 | [
"MIT"
] | 26 | 2017-11-09T23:35:52.000Z | 2022-03-11T03:22:57.000Z | experiments/dataset_stats/stats_utils.py | ashinmarin/dreyeve | d73979d738e706d90a8aa9d696c6e4dcb19c1134 | [
"MIT"
] | 36 | 2017-09-23T02:48:41.000Z | 2022-03-11T01:34:23.000Z | """
Some statistics utils.
"""
import numpy as np
from os.path import join
def expectation_2d(pdf):
"""
Computes the statistical expectation of a pdf defined
over two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the expectation for the x and y random variables.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
weighted_grid = pdf[..., None] * grid # broadcasting
E = np.apply_over_axes(np.sum, weighted_grid, axes=[0, 1])
E = np.squeeze(E)
return E
def covariance_matrix_2d(pdf):
"""
Computes the covariance matrix of a 2-dimensional gaussian
fitted over a joint pdf of two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the covariance matrix.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
mu = expectation_2d(pdf)
grid = np.float32(grid)
# remove mean
grid -= mu[None, None, :]
grid_flat = np.reshape(grid, newshape=(-1, 2))
# in computing the dot product, pdf has to be counted one (outside the square!)
cov = np.dot(grid_flat.T, grid_flat * np.reshape(pdf, -1)[..., None])
return cov
def read_dreyeve_design(dreyeve_root):
"""
Reads the whole dr(eye)ve design.
Returns
-------
ndarray
the dr(eye)ve design in the form (sequences, params).
"""
with open(join(dreyeve_root, 'dr(eye)ve_design.txt')) as f:
dreyeve_design = np.array([l.rstrip().split('\t') for l in f.readlines()])
return dreyeve_design
| 21.090909 | 83 | 0.604885 | """
Some statistics utils.
"""
import numpy as np
from os.path import join
def expectation_2d(pdf):
"""
Computes the statistical expectation of a pdf defined
over two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the expectation for the x and y random variables.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
weighted_grid = pdf[..., None] * grid # broadcasting
E = np.apply_over_axes(np.sum, weighted_grid, axes=[0, 1])
E = np.squeeze(E)
return E
def covariance_matrix_2d(pdf):
"""
Computes the covariance matrix of a 2-dimensional gaussian
fitted over a joint pdf of two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the covariance matrix.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
mu = expectation_2d(pdf)
grid = np.float32(grid)
# remove mean
grid -= mu[None, None, :]
grid_flat = np.reshape(grid, newshape=(-1, 2))
# in computing the dot product, pdf has to be counted one (outside the square!)
cov = np.dot(grid_flat.T, grid_flat * np.reshape(pdf, -1)[..., None])
return cov
def read_dreyeve_design(dreyeve_root):
"""
Reads the whole dr(eye)ve design.
Returns
-------
ndarray
the dr(eye)ve design in the form (sequences, params).
"""
with open(join(dreyeve_root, 'dr(eye)ve_design.txt')) as f:
dreyeve_design = np.array([l.rstrip().split('\t') for l in f.readlines()])
return dreyeve_design
| 0 | 0 | 0 |
d5835217c99d1b1decaa544b038ac29991c0172b | 824 | py | Python | telemetry/main.py | MovoLovo/Robotics | 5efcf546ad6074595617875db04f402848a796c3 | [
"MIT"
] | null | null | null | telemetry/main.py | MovoLovo/Robotics | 5efcf546ad6074595617875db04f402848a796c3 | [
"MIT"
] | null | null | null | telemetry/main.py | MovoLovo/Robotics | 5efcf546ad6074595617875db04f402848a796c3 | [
"MIT"
] | 1 | 2020-11-16T21:47:21.000Z | 2020-11-16T21:47:21.000Z | #!/usr/bin/env pybricks-micropython
from pybricks.hubs import EV3Brick
from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor,
InfraredSensor, UltrasonicSensor, GyroSensor)
from pybricks.parameters import Port, Stop, Direction, Button, Color
from pybricks.tools import wait, StopWatch, DataLog
from pybricks.robotics import DriveBase
from pybricks.media.ev3dev import SoundFile, ImageFile
# Config
ev3 = EV3Brick()
left_motor = Motor(Port.B)
right_motor = Motor(Port.C)
right_ultra = UltrasonicSensor(Port.S4)
front_ultra = UltrasonicSensor(Port.S3)
gyro = GyroSensor(Port.S2)
data = []
gyro.reset_angle(0)
count = 0
while gyro.angle() < 320:
left_motor.run(200)
right_motor.run(-200)
print(front_ultra.distance())
data.append(front_ultra.distance())
print(data) | 24.969697 | 78 | 0.747573 | #!/usr/bin/env pybricks-micropython
from pybricks.hubs import EV3Brick
from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor,
InfraredSensor, UltrasonicSensor, GyroSensor)
from pybricks.parameters import Port, Stop, Direction, Button, Color
from pybricks.tools import wait, StopWatch, DataLog
from pybricks.robotics import DriveBase
from pybricks.media.ev3dev import SoundFile, ImageFile
# Config
ev3 = EV3Brick()
left_motor = Motor(Port.B)
right_motor = Motor(Port.C)
right_ultra = UltrasonicSensor(Port.S4)
front_ultra = UltrasonicSensor(Port.S3)
gyro = GyroSensor(Port.S2)
data = []
gyro.reset_angle(0)
count = 0
while gyro.angle() < 320:
left_motor.run(200)
right_motor.run(-200)
print(front_ultra.distance())
data.append(front_ultra.distance())
print(data) | 0 | 0 | 0 |
fb424fb1d756cf4cd0f7c5620c5fe0dcc99bab7d | 581 | py | Python | cybot/plug/newmember.py | francis-taylor/Timotty-Bot | 2cf7c9897ed31d26d331594e2578b253e3b970d8 | [
"MIT"
] | 6 | 2017-10-18T14:22:48.000Z | 2017-10-26T15:14:52.000Z | cybot/plug/newmember.py | Fr4ncisTaylor/Timotty | 2cf7c9897ed31d26d331594e2578b253e3b970d8 | [
"MIT"
] | 1 | 2017-10-20T19:16:03.000Z | 2017-10-20T19:16:03.000Z | cybot/plug/newmember.py | Fr4ncisTaylor/Timotty | 2cf7c9897ed31d26d331594e2578b253e3b970d8 | [
"MIT"
] | 7 | 2017-10-18T14:19:52.000Z | 2017-10-22T15:23:33.000Z | #coding:utf-8 -*-
import config, metodos
from mensagens import bemvindo
from pprint import *
from metodos import sendMessage
| 27.666667 | 142 | 0.698795 | #coding:utf-8 -*-
import config, metodos
from mensagens import bemvindo
from pprint import *
from metodos import sendMessage
def welcome(msg):
if 'new_chat_member' in msg:
sendMessage(msg['chat']['id'], bemvindo['welcome'].format(msg['new_chat_member']['first_name']),reply_to_message_id=msg['message_id'])
return
def byebye(msg):
if 'left_chat_member' in msg:
sendMessage(msg['chat']['id'],bemvindo['byebye'].format(msg['left_chat_member'][u'first_name']),reply_to_message_id=msg['message_id'])
return
def shell(msg):
welcome(msg)
| 387 | 0 | 69 |
eb2db6022afb454387e5f42136c03a4a0dd14767 | 139 | py | Python | example/test/core/light/dark/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 2 | 2020-09-04T12:27:15.000Z | 2022-01-17T14:49:40.000Z | example/test/core/light/dark/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | null | null | null | example/test/core/light/dark/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 1 | 2020-09-04T12:27:52.000Z | 2020-09-04T12:27:52.000Z | import IceRayCpp
| 12.636364 | 34 | 0.553957 | import IceRayCpp
def name( ):
return "dark"
def make( ):
light = IceRayCpp.LightDark()
return { 'this': light }
| 71 | 0 | 50 |
c78551398e7c80311e36e1af2905f117981784e3 | 533 | py | Python | TimeWrapper_JE/venv/Lib/site-packages/pkginfo/index.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | TimeWrapper_JE/venv/Lib/site-packages/pkginfo/index.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | TimeWrapper_JE/venv/Lib/site-packages/pkginfo/index.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | from .distribution import Distribution
| 33.3125 | 66 | 0.606004 | from .distribution import Distribution
class Index(dict):
def __setitem__(self, key, value):
if not isinstance(value, Distribution):
raise ValueError('Not a distribution: %r.' % value)
if key != '%s-%s' % (value.name, value.version):
raise ValueError('Key must match <name>-<version>.')
super(Index, self).__setitem__(key, value)
def add(self, distribution):
key = '%s-%s' % (distribution.name, distribution.version)
self[key] = distribution
| 411 | -3 | 83 |
06a385fb58c0fd830b984e212ec507d4558954b3 | 1,037 | py | Python | python/tests/test_ll_zip.py | M7madMomani2/data-structures-and-algorithms | 35ba48973d45f6972d097e4aaac7cfb7147a83a2 | [
"MIT"
] | null | null | null | python/tests/test_ll_zip.py | M7madMomani2/data-structures-and-algorithms | 35ba48973d45f6972d097e4aaac7cfb7147a83a2 | [
"MIT"
] | null | null | null | python/tests/test_ll_zip.py | M7madMomani2/data-structures-and-algorithms | 35ba48973d45f6972d097e4aaac7cfb7147a83a2 | [
"MIT"
] | 1 | 2021-08-29T20:16:19.000Z | 2021-08-29T20:16:19.000Z | import pytest
from data_structures.linked_list.ll_zip import *
from data_structures.linked_list.linked_list import *
@pytest.fixture
@pytest.fixture
| 29.628571 | 91 | 0.702025 | import pytest
from data_structures.linked_list.ll_zip import *
from data_structures.linked_list.linked_list import *
def test_zipLists(linked_list_ob,linked_list_ob2):
actual_output1 =linked_list_ob.zipLists(linked_list_ob2).to_string()
expected_output1=" {0} -> {1} -> {3} -> {2} -> {2} -> {3} -> {1} -> {0} -> NULL"
assert actual_output1== expected_output1
linked_list_ob2.insert(5)
actual_output2 =linked_list_ob.zipLists(linked_list_ob2).to_string()
expected_output2=" {0} -> {5} -> {3} -> {1} -> {2} -> {2} -> {1} -> {3} -> {0} -> NULL"
assert actual_output2== expected_output2
@pytest.fixture
def linked_list_ob():
linked_list_o=Linkedlist()
linked_list_o.insert(1)
linked_list_o.insert(2)
linked_list_o.insert(3)
linked_list_o.insert(0)
return linked_list_o
@pytest.fixture
def linked_list_ob2():
linked_list_o2=Linkedlist()
linked_list_o2.insert(0)
linked_list_o2.insert(3)
linked_list_o2.insert(2)
linked_list_o2.insert(1)
return linked_list_o2
| 816 | 0 | 67 |
9c08c75fac5d3fba256ebb61b6ce5501cf322d19 | 737 | py | Python | networks/encoders/__init__.py | Yaoyi-Li/HOP-Matting | 4ac22d92b5432734ffe416cf2c0a99fb730d0c04 | [
"MIT"
] | 56 | 2020-04-26T16:19:50.000Z | 2021-12-30T07:20:40.000Z | networks/encoders/__init__.py | Yaoyi-Li/HOP-Matting | 4ac22d92b5432734ffe416cf2c0a99fb730d0c04 | [
"MIT"
] | 5 | 2020-04-27T19:17:17.000Z | 2021-07-17T13:55:35.000Z | networks/encoders/__init__.py | Yaoyi-Li/HOP-Matting | 4ac22d92b5432734ffe416cf2c0a99fb730d0c04 | [
"MIT"
] | 11 | 2020-04-29T10:01:35.000Z | 2022-03-31T03:34:50.000Z | import logging
from .resnet_enc import ResNet_D, BasicBlock, Bottleneck
from .res_localHOP_posEmb_enc import ResLocalHOP_PosEmb
__all__ = ['resnet_localHOP_posEmb_encoder_29']
if __name__ == "__main__":
import torch
logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%m-%d %H:%M:%S')
resnet_encoder = resnet_encoder_29()
x = torch.randn(4,6,512,512)
z = resnet_encoder(x)
print(z[0].shape)
| 28.346154 | 95 | 0.701493 | import logging
from .resnet_enc import ResNet_D, BasicBlock, Bottleneck
from .res_localHOP_posEmb_enc import ResLocalHOP_PosEmb
__all__ = ['resnet_localHOP_posEmb_encoder_29']
def _res_localHOP_posEmb(block, layers, **kwargs):
model = ResLocalHOP_PosEmb(block, layers, **kwargs)
return model
def resnet_localHOP_posEmb_encoder_29(**kwargs):
return _res_localHOP_posEmb(BasicBlock, [3, 4, 4, 2], **kwargs)
if __name__ == "__main__":
import torch
logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%m-%d %H:%M:%S')
resnet_encoder = resnet_encoder_29()
x = torch.randn(4,6,512,512)
z = resnet_encoder(x)
print(z[0].shape)
| 197 | 0 | 46 |
597ea9e787724f7ec681dab7b426f06cda2a4e67 | 1,057 | py | Python | par_checker_general.py | Yasir323/Data-Structures-and-Algorithms-in-Python | b721d0ca0218b9665d4f6ca0bbfd4417244bcdf0 | [
"MIT"
] | null | null | null | par_checker_general.py | Yasir323/Data-Structures-and-Algorithms-in-Python | b721d0ca0218b9665d4f6ca0bbfd4417244bcdf0 | [
"MIT"
] | null | null | null | par_checker_general.py | Yasir323/Data-Structures-and-Algorithms-in-Python | b721d0ca0218b9665d4f6ca0bbfd4417244bcdf0 | [
"MIT"
] | null | null | null | # Program to check closing of all brackets
print(par_checker('{({([][])}())}')) # True
print(par_checker('[{()]')) # False
| 19.574074 | 52 | 0.522233 | # Program to check closing of all brackets
class Stack:
def __init__(self, items=[]):
self.items = items
def is_empty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items) - 1]
def __len__(self):
return len(self.items)
def matches(open, close):
opens = '[{('
closers = ']})'
return opens.index(open) == closers.index(close)
def par_checker(str):
stack = Stack()
balanced = True
for char in str:
if char in "([{":
stack.push(char)
else:
if stack.is_empty():
balanced = False
else:
top = stack.pop()
if not matches(top, char):
balanced = False
if balanced and stack.is_empty():
return True
else:
return False
print(par_checker('{({([][])}())}')) # True
print(par_checker('[{()]')) # False
| 705 | -9 | 231 |
2284af4298e7f6432c522550cff209b741fb1b8e | 65 | py | Python | lib/__init__.py | gideontong/Humingbird | 07f8ef2e52e2212742e80b2cece06566b0ebea86 | [
"MIT"
] | 1 | 2020-07-19T22:16:14.000Z | 2020-07-19T22:16:14.000Z | lib/__init__.py | gideontong/Humingbird | 07f8ef2e52e2212742e80b2cece06566b0ebea86 | [
"MIT"
] | 1 | 2020-07-31T04:43:25.000Z | 2020-07-31T09:16:50.000Z | lib/__init__.py | gideontong/Humingbird | 07f8ef2e52e2212742e80b2cece06566b0ebea86 | [
"MIT"
] | 1 | 2021-03-07T20:09:44.000Z | 2021-03-07T20:09:44.000Z | from . import csvTasks
from . import Gender
# from . import Learn | 21.666667 | 22 | 0.753846 | from . import csvTasks
from . import Gender
# from . import Learn | 0 | 0 | 0 |
6146fe70d4b5666a37f29b0b26d9dd1fe27dabca | 17 | py | Python | env-sample.py | peeomid/fbmsgbot-sample-py | 489018ab269f9beb4d38a6157af7f42205cffbfb | [
"MIT"
] | null | null | null | env-sample.py | peeomid/fbmsgbot-sample-py | 489018ab269f9beb4d38a6157af7f42205cffbfb | [
"MIT"
] | null | null | null | env-sample.py | peeomid/fbmsgbot-sample-py | 489018ab269f9beb4d38a6157af7f42205cffbfb | [
"MIT"
] | null | null | null | VERIFY_TOKEN = '' | 17 | 17 | 0.705882 | VERIFY_TOKEN = '' | 0 | 0 | 0 |
408085c17ca0fc1260b46e80780ea878b34a31c5 | 2,716 | py | Python | generate-html.py | tebeka/py2go-cheatsheet | 14c83850876ef80c36af326ab4fc6f56344781c7 | [
"BSD-3-Clause"
] | 13 | 2017-09-09T08:32:34.000Z | 2022-02-28T04:32:43.000Z | generate-html.py | tebeka/py2go-cheatsheet | 14c83850876ef80c36af326ab4fc6f56344781c7 | [
"BSD-3-Clause"
] | 3 | 2017-11-25T18:48:11.000Z | 2017-12-30T13:00:04.000Z | generate-html.py | tebeka/py2go-cheatsheet | 14c83850876ef80c36af326ab4fc6f56344781c7 | [
"BSD-3-Clause"
] | 2 | 2019-11-03T19:58:17.000Z | 2020-04-28T01:14:17.000Z | #!/usr/bin/env python
import html
import re
from subprocess import check_output
import yaml
find_code = re.compile(r'code: (\w+)').search
table_html = '''
<table class="code table table-sm">
<tbody>
<tr>
<td>{py}</td>
<td>{go}</td>
</tr>
</tbody>
</table>
'''
module_html = '''
<tr>
<td>{task}</td>
<td><a href="https://docs.python.org/3/library/{python}.html">
{python}</a>
</td>
<td><a href="https://golang.org/pkg/{go}/">{go}</a></td>
</tr>
'''
is_start = re.compile(r'(//|#) START').search
is_end = re.compile(r'(//|#) END').search
find_spaces = re.compile('^[ \t]+').match
if __name__ == '__main__':
from argparse import ArgumentParser, FileType
from sys import stdin
parser = ArgumentParser()
parser.add_argument('--file', type=FileType(), default=stdin)
args = parser.parse_args()
for line in args.file:
line = line[:-1] # trim newline
match = find_code(line)
if match:
name = match.group(1)
py = htmlize(code_for(name, 'python'), 'python')
go = htmlize(code_for(name, 'go'), 'go')
print(table_html.format(py=py, go=go))
elif line.strip() == ':modules:':
modules()
else:
print(line)
continue
| 24.468468 | 73 | 0.555596 | #!/usr/bin/env python
import html
import re
from subprocess import check_output
import yaml
find_code = re.compile(r'code: (\w+)').search
table_html = '''
<table class="code table table-sm">
<tbody>
<tr>
<td>{py}</td>
<td>{go}</td>
</tr>
</tbody>
</table>
'''
module_html = '''
<tr>
<td>{task}</td>
<td><a href="https://docs.python.org/3/library/{python}.html">
{python}</a>
</td>
<td><a href="https://golang.org/pkg/{go}/">{go}</a></td>
</tr>
'''
is_start = re.compile(r'(//|#) START').search
is_end = re.compile(r'(//|#) END').search
find_spaces = re.compile('^[ \t]+').match
def indent_size(line):
match = find_spaces(line)
return len(match.group()) if match else 0
def get_code(fname, sep):
blocks = []
block = []
in_block = False
with open(fname) as fp:
for line in fp:
if is_start(line):
assert not in_block, 'start inside block'
in_block = True
continue
elif is_end(line):
assert in_block, 'end without block'
blocks.append(block)
block = []
in_block = False
elif in_block:
block.append(line.replace('\t', ' '))
for block in blocks:
indent = min(indent_size(line) for line in block if line.strip())
for i, line in enumerate(block):
block[i] = line[indent:]
codes = [''.join(block).strip() for block in blocks]
return sep.join(codes)
def code_for(name, typ):
ext = typ[:2]
comment = '#' if typ == 'python' else '//'
sep = '\n\n{} ...\n\n'.format(comment)
return get_code(f'{name}.{ext}', sep)
def htmlize(code, typ):
cmd = ['pygmentize', '-l', typ, '-f', 'html']
return check_output(cmd, input=code.encode()).decode()
def modules():
with open('modules.yaml') as fp:
modules = yaml.load(fp)
for module in modules:
module['task'] = html.escape(module['task'])
print(module_html.format(**module))
if __name__ == '__main__':
from argparse import ArgumentParser, FileType
from sys import stdin
parser = ArgumentParser()
parser.add_argument('--file', type=FileType(), default=stdin)
args = parser.parse_args()
for line in args.file:
line = line[:-1] # trim newline
match = find_code(line)
if match:
name = match.group(1)
py = htmlize(code_for(name, 'python'), 'python')
go = htmlize(code_for(name, 'go'), 'go')
print(table_html.format(py=py, go=go))
elif line.strip() == ':modules:':
modules()
else:
print(line)
continue
| 1,309 | 0 | 115 |
ad1bb7663f7a9d7fe0f5b7b2838066a4f8f734ea | 9,247 | py | Python | transiter/http/endpoints/systemendpoints.py | jamespfennell/realtimerail | 352dd7d185d3501d28276476e1390d3288735690 | [
"MIT"
] | null | null | null | transiter/http/endpoints/systemendpoints.py | jamespfennell/realtimerail | 352dd7d185d3501d28276476e1390d3288735690 | [
"MIT"
] | null | null | null | transiter/http/endpoints/systemendpoints.py | jamespfennell/realtimerail | 352dd7d185d3501d28276476e1390d3288735690 | [
"MIT"
] | null | null | null | """
Systems
Endpoints for installing, reading, configuring and deleting transit systems.
"""
import flask
import requests
from transiter import exceptions
from transiter.http import httpmanager, httpviews
from transiter.http.httpmanager import (
http_endpoint,
link_target,
HttpMethod,
HttpStatus,
)
from transiter.http.permissions import requires_permissions, PermissionsLevel
from transiter.services import stopservice, systemservice, views
system_endpoints = flask.Blueprint(__name__, __name__)
@http_endpoint(system_endpoints, "")
@link_target(httpviews.SystemsInstalled)
def list_all():
"""
List all systems
List all transit systems that are installed in this Transiter instance.
"""
return systemservice.list_all()
@http_endpoint(system_endpoints, "/<system_id>")
@link_target(views.System, ["id"])
def get_by_id(system_id):
"""
Get a specific system
Get a system by its ID.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
return systemservice.get_by_id(system_id)
@http_endpoint(system_endpoints, "/<system_id>/transfers")
@link_target(views.TransfersInSystem, ["_system_id"])
def list_all_transfers_in_system(system_id):
"""
List all transfers in a system
List all transfers in a system.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
from_stop_ids = httpmanager.get_list_url_parameter("from_stop_id")
to_stop_ids = httpmanager.get_list_url_parameter("to_stop_id")
return stopservice.list_all_transfers_in_system(
system_id, from_stop_ids=from_stop_ids, to_stop_ids=to_stop_ids
)
@http_endpoint(
system_endpoints, "/<system_id>", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def install(system_id):
"""
Install a system
This endpoint is used to install or update transit systems.
Installs/updates can be performed asynchronously (recommended)
or synchronously (using the optional URL parameter `sync=true`; not recommended);
see below for more information.
The endpoint accepts `multipart/form-data` requests.
There is a single required parameter, `config_file`, which
specifies the YAML configuration file for the Transit system.
(There is a [dedicated documentation page](systems.md) concerned with creating transit system configuration files.)
The parameter can either be:
- A file upload of the configuration file, or
- A text string, which will be interpreted as a URL pointing to the configuration file.
In addition, depending on the configuration file, the endpoint will also accept extra text form data parameters.
These additional parameters are used for things like API keys, which are different
for each user installing the transit system.
The configuration file will customize certain information using the parameters -
for example, it might include an API key as a GET parameter in a feed URL.
If you are installing a system using a YAML configuration provided by someone else, you
should be advised of which additional parameters are needed.
If you attempt to install a system without the required parameters, the install will fail and
the response will detail which parameters you're missing.
#### Async versus sync
Often the install/update process is long because it often involves performing
large feed updates
of static feeds - for example, in the case of the New York City Subway,
an install takes close to two minutes.
If you perform a synchronous install, the install request is liable
to timeout - for example, Gunicorn by default terminates HTTP
requests that take over 60 seconds.
For this reason you should generally install asynchronously.
After triggering the install asynchronously, you can track its
progress by hitting the `GET` system endpoint repeatedly.
Synchronous installs are supported and useful when writing new
transit system configs, in which case getting feedback from a single request
is quicker.
Return code | Description
--------------------|-------------
`201 CREATED` | For synchronous installs, returned if the transit system was successfully installed.
`202 ACCEPTED` | For asynchronous installs, returned if the install is successfully triggered. This does not necessarily mean the system will be succesfully installed.
`400 BAD REQUEST` | Returned if the YAML configuration file cannot be retrieved. For synchronous installs, this code is also returned if there is any kind of install error.
"""
form_key_to_value = flask.request.form.to_dict()
config_file_url = form_key_to_value.pop("config_file", None)
sync = httpmanager.is_sync_request()
system_update_pk = systemservice.install(
system_id=system_id,
config_str=_get_config_file(
config_file_url, flask.request.files.get("config_file")
),
extra_settings=form_key_to_value,
config_source_url=config_file_url,
sync=sync,
)
if sync:
if (
systemservice.get_update_by_id(system_update_pk).status
== views.SystemUpdateStatus.SUCCESS
):
status = HttpStatus.CREATED
else:
status = HttpStatus.BAD_REQUEST
else:
status = HttpStatus.ACCEPTED
a = systemservice.get_update_by_id(system_update_pk), status
print("Status", a)
return a
@http_endpoint(
system_endpoints,
"/<system_id>",
method=HttpMethod.DELETE,
returns_json_response=False,
)
@requires_permissions(PermissionsLevel.ALL)
def delete_by_id(system_id):
"""
Uninstall (delete) a system
The uninstall can be performed asynchronously or synchronously (using the
optional URL parameter `sync=true`).
You should almost always use the asynchronous version of this endpoint.
It works by changing the system ID to be a new "random" ID, and then performs
the delete asynchronously.
This means that at soon as the HTTP request ends (within a few milliseconds)
the system is invisible to users, and available for installing a new system.
The actual delete takes up to a few minutes for large transit systems like
the NYC Subway.
Return code | Description
--------------------|-------------
`202 ACCEPTED` | For asynchronous deletes, returned if the delete is successfully triggered.
`204 NO CONTENT` | For synchronous deletes, returned if the system was successfully deleted.
`404 NOT FOUND` | Returned if the system does not exist.
"""
systemservice.delete_by_id(
system_id, error_if_not_exists=True, sync=httpmanager.is_sync_request()
)
if httpmanager.is_sync_request():
status = HttpStatus.NO_CONTENT
else:
status = HttpStatus.ACCEPTED
return flask.Response(response="", status=status, content_type="")
@http_endpoint(
system_endpoints, "/<system_id>/auto-update", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def set_auto_update_enabled(system_id):
"""
Configure system auto-update
Configure whether auto-update is enabled for
auto-updatable feeds in a system.
The endpoint takes a single form parameter `enabled`
which can either be `true` or `false` (case insensitive).
Return code | Description
--------------------|-------------
`204 NO CONTENT` | The configuration was applied successfully.
`400 BAD REQUEST` | Returned if the form parameter is not provided or is invalid.
`404 NOT FOUND` | Returned if the system does not exist.
"""
# TODO: this should just accept a URL parameter
form_key_to_value = flask.request.form.to_dict()
enabled = form_key_to_value.get("enabled")
if enabled is None:
raise exceptions.InvalidInput("The form variable 'enabled' is required")
enabled = enabled.lower()
if enabled not in {"false", "true"}:
raise exceptions.InvalidInput(
"The form variable 'enabled' has to be 'true' or 'false', not '{}'".format(
enabled
)
)
systemservice.set_auto_update_enabled(
system_id, form_key_to_value["enabled"].lower() == "true"
)
return "", HttpStatus.NO_CONTENT
| 37.136546 | 178 | 0.701525 | """
Systems
Endpoints for installing, reading, configuring and deleting transit systems.
"""
import flask
import requests
from transiter import exceptions
from transiter.http import httpmanager, httpviews
from transiter.http.httpmanager import (
http_endpoint,
link_target,
HttpMethod,
HttpStatus,
)
from transiter.http.permissions import requires_permissions, PermissionsLevel
from transiter.services import stopservice, systemservice, views
system_endpoints = flask.Blueprint(__name__, __name__)
@http_endpoint(system_endpoints, "")
@link_target(httpviews.SystemsInstalled)
def list_all():
"""
List all systems
List all transit systems that are installed in this Transiter instance.
"""
return systemservice.list_all()
@http_endpoint(system_endpoints, "/<system_id>")
@link_target(views.System, ["id"])
def get_by_id(system_id):
"""
Get a specific system
Get a system by its ID.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
return systemservice.get_by_id(system_id)
@http_endpoint(system_endpoints, "/<system_id>/transfers")
@link_target(views.TransfersInSystem, ["_system_id"])
def list_all_transfers_in_system(system_id):
"""
List all transfers in a system
List all transfers in a system.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
from_stop_ids = httpmanager.get_list_url_parameter("from_stop_id")
to_stop_ids = httpmanager.get_list_url_parameter("to_stop_id")
return stopservice.list_all_transfers_in_system(
system_id, from_stop_ids=from_stop_ids, to_stop_ids=to_stop_ids
)
@http_endpoint(
system_endpoints, "/<system_id>", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def install(system_id):
"""
Install a system
This endpoint is used to install or update transit systems.
Installs/updates can be performed asynchronously (recommended)
or synchronously (using the optional URL parameter `sync=true`; not recommended);
see below for more information.
The endpoint accepts `multipart/form-data` requests.
There is a single required parameter, `config_file`, which
specifies the YAML configuration file for the Transit system.
(There is a [dedicated documentation page](systems.md) concerned with creating transit system configuration files.)
The parameter can either be:
- A file upload of the configuration file, or
- A text string, which will be interpreted as a URL pointing to the configuration file.
In addition, depending on the configuration file, the endpoint will also accept extra text form data parameters.
These additional parameters are used for things like API keys, which are different
for each user installing the transit system.
The configuration file will customize certain information using the parameters -
for example, it might include an API key as a GET parameter in a feed URL.
If you are installing a system using a YAML configuration provided by someone else, you
should be advised of which additional parameters are needed.
If you attempt to install a system without the required parameters, the install will fail and
the response will detail which parameters you're missing.
#### Async versus sync
Often the install/update process is long because it often involves performing
large feed updates
of static feeds - for example, in the case of the New York City Subway,
an install takes close to two minutes.
If you perform a synchronous install, the install request is liable
to timeout - for example, Gunicorn by default terminates HTTP
requests that take over 60 seconds.
For this reason you should generally install asynchronously.
After triggering the install asynchronously, you can track its
progress by hitting the `GET` system endpoint repeatedly.
Synchronous installs are supported and useful when writing new
transit system configs, in which case getting feedback from a single request
is quicker.
Return code | Description
--------------------|-------------
`201 CREATED` | For synchronous installs, returned if the transit system was successfully installed.
`202 ACCEPTED` | For asynchronous installs, returned if the install is successfully triggered. This does not necessarily mean the system will be succesfully installed.
`400 BAD REQUEST` | Returned if the YAML configuration file cannot be retrieved. For synchronous installs, this code is also returned if there is any kind of install error.
"""
form_key_to_value = flask.request.form.to_dict()
config_file_url = form_key_to_value.pop("config_file", None)
sync = httpmanager.is_sync_request()
system_update_pk = systemservice.install(
system_id=system_id,
config_str=_get_config_file(
config_file_url, flask.request.files.get("config_file")
),
extra_settings=form_key_to_value,
config_source_url=config_file_url,
sync=sync,
)
if sync:
if (
systemservice.get_update_by_id(system_update_pk).status
== views.SystemUpdateStatus.SUCCESS
):
status = HttpStatus.CREATED
else:
status = HttpStatus.BAD_REQUEST
else:
status = HttpStatus.ACCEPTED
a = systemservice.get_update_by_id(system_update_pk), status
print("Status", a)
return a
def _get_config_file(config_source_url, uploaded_config_file):
if config_source_url is not None:
try:
response = requests.get(config_source_url)
response.raise_for_status()
except requests.exceptions.RequestException:
raise exceptions.InvalidInput(
"Could not download YAML config file from '{}'".format(
config_source_url
)
)
return response.text
elif uploaded_config_file is not None:
return uploaded_config_file.read().decode("utf-8")
else:
raise exceptions.InvalidInput("YAML config file not provided!")
@http_endpoint(
system_endpoints,
"/<system_id>",
method=HttpMethod.DELETE,
returns_json_response=False,
)
@requires_permissions(PermissionsLevel.ALL)
def delete_by_id(system_id):
"""
Uninstall (delete) a system
The uninstall can be performed asynchronously or synchronously (using the
optional URL parameter `sync=true`).
You should almost always use the asynchronous version of this endpoint.
It works by changing the system ID to be a new "random" ID, and then performs
the delete asynchronously.
This means that at soon as the HTTP request ends (within a few milliseconds)
the system is invisible to users, and available for installing a new system.
The actual delete takes up to a few minutes for large transit systems like
the NYC Subway.
Return code | Description
--------------------|-------------
`202 ACCEPTED` | For asynchronous deletes, returned if the delete is successfully triggered.
`204 NO CONTENT` | For synchronous deletes, returned if the system was successfully deleted.
`404 NOT FOUND` | Returned if the system does not exist.
"""
systemservice.delete_by_id(
system_id, error_if_not_exists=True, sync=httpmanager.is_sync_request()
)
if httpmanager.is_sync_request():
status = HttpStatus.NO_CONTENT
else:
status = HttpStatus.ACCEPTED
return flask.Response(response="", status=status, content_type="")
@http_endpoint(
system_endpoints, "/<system_id>/auto-update", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def set_auto_update_enabled(system_id):
"""
Configure system auto-update
Configure whether auto-update is enabled for
auto-updatable feeds in a system.
The endpoint takes a single form parameter `enabled`
which can either be `true` or `false` (case insensitive).
Return code | Description
--------------------|-------------
`204 NO CONTENT` | The configuration was applied successfully.
`400 BAD REQUEST` | Returned if the form parameter is not provided or is invalid.
`404 NOT FOUND` | Returned if the system does not exist.
"""
# TODO: this should just accept a URL parameter
form_key_to_value = flask.request.form.to_dict()
enabled = form_key_to_value.get("enabled")
if enabled is None:
raise exceptions.InvalidInput("The form variable 'enabled' is required")
enabled = enabled.lower()
if enabled not in {"false", "true"}:
raise exceptions.InvalidInput(
"The form variable 'enabled' has to be 'true' or 'false', not '{}'".format(
enabled
)
)
systemservice.set_auto_update_enabled(
system_id, form_key_to_value["enabled"].lower() == "true"
)
return "", HttpStatus.NO_CONTENT
| 638 | 0 | 23 |
33ce6abae939c65243a3a41bb7afe2b0e8f4f745 | 234 | py | Python | XCTF/2019 Finals/babytaint/solver.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | 1 | 2021-08-24T22:16:41.000Z | 2021-08-24T22:16:41.000Z | XCTF/2019 Finals/babytaint/solver.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | XCTF/2019 Finals/babytaint/solver.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | # xctf{35eedc512678301f582de3176d1fc81c}
payload = ''
for i in range(16):
payload += 'x%d=["Source"()[%d]];' % (i, i)
payload += 'a=['
payload += ','.join('x%d[0]' % i for i in range(16))
payload += '];"Sink"(a)'
print payload
| 19.5 | 52 | 0.57265 | # xctf{35eedc512678301f582de3176d1fc81c}
payload = ''
for i in range(16):
payload += 'x%d=["Source"()[%d]];' % (i, i)
payload += 'a=['
payload += ','.join('x%d[0]' % i for i in range(16))
payload += '];"Sink"(a)'
print payload
| 0 | 0 | 0 |
2c88e07b2599ec87bce1d6dde1049ee4ec409b53 | 512 | py | Python | fbapp/urls.py | lund5000/chirpradio | e084d53e92d897766ac19fe85eeabad68a1d7e39 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2016-01-26T08:09:59.000Z | 2019-03-23T23:27:07.000Z | fbapp/urls.py | lund5000/chirpradio | e084d53e92d897766ac19fe85eeabad68a1d7e39 | [
"ECL-2.0",
"Apache-2.0"
] | 7 | 2016-02-16T03:36:14.000Z | 2020-05-04T16:19:00.000Z | fbapp/urls.py | lund5000/chirpradio | e084d53e92d897766ac19fe85eeabad68a1d7e39 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2016-04-16T02:51:35.000Z | 2021-12-06T19:25:15.000Z | from django.conf import settings
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('',
url(r'^$', 'fbapp.views.canvas', name="fbapp.canvas"),
url(r'^page_tab$', 'fbapp.views.page_tab', name="fbapp.page_tab"),
url(r'^owa$', 'fbapp.views.open_web_app', name="fbapp.open_web_app"),
url(r'^owa/chirpradio\.webapp$', 'fbapp.views.open_web_app_manifest',
name="fbapp.open_web_app_manifest"),
url(r'^channel\.html$', 'fbapp.views.channel', name="fbapp.channel"),
)
| 42.666667 | 73 | 0.691406 | from django.conf import settings
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('',
url(r'^$', 'fbapp.views.canvas', name="fbapp.canvas"),
url(r'^page_tab$', 'fbapp.views.page_tab', name="fbapp.page_tab"),
url(r'^owa$', 'fbapp.views.open_web_app', name="fbapp.open_web_app"),
url(r'^owa/chirpradio\.webapp$', 'fbapp.views.open_web_app_manifest',
name="fbapp.open_web_app_manifest"),
url(r'^channel\.html$', 'fbapp.views.channel', name="fbapp.channel"),
)
| 0 | 0 | 0 |
bff42427c2600dfea0e27fbe38b4ffeff88124fb | 3,116 | py | Python | MQ/Models/BoxCoder.py | emulhall/episodic-memory | 27bafec6e09c108f0efe5ac899eabde9d1ac40cc | [
"MIT"
] | 27 | 2021-10-16T02:39:17.000Z | 2022-03-31T11:16:11.000Z | Models/BoxCoder.py | coolbay/VSGN | 51a3fd59343820aed82879266df3be0147231cab | [
"MIT"
] | 5 | 2022-03-23T04:53:36.000Z | 2022-03-29T23:39:07.000Z | Models/BoxCoder.py | coolbay/VSGN | 51a3fd59343820aed82879266df3be0147231cab | [
"MIT"
] | 13 | 2021-11-25T19:17:29.000Z | 2022-03-25T14:01:47.000Z |
import torch
import math | 42.684932 | 91 | 0.523748 |
import torch
import math
class BoxCoder(object):
def __init__(self, opt):
self.cfg = opt
def encode(self, gt_boxes, anchors):
if False: #self.cfg.MODEL.ATSS.REGRESSION_TYPE == 'POINT':
TO_REMOVE = 1 # TODO remove
anchors_w = anchors[:, 2] - anchors[:, 0] + TO_REMOVE
anchors_h = anchors[:, 3] - anchors[:, 1] + TO_REMOVE
anchors_cx = (anchors[:, 2] + anchors[:, 0]) / 2
anchors_cy = (anchors[:, 3] + anchors[:, 1]) / 2
w = self.cfg.MODEL.ATSS.ANCHOR_SIZES[0] / self.cfg.MODEL.ATSS.ANCHOR_STRIDES[0]
l = w * (anchors_cx - gt_boxes[:, 0]) / anchors_w
t = w * (anchors_cy - gt_boxes[:, 1]) / anchors_h
r = w * (gt_boxes[:, 2] - anchors_cx) / anchors_w
b = w * (gt_boxes[:, 3] - anchors_cy) / anchors_h
targets = torch.stack([l, t, r, b], dim=1)
elif True: #self.cfg.MODEL.ATSS.REGRESSION_TYPE == 'BOX':
TO_REMOVE = 1 # TODO remove
ex_length = anchors[:, 1] - anchors[:, 0] + TO_REMOVE
ex_center = (anchors[:, 1] + anchors[:, 0]) / 2
gt_length = gt_boxes[:, 1] - gt_boxes[:, 0] + TO_REMOVE
gt_center = (gt_boxes[:, 1] + gt_boxes[:, 0]) / 2
wx, ww = (10., 5.)
targets_dx = wx * (gt_center - ex_center) / ex_length
targets_dw = ww * torch.log(gt_length / ex_length)
targets = torch.stack((targets_dx, targets_dw), dim=1)
return targets
def decode(self, preds, anchors):
if False: #self.cfg.MODEL.ATSS.REGRESSION_TYPE == 'POINT':
TO_REMOVE = 1 # TODO remove
anchors_w = anchors[:, 2] - anchors[:, 0] + TO_REMOVE
anchors_h = anchors[:, 3] - anchors[:, 1] + TO_REMOVE
anchors_cx = (anchors[:, 2] + anchors[:, 0]) / 2
anchors_cy = (anchors[:, 3] + anchors[:, 1]) / 2
w = self.cfg.MODEL.ATSS.ANCHOR_SIZES[0] / self.cfg.MODEL.ATSS.ANCHOR_STRIDES[0]
x1 = anchors_cx - preds[:, 0] / w * anchors_w
y1 = anchors_cy - preds[:, 1] / w * anchors_h
x2 = anchors_cx + preds[:, 2] / w * anchors_w
y2 = anchors_cy + preds[:, 3] / w * anchors_h
pred_boxes = torch.stack([x1, y1, x2, y2], dim=1)
elif True: #self.cfg.MODEL.ATSS.REGRESSION_TYPE == 'BOX':
anchors = anchors.to(preds.dtype)
TO_REMOVE = 1 # TODO remove
ex_length = anchors[:, 1] - anchors[:, 0] + TO_REMOVE
ex_center = (anchors[:, 1] + anchors[:, 0]) / 2
wx, ww = (10, 5.)
dx = preds[:, 0] / wx
dw = preds[:, 1] / ww
# Prevent sending too large values into torch.exp()
dw = torch.clamp(dw, max=math.log(1000. / 16))
pred_ctr_x = (dx * ex_length + ex_center)
pred_w = (torch.exp(dw) * ex_length)
pred_boxes = torch.zeros_like(preds)
pred_boxes[:, 0] = pred_ctr_x - 0.5 * (pred_w - 1)
pred_boxes[:, 1] = pred_ctr_x + 0.5 * (pred_w - 1)
return pred_boxes | 2,985 | 2 | 104 |
9e99b9353b020386db6b01e64d0da4bf91fb2d1d | 1,107 | py | Python | handle_image/handle.py | hyjalxl/spidier2 | 3ab1dec50bf03bd673665297147606a418757969 | [
"Apache-2.0"
] | null | null | null | handle_image/handle.py | hyjalxl/spidier2 | 3ab1dec50bf03bd673665297147606a418757969 | [
"Apache-2.0"
] | null | null | null | handle_image/handle.py | hyjalxl/spidier2 | 3ab1dec50bf03bd673665297147606a418757969 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# name=hu_yang_jie
#coding=utf-8
import cv2
import numpy as np
img = cv2.imread("bili.jpg") #载入图像
h, w = img.shape[:2] #获取图像的高和宽
cv2.imshow("Origin", img) #显示原始图像
blured = cv2.blur(img,(5,5)) #进行滤波去掉噪声
cv2.imshow("Blur", blured) #显示低通滤波后的图像
mask = np.zeros((h+2, w+2), np.uint8) #掩码长和宽都比输入图像多两个像素点,满水填充不会超出掩码的非零边缘
#进行泛洪填充
cv2.floodFill(blured, mask, (w-1,h-1), (255,255,255), (2,2,2),(3,3,3),8)
cv2.imshow("floodfill", blured)
#得到灰度图
gray = cv2.cvtColor(blured,cv2.COLOR_BGR2GRAY)
cv2.imshow("gray", gray)
#定义结构元素
kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(50, 50))
#开闭运算,先开运算去除背景噪声,再继续闭运算填充目标内的孔洞
opened = cv2.morphologyEx(gray, cv2.MORPH_OPEN, kernel)
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, kernel)
cv2.imshow("closed", closed)
#求二值图
ret, binary = cv2.threshold(closed,250,255,cv2.THRESH_BINARY)
cv2.imshow("binary", binary)
#找到轮廓
_,contours, hierarchy = cv2.findContours(binary,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
#绘制轮廓
cv2.drawContours(img,contours,-1,(0,0,255),3)
#绘制结果
cv2.imshow("result", img)
cv2.waitKey(0)
cv2.destroyAllWindows() | 25.159091 | 86 | 0.722674 | # coding=utf-8
# name=hu_yang_jie
#coding=utf-8
import cv2
import numpy as np
img = cv2.imread("bili.jpg") #载入图像
h, w = img.shape[:2] #获取图像的高和宽
cv2.imshow("Origin", img) #显示原始图像
blured = cv2.blur(img,(5,5)) #进行滤波去掉噪声
cv2.imshow("Blur", blured) #显示低通滤波后的图像
mask = np.zeros((h+2, w+2), np.uint8) #掩码长和宽都比输入图像多两个像素点,满水填充不会超出掩码的非零边缘
#进行泛洪填充
cv2.floodFill(blured, mask, (w-1,h-1), (255,255,255), (2,2,2),(3,3,3),8)
cv2.imshow("floodfill", blured)
#得到灰度图
gray = cv2.cvtColor(blured,cv2.COLOR_BGR2GRAY)
cv2.imshow("gray", gray)
#定义结构元素
kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(50, 50))
#开闭运算,先开运算去除背景噪声,再继续闭运算填充目标内的孔洞
opened = cv2.morphologyEx(gray, cv2.MORPH_OPEN, kernel)
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, kernel)
cv2.imshow("closed", closed)
#求二值图
ret, binary = cv2.threshold(closed,250,255,cv2.THRESH_BINARY)
cv2.imshow("binary", binary)
#找到轮廓
_,contours, hierarchy = cv2.findContours(binary,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
#绘制轮廓
cv2.drawContours(img,contours,-1,(0,0,255),3)
#绘制结果
cv2.imshow("result", img)
cv2.waitKey(0)
cv2.destroyAllWindows() | 0 | 0 | 0 |
57808233141949087e2b74753b28ec52afb50789 | 184 | py | Python | nxturtle/version.py | mar10/nxturtle | 12762ab7e5d95514f189aa715b09750e801293fb | [
"MIT"
] | 1 | 2020-09-02T17:43:58.000Z | 2020-09-02T17:43:58.000Z | nxturtle/version.py | TheVinhLuong102/nxturtle | 12762ab7e5d95514f189aa715b09750e801293fb | [
"MIT"
] | null | null | null | nxturtle/version.py | TheVinhLuong102/nxturtle | 12762ab7e5d95514f189aa715b09750e801293fb | [
"MIT"
] | 3 | 2016-05-16T19:09:27.000Z | 2020-09-02T17:44:01.000Z | """
Package version number.
"""
# We have to put it here, because setup.py (and easy_install) cannot import
# nxturtle before requirements are satisfied.
__version__ = "1.0.4"
| 26.285714 | 77 | 0.706522 | """
Package version number.
"""
# We have to put it here, because setup.py (and easy_install) cannot import
# nxturtle before requirements are satisfied.
__version__ = "1.0.4"
| 0 | 0 | 0 |
1f33a171260f3a0ed852b9c166dbad3fffa969b3 | 1,415 | py | Python | tests/schemas/test_schema_with_complex_types.py | slawak/dataclasses-avroschema | 04e69a176b3e72bfa0acd3edbd044ecd161b1a68 | [
"MIT"
] | null | null | null | tests/schemas/test_schema_with_complex_types.py | slawak/dataclasses-avroschema | 04e69a176b3e72bfa0acd3edbd044ecd161b1a68 | [
"MIT"
] | null | null | null | tests/schemas/test_schema_with_complex_types.py | slawak/dataclasses-avroschema | 04e69a176b3e72bfa0acd3edbd044ecd161b1a68 | [
"MIT"
] | null | null | null | import dataclasses
import datetime
import json
import typing
import uuid
from dataclasses_avroschema.schema_generator import SchemaGenerator
| 28.877551 | 83 | 0.738516 | import dataclasses
import datetime
import json
import typing
import uuid
from dataclasses_avroschema.schema_generator import SchemaGenerator
def test_schema_with_complex_types(user_advance_dataclass, user_advance_avro_json):
user_schema = SchemaGenerator(
user_advance_dataclass, include_schema_doc=False
).avro_schema()
assert user_schema == json.dumps(user_advance_avro_json)
def test_schema_with_complex_types_and_defaults(
user_advance_with_defaults_dataclass, user_advance_with_defaults_avro_json
):
user_schema = SchemaGenerator(
user_advance_with_defaults_dataclass, include_schema_doc=False
).avro_schema()
assert user_schema == json.dumps(user_advance_with_defaults_avro_json)
def test_schema_with_unions_type(union_type_schema):
class Bus:
"A Bus"
engine_name: str
class Car:
"A Car"
engine_name: str
class UnionSchema:
"Some Unions"
first_union: typing.Union[str, int]
logical_union: typing.Union[datetime.datetime, datetime.date, uuid.uuid4]
lake_trip: typing.Union[Bus, Car]
river_trip: typing.Union[Bus, Car] = None
mountain_trip: typing.Union[Bus, Car] = dataclasses.field(
default_factory=lambda: {"engine_name": "honda"}
)
schema = SchemaGenerator(UnionSchema).avro_schema()
assert schema == json.dumps(union_type_schema)
| 1,201 | 0 | 69 |
4fc8985147ea244f52071c25c82698f009c5ee2d | 5,398 | py | Python | spira/lne/geometry.py | cloudcalvin/spira | 2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c | [
"MIT"
] | null | null | null | spira/lne/geometry.py | cloudcalvin/spira | 2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c | [
"MIT"
] | 1 | 2021-10-17T10:18:04.000Z | 2021-10-17T10:18:04.000Z | spira/lne/geometry.py | cloudcalvin/spira | 2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c | [
"MIT"
] | null | null | null | import os
import spira
import pygmsh
import meshio
import inspect
from spira.core.lists import ElementList
# from spira.gdsii.utils import numpy_to_list
from spira import param
from spira.lne.mesh import Mesh
from spira.core.initializer import ElementalInitializer
| 29.336957 | 79 | 0.559466 | import os
import spira
import pygmsh
import meshio
import inspect
from spira.core.lists import ElementList
# from spira.gdsii.utils import numpy_to_list
from spira import param
from spira.lne.mesh import Mesh
from spira.core.initializer import ElementalInitializer
class __Geometry__(ElementalInitializer):
def __init__(self, lcar, **kwargs):
ElementalInitializer.__init__(self, **kwargs)
self.extrude = []
self.volume = []
self.geom = pygmsh.opencascade.Geometry(
characteristic_length_min=lcar,
characteristic_length_max=lcar
)
self.geom.add_raw_code('Mesh.Algorithm = {};'.format(self.algorithm))
self.geom.add_raw_code('Coherence Mesh;')
self.mesh = None
def __surfaces__(self):
surfaces = []
for e in self.elements:
if isinstance(e, pygmsh.built_in.plane_surface.PlaneSurface):
surfaces.append(e)
return surfaces
class GeometryAbstract(__Geometry__):
_ID = 0
name = param.StringField()
layer = param.IntegerField()
dimension = param.IntegerField(default=2)
algorithm = param.IntegerField(default=6)
polygons = param.ElementListField()
# gmsh_elements = param.ElementListField()
create_mesh = param.DataField(fdef_name='create_meshio')
elements = param.DataField(fdef_name='create_pygmsh_elements')
def __init__(self, lcar=0.01, **kwargs):
super().__init__(lcar=lcar, **kwargs)
def create_meshio(self):
"""
Generates a GMSH mesh, which is saved in the `debug` folder.
Arguments
---------
mesh : dict
Dictionary containing all the necessary mesh information.
"""
if len(self.__surfaces__()) > 1:
self.geom.boolean_union(self.__surfaces__())
directory = os.getcwd() + '/debug/gmsh/'
mesh_file = '{}{}.msh'.format(directory, self.name)
geo_file = '{}{}.geo'.format(directory, self.name)
vtk_file = '{}{}.vtu'.format(directory, self.name)
if not os.path.exists(directory):
os.makedirs(directory)
mesh_data = pygmsh.generate_mesh(self.geom,
verbose=False,
dim=self.dimension,
prune_vertices=False,
remove_faces=False,
geo_filename=geo_file)
mm = meshio.Mesh(*mesh_data)
meshio.write(mesh_file, mm)
meshio.write(vtk_file, mm)
# params = {
# 'name': self.name,
# 'layer': spira.Layer(number=self.layer),
# 'points': [mesh_data[0]],
# 'cells': [mesh_data[1]],
# 'point_data': [mesh_data[2]],
# 'cell_data': [mesh_data[3]],
# 'field_data': [mesh_data[4]]
# }
# return params
return mesh_data
def create_pygmsh_elements(self):
print('number of polygons {}'.format(len(self.polygons)))
height = 0.0
holes = None
elems = ElementList()
for ply in self.polygons:
for i, points in enumerate(ply.polygons):
pp = numpy_to_list(points, height, unit=10e-9)
surface_label = '{}_{}_{}_{}'.format(ply.gdslayer.number,
ply.gdslayer.datatype,
GeometryAbstract._ID, i)
gp = self.geom.add_polygon(pp, lcar=1.0,
make_surface=True,
holes=holes)
self.geom.add_physical_surface(gp.surface, label=surface_label)
elems += [gp.surface, gp.line_loop]
GeometryAbstract._ID += 1
return elems
def extrude_surfaces(self, geom, surfaces):
""" This extrudes the surface to a 3d volume element. """
for i, surface in enumerate(surfaces):
width = float(self.width) * scale
ex = self.geom.extrude(surface, [0, 0, width])
unique_id = '{}_{}'.format(polygons._id, i)
volume = self.geom.add_physical_volume(ex[1], unique_id)
self.extrude.append(ex[1])
self.volume.append(volume)
def geom_holes(self):
"""
Create a list of gmsh surfaces from the mask polygons
generated by the gdsii package.
Arguments
---------
surfaces : list
list of pygmsh surface objects.
"""
print('number of polygons {}'.format(len(self.e.polygons)))
dim = 2
height = 0.0
material_stack = None
for i, points in enumerate(self.e.polygons):
if dim == 3:
height = self.vertical_position(material_stack)
pp = numpy_to_list(points, height, unit=self.e.unit)
gp = geom.add_polygon(pp, lcar=1.0, make_surface=true)
line_loops.append(gp.line_loop)
def flat_copy(self, level=-1, commit_to_gdspy=False):
return self
def flatten(self):
return [self]
def commit_to_gdspy(self, cell):
pass
def transform(self, transform):
return self
class Geometry(GeometryAbstract):
pass
| 1,704 | 3,302 | 123 |
183d1196e1f6f1aa96619ac36af307eec5de268e | 3,805 | py | Python | astroquery/utils/tap/conn/tests/DummyConnHandler.py | jmilou/astroquery | 06d6e6740865d0461570390726e1831ea139b558 | [
"BSD-3-Clause"
] | null | null | null | astroquery/utils/tap/conn/tests/DummyConnHandler.py | jmilou/astroquery | 06d6e6740865d0461570390726e1831ea139b558 | [
"BSD-3-Clause"
] | 2 | 2020-10-29T19:55:25.000Z | 2021-05-14T19:17:44.000Z | astroquery/utils/tap/conn/tests/DummyConnHandler.py | mevtorres/localAstroquery | 06d6e6740865d0461570390726e1831ea139b558 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
=============
TAP plus
=============
@author: Juan Carlos Segovia
@contact: juan.carlos.segovia@sciops.esa.int
European Space Astronomy Centre (ESAC)
European Space Agency (ESA)
Created on 30 jun. 2016
"""
from astroquery.utils.tap import taputils
from six.moves.urllib.parse import urlencode
| 29.726563 | 78 | 0.600526 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
=============
TAP plus
=============
@author: Juan Carlos Segovia
@contact: juan.carlos.segovia@sciops.esa.int
European Space Astronomy Centre (ESAC)
European Space Agency (ESA)
Created on 30 jun. 2016
"""
from astroquery.utils.tap import taputils
from six.moves.urllib.parse import urlencode
class DummyConnHandler(object):
def __init__(self):
self.request = None
self.data = None
self.fileExt = ".ext"
self.defaultResponse = None
self.responses = {}
self.errorFileOutput = None
self.errorReceivedResponse = None
def set_default_response(self, defaultResponse):
self.defaultResponse = defaultResponse
def get_default_response(self):
return self.defaultResponse
def get_last_request(self):
return self.request
def get_last_data(self):
return self.data
def get_error_file_output(self):
return self.errorFileOutput
def get_error_received_response(self):
return self.errorReceivedResponse
def set_response(self, request, response):
self.responses[str(request)] = response
def execute_get(self, request):
self.request = request
return self.__get_response(request)
def execute_post(self, subcontext, data):
self.data = data
sortedKey = self.__create_sorted_dict_key(data)
if subcontext.find('?') == -1:
self.request = subcontext + "?" + sortedKey
else:
if subcontext.endswith('?'):
self.request = subcontext + sortedKey
else:
self.request = subcontext + "&" + sortedKey
return self.__get_response(self.request)
def dump_to_file(self, fileOutput, response):
self.errorFileOutput = fileOutput
self.errorReceivedResponse = response
print("DummyConnHandler - dump to file: file: '%s', \
response status: %s, response msg: %s", (
str(fileOutput),
str(response.status),
str(response.reason)))
def __get_response(self, responseid):
try:
return self.responses[str(responseid)]
except KeyError as e:
if self.defaultResponse is not None:
return self.defaultResponse
else:
print("\nNot found response for key\n\t'"+str(responseid)+"'")
print("Available keys: ")
if self.responses is None:
print("\tNone available")
else:
for k in self.responses.keys():
print("\t'"+str(k)+"'")
raise (e)
def __create_sorted_dict_key(self, data):
dictTmp = {}
items = data.split('&')
for i in (items):
subItems = i.split('=')
dictTmp[subItems[0]] = subItems[1]
# sort dict
return taputils.taputil_create_sorted_dict_key(dictTmp)
def check_launch_response_status(self, response, debug,
expected_response_status):
isError = False
if response.status != expected_response_status:
if debug:
print("ERROR: " + str(response.status) + ": "
+ str(response.reason))
isError = True
return isError
def url_encode(self, data):
return urlencode(data)
def get_suitable_extension(self, headers):
return self.fileExt
def set_suitable_extension(self, ext):
self.fileExt = ext
def get_suitable_extension_by_format(self, output_format):
return self.fileExt
def find_header(self, headers, key):
return taputils.taputil_find_header(headers, key)
| 2,891 | 10 | 536 |
681fd52b15338d6c2c9dd4b4bdc669825c9b4c27 | 4,658 | py | Python | WeOptPy/algorithms/sa.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | 1 | 2021-05-12T10:02:21.000Z | 2021-05-12T10:02:21.000Z | WeOptPy/algorithms/sa.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | null | null | null | WeOptPy/algorithms/sa.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | null | null | null | # encoding=utf8
"""Simulated annealing algorithm module."""
import numpy as np
from WeOptPy.algorithms.interfaces import Algorithm
__all__ = [
'SimulatedAnnealing',
'coolDelta',
'coolLinear'
]
def coolDelta(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate new temperature by differences.
Args:
currentT (float):
T (float):
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - deltaT
def coolLinear(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate temperature with linear function.
Args:
currentT (float): Current temperature.
T (float):
deltaT (float):
nFES (int): Number of evaluations done.
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - T / nFES
class SimulatedAnnealing(Algorithm):
r"""Implementation of Simulated Annealing Algorithm.
Algorithm:
Simulated Annealing Algorithm
Date:
2018
Authors:
Jan Popič and Klemen Berkovič
License:
MIT
Reference URL:
TODO
Reference paper:
TODO
Attributes:
Name (List[str]): List of strings representing algorithm name.
delta (float): Movement for neighbour search.
T (float); Starting temperature.
deltaT (float): Change in temperature.
coolingMethod (Callable): Neighbourhood function.
epsilon (float): Error value.
See Also:
* :class:`NiaPy.algorithms.Algorithm`
"""
Name = ['SimulatedAnnealing', 'SA']
@staticmethod
def algorithm_info():
r"""Get basic information of algorithm.
Returns:
str: Basic information of algorithm.
See Also:
* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
"""
return r"""None"""
@staticmethod
def type_parameters():
r"""Get dictionary with functions for checking values of parameters.
Returns:
Dict[str, Callable]:
* delta (Callable[[Union[float, int], bool]): TODO
"""
return {
'delta': lambda x: isinstance(x, (int, float)) and x > 0,
'T': lambda x: isinstance(x, (int, float)) and x > 0,
'deltaT': lambda x: isinstance(x, (int, float)) and x > 0,
'epsilon': lambda x: isinstance(x, float) and 0 < x < 1
}
def set_parameters(self, delta=0.5, T=2000, deltaT=0.8, coolingMethod=coolDelta, epsilon=1e-23, **ukwargs):
r"""Set the algorithm parameters/arguments.
Arguments:
delta (Optional[float]): Movement for neighbour search.
T (Optional[float]); Starting temperature.
deltaT (Optional[float]): Change in temperature.
coolingMethod (Optional[Callable]): Neighbourhood function.
epsilon (Optional[float]): Error value.
See Also
* :func:`NiaPy.algorithms.Algorithm.setParameters`
"""
ukwargs.pop('n', None)
Algorithm.set_parameters(self, n=1, **ukwargs)
self.delta, self.T, self.deltaT, self.cool, self.epsilon = delta, T, deltaT, coolingMethod, epsilon
def get_parameters(self):
r"""Get algorithms parametes values.
Returns:
Dict[str, Any]:
See Also
* :func:`NiaPy.algorithms.Algorithm.getParameters`
"""
d = Algorithm.get_parameters(self)
d.update({
'delta': self.delta,
'deltaT': self.deltaT,
'T': self.T,
'epsilon': self.epsilon
})
return d
def init_population(self, task):
r"""Initialize the starting population.
Args:
task (Task): Optimization task.
Returns:
Tuple[numpy.ndarray, float, list, dict]:
1. Initial solution.
2. Initial solutions fitness/objective value.
3. Additional arguments.
4. Additional keyword arguments.
"""
x = task.Lower + task.bRange * self.rand(task.D)
curT, xfit = self.T, task.eval(x)
return x, xfit, [], {'curT': curT}
def run_iteration(self, task, x, xfit, xb, fxb, curT, *args, **dparams):
r"""Core function of the algorithm.
Args:
task (Task):
x (numpy.ndarray):
xfit (float):
xb (numpy.ndarray):
fxb (float):
curT (float):
args (list): Additional arguments.
dparams (dict): Additional keyword arguments.
Returns:
Tuple[numpy.ndarray, float, numpy.ndarray, float, list, dict]:
1. New solution.
2. New solutions fitness/objective value.
3. New global best solution.
4. New global best solutions fitness/objective value.
5. Additional arguments.
6. Additional keyword arguments.
"""
c = task.repair(x - self.delta / 2 + self.rand(task.D) * self.delta, rnd=self.Rand)
cfit = task.eval(c)
deltaFit, r = cfit - xfit, self.rand()
if deltaFit < 0 or r < np.exp(deltaFit / curT): x, xfit = c, cfit
curT = self.cool(curT, self.T, deltaT=self.deltaT, nFES=task.nFES)
xb, fxb = self.get_best(x, xfit, xb, fxb)
return x, xfit, xb, fxb, args, {'curT': curT}
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| 24.515789 | 108 | 0.683985 | # encoding=utf8
"""Simulated annealing algorithm module."""
import numpy as np
from WeOptPy.algorithms.interfaces import Algorithm
__all__ = [
'SimulatedAnnealing',
'coolDelta',
'coolLinear'
]
def coolDelta(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate new temperature by differences.
Args:
currentT (float):
T (float):
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - deltaT
def coolLinear(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate temperature with linear function.
Args:
currentT (float): Current temperature.
T (float):
deltaT (float):
nFES (int): Number of evaluations done.
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - T / nFES
class SimulatedAnnealing(Algorithm):
r"""Implementation of Simulated Annealing Algorithm.
Algorithm:
Simulated Annealing Algorithm
Date:
2018
Authors:
Jan Popič and Klemen Berkovič
License:
MIT
Reference URL:
TODO
Reference paper:
TODO
Attributes:
Name (List[str]): List of strings representing algorithm name.
delta (float): Movement for neighbour search.
T (float); Starting temperature.
deltaT (float): Change in temperature.
coolingMethod (Callable): Neighbourhood function.
epsilon (float): Error value.
See Also:
* :class:`NiaPy.algorithms.Algorithm`
"""
Name = ['SimulatedAnnealing', 'SA']
@staticmethod
def algorithm_info():
r"""Get basic information of algorithm.
Returns:
str: Basic information of algorithm.
See Also:
* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
"""
return r"""None"""
@staticmethod
def type_parameters():
r"""Get dictionary with functions for checking values of parameters.
Returns:
Dict[str, Callable]:
* delta (Callable[[Union[float, int], bool]): TODO
"""
return {
'delta': lambda x: isinstance(x, (int, float)) and x > 0,
'T': lambda x: isinstance(x, (int, float)) and x > 0,
'deltaT': lambda x: isinstance(x, (int, float)) and x > 0,
'epsilon': lambda x: isinstance(x, float) and 0 < x < 1
}
def set_parameters(self, delta=0.5, T=2000, deltaT=0.8, coolingMethod=coolDelta, epsilon=1e-23, **ukwargs):
r"""Set the algorithm parameters/arguments.
Arguments:
delta (Optional[float]): Movement for neighbour search.
T (Optional[float]); Starting temperature.
deltaT (Optional[float]): Change in temperature.
coolingMethod (Optional[Callable]): Neighbourhood function.
epsilon (Optional[float]): Error value.
See Also
* :func:`NiaPy.algorithms.Algorithm.setParameters`
"""
ukwargs.pop('n', None)
Algorithm.set_parameters(self, n=1, **ukwargs)
self.delta, self.T, self.deltaT, self.cool, self.epsilon = delta, T, deltaT, coolingMethod, epsilon
def get_parameters(self):
r"""Get algorithms parametes values.
Returns:
Dict[str, Any]:
See Also
* :func:`NiaPy.algorithms.Algorithm.getParameters`
"""
d = Algorithm.get_parameters(self)
d.update({
'delta': self.delta,
'deltaT': self.deltaT,
'T': self.T,
'epsilon': self.epsilon
})
return d
def init_population(self, task):
r"""Initialize the starting population.
Args:
task (Task): Optimization task.
Returns:
Tuple[numpy.ndarray, float, list, dict]:
1. Initial solution.
2. Initial solutions fitness/objective value.
3. Additional arguments.
4. Additional keyword arguments.
"""
x = task.Lower + task.bRange * self.rand(task.D)
curT, xfit = self.T, task.eval(x)
return x, xfit, [], {'curT': curT}
def run_iteration(self, task, x, xfit, xb, fxb, curT, *args, **dparams):
r"""Core function of the algorithm.
Args:
task (Task):
x (numpy.ndarray):
xfit (float):
xb (numpy.ndarray):
fxb (float):
curT (float):
args (list): Additional arguments.
dparams (dict): Additional keyword arguments.
Returns:
Tuple[numpy.ndarray, float, numpy.ndarray, float, list, dict]:
1. New solution.
2. New solutions fitness/objective value.
3. New global best solution.
4. New global best solutions fitness/objective value.
5. Additional arguments.
6. Additional keyword arguments.
"""
c = task.repair(x - self.delta / 2 + self.rand(task.D) * self.delta, rnd=self.Rand)
cfit = task.eval(c)
deltaFit, r = cfit - xfit, self.rand()
if deltaFit < 0 or r < np.exp(deltaFit / curT): x, xfit = c, cfit
curT = self.cool(curT, self.T, deltaT=self.deltaT, nFES=task.nFES)
xb, fxb = self.get_best(x, xfit, xb, fxb)
return x, xfit, xb, fxb, args, {'curT': curT}
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| 0 | 0 | 0 |
ba98afdb26c78c2e50fad1d16a3cc2b5bb4d30d1 | 2,761 | py | Python | bot.py | esjimenezro/trading-bot | bbffd35285da2bafd616b0fcc4e15ae9d250ac45 | [
"MIT"
] | null | null | null | bot.py | esjimenezro/trading-bot | bbffd35285da2bafd616b0fcc4e15ae9d250ac45 | [
"MIT"
] | null | null | null | bot.py | esjimenezro/trading-bot | bbffd35285da2bafd616b0fcc4e15ae9d250ac45 | [
"MIT"
] | null | null | null | import sys
sys.path.append("../config_files")
import mysql.connector
from settings_project import CONFIG_MYSQL
import pandas as pd
import numpy as np
import time
#DEFINIMOS FUNCIONES
# CALCULAR MEDIAS MOVILES
# CREAMOS CICLO INFINITO CON PAUSA CADA 10S
while True:
# Cargar datos
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
if ((datos['alpha_bin'].iloc[-1] == 1) & (datos['alpha_bin'].shift(1).iloc[-1] == -1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posición Larga--')
time.sleep(10)
elif ((datos['alpha_bin'].iloc[-1] == -1) & (datos['alpha_bin'].shift(1).iloc[-1] == 1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posicion Corta--')
time.sleep(10)
else:
print('Sin Posición')
time.sleep(10)
# Forward Test
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
longitud_arreglo = len(datos['alpha_bin'])
for j in range(len(datos['alpha_bin'])):
i = longitud_arreglo - j
if ((datos['alpha_bin'].iloc[-i] == 1) & (datos['alpha_bin'].shift(1).iloc[-i] == -1) &
(datos['alpha_bin'].shift(2).iloc[-i] == -1) & (datos['alpha_bin'].shift(3).iloc[-i] == -1)):
print('--Posición Larga--', i)
time.sleep(1)
if ((datos['alpha_bin'].iloc[-i] == -1) & (datos['alpha_bin'].shift(1).iloc[-i] == 1) &
(datos['alpha_bin'].shift(2).iloc[-i] == 1) & (datos['alpha_bin'].shift(3).iloc[-i] == 1)):
print('--Posicion Corta--', i)
time.sleep(1)
else:
print('Sin Posición', i)
time.sleep(1) | 30.01087 | 105 | 0.6092 | import sys
sys.path.append("../config_files")
import mysql.connector
from settings_project import CONFIG_MYSQL
import pandas as pd
import numpy as np
import time
#DEFINIMOS FUNCIONES
def load_bitso_data_top500(CONFIG_MYSQL):
conn = mysql.connector(**CONFIG_MYSQL)
cursor = conn.cursor()
query = """SELECT
*
FROM
bitso_api.bitso_mxn
ORDER BY
tid DESC
LIMIT 5000"""
extraction = pd.read_sql(query, conn)
return extraction
# CALCULAR MEDIAS MOVILES
def sma(df, d):
c = df.rolling(d).mean()
return c.dropna()
# CREAMOS CICLO INFINITO CON PAUSA CADA 10S
while True:
# Cargar datos
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
if ((datos['alpha_bin'].iloc[-1] == 1) & (datos['alpha_bin'].shift(1).iloc[-1] == -1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posición Larga--')
time.sleep(10)
elif ((datos['alpha_bin'].iloc[-1] == -1) & (datos['alpha_bin'].shift(1).iloc[-1] == 1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posicion Corta--')
time.sleep(10)
else:
print('Sin Posición')
time.sleep(10)
# Forward Test
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
longitud_arreglo = len(datos['alpha_bin'])
for j in range(len(datos['alpha_bin'])):
i = longitud_arreglo - j
if ((datos['alpha_bin'].iloc[-i] == 1) & (datos['alpha_bin'].shift(1).iloc[-i] == -1) &
(datos['alpha_bin'].shift(2).iloc[-i] == -1) & (datos['alpha_bin'].shift(3).iloc[-i] == -1)):
print('--Posición Larga--', i)
time.sleep(1)
if ((datos['alpha_bin'].iloc[-i] == -1) & (datos['alpha_bin'].shift(1).iloc[-i] == 1) &
(datos['alpha_bin'].shift(2).iloc[-i] == 1) & (datos['alpha_bin'].shift(3).iloc[-i] == 1)):
print('--Posicion Corta--', i)
time.sleep(1)
else:
print('Sin Posición', i)
time.sleep(1) | 313 | 0 | 45 |
711587f58366a89e232d7627e3317ebe481064d0 | 1,812 | py | Python | blog/tests/test_models.py | broadsinatlanta/higashi-hiroshima-tours- | 5ae44056d4cb577eab2e5a0525a330b3ac19e122 | [
"MIT"
] | null | null | null | blog/tests/test_models.py | broadsinatlanta/higashi-hiroshima-tours- | 5ae44056d4cb577eab2e5a0525a330b3ac19e122 | [
"MIT"
] | 14 | 2020-02-12T00:00:09.000Z | 2022-03-11T23:44:27.000Z | blog/tests/test_models.py | broadsinatlanta/higashi-hiroshima-tours- | 5ae44056d4cb577eab2e5a0525a330b3ac19e122 | [
"MIT"
] | null | null | null | from django.test import TestCase
# from django.contrib.auth.models import User
from blog.models import Post, Comment
import blog.tests.helper as h
| 29.704918 | 101 | 0.662252 | from django.test import TestCase
# from django.contrib.auth.models import User
from blog.models import Post, Comment
import blog.tests.helper as h
class CommentTestCase(TestCase):
def setUp(self):
self.user = h.create_user()
self.post = h.create_post(self.user)
self.comment = h.create_comment(self.post)
self.data = self.post.__dict__
def test_can_make_post(self):
self.assertIsInstance(self.comment, Comment)
self.assertEqual(self.comment.__str__(), f"{self.comment.commenter}: {self.comment.comment}")
def test_comment_admin_and_unadmin(self):
self.assertFalse(self.comment.by_admin)
self.comment.admin()
self.assertTrue(self.comment.by_admin)
self.comment.unadmin()
self.assertFalse(self.comment.by_admin)
class PostTestCase(TestCase):
def setUp(self):
self.user = h.create_user()
self.post = h.create_post(self.user)
def test_can_make_post(self):
self.assertIsInstance(self.post, Post)
self.assertEqual(self.post.__str__(), self.post.title)
self.assertEqual(self.post.__repr__(), f'{self.post.title} by {self.post.author}')
def test_post_get_absolute_url(self):
self.assertEqual(
self.post.get_absolute_url(),
'/blog/post/1/',
)
def test_post_pin_and_unpin(self):
self.assertFalse(self.post.pinned)
self.post.pin()
self.assertTrue(self.post.pinned)
self.post.unpin()
self.assertFalse(self.post.pinned)
def test_post_admin_and_unadmin(self):
self.assertFalse(self.post.admin_post)
self.post.admin()
self.assertTrue(self.post.admin_post)
self.post.unadmin()
self.assertFalse(self.post.admin_post)
| 1,372 | 19 | 270 |
19fb02b9eddeb85e1565a9aaff760be0fe27b29c | 9,391 | py | Python | mud/models/player.py | erwanaubry/alamud_IUT_Escape | cc9e77203245a9b933300edc2efb9bd5fcd8abc3 | [
"Unlicense"
] | null | null | null | mud/models/player.py | erwanaubry/alamud_IUT_Escape | cc9e77203245a9b933300edc2efb9bd5fcd8abc3 | [
"Unlicense"
] | null | null | null | mud/models/player.py | erwanaubry/alamud_IUT_Escape | cc9e77203245a9b933300edc2efb9bd5fcd8abc3 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Denys Duchier, IUT d'Orléans
#==============================================================================
import mud.game
from .thing import Thing
from .mixins.containing import Containing
from .location import Location
import queue
from tornado.ioloop import IOLoop
| 35.304511 | 88 | 0.485571 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Denys Duchier, IUT d'Orléans
#==============================================================================
import mud.game
from .thing import Thing
from .mixins.containing import Containing
from .location import Location
import queue
from tornado.ioloop import IOLoop
class Player(Containing, Thing):
def __new__(cls, name=None, **kargs):
player = mud.game.GAME.players.get(name, None)
if player is not None:
return player
return super(Player, cls).__new__(cls)
#--------------------------------------------------------------------------
# initialization
#--------------------------------------------------------------------------
def __init__(self, name=None, **kargs):
if hasattr(self, "name"): # check if the player has already been initialized
return # in which case, nothing more needs to be done
kargs["id"] = pid = "player__" + name
super().__init__(**kargs) # otherwise, initialize base classes
GAME = mud.game.GAME
GAME.players[name] = self # save player in game dict
self.transcript = GAME.transcripts.lookup(name) # and add appropriate attributes
self.name = name
self.yaml = {"id": pid, "name": name}
user = mud.game.GAME.users[name]
self.gender = user["gender"]
self.description = user["description"]
#--------------------------------------------------------------------------
# initialization from YAML data
#--------------------------------------------------------------------------
def init_from_yaml(self, data, world):
super().init_from_yaml(data, world)
self.add_name(self.name)
def update_from_yaml(self, data, world):
super().update_from_yaml(data, world)
#--------------------------------------------------------------------------
# API for saving the dynamic part of objects to YAML (via JSON)
#--------------------------------------------------------------------------
def archive_into(self, obj):
super().archive_into(obj)
#--------------------------------------------------------------------------
# model API
#--------------------------------------------------------------------------
def is_player(self):
return True
def __str__(self):
return self.name
def can_see(self):
if not self.container().has_prop("dark"):
return True
if self.has_prop("power-to-see-in-the-dark"):
return True
if self.find_for_light(prop="light-on"):
return True
return False
def _has_prop_can_see(self):
return self.can_see()
def all(self):
yield from self.contents()
yield from self.parts()
def is_alive(self):
return bool(self.container())
def noun_the(self):
return self.name
def noun_a(self):
return self.name
#--------------------------------------------------------------------------
# API for sending messages back to the user through his websocket
#--------------------------------------------------------------------------
def _send(self, msg):
ws = getattr(self, "websocket", None)
if ws:
IOLoop.current().add_callback(ws.write_message, msg)
if msg["type"] != "death":
self.transcript.append(msg)
def send_echo(self, html):
"""sends back the commands as received."""
self._send({"type": "echo", "html": html})
def send_error(self, html):
"""sends an error message for a command that was not understood
or could not be executed."""
self._send({"type": "error", "html": html})
def send_result(self, html):
"""sends a description that is a consequence from the user's last
action."""
self._send({"type": "result", "html": html})
def send_info(self, html):
"""sends a description for an event not initiated by the user.
for example, for actions of players in the same location."""
self._send({"type": "info", "html": html})
def reset(self):
from mud.events import ResetEvent
super().reset()
ResetEvent(self).execute()
#--------------------------------------------------------------------------
# find API
# when the player issues an order, this order will refer to objects by name
# or make assumptions about the existence of objects with a certain
# property, etc... The MUD engine needs to find such objects that are
# implicitly refered to. However, different actions will use different
# strategies (look in different places) to find such objects. Below are
# functions for performing the search in different use cases.
#--------------------------------------------------------------------------
def _make_find_pred(self, kargs):
"""create a function to test whether an object matches the given
criteria."""
test = kargs.get("test") # a function (optional)
name = kargs.get("name") # a name (optional)
prop = kargs.get("prop") # a property (optional)
props= kargs.get("props") # a list of properties (optional)
def pred(x): # the new testing predicate
return (((not test) or text(x)) and
((not name) or x.has_name(name)) and
((not prop) or x.has_prop(prop)) and
((not props) or x.has_props(props)))
return pred
def find_for_use(self, **kargs):
"""find an object that you can use/drop:
- in your inventory"""
pred = self._make_find_pred(kargs)
for x in self.all():
if pred(x):
return x
return None
def find_for_operate(self, **kargs):
"""find an object that you can operate on:
- in your inventory
- in your immediate surroundings"""
pred = self._make_find_pred(kargs)
for x in self.all():
if pred(x):
return x
c = self.container()
parts = []
if c is not None:
for x in c.all():
if pred(x):
return x
parts.append(x)
while parts:
l = parts
parts = []
for x in l:
for y in x.parts():
if pred(y):
return y
parts.append(y)
return None
def find_for_take(self, **kargs):
"""find an object that you can take:
- in your surroundings
- recursively inside open containers of your surroundings"""
pred = self._make_find_pred(kargs)
cont = self.container()
if cont is None:
return None
q = queue.Queue()
q.put(cont)
while not q.empty():
cont = q.get()
for x in cont.all():
if pred(x):
return x
elif x is self:
q.put(x)
elif isinstance(x, Containing) and \
not isinstance(x, Player) and \
not x.has_prop("closed"):
q.put(x)
return None
def find_for_light(self, **kargs):
"""find an object that can light your surroundings:
- in your inventory
- in your surroundings
- or recursively in outer containers (unless you find that's
closed and you can't look outside any further
- or carried by people in your surroundings
- or recursively by people in outer containers (that you
can reach)"""
pred = self._make_find_pred(kargs)
for x in self.contents():
if pred(x):
return x
q = queue.Queue()
c = self.container()
while c:
q.put(c)
c = c.is_container() and c.container()
while not q.empty():
c = q.get()
for x in c.all():
if pred(x):
return x
if isinstance(x, Player):
for y in x.all():
if pred(y):
return y
return None
def find_for_go(self, **kargs):
"""find an exit in your surroundings."""
c = self.container()
if not c or not isinstance(c, Location):
return None
pred = self._make_find_pred(kargs)
for x in c.exits():
if pred(x):
return x
def resolve_for_use(self, **kargs):
return self.find_for_use(**kargs)
def resolve_for_operate(self, **kargs):
if self.can_see():
return self.find_for_operate(**kargs)
else:
return self.find_for_use(**kargs)
def resolve_for_take(self, **kargs):
if self.can_see():
return self.find_for_take(**kargs)
else:
return None
def resolve_for_go(self, **kargs):
if self.can_see():
return self.find_for_go(**kargs)
else:
return None
| 2,583 | 6,444 | 23 |
ba1d80b3a90811062c59fdbc4b11f2783bbedce3 | 3,057 | py | Python | pipeline.py | alfiopuglisi/pipeline | bb0774cff8845c683f7e38475885a6e74e2d86ee | [
"MIT"
] | 4 | 2021-02-07T23:07:38.000Z | 2021-09-29T09:35:23.000Z | pipeline.py | alfiopuglisi/pipeline | bb0774cff8845c683f7e38475885a6e74e2d86ee | [
"MIT"
] | null | null | null | pipeline.py | alfiopuglisi/pipeline | bb0774cff8845c683f7e38475885a6e74e2d86ee | [
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
Module for pipelining composition.
value >> func1 >> func2 >> func3 ...
Pipelines are expressions whose resulting value can be assigned:
result = value >> func1 >> func2
Rules:
* First value can be any python value
* Functions must be chained with the '>>' operator.
* All functions must be built as attributes of a Pipeline object and must accept
one argument, that will be set using the pipelined value.
Any additional arguments must be specified in the pipeline and the value will be added
as the last argument.
* Generators are allowed. Using a generator will turn the value being pipelined
into a generator object, meaning that subsequent pipeline steps must be able
to consume the values (for example with p.list). Multiple generators will be
automatically chained, and if the last step is a generator, the whole expression
becomes a single generator ready for action!
Examples:
from pipeline import p
# This pipeline has a result of 3
'foo' >> p.len
# This pipeline chains filters and maps objects, and calls list() on them
# at the end to execute them. The result will be [1, 9, 25, 49, 81]
range(10) >> p.filter(lambda i : i%2) >> p.map(lambda i : i*i) >> p.list
# If you already have a function object (or want to define one with lambda),
# pass it as a parameter to p():
'foo' >> p(lambda x: x.upper())
'foo' >> p('The word was {}'.format)
# if imported symbols are used, they must be passed
# to the Pipeline constructor. This example counts
# the links in the python.org page, but since 'findall'
#'is imported, we must build a Pipeline object using
# the globals() array:
from pipeline import Pipeline
from urllib.request import urlopen
from re import findall
p = Pipeline(globals())
url = 'http://python.org'
urlopen(url).read() >> p.findall(b'href="') >> p.len >> p('{} hrefs'.format)
Generator support using the special "p.value" keyword:
range(10) >> p(x*2 for x in p.value if x%2==0) >> p(x*3 for x in p.value)
The result will be a generator, that is, nothing is executed until
the final generator will be asked to produce the values!
'''
import hack
import types
p = Pipeline()
p.value = '' # Dummy iterable
# __oOo__
| 28.839623 | 90 | 0.66634 | #!/usr/bin/env python
'''
Module for pipelining composition.
value >> func1 >> func2 >> func3 ...
Pipelines are expressions whose resulting value can be assigned:
result = value >> func1 >> func2
Rules:
* First value can be any python value
* Functions must be chained with the '>>' operator.
* All functions must be built as attributes of a Pipeline object and must accept
one argument, that will be set using the pipelined value.
Any additional arguments must be specified in the pipeline and the value will be added
as the last argument.
* Generators are allowed. Using a generator will turn the value being pipelined
into a generator object, meaning that subsequent pipeline steps must be able
to consume the values (for example with p.list). Multiple generators will be
automatically chained, and if the last step is a generator, the whole expression
becomes a single generator ready for action!
Examples:
from pipeline import p
# This pipeline has a result of 3
'foo' >> p.len
# This pipeline chains filters and maps objects, and calls list() on them
# at the end to execute them. The result will be [1, 9, 25, 49, 81]
range(10) >> p.filter(lambda i : i%2) >> p.map(lambda i : i*i) >> p.list
# If you already have a function object (or want to define one with lambda),
# pass it as a parameter to p():
'foo' >> p(lambda x: x.upper())
'foo' >> p('The word was {}'.format)
# if imported symbols are used, they must be passed
# to the Pipeline constructor. This example counts
# the links in the python.org page, but since 'findall'
#'is imported, we must build a Pipeline object using
# the globals() array:
from pipeline import Pipeline
from urllib.request import urlopen
from re import findall
p = Pipeline(globals())
url = 'http://python.org'
urlopen(url).read() >> p.findall(b'href="') >> p.len >> p('{} hrefs'.format)
Generator support using the special "p.value" keyword:
range(10) >> p(x*2 for x in p.value if x%2==0) >> p(x*3 for x in p.value)
The result will be a generator, that is, nothing is executed until
the final generator will be asked to produce the values!
'''
import hack
import types
class Pipeline():
def __init__(self, globals_=None):
self._globals = globals_
def __getattr__(self, name):
return _Dispatch(eval(name, self._globals))
def __call__(self, f):
return _Dispatch(f)
class _Dispatch():
def __init__(self, f, *args, **kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __rrshift__(self, incoming_value):
if type(self.f) == types.GeneratorType:
hack.replace_generator_sequence(self.f, incoming_value)
return self.f
else:
return self.f(*self.args, incoming_value, **self.kwargs)
def __call__(self, *args, **kwargs):
return _Dispatch(self.f, *args, **kwargs)
p = Pipeline()
p.value = '' # Dummy iterable
# __oOo__
| 532 | -7 | 207 |
0778d7f2ea48b9691400a4b10a602762013b70cf | 2,995 | py | Python | scripts/real/cat_img.py | ZJU-Robotics-Lab/CICT | ff873a03ab03d9113b8db96d26246939bb5da0d4 | [
"MIT"
] | 12 | 2021-02-09T05:08:36.000Z | 2022-02-24T07:51:30.000Z | scripts/real/cat_img.py | ZJU-Robotics-Lab/CICT | ff873a03ab03d9113b8db96d26246939bb5da0d4 | [
"MIT"
] | null | null | null | scripts/real/cat_img.py | ZJU-Robotics-Lab/CICT | ff873a03ab03d9113b8db96d26246939bb5da0d4 | [
"MIT"
] | 6 | 2021-03-30T06:30:13.000Z | 2022-03-01T14:15:00.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import glob
from os.path import join, dirname
sys.path.insert(0, join(dirname(__file__), '..'))
sys.path.insert(0, join(dirname(__file__), '../..'))
import cv2
import numpy as np
if __name__ == '__main__':
rate = 1.25
rate2 = 1.0
dataset = {}
fps = 30
video_size = (1280, 720)
videoWriter = cv2.VideoWriter("/media/wang/Data/video/first-person/2.mp4", cv2.VideoWriter_fourcc(*'MJPG'), fps, video_size)
for index in [2,4,5]:
img_list, pcd_list, nav_list, cost_list, out_list = read_files(index)
dataset[index] = {'img_list':img_list, 'pcd_list':pcd_list, 'nav_list':nav_list, 'cost_list':cost_list, 'out_list':out_list}
for index in [2]:
choose_dataset = dataset[index]
for ts in choose_dataset['img_list']:
img = cv2.imread('/media/wang/Data/video/data'+str(index)+'/output/'+ts+'.png')
#print(img.shape) #(720, 1280, 3)
if img is None: continue
#img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
nav_ts = find_nn(ts, choose_dataset['nav_list'])
cost_ts = find_nn(ts, choose_dataset['cost_list'])
nav = cv2.imread('/media/wang/Data/video/data'+str(index)+'/nav/'+nav_ts+'.png')
costmap = cv2.imread('/media/wang/Data/video/data'+str(index)+'/cost/'+cost_ts+'.png')
nav = cv2.cvtColor(nav, cv2.COLOR_BGR2RGB) #(160, 200, 3)
#input_img = get_img(img, nav)
nav = cv2.resize(nav, (int(200*rate), int(rate*160)))
img[0:int(rate*160), -int(200*rate):] = nav
img[0:int(rate2*200), 0:int(400*rate2)] = costmap
cv2.imshow('img', img)
videoWriter.write(img)
#cv2.imshow('costmap', costmap)
cv2.waitKey(1)
cv2.destroyAllWindows()
videoWriter.release()
| 34.034091 | 132 | 0.6 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import glob
from os.path import join, dirname
sys.path.insert(0, join(dirname(__file__), '..'))
sys.path.insert(0, join(dirname(__file__), '../..'))
import cv2
import numpy as np
def read_files(index):
file_path = '/media/wang/Data/video/data'+str(index)
img_list = []
out_list = []
pcd_list = []
nav_list = []
cost_list = []
for file_name in glob.glob(file_path+'/img/'+'*.png'):
img_list.append(file_name.split('/')[-1][:-4])
for file_name in glob.glob(file_path+'/output/'+'*.png'):
out_list.append(file_name.split('/')[-1][:-4])
for file_name in glob.glob(file_path+'/lidar/'+'*.npy'):
pcd_list.append(file_name.split('/')[-1][:-4])
for file_name in glob.glob(file_path+'/nav/'+'*.png'):
nav_list.append(file_name.split('/')[-1][:-4])
for file_name in glob.glob(file_path+'/cost/'+'*.png'):
cost_list.append(file_name.split('/')[-1][:-4])
img_list.sort(), pcd_list.sort(), nav_list.sort(), cost_list.sort(), out_list.sort()
return img_list, pcd_list, nav_list, cost_list, out_list
def find_nn(ts, ts_list, back=0):
dt_list = list(map(lambda x: abs(float(x)-float(ts)), ts_list))
index = max(0, dt_list.index(min(dt_list)) - back)
return ts_list[index]
if __name__ == '__main__':
rate = 1.25
rate2 = 1.0
dataset = {}
fps = 30
video_size = (1280, 720)
videoWriter = cv2.VideoWriter("/media/wang/Data/video/first-person/2.mp4", cv2.VideoWriter_fourcc(*'MJPG'), fps, video_size)
for index in [2,4,5]:
img_list, pcd_list, nav_list, cost_list, out_list = read_files(index)
dataset[index] = {'img_list':img_list, 'pcd_list':pcd_list, 'nav_list':nav_list, 'cost_list':cost_list, 'out_list':out_list}
for index in [2]:
choose_dataset = dataset[index]
for ts in choose_dataset['img_list']:
img = cv2.imread('/media/wang/Data/video/data'+str(index)+'/output/'+ts+'.png')
#print(img.shape) #(720, 1280, 3)
if img is None: continue
#img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
nav_ts = find_nn(ts, choose_dataset['nav_list'])
cost_ts = find_nn(ts, choose_dataset['cost_list'])
nav = cv2.imread('/media/wang/Data/video/data'+str(index)+'/nav/'+nav_ts+'.png')
costmap = cv2.imread('/media/wang/Data/video/data'+str(index)+'/cost/'+cost_ts+'.png')
nav = cv2.cvtColor(nav, cv2.COLOR_BGR2RGB) #(160, 200, 3)
#input_img = get_img(img, nav)
nav = cv2.resize(nav, (int(200*rate), int(rate*160)))
img[0:int(rate*160), -int(200*rate):] = nav
img[0:int(rate2*200), 0:int(400*rate2)] = costmap
cv2.imshow('img', img)
videoWriter.write(img)
#cv2.imshow('costmap', costmap)
cv2.waitKey(1)
cv2.destroyAllWindows()
videoWriter.release()
| 1,037 | 0 | 46 |
4f1b4c79fe935ceee776c57b22a97040f332d3db | 512 | py | Python | examples/Gephi_streaming.py | chumpblocckami/Tweet2Graph | eaf986ea83e5a420b26c6417ddca887506814fd4 | [
"MIT"
] | null | null | null | examples/Gephi_streaming.py | chumpblocckami/Tweet2Graph | eaf986ea83e5a420b26c6417ddca887506814fd4 | [
"MIT"
] | null | null | null | examples/Gephi_streaming.py | chumpblocckami/Tweet2Graph | eaf986ea83e5a420b26c6417ddca887506814fd4 | [
"MIT"
] | null | null | null | from gephistreamer import graph
from gephistreamer import streamer
#https://github.com/totetmatt/GephiStreamer
#DA FARE
stream = streamer.Streamer(streamer.GephiWS(hostname="localhost", port=8080, workspace="workspace0"),)
node_a = graph.Node("A",custom_property=1)
node_b = graph.Node("B",custom_property=2)
node_c = graph.Node("C",custom_property=3)
node_d = graph.Node("D",custom_property=4)
stream.add_node(node_a,node_b)
edge_ab = graph.Edge(node_a,node_b,custom_property="hello")
stream.add_edge(edge_ab)
| 36.571429 | 102 | 0.791016 | from gephistreamer import graph
from gephistreamer import streamer
#https://github.com/totetmatt/GephiStreamer
#DA FARE
stream = streamer.Streamer(streamer.GephiWS(hostname="localhost", port=8080, workspace="workspace0"),)
node_a = graph.Node("A",custom_property=1)
node_b = graph.Node("B",custom_property=2)
node_c = graph.Node("C",custom_property=3)
node_d = graph.Node("D",custom_property=4)
stream.add_node(node_a,node_b)
edge_ab = graph.Edge(node_a,node_b,custom_property="hello")
stream.add_edge(edge_ab)
| 0 | 0 | 0 |
ec505f1cf69b1833c3a8ad53a5c963b1d29f8429 | 14,640 | py | Python | tts/src/tts/synthesizer.py | mjsobrep/tts-ros1 | b3829ee93f76a70ff1460f505fd8ba70cf8f8d24 | [
"Apache-2.0"
] | null | null | null | tts/src/tts/synthesizer.py | mjsobrep/tts-ros1 | b3829ee93f76a70ff1460f505fd8ba70cf8f8d24 | [
"Apache-2.0"
] | 11 | 2021-05-11T16:13:36.000Z | 2022-03-25T16:10:26.000Z | tts/src/tts/synthesizer.py | mjsobrep/tts-ros1 | b3829ee93f76a70ff1460f505fd8ba70cf8f8d24 | [
"Apache-2.0"
] | 1 | 2019-11-13T22:03:28.000Z | 2019-11-13T22:03:28.000Z | #!/usr/bin/env python
# Copyright (c) 2018, Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import time
import json
import rospy
import hashlib
import sqlite3
import time
from optparse import OptionParser
from tts.srv import Synthesizer, SynthesizerResponse
from tts.srv import PollyResponse
from tts.db import DB
class SpeechSynthesizer:
"""This class serves as a ROS service node that should be an entry point of a TTS task.
Although the current implementation uses Amazon Polly as the synthesis engine, it is not hard to let it support
more heterogeneous engines while keeping the API the same.
In order to support a variety of engines, the SynthesizerRequest was designed with flexibility in mind. It
has two fields: text and metadata. Both are strings. In most cases, a user can ignore the metadata and call
the service with some plain text. If the use case needs any control or engine-specific feature, the extra
information can be put into the JSON-form metadata. This class will use the information when calling the engine.
The decoupling of the synthesizer and the actual synthesis engine will benefit the users in many ways.
First, a user will be able to use a unified interface to do the TTS job and have the freedom to use different
engines available with no or very little change from the client side.
Second, by applying some design patterns, the synthesizer can choose an engine dynamically. For example, a user
may prefer to use Amazon Polly but is also OK with an offline solution when network is not reliable.
Third, engines can be complicated, thus difficult to use. As an example, Amazon Polly supports dozens of parameters
and is able to accomplish nontrivial synthesis jobs, but majority of the users never need those features. This
class provides a clean interface with two parameters only, so that it is much easier and pleasant to use. If by
any chance the advanced features are required, the user can always leverage the metadata field or even go to the
backend engine directly.
Also, from an engineering perspective, simple and decoupled modules are easier to maintain.
This class supports two modes of using polly. It can either call a service node or use AmazonPolly as a library.
Start the service node::
$ rosrun tts synthesizer_node.py # use default configuration
$ rosrun tts synthesizer_node.py -e POLLY_LIBRARY # will not call polly service node
Call the service::
$ rosservice call /synthesizer 'hello' ''
$ rosservice call /synthesizer '<speak>hello</speak>' '"{\"text_type\":\"ssml\"}"'
"""
class DummyEngine:
"""A dummy engine which exists to facilitate testing. Can either
be set to act as if it is connected or disconnected. Will create files where
they are expected, but they will not be actual audio files."""
def __call__(self, **kwargs):
"""put a file at the specified location and return resonable dummy
values. If not connected, fills in the Exception fields.
Args:
**kwarks: dictionary with fields: output_format, voice_id, sample_rate,
text_type, text, output_path
Returns: A json version of a string with fields: Audio File, Audio Type,
Exception (if there is an exception), Traceback (if there is an exception),
and if succesful Amazon Polly Response Metadata
"""
if self.connected:
with open(kwargs['output_path'], 'wb') as f:
f.write(os.urandom(self.file_size))
output_format = kwargs['OutputFormat'] if 'OutputFormat' in kwargs else 'ogg_vorbis'
resp = json.dumps({
'Audio File': kwargs['output_path'],
'Audio Type': output_format,
'Amazon Polly Response Metadata': {'some header': 'some data'}
})
return SynthesizerResponse(resp)
else:
current_dir = os.path.dirname(os.path.abspath(__file__))
error_ogg_filename = 'connerror.ogg'
error_details = {
'Audio File': os.path.join(current_dir, '../src/tts/data', error_ogg_filename),
'Audio Type': 'ogg',
'Exception': {
'dummy head': 'dummy val'
# 'Type': str(exc_type),
# 'Module': exc_type.__module__,
# 'Name': exc_type.__name__,
# 'Value': str(e),
},
'Traceback': 'some traceback'
}
return SynthesizerResponse(json.dumps(error_details))
def set_connection(self, connected):
"""set the connection state
Args:
connected: boolean, whether to act connected or not
"""
self.connected = connected
def set_file_sizes(self, size):
"""Set the target file size for future files in bytes
Args:
size: the number of bytes to make the next files
"""
self.file_size = size
ENGINES = {
'POLLY_SERVICE': PollyViaNode,
'POLLY_LIBRARY': PollyDirect,
'DUMMY': DummyEngine,
}
#TODO: expose this max_cache_bytes value to the roslaunch system (why is rosparam not used in this file?)
def _call_engine(self, **kw):
"""Call engine to do the job.
If no output path is found from input, the audio
file will be put into /tmp and the file name will have
a prefix of the md5 hash of the text. If a filename is
not given, the utterance is added to the cache. If a
filename is specified, then we will assume that the
file is being managed by the user and it will not
be added to the cache.
:param kw: what AmazonPolly needs to synthesize
:return: response from AmazonPolly
"""
if 'output_path' not in kw:
tmp_filename = hashlib.md5(
json.dumps(kw, sort_keys=True)).hexdigest()
tmp_filepath = os.path.join(
os.sep, 'tmp', 'voice_{}'.format(tmp_filename))
kw['output_path'] = os.path.abspath(tmp_filepath)
rospy.loginfo('managing file with name: {}'.format(tmp_filename))
# because the hash will include information about any file ending choices, we only
# need to look at the hash itself.
db = DB()
db_search_result = db.ex(
'SELECT file, audio_type FROM cache WHERE hash=?', tmp_filename).fetchone()
current_time = time.time()
file_found = False
if db_search_result: # then there is data
# check if the file exists, if not, remove from db
# TODO: add a test that deletes a file without telling the db and tries to synthesize it
if os.path.exists(db_search_result['file']):
file_found = True
db.ex('update cache set last_accessed=? where hash=?',
current_time, tmp_filename)
synth_result = PollyResponse(json.dumps({
'Audio File': db_search_result['file'],
'Audio Type': db_search_result['audio_type'],
'Amazon Polly Response Metadata': ''
}))
rospy.loginfo('audio file was already cached at: %s',
db_search_result['file'])
else:
rospy.logwarn(
'A file in the database did not exist on the disk, removing from db')
db.remove_file(db_search_result['file'])
if not file_found: # havent cached this yet
rospy.loginfo('Caching file')
synth_result = self.engine(**kw)
res_dict = json.loads(synth_result.result)
if 'Exception' not in res_dict:
file_name = res_dict['Audio File']
if file_name:
file_size = os.path.getsize(file_name)
db.ex('''insert into cache(
hash, file, audio_type, last_accessed,size)
values (?,?,?,?,?)''', tmp_filename, file_name,
res_dict['Audio Type'], current_time, file_size)
rospy.loginfo(
'generated new file, saved to %s and cached', file_name)
# make sure the cache hasn't grown too big
while db.get_size() > self.max_cache_bytes and db.get_num_files() > 1:
remove_res = db.ex(
'select file, min(last_accessed), size from cache'
).fetchone()
db.remove_file(remove_res['file'])
rospy.loginfo('removing %s to maintain cache size, new size: %i',
remove_res['file'], db.get_size())
else:
synth_result = self.engine(**kw)
return synth_result
def _parse_request_or_raise(self, request):
"""It will raise if request is malformed.
:param request: an instance of SynthesizerRequest
:return: a dict
"""
md = json.loads(request.metadata) if request.metadata else {}
md['output_format'] = md.get('output_format', self.default_output_format)
md['voice_id'] = md.get('voice_id', self.default_voice_id)
md['sample_rate'] = md.get('sample_rate', '16000' if md['output_format'].lower() == 'pcm' else '22050')
md['text_type'] = md.get('text_type', self.default_text_type)
md['text'] = request.text
return md
def _node_request_handler(self, request):
"""The callback function for processing service request.
It never raises. If anything unexpected happens, it will return a SynthesizerResponse with the exception.
:param request: an instance of SynthesizerRequest
:return: a SynthesizerResponse
"""
rospy.loginfo(request)
try:
kws = self._parse_request_or_raise(request)
res = self._call_engine(**kws).result
return SynthesizerResponse(res)
except Exception as e:
return SynthesizerResponse('Exception: {}'.format(e))
def start(self, node_name='synthesizer_node', service_name='synthesizer'):
"""The entry point of a ROS service node.
:param node_name: name of ROS node
:param service_name: name of ROS service
:return: it doesn't return
"""
rospy.init_node(node_name)
service = rospy.Service(service_name, Synthesizer, self._node_request_handler)
rospy.loginfo('{} running: {}'.format(node_name, service.uri))
rospy.spin()
if __name__ == "__main__":
main()
| 43.442136 | 119 | 0.60888 | #!/usr/bin/env python
# Copyright (c) 2018, Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import time
import json
import rospy
import hashlib
import sqlite3
import time
from optparse import OptionParser
from tts.srv import Synthesizer, SynthesizerResponse
from tts.srv import PollyResponse
from tts.db import DB
class SpeechSynthesizer:
"""This class serves as a ROS service node that should be an entry point of a TTS task.
Although the current implementation uses Amazon Polly as the synthesis engine, it is not hard to let it support
more heterogeneous engines while keeping the API the same.
In order to support a variety of engines, the SynthesizerRequest was designed with flexibility in mind. It
has two fields: text and metadata. Both are strings. In most cases, a user can ignore the metadata and call
the service with some plain text. If the use case needs any control or engine-specific feature, the extra
information can be put into the JSON-form metadata. This class will use the information when calling the engine.
The decoupling of the synthesizer and the actual synthesis engine will benefit the users in many ways.
First, a user will be able to use a unified interface to do the TTS job and have the freedom to use different
engines available with no or very little change from the client side.
Second, by applying some design patterns, the synthesizer can choose an engine dynamically. For example, a user
may prefer to use Amazon Polly but is also OK with an offline solution when network is not reliable.
Third, engines can be complicated, thus difficult to use. As an example, Amazon Polly supports dozens of parameters
and is able to accomplish nontrivial synthesis jobs, but majority of the users never need those features. This
class provides a clean interface with two parameters only, so that it is much easier and pleasant to use. If by
any chance the advanced features are required, the user can always leverage the metadata field or even go to the
backend engine directly.
Also, from an engineering perspective, simple and decoupled modules are easier to maintain.
This class supports two modes of using polly. It can either call a service node or use AmazonPolly as a library.
Start the service node::
$ rosrun tts synthesizer_node.py # use default configuration
$ rosrun tts synthesizer_node.py -e POLLY_LIBRARY # will not call polly service node
Call the service::
$ rosservice call /synthesizer 'hello' ''
$ rosservice call /synthesizer '<speak>hello</speak>' '"{\"text_type\":\"ssml\"}"'
"""
class PollyViaNode:
def __init__(self, polly_service_name='polly'):
self.service_name = polly_service_name
def __call__(self, **kwargs):
rospy.loginfo('will call service {}'.format(self.service_name))
from tts.srv import Polly
rospy.wait_for_service(self.service_name)
polly = rospy.ServiceProxy(self.service_name, Polly)
return polly(polly_action='SynthesizeSpeech', **kwargs)
class PollyDirect:
def __init__(self):
pass
def __call__(self, **kwargs):
rospy.loginfo('will import amazonpolly.AmazonPolly')
from tts.amazonpolly import AmazonPolly
node = AmazonPolly()
return node.synthesize(**kwargs)
class DummyEngine:
"""A dummy engine which exists to facilitate testing. Can either
be set to act as if it is connected or disconnected. Will create files where
they are expected, but they will not be actual audio files."""
def __init__(self):
self.connected = True
self.file_size = 50000
def __call__(self, **kwargs):
"""put a file at the specified location and return resonable dummy
values. If not connected, fills in the Exception fields.
Args:
**kwarks: dictionary with fields: output_format, voice_id, sample_rate,
text_type, text, output_path
Returns: A json version of a string with fields: Audio File, Audio Type,
Exception (if there is an exception), Traceback (if there is an exception),
and if succesful Amazon Polly Response Metadata
"""
if self.connected:
with open(kwargs['output_path'], 'wb') as f:
f.write(os.urandom(self.file_size))
output_format = kwargs['OutputFormat'] if 'OutputFormat' in kwargs else 'ogg_vorbis'
resp = json.dumps({
'Audio File': kwargs['output_path'],
'Audio Type': output_format,
'Amazon Polly Response Metadata': {'some header': 'some data'}
})
return SynthesizerResponse(resp)
else:
current_dir = os.path.dirname(os.path.abspath(__file__))
error_ogg_filename = 'connerror.ogg'
error_details = {
'Audio File': os.path.join(current_dir, '../src/tts/data', error_ogg_filename),
'Audio Type': 'ogg',
'Exception': {
'dummy head': 'dummy val'
# 'Type': str(exc_type),
# 'Module': exc_type.__module__,
# 'Name': exc_type.__name__,
# 'Value': str(e),
},
'Traceback': 'some traceback'
}
return SynthesizerResponse(json.dumps(error_details))
def set_connection(self, connected):
"""set the connection state
Args:
connected: boolean, whether to act connected or not
"""
self.connected = connected
def set_file_sizes(self, size):
"""Set the target file size for future files in bytes
Args:
size: the number of bytes to make the next files
"""
self.file_size = size
ENGINES = {
'POLLY_SERVICE': PollyViaNode,
'POLLY_LIBRARY': PollyDirect,
'DUMMY': DummyEngine,
}
class BadEngineError(NameError):
pass
#TODO: expose this max_cache_bytes value to the roslaunch system (why is rosparam not used in this file?)
def __init__(self, engine='POLLY_SERVICE', polly_service_name='polly', max_cache_bytes=100000000):
if engine not in self.ENGINES:
msg = 'bad engine {} which is not one of {}'.format(engine, ', '.join(SpeechSynthesizer.ENGINES.keys()))
raise SpeechSynthesizer.BadEngineError(msg)
engine_kwargs = {'polly_service_name': polly_service_name} if engine == 'POLLY_SERVICE' else {}
self.engine = self.ENGINES[engine](**engine_kwargs)
self.default_text_type = 'text'
self.default_voice_id = 'Joanna'
self.default_output_format = 'ogg_vorbis'
self.max_cache_bytes = max_cache_bytes
def _call_engine(self, **kw):
"""Call engine to do the job.
If no output path is found from input, the audio
file will be put into /tmp and the file name will have
a prefix of the md5 hash of the text. If a filename is
not given, the utterance is added to the cache. If a
filename is specified, then we will assume that the
file is being managed by the user and it will not
be added to the cache.
:param kw: what AmazonPolly needs to synthesize
:return: response from AmazonPolly
"""
if 'output_path' not in kw:
tmp_filename = hashlib.md5(
json.dumps(kw, sort_keys=True)).hexdigest()
tmp_filepath = os.path.join(
os.sep, 'tmp', 'voice_{}'.format(tmp_filename))
kw['output_path'] = os.path.abspath(tmp_filepath)
rospy.loginfo('managing file with name: {}'.format(tmp_filename))
# because the hash will include information about any file ending choices, we only
# need to look at the hash itself.
db = DB()
db_search_result = db.ex(
'SELECT file, audio_type FROM cache WHERE hash=?', tmp_filename).fetchone()
current_time = time.time()
file_found = False
if db_search_result: # then there is data
# check if the file exists, if not, remove from db
# TODO: add a test that deletes a file without telling the db and tries to synthesize it
if os.path.exists(db_search_result['file']):
file_found = True
db.ex('update cache set last_accessed=? where hash=?',
current_time, tmp_filename)
synth_result = PollyResponse(json.dumps({
'Audio File': db_search_result['file'],
'Audio Type': db_search_result['audio_type'],
'Amazon Polly Response Metadata': ''
}))
rospy.loginfo('audio file was already cached at: %s',
db_search_result['file'])
else:
rospy.logwarn(
'A file in the database did not exist on the disk, removing from db')
db.remove_file(db_search_result['file'])
if not file_found: # havent cached this yet
rospy.loginfo('Caching file')
synth_result = self.engine(**kw)
res_dict = json.loads(synth_result.result)
if 'Exception' not in res_dict:
file_name = res_dict['Audio File']
if file_name:
file_size = os.path.getsize(file_name)
db.ex('''insert into cache(
hash, file, audio_type, last_accessed,size)
values (?,?,?,?,?)''', tmp_filename, file_name,
res_dict['Audio Type'], current_time, file_size)
rospy.loginfo(
'generated new file, saved to %s and cached', file_name)
# make sure the cache hasn't grown too big
while db.get_size() > self.max_cache_bytes and db.get_num_files() > 1:
remove_res = db.ex(
'select file, min(last_accessed), size from cache'
).fetchone()
db.remove_file(remove_res['file'])
rospy.loginfo('removing %s to maintain cache size, new size: %i',
remove_res['file'], db.get_size())
else:
synth_result = self.engine(**kw)
return synth_result
def _parse_request_or_raise(self, request):
"""It will raise if request is malformed.
:param request: an instance of SynthesizerRequest
:return: a dict
"""
md = json.loads(request.metadata) if request.metadata else {}
md['output_format'] = md.get('output_format', self.default_output_format)
md['voice_id'] = md.get('voice_id', self.default_voice_id)
md['sample_rate'] = md.get('sample_rate', '16000' if md['output_format'].lower() == 'pcm' else '22050')
md['text_type'] = md.get('text_type', self.default_text_type)
md['text'] = request.text
return md
def _node_request_handler(self, request):
"""The callback function for processing service request.
It never raises. If anything unexpected happens, it will return a SynthesizerResponse with the exception.
:param request: an instance of SynthesizerRequest
:return: a SynthesizerResponse
"""
rospy.loginfo(request)
try:
kws = self._parse_request_or_raise(request)
res = self._call_engine(**kws).result
return SynthesizerResponse(res)
except Exception as e:
return SynthesizerResponse('Exception: {}'.format(e))
def start(self, node_name='synthesizer_node', service_name='synthesizer'):
"""The entry point of a ROS service node.
:param node_name: name of ROS node
:param service_name: name of ROS service
:return: it doesn't return
"""
rospy.init_node(node_name)
service = rospy.Service(service_name, Synthesizer, self._node_request_handler)
rospy.loginfo('{} running: {}'.format(node_name, service.uri))
rospy.spin()
def main():
usage = '''usage: %prog [options]
'''
parser = OptionParser(usage)
parser.add_option("-n", "--node-name", dest="node_name", default='synthesizer_node',
help="name of the ROS node",
metavar="NODE_NAME")
parser.add_option("-s", "--service-name", dest="service_name", default='synthesizer',
help="name of the ROS service",
metavar="SERVICE_NAME")
parser.add_option("-e", "--engine", dest="engine", default='POLLY_SERVICE',
help="name of the synthesis engine",
metavar="ENGINE")
parser.add_option("-p", "--polly-service-name", dest="polly_service_name", default='polly',
help="name of the polly service",
metavar="POLLY_SERVICE_NAME")
(options, args) = parser.parse_args()
node_name = options.node_name
service_name = options.service_name
engine = options.engine
polly_service_name = options.polly_service_name
if engine == 'POLLY_SERVICE':
synthesizer = SpeechSynthesizer(engine=engine, polly_service_name=polly_service_name)
else:
synthesizer = SpeechSynthesizer(engine=engine)
synthesizer.start(node_name=node_name, service_name=service_name)
if __name__ == "__main__":
main()
| 2,594 | 19 | 283 |
d25a7bd75d7c54850950a409c269be37acd67a36 | 6,722 | py | Python | wso.py | daakru/BLReLM | ad1001c101821356abff711c1ed4d3178a77baa7 | [
"MIT"
] | null | null | null | wso.py | daakru/BLReLM | ad1001c101821356abff711c1ed4d3178a77baa7 | [
"MIT"
] | null | null | null | wso.py | daakru/BLReLM | ad1001c101821356abff711c1ed4d3178a77baa7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Writable String Object | Stores data to simplify writing outputs.
Created on Sat Nov 28 19:06:09 2020
Version 1.3.0 (debug support) A-05-2021 @ 14:34 UTC -5
Requires: NONE
@author: Kinetos#6935
"""
import argparse
class wso(object):
"""
A class to to simplify writing outputs when printing.
Attributes
----------
template : str
Template used to format the printing string.
outfile : str
Path to the text file used for writing.
writeable : bool
Whether the write methods should write to a file when called.
string : str
Current formatted string to write or print.
Methods
-------
fm(*args):
Format the template string using *args. Stores result in self.string.
wp():
Write to file and print the currently stored string.
p():
Print the currently stored string.
w():
Write the currently stored string to the outfile.
clean():
Open the outfile with 'w' setting to clear any existing contents.
"""
def set_template(self, template):
"""
Setter for template.
Parameters
----------
template : str
Template used to format the printing string.
Returns
-------
None.
"""
self.template = template
def set_outfile(self, filepath):
"""
Setter for outfile.
Parameters
----------
filepath : str
New path to text file to use for writing.
Returns
-------
None.
"""
self.outfile = filepath
def set_writeable(self, writeable):
"""
Setter for writeable.
Parameters
----------
writeable : bool
Whether the write methods should write to a file when called.
Returns
-------
None.
"""
self.writeable = bool(writeable)
def set_string(self, string):
"""
Setter for string.
Parameters
----------
string : str
New formatted string to write or print.
Returns
-------
None.
"""
self.string = string
def get_template(self):
"""
Getter for template.
Returns
-------
template : str
Template used to format the printing string.
"""
return self.template
def get_outfile(self):
"""
Getter for outfile.
Returns
-------
outfile : str
Path to the text file used for writing.
"""
return self.outfile
def get_string(self):
"""
Getter for string.
Returns
-------
string : str
Current formatted string to write or print.
"""
return self.string
def fm(self, *args):
"""
Format the template string using *args. Stores result in self.string.
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.string = self.template.format(*args)
def wp(self):
"""
Write to file and print the currently stored string.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string + '\n')
print(self.string)
def p(self):
"""
Print the currently stored string.
Returns
-------
None.
"""
print(self.string)
def w(self):
"""
Write the currently stored string to the outfile.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string)
def clean(self):
"""
Open the outfile with 'w' setting to clear any existing contents.
Returns
-------
None.
"""
if self.writeable:
open(self.outfile, 'w').close()
def fmwp(self, *args):
"""
Perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.fm(*args)
self.wp()
def dbwp(self, *args):
"""
If debug mode is enabled, perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
if self.debug:
self.fm(*args)
self.wp()
def generate_outfile_parser(description):
"""
Reusable -o and -O arguments.
Parameters
----------
description : str
Description for the argument parser, usually __doc__ or some variant.
Returns
-------
p : argparse.ArgumentParser
Created ArgumentParser object with -o, -O, and the given description.
"""
p = argparse.ArgumentParser(description)
p.add_argument("-o", "--outfile", dest="o", action="store_true",
help="output printed results to default file: out.txt")
p.add_argument("-O", dest="oname", metavar="NAME",
help="output printed results to text file w/ custom path")
return p
def implement_outfile_parser(args):
"""
Implement -o and -O arguments added by generate_outfile_parser.
Parameters
----------
args : argparse args object
Results of parse_args() when called on an Argument Parser object.
Returns
-------
outfile : str, None
None if neither args.oname nor args.o was set, do not output to file.
Notes
-----
Use for docstring in methods that accept outfile as an argument:
outfile : str, None
Path to output text file. Disables writing to file if set to None.
"""
outfile = None
if args.oname is not None:
outfile = args.oname.strip()
if not outfile.endswith(".txt"):
outfile += ".txt"
elif args.o:
outfile = "out.txt"
return outfile
| 21.476038 | 77 | 0.520678 | # -*- coding: utf-8 -*-
"""
Writable String Object | Stores data to simplify writing outputs.
Created on Sat Nov 28 19:06:09 2020
Version 1.3.0 (debug support) A-05-2021 @ 14:34 UTC -5
Requires: NONE
@author: Kinetos#6935
"""
import argparse
class wso(object):
"""
A class to to simplify writing outputs when printing.
Attributes
----------
template : str
Template used to format the printing string.
outfile : str
Path to the text file used for writing.
writeable : bool
Whether the write methods should write to a file when called.
string : str
Current formatted string to write or print.
Methods
-------
fm(*args):
Format the template string using *args. Stores result in self.string.
wp():
Write to file and print the currently stored string.
p():
Print the currently stored string.
w():
Write the currently stored string to the outfile.
clean():
Open the outfile with 'w' setting to clear any existing contents.
"""
def __init__(self, template="", outfile="out.txt", debug=False):
self.template = template
self.outfile = outfile
self.writeable = self.outfile is not None
self.debug = debug
self.string = ""
def set_template(self, template):
"""
Setter for template.
Parameters
----------
template : str
Template used to format the printing string.
Returns
-------
None.
"""
self.template = template
def set_outfile(self, filepath):
"""
Setter for outfile.
Parameters
----------
filepath : str
New path to text file to use for writing.
Returns
-------
None.
"""
self.outfile = filepath
def set_writeable(self, writeable):
"""
Setter for writeable.
Parameters
----------
writeable : bool
Whether the write methods should write to a file when called.
Returns
-------
None.
"""
self.writeable = bool(writeable)
def set_string(self, string):
"""
Setter for string.
Parameters
----------
string : str
New formatted string to write or print.
Returns
-------
None.
"""
self.string = string
def get_template(self):
"""
Getter for template.
Returns
-------
template : str
Template used to format the printing string.
"""
return self.template
def get_outfile(self):
"""
Getter for outfile.
Returns
-------
outfile : str
Path to the text file used for writing.
"""
return self.outfile
def get_string(self):
"""
Getter for string.
Returns
-------
string : str
Current formatted string to write or print.
"""
return self.string
def fm(self, *args):
"""
Format the template string using *args. Stores result in self.string.
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.string = self.template.format(*args)
def wp(self):
"""
Write to file and print the currently stored string.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string + '\n')
print(self.string)
def p(self):
"""
Print the currently stored string.
Returns
-------
None.
"""
print(self.string)
def w(self):
"""
Write the currently stored string to the outfile.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string)
def clean(self):
"""
Open the outfile with 'w' setting to clear any existing contents.
Returns
-------
None.
"""
if self.writeable:
open(self.outfile, 'w').close()
def fmwp(self, *args):
"""
Perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.fm(*args)
self.wp()
def dbwp(self, *args):
"""
If debug mode is enabled, perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
if self.debug:
self.fm(*args)
self.wp()
def generate_outfile_parser(description):
"""
Reusable -o and -O arguments.
Parameters
----------
description : str
Description for the argument parser, usually __doc__ or some variant.
Returns
-------
p : argparse.ArgumentParser
Created ArgumentParser object with -o, -O, and the given description.
"""
p = argparse.ArgumentParser(description)
p.add_argument("-o", "--outfile", dest="o", action="store_true",
help="output printed results to default file: out.txt")
p.add_argument("-O", dest="oname", metavar="NAME",
help="output printed results to text file w/ custom path")
return p
def implement_outfile_parser(args):
"""
Implement -o and -O arguments added by generate_outfile_parser.
Parameters
----------
args : argparse args object
Results of parse_args() when called on an Argument Parser object.
Returns
-------
outfile : str, None
None if neither args.oname nor args.o was set, do not output to file.
Notes
-----
Use for docstring in methods that accept outfile as an argument:
outfile : str, None
Path to output text file. Disables writing to file if set to None.
"""
outfile = None
if args.oname is not None:
outfile = args.oname.strip()
if not outfile.endswith(".txt"):
outfile += ".txt"
elif args.o:
outfile = "out.txt"
return outfile
def init(template="", o=None, d=False):
if o is not None:
return wso(template, o, d)
else:
return wso(template, o, d)
| 329 | 0 | 50 |
bfca6e0fa0eb15b62fed5d38aacf62564fa094de | 900 | py | Python | app/schema/answers/month_year_date_answer.py | qateam123/eq | 704757952323647d659c49a71975c56406ff4047 | [
"MIT"
] | null | null | null | app/schema/answers/month_year_date_answer.py | qateam123/eq | 704757952323647d659c49a71975c56406ff4047 | [
"MIT"
] | 8 | 2020-03-24T15:24:18.000Z | 2022-03-02T04:32:56.000Z | app/schema/answers/month_year_date_answer.py | qateam123/eq | 704757952323647d659c49a71975c56406ff4047 | [
"MIT"
] | null | null | null | from app.schema.answer import Answer
from app.schema.exceptions import TypeCheckingException
from app.schema.widgets.month_year_date_widget import MonthYearDateWidget
from app.validation.month_year_date_type_check import MonthYearDateTypeCheck
| 36 | 76 | 0.744444 | from app.schema.answer import Answer
from app.schema.exceptions import TypeCheckingException
from app.schema.widgets.month_year_date_widget import MonthYearDateWidget
from app.validation.month_year_date_type_check import MonthYearDateTypeCheck
class MonthYearDateAnswer(Answer):
def __init__(self, answer_id=None):
super().__init__(answer_id)
self.type_checkers.append(MonthYearDateTypeCheck())
self.widget = MonthYearDateWidget(self.id)
def get_typed_value(self, post_data):
user_input = self.get_user_input(post_data)
for checker in self.type_checkers:
result = checker.validate(user_input)
if not result.is_valid:
raise TypeCheckingException(result.errors[0])
return self._cast_user_input(user_input)
def get_user_input(self, post_vars):
return self.widget.get_user_input(post_vars)
| 539 | 13 | 103 |
2078827fc55525f8112757f3bcc7571c050f6d7c | 793 | py | Python | __init__.py | samg11/SpaceX-Launch-Data | ae4d70fc991e4b6a6b0e8b294e8d25e2a1a91f52 | [
"MIT"
] | null | null | null | __init__.py | samg11/SpaceX-Launch-Data | ae4d70fc991e4b6a6b0e8b294e8d25e2a1a91f52 | [
"MIT"
] | null | null | null | __init__.py | samg11/SpaceX-Launch-Data | ae4d70fc991e4b6a6b0e8b294e8d25e2a1a91f52 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, url_for, flash, redirect
app = Flask(__name__)
@app.route('/')
@app.route('/next')
@app.route('/upcoming')
@app.route('/rockets')
@app.route('/rockets/<rocket>')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
| 29.37037 | 94 | 0.672131 | from flask import Flask, render_template, url_for, flash, redirect
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html', script="index.js", name="Home Page")
@app.route('/next')
def next():
return render_template('next.html', script="next.js", name="Next Launch")
@app.route('/upcoming')
def upcoming():
return render_template("upcoming.html", script="upcoming.js", name="Upcoming")
@app.route('/rockets')
def rockets():
return render_template("rockets.html", script="rockets.js", name="Rockets")
@app.route('/rockets/<rocket>')
def rocket(rocket):
return render_template("sRocket.html", script="sRocket.js", name="Rocket", rocket=rocket)
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
| 384 | 0 | 115 |
cd6b1eaf10be5e0429bcdbb1b2ff218d05d45adb | 6,304 | py | Python | System_of_Beams/testing_collection/test_runner.py | ChairOfStructuralMechanicsTUM/Mechanics_Apps | b064a42d4df3fa9bde62a5cff9cb27ca61b0127c | [
"MIT"
] | 11 | 2017-05-06T17:05:29.000Z | 2020-11-12T09:26:47.000Z | System_of_Beams/testing_collection/test_runner.py | ChairOfStructuralMechanicsTUM/Mechanics_Apps | b064a42d4df3fa9bde62a5cff9cb27ca61b0127c | [
"MIT"
] | 49 | 2017-04-20T11:26:11.000Z | 2020-05-29T13:18:06.000Z | System_of_Beams/testing_collection/test_runner.py | ChairOfStructuralMechanicsTUM/Mechanics_Apps | b064a42d4df3fa9bde62a5cff9cb27ca61b0127c | [
"MIT"
] | 4 | 2017-02-14T12:55:34.000Z | 2022-01-12T15:07:07.000Z | from testing_collection import visualisation_tests as visu_tests
from Classes.CurrentDocument import CurrentDoc
def run_tests(curr_doc: CurrentDoc):
"""
1.) write test case and add it to file test_cases.py
2.) Call it in this function (run_tests() will be called in 'System_of_Beams\main.py'
3.) make sure, the variable 'run_tests' in the file main.py is set to true
4.) Only the latest run test can be plotted (no opportunities up to now to run one after another)
5.) Results will be visualized at the bokeh server
"""
"""
VISUALISATION TESTS
"""
# print("Single beam lineload test")
# visu_tests.single_beam_lineload_visu(curr_doc)
# print("Final Software lab structure")
# visu_tests.final_structure_software_lab(curr_doc)
print('Test example Quirin') #19.11
visu_tests.example_unterlagen_visu(curr_doc)
# print("Visualise all possible nodedep elements")
# visu_tests.vis_all_possible_nodedep_ele(curr_doc)
"""
CALCULATION TESTS
"""
# print("Single beam lineload test") #24.11
# test_cases.single_beam_lineload_test(curr_doc)
# print('normal line load') #24.11
# test_cases.single_beam_normal_lineload_test(curr_doc)
# print("Single beam clamping test") #24.11
# test_cases.single_clamping_left_side(curr_doc)
# print("Two beam lineload test") #17.11
# test_cases.two_beam_lineload_test(curr_doc)
# print("Two beam lineload overdefined test") #17.11
# test_cases.single_beam_lineload_test_overdefined(curr_doc)
# print("Single beam lineload test underdefined") #24.11
# test_cases.single_beam_lineload_test_underdefined(curr_doc)
# print('Big beam out of free elements') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test(curr_doc)
# print('Big beam out of free elements 2 l') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test_2l(curr_doc)
# print('Single load in the middle') #17.11
# test_cases.two_beam_combined_to_one_single_load_middle(curr_doc)
# print('Seperated elements') #17.11
# test_cases.single_beam_lineload_test_seperated_elements(curr_doc)
# print('Joint test) #18.11
# test_cases.two_beam_combined_to_one_single_load_middle_joint(curr_doc)
#
# print('Clamping with single load test') #17.11
# test_cases.single_clamping_left_side_single_load(curr_doc)
# print('TM example') #17.11
# test_cases.example_from_sheet_2_4(curr_doc)
# print('Trapezlast') #17.11
# test_cases.single_beam_trapezload_test(curr_doc)
# print('Temperature test') #17.11
# test_cases.single_beam_temperature_test(curr_doc)
# print('Triangle test') #17.11
# test_cases.two_beam_triangle_load_middle(curr_doc)
# print('Temperature clamping') #18.11
# test_cases.single_clamping_left_side_temperature(curr_doc)
# print('ss13') #17.11
# test_cases.example_ss13(curr_doc)
# print('ss12') #17.11
# test_cases.example_ss12(curr_doc)
#
# print('ss12_vereinfacht') #17.11
# test_cases.example_ss12_vereinfacht(curr_doc)
# print('ss11') #17.11
# test_cases.example_ss11(curr_doc)
# print('ss14') #19.11
# test_cases.example_ss14(curr_doc)
# print('schraeg') #17.11
# test_cases.single_beam_schraeg(curr_doc)
# print('vertical') #17.11
# test_cases.single_beam_lineload_vertical_test(curr_doc)
# print('vertical single load') #17.11
# test_cases.single_beam_single_load_vertical_test(curr_doc)
# print('Test Ecke') #17.11
# test_cases.two_beam_corner_line_load(curr_doc)
# print('triangle_not_symmetric') #17.11
# test_cases.two_beam_triangle_load_middle_not_symmetrical(curr_doc)
# print('Test example Quirin') #19.11
# test_cases.example_unterlagen_test(curr_doc)
# print('Test Quirin vereinfacht') #19.11
# test_cases.example_unterlagen_test_vereinfacht(curr_doc)
# print('test cos') #18.11
# test_cases.single_beam_cos_test(curr_doc)
# print('test multiple elements') #19.11
# test_cases.multiple_elements(curr_doc)
# print('test case spring') #24.11
# test_cases.example_2_3_neu(curr_doc)
# print('Test case ss 15') #24.11
# test_cases.example_ss15(curr_doc)
# print('Test case ss 16') #24.11
# test_cases.example_SS_16(curr_doc)
# test_cases.single_beam_lineload_test_infinity(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
| 44.70922 | 101 | 0.521415 | from testing_collection import visualisation_tests as visu_tests
from Classes.CurrentDocument import CurrentDoc
def run_tests(curr_doc: CurrentDoc):
"""
1.) write test case and add it to file test_cases.py
2.) Call it in this function (run_tests() will be called in 'System_of_Beams\main.py'
3.) make sure, the variable 'run_tests' in the file main.py is set to true
4.) Only the latest run test can be plotted (no opportunities up to now to run one after another)
5.) Results will be visualized at the bokeh server
"""
"""
VISUALISATION TESTS
"""
# print("Single beam lineload test")
# visu_tests.single_beam_lineload_visu(curr_doc)
# print("Final Software lab structure")
# visu_tests.final_structure_software_lab(curr_doc)
print('Test example Quirin') #19.11
visu_tests.example_unterlagen_visu(curr_doc)
# print("Visualise all possible nodedep elements")
# visu_tests.vis_all_possible_nodedep_ele(curr_doc)
"""
CALCULATION TESTS
"""
# print("Single beam lineload test") #24.11
# test_cases.single_beam_lineload_test(curr_doc)
# print('normal line load') #24.11
# test_cases.single_beam_normal_lineload_test(curr_doc)
# print("Single beam clamping test") #24.11
# test_cases.single_clamping_left_side(curr_doc)
# print("Two beam lineload test") #17.11
# test_cases.two_beam_lineload_test(curr_doc)
# print("Two beam lineload overdefined test") #17.11
# test_cases.single_beam_lineload_test_overdefined(curr_doc)
# print("Single beam lineload test underdefined") #24.11
# test_cases.single_beam_lineload_test_underdefined(curr_doc)
# print('Big beam out of free elements') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test(curr_doc)
# print('Big beam out of free elements 2 l') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test_2l(curr_doc)
# print('Single load in the middle') #17.11
# test_cases.two_beam_combined_to_one_single_load_middle(curr_doc)
# print('Seperated elements') #17.11
# test_cases.single_beam_lineload_test_seperated_elements(curr_doc)
# print('Joint test) #18.11
# test_cases.two_beam_combined_to_one_single_load_middle_joint(curr_doc)
#
# print('Clamping with single load test') #17.11
# test_cases.single_clamping_left_side_single_load(curr_doc)
# print('TM example') #17.11
# test_cases.example_from_sheet_2_4(curr_doc)
# print('Trapezlast') #17.11
# test_cases.single_beam_trapezload_test(curr_doc)
# print('Temperature test') #17.11
# test_cases.single_beam_temperature_test(curr_doc)
# print('Triangle test') #17.11
# test_cases.two_beam_triangle_load_middle(curr_doc)
# print('Temperature clamping') #18.11
# test_cases.single_clamping_left_side_temperature(curr_doc)
# print('ss13') #17.11
# test_cases.example_ss13(curr_doc)
# print('ss12') #17.11
# test_cases.example_ss12(curr_doc)
#
# print('ss12_vereinfacht') #17.11
# test_cases.example_ss12_vereinfacht(curr_doc)
# print('ss11') #17.11
# test_cases.example_ss11(curr_doc)
# print('ss14') #19.11
# test_cases.example_ss14(curr_doc)
# print('schraeg') #17.11
# test_cases.single_beam_schraeg(curr_doc)
# print('vertical') #17.11
# test_cases.single_beam_lineload_vertical_test(curr_doc)
# print('vertical single load') #17.11
# test_cases.single_beam_single_load_vertical_test(curr_doc)
# print('Test Ecke') #17.11
# test_cases.two_beam_corner_line_load(curr_doc)
# print('triangle_not_symmetric') #17.11
# test_cases.two_beam_triangle_load_middle_not_symmetrical(curr_doc)
# print('Test example Quirin') #19.11
# test_cases.example_unterlagen_test(curr_doc)
# print('Test Quirin vereinfacht') #19.11
# test_cases.example_unterlagen_test_vereinfacht(curr_doc)
# print('test cos') #18.11
# test_cases.single_beam_cos_test(curr_doc)
# print('test multiple elements') #19.11
# test_cases.multiple_elements(curr_doc)
# print('test case spring') #24.11
# test_cases.example_2_3_neu(curr_doc)
# print('Test case ss 15') #24.11
# test_cases.example_ss15(curr_doc)
# print('Test case ss 16') #24.11
# test_cases.example_SS_16(curr_doc)
# test_cases.single_beam_lineload_test_infinity(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
| 0 | 0 | 0 |
2ee6ff7af3295482ca8a08e6514f2134c750464d | 5,280 | py | Python | retrievals/data/ecmwf/grib.py | leric2/pyretrievals | 3cae0afc9951ce079a44aa093689867b17a11060 | [
"MIT"
] | null | null | null | retrievals/data/ecmwf/grib.py | leric2/pyretrievals | 3cae0afc9951ce079a44aa093689867b17a11060 | [
"MIT"
] | null | null | null | retrievals/data/ecmwf/grib.py | leric2/pyretrievals | 3cae0afc9951ce079a44aa093689867b17a11060 | [
"MIT"
] | 2 | 2020-05-13T10:35:21.000Z | 2022-03-31T07:47:27.000Z | import pygrib
import numpy as np
from collections import defaultdict
from collections import namedtuple
from collections import Iterable
import xarray as xr
def grb_msg_to_xr(message, has_levels=True):
"""
Convert a single grib message to xarray.
:param message:
:type message: pygrib message
:param has_levels: If True, add a level coordinate.
:rtype: xarray.DataArray
"""
lons = np.linspace(-float(message['longitudeOfFirstGridPointInDegrees']),
float(message['longitudeOfLastGridPointInDegrees']),
int(message['Ni']))
lats = np.linspace(float(message['latitudeOfFirstGridPointInDegrees']),
float(message['latitudeOfLastGridPointInDegrees']),
int(message['Nj']))
coords = {
'time': message.analDate,
'lat': lats,
'lon': lons,
}
if has_levels:
coords['level'] = message.level
# set up data variables
values = message.values # values in lat, lon
attrs = dict()
attrs['units'] = message.units
attrs['standard_name'] = message.cfName
attrs['long_name'] = message.name
attrs['parameter_id'] = message.paramId
da = xr.DataArray(data=values,
dims=('lat', 'lon'),
coords=coords,
name=message.name.lower().replace(' ', '_'),
attrs=attrs)
# Expand dimensions
if 'level' in coords:
da = da.expand_dims('level', 2)
da = da.expand_dims('time', len(coords) - 1)
# Attributes
da['lat'].attrs['standard_name'] = 'latitude'
da['lat'].attrs['long_name'] = 'Latitude'
da['lat'].attrs['units'] = 'degrees_north'
da['lat'].attrs['axis'] = 'Y'
da['lon'].attrs['standard_name'] = 'longitude'
da['lon'].attrs['long_name'] = 'Longitude'
da['lon'].attrs['units'] = 'degrees_east'
da['lon'].attrs['axis'] = 'X'
da['time'].attrs['standard_name'] = 'time'
da['time'].attrs['long_name'] = 'Time'
if 'level' in coords:
da['level'].attrs['long_name'] = 'ECMWF model level'
return da
| 30.520231 | 93 | 0.58447 | import pygrib
import numpy as np
from collections import defaultdict
from collections import namedtuple
from collections import Iterable
import xarray as xr
def grb_msg_to_xr(message, has_levels=True):
"""
Convert a single grib message to xarray.
:param message:
:type message: pygrib message
:param has_levels: If True, add a level coordinate.
:rtype: xarray.DataArray
"""
lons = np.linspace(-float(message['longitudeOfFirstGridPointInDegrees']),
float(message['longitudeOfLastGridPointInDegrees']),
int(message['Ni']))
lats = np.linspace(float(message['latitudeOfFirstGridPointInDegrees']),
float(message['latitudeOfLastGridPointInDegrees']),
int(message['Nj']))
coords = {
'time': message.analDate,
'lat': lats,
'lon': lons,
}
if has_levels:
coords['level'] = message.level
# set up data variables
values = message.values # values in lat, lon
attrs = dict()
attrs['units'] = message.units
attrs['standard_name'] = message.cfName
attrs['long_name'] = message.name
attrs['parameter_id'] = message.paramId
da = xr.DataArray(data=values,
dims=('lat', 'lon'),
coords=coords,
name=message.name.lower().replace(' ', '_'),
attrs=attrs)
# Expand dimensions
if 'level' in coords:
da = da.expand_dims('level', 2)
da = da.expand_dims('time', len(coords) - 1)
# Attributes
da['lat'].attrs['standard_name'] = 'latitude'
da['lat'].attrs['long_name'] = 'Latitude'
da['lat'].attrs['units'] = 'degrees_north'
da['lat'].attrs['axis'] = 'Y'
da['lon'].attrs['standard_name'] = 'longitude'
da['lon'].attrs['long_name'] = 'Longitude'
da['lon'].attrs['units'] = 'degrees_east'
da['lon'].attrs['axis'] = 'X'
da['time'].attrs['standard_name'] = 'time'
da['time'].attrs['long_name'] = 'Time'
if 'level' in coords:
da['level'].attrs['long_name'] = 'ECMWF model level'
return da
class GribECMWF:
def __init__(self, filename):
self.filename = filename
self.grbs = pygrib.open(filename)
self.index = GribIndex(self.grbs, ['parameterName', 'level', 'analDate'])
def get_dataset(self, parameter, level, time):
message_numbers = self.index.sel(parameterName=parameter, level=level, analDate=time)
if len(message_numbers) > 1:
raise ValueError('Got multiple grib messages, but expected one.')
elif len(message_numbers) == 0:
raise KeyError('No messages found.')
message_number = message_numbers[0]
# read message
message = self.grbs[message_number]
has_levels = len(self.index.sel(parameterName=parameter, analDate=time)) > 1
da = grb_msg_to_xr(message, has_levels)
return da
@staticmethod
def _slugify(name):
return name.lower().replace(' ', '_')
class GribIndex:
def __init__(self, grbs, keys):
self.grbs = grbs
self.keys = set(keys)
self.Index = namedtuple('Index', keys)
# Create index
self.index = None
self.values = None
self.create_index()
def sel(self, **kwargs):
"""
Get the message numbers by index.
"""
for key in kwargs.keys():
if key not in self.keys:
raise KeyError(key + ' is not a valid indexer.')
# format the selectors
valid_values = dict()
for key, selector in kwargs.items():
valid_values[key] = self.sel_values(key, selector)
message_numbers = list()
for index, message_number in self.index.items():
matched = True
for key, selector in valid_values.items():
matched = matched and getattr(index, key) in selector
if matched:
message_numbers.append(message_number)
return message_numbers
def sel_values(self, key, selector=None):
if selector is None:
# Retrun all values
return self.values[key]
if isinstance(selector, str):
selector = [selector]
elif not isinstance(selector, Iterable):
selector = [selector]
values = self.values[key]
return [v for v in values if v in selector]
def __getitem__(self, item):
return self.values[item]
def create_index(self):
"""
Create an index of all messages by keys.
"""
table = dict()
values = defaultdict(set)
self.grbs.seek(0)
for msg in self.grbs:
# Check if all keys are present
present_keys = [msg.has_key(key) for key in self.keys]
if False in present_keys:
continue # next message
# Extract index values
index_values = {key: getattr(msg, key) for key in self.keys}
index = self.Index(**index_values)
for key, value in index_values.items():
values[key].add(value)
table[index] = msg.messagenumber
self.index = table
self.values = values
| 1,414 | 1,678 | 46 |
465c3233b80f7af04fe584f39fcb8e47b7171fe4 | 11,615 | py | Python | tests/system/python/api/test_service.py | doug-dianomic/fledge | cab620d1f31e6dca8e31ca8e483adaad7ce94834 | [
"Apache-2.0"
] | null | null | null | tests/system/python/api/test_service.py | doug-dianomic/fledge | cab620d1f31e6dca8e31ca8e483adaad7ce94834 | [
"Apache-2.0"
] | null | null | null | tests/system/python/api/test_service.py | doug-dianomic/fledge | cab620d1f31e6dca8e31ca8e483adaad7ce94834 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge.readthedocs.io/
# FLEDGE_END
""" Test add service using poll and async plugins for both python & C version REST API """
import os
import http.client
import json
import time
from uuid import UUID
from collections import Counter
from urllib.parse import quote
import pytest
import plugin_and_service
__author__ = "Ashish Jabble"
__copyright__ = "Copyright (c) 2019 Dianomic Systems"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
SVC_NAME_1 = 'Random Walk #1'
SVC_NAME_2 = 'HTTP-SOUTH'
SVC_NAME_3 = '1 Bench'
SVC_NAME_4 = 'Rand 1 #3'
SVC_NAME_5 = SVC_NAME_C_ASYNC = "Async 1"
SVC_NAME_6 = 'randomwalk'
PLUGIN_FILTER = 'metadata'
FILTER_NAME = 'meta'
@pytest.fixture
| 39.372881 | 118 | 0.624709 | # -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge.readthedocs.io/
# FLEDGE_END
""" Test add service using poll and async plugins for both python & C version REST API """
import os
import http.client
import json
import time
from uuid import UUID
from collections import Counter
from urllib.parse import quote
import pytest
import plugin_and_service
__author__ = "Ashish Jabble"
__copyright__ = "Copyright (c) 2019 Dianomic Systems"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
SVC_NAME_1 = 'Random Walk #1'
SVC_NAME_2 = 'HTTP-SOUTH'
SVC_NAME_3 = '1 Bench'
SVC_NAME_4 = 'Rand 1 #3'
SVC_NAME_5 = SVC_NAME_C_ASYNC = "Async 1"
SVC_NAME_6 = 'randomwalk'
PLUGIN_FILTER = 'metadata'
FILTER_NAME = 'meta'
@pytest.fixture
def install_plugins():
plugin_and_service.install('south', plugin='randomwalk')
plugin_and_service.install('south', plugin='http')
plugin_and_service.install('south', plugin='benchmark', plugin_lang='C')
plugin_and_service.install('south', plugin='random', plugin_lang='C')
plugin_and_service.install('south', plugin='csv-async', plugin_lang='C')
def get_service(fledge_url, path):
conn = http.client.HTTPConnection(fledge_url)
conn.request("GET", path)
res = conn.getresponse()
r = res.read().decode()
assert 200 == res.status
jdoc = json.loads(r)
return jdoc
class TestService:
def test_cleanup_and_setup(self, reset_and_start_fledge, install_plugins):
# TODO: FOGL-2669 Better setup & teardown fixtures
pass
def test_default_service(self, fledge_url):
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
# Only storage and core service is expected by default
assert 2 == len(jdoc['services'])
keys = {'address', 'service_port', 'type', 'status', 'name', 'management_port', 'protocol'}
assert Counter(keys) == Counter(jdoc['services'][0].keys())
storage_svc = jdoc['services'][0]
assert isinstance(storage_svc['service_port'], int)
assert isinstance(storage_svc['management_port'], int)
assert 'running' == storage_svc['status']
assert 'Storage' == storage_svc['type']
assert 'localhost' == storage_svc['address']
assert 'Fledge Storage' == storage_svc['name']
assert 'http' == storage_svc['protocol']
core_svc = jdoc['services'][1]
assert isinstance(core_svc['management_port'], int)
assert 8081 == core_svc['service_port']
assert 'running' == core_svc['status']
assert 'Core' == core_svc['type']
assert '0.0.0.0' == core_svc['address']
assert 'Fledge Core' == core_svc['name']
assert 'http' == core_svc['protocol']
C_ASYNC_CONFIG = {"file": {"value": os.getenv("FLEDGE_ROOT", "") + '/tests/system/python/data/vibration.csv'}}
@pytest.mark.parametrize("plugin, svc_name, display_svc_name, config, enabled, svc_count", [
("randomwalk", SVC_NAME_1, SVC_NAME_1, None, True, 3),
("http_south", SVC_NAME_2, SVC_NAME_1, None, False, 3),
("Benchmark", SVC_NAME_3, SVC_NAME_3, None, True, 4),
("Random", SVC_NAME_4, SVC_NAME_3, None, False, 4),
("CSV-Async", SVC_NAME_C_ASYNC, SVC_NAME_C_ASYNC, C_ASYNC_CONFIG, True, 5)
])
def test_add_service(self, fledge_url, wait_time, plugin, svc_name, display_svc_name, config, enabled, svc_count):
jdoc = plugin_and_service.add_south_service(plugin, fledge_url, svc_name, config, enabled)
assert svc_name == jdoc['name']
assert UUID(jdoc['id'], version=4)
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert svc_count == len(jdoc['services'])
southbound_svc = jdoc['services'][svc_count - 1]
assert isinstance(southbound_svc['management_port'], int)
assert southbound_svc['service_port'] is None
assert display_svc_name == southbound_svc['name']
assert 'running' == southbound_svc['status']
assert 'Southbound' == southbound_svc['type']
assert 'localhost' == southbound_svc['address']
assert 'http' == southbound_svc['protocol']
def test_add_service_with_config(self, fledge_url, wait_time):
# add service with config param
data = {"name": SVC_NAME_6,
"type": "South",
"plugin": 'randomwalk',
"config": {"maxValue": {"value": "20"}, "assetName": {"value": "Random"}},
"enabled": True
}
conn = http.client.HTTPConnection(fledge_url)
conn.request("POST", '/fledge/service', json.dumps(data))
r = conn.getresponse()
assert 200 == r.status
r = r.read().decode()
jdoc = json.loads(r)
assert SVC_NAME_6 == jdoc['name']
assert UUID(jdoc['id'], version=4)
# verify config is correctly saved
conn.request("GET", '/fledge/category/{}'.format(SVC_NAME_6))
r = conn.getresponse()
assert 200 == r.status
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), "No data found"
assert data['config']['assetName']['value'] == jdoc['assetName']['value']
assert data['config']['maxValue']['value'] == jdoc['maxValue']['value']
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 6 == len(jdoc['services'])
assert SVC_NAME_6 == jdoc['services'][5]['name']
@pytest.mark.parametrize("svc_name, status, svc_count", [
("Fledge Storage", 404, 2),
("Fledge Core", 404, 2),
(SVC_NAME_1, 200, 5),
(SVC_NAME_2, 200, 5),
(SVC_NAME_3, 200, 4)
])
def test_delete_service(self, svc_name, status, svc_count, fledge_url, wait_time):
conn = http.client.HTTPConnection(fledge_url)
conn.request("DELETE", '/fledge/service/{}'.format(quote(svc_name)))
res = conn.getresponse()
assert status == res.status
if status == 404:
# FIXME: FOGL-2668 expected 403 for Core and Storage
assert '{} service does not exist.'.format(svc_name) == res.reason
else:
r = res.read().decode()
jdoc = json.loads(r)
assert 'Service {} deleted successfully.'.format(svc_name) == jdoc['result']
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert svc_count == len(jdoc['services'])
services = [s['name'] for s in jdoc['services']]
assert svc_name not in services
# no category (including its children) exists anymore for serviceName
conn = http.client.HTTPConnection(fledge_url)
conn.request("GET", '/fledge/category/{}'.format(quote(svc_name)))
res = conn.getresponse()
r = res.read().decode()
assert 404 == res.status
conn.request("GET", '/fledge/category/{}/children'.format(quote(svc_name)))
res = conn.getresponse()
r = res.read().decode()
assert 404 == res.status
# no schedule exists anymore for serviceName
conn.request("GET", '/fledge/schedule')
res = conn.getresponse()
r = res.read().decode()
jdoc = json.loads(r)
assert svc_name not in [s['name'] for s in jdoc["schedules"]]
# TODO: verify FOGL-2718 no category interest exists anymore for serviceId in InterestRegistry
def test_service_with_enable_schedule(self, fledge_url, wait_time, enable_schedule):
enable_schedule(fledge_url, SVC_NAME_4)
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 5 == len(jdoc['services'])
assert SVC_NAME_4 in [s['name'] for s in jdoc['services']]
def test_service_with_disable_schedule(self, fledge_url, wait_time, disable_schedule):
disable_schedule(fledge_url, SVC_NAME_4)
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 5 == len(jdoc['services'])
assert (SVC_NAME_4, 'shutdown') in [(s['name'], s['status']) for s in jdoc['services']]
def test_service_on_restart(self, fledge_url, wait_time):
conn = http.client.HTTPConnection(fledge_url)
conn.request("PUT", '/fledge/restart')
r = conn.getresponse()
assert 200 == r.status
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), "No data found"
assert 'Fledge restart has been scheduled.' == jdoc['message']
time.sleep(wait_time * 4)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 4 == len(jdoc['services'])
services = [name['name'] for name in jdoc['services']]
assert SVC_NAME_4 not in services
def test_delete_service_with_filters(self, fledge_url, wait_time, add_filter, filter_branch, enable_schedule):
# add filter
add_filter(PLUGIN_FILTER, filter_branch, FILTER_NAME, {"enable": "true"}, fledge_url, SVC_NAME_6)
# delete service
conn = http.client.HTTPConnection(fledge_url)
conn.request("DELETE", '/fledge/service/{}'.format(SVC_NAME_6))
r = conn.getresponse()
r = r.read().decode()
jdoc = json.loads(r)
assert 'Service {} deleted successfully.'.format(SVC_NAME_6) == jdoc['result']
# verify service does not exist
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 3 == len(jdoc['services'])
services = [name['name'] for name in jdoc['services']]
assert SVC_NAME_6 not in services
# filter linked with SVC_NAME_4
data = {"pipeline": [FILTER_NAME]}
conn.request("PUT", '/fledge/filter/{}/pipeline?allow_duplicates=true&append_filter=true'
.format(quote(SVC_NAME_4)), json.dumps(data))
r = conn.getresponse()
r = r.read().decode()
jdoc = json.loads(r)
assert "Filter pipeline {{'pipeline': ['{}']}} updated successfully".format(FILTER_NAME) == jdoc['result']
# enable SVC_NAME_4 schedule
enable_schedule(fledge_url, SVC_NAME_4)
# verify SVC_NAME_4 exist
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 4 == len(jdoc['services'])
services = [s['name'] for s in jdoc['services']]
assert SVC_NAME_4 in services
# delete SVC_NAME_4
conn.request("DELETE", '/fledge/service/{}'.format(quote(SVC_NAME_4)))
r = conn.getresponse()
r = r.read().decode()
jdoc = json.loads(r)
assert 'Service {} deleted successfully.'.format(SVC_NAME_4) == jdoc['result']
# verify SVC_NAME_4 does not exist anymore
time.sleep(wait_time)
jdoc = get_service(fledge_url, '/fledge/service')
assert len(jdoc), "No data found"
assert 3 == len(jdoc['services'])
services = [s['name'] for s in jdoc['services']]
assert SVC_NAME_4 not in services
def test_notification_service(self):
assert 1, "Already verified in test_e2e_notification_service_with_plugins.py"
| 9,756 | 1,046 | 68 |
424d8efd24e564601ab5d62dad56f9fc790093c6 | 201 | py | Python | apps/aggregate/stores/serializers.py | KimSoungRyoul/DjangoBackendProgramming | b346e04ce85388beba788bd3e6a405635ed913dc | [
"MIT"
] | null | null | null | apps/aggregate/stores/serializers.py | KimSoungRyoul/DjangoBackendProgramming | b346e04ce85388beba788bd3e6a405635ed913dc | [
"MIT"
] | null | null | null | apps/aggregate/stores/serializers.py | KimSoungRyoul/DjangoBackendProgramming | b346e04ce85388beba788bd3e6a405635ed913dc | [
"MIT"
] | null | null | null | from rest_framework import serializers
from aggregate.stores.models import Store
| 20.1 | 51 | 0.741294 | from rest_framework import serializers
from aggregate.stores.models import Store
class StoreSerializer(serializers.ModelSerializer):
class Meta:
model = Store
fields = "__all__"
| 0 | 95 | 23 |
71c5edd92cbebdc52ea8a994e7bdfb165cfbd92a | 727 | py | Python | metricbeat/module/couchbase/test_couchbase.py | phillip2019/beats | c825258c3dc62f05e5feefa9956befa7fb47fa68 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-06-24T21:20:14.000Z | 2021-06-24T21:20:14.000Z | metricbeat/module/couchbase/test_couchbase.py | michaelolo24/beats | cf46a2a12fbe879562ab56ee1f9c33d6bb4ac40d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | metricbeat/module/couchbase/test_couchbase.py | michaelolo24/beats | cf46a2a12fbe879562ab56ee1f9c33d6bb4ac40d | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-10-26T15:34:06.000Z | 2021-12-10T08:51:58.000Z | import os
import sys
import unittest
from parameterized import parameterized
sys.path.append(os.path.join(os.path.dirname(__file__), '../../tests/system'))
import metricbeat
| 25.068966 | 83 | 0.657497 | import os
import sys
import unittest
from parameterized import parameterized
sys.path.append(os.path.join(os.path.dirname(__file__), '../../tests/system'))
import metricbeat
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['couchbase']
FIELDS = ['couchbase']
@parameterized.expand([
("bucket"),
("cluster"),
("node"),
])
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
def test_couchbase(self, metricset):
"""
couchbase metricsets tests
"""
self.check_metricset("couchbase", metricset, self.get_hosts(), self.FIELDS)
def get_hosts(self):
return ["http://Administrator:password@" + self.compose_host()]
| 71 | 457 | 23 |
d039b9142821aa0cafb9f36af8ed2e45442dee5f | 1,834 | py | Python | odScrape.py | willkim28/openDota | 90f8543be2aa6392d3016003827d825fca6dc92b | [
"MIT"
] | null | null | null | odScrape.py | willkim28/openDota | 90f8543be2aa6392d3016003827d825fca6dc92b | [
"MIT"
] | null | null | null | odScrape.py | willkim28/openDota | 90f8543be2aa6392d3016003827d825fca6dc92b | [
"MIT"
] | null | null | null | import requests
import time
import json
url = 'https://api.opendota.com/api/'
gang = {
'will': {
'name': 'Will',
'pid': '67798385'
},
'pat': {
'name': 'Pat',
'pid': '52147853'
},
'james': {
'name': 'James',
'pid': '84941438'
},
'tibi': {
'name': 'Tibi',
'pid': '72600614'
}
}
'''
# Player matches
for pl in gang:
pm = requests.get(url + 'players/' + gang[pl].get('pid') + '/matches')
with open('data/%s.json' % pl, 'w') as f:
f.write(pm.text)
time.sleep(1)
'''
'''
# Player match info
for pl in gang:
input_file = open('data/%s.json' % pl, 'r')
json_array = json.load(input_file)
match_list = []
for item in json_array:
matchId = {"match_id":None}
matchId['match_id'] = item['match_id']
match_list.append(matchId)
with open('data/%sMatches.json' % pl, 'w') as f:
json.dump(match_list, f)
'''
'''
# Filtered All Matches
for pl in gang:
input_file = open('data/%sMatches.json' % pl, 'r')
json_array = json.load(input_file)
all_matches = []
for match in json_array:
matchId = {"match_id":None}
matchId['match_id'] = match['match_id']
all_matches.append(matchId)
with open('data/allMatches.json', 'w') as f:
json.dump(all_matches, f)
'''
# Match GET
with open('data/allMatches.json', 'r') as f:
json_array = json.load(f)
for match in json_array:
matchId = {"mid":None}
matchId['mid'] = str(match['match_id'])
am = requests.get(url + 'matches/' + matchId['mid'])
with open('data/matches/%s.json' % matchId['mid'], 'w', encoding="utf8") as f:
f.write(am.text)
time.sleep(1) | 22.641975 | 87 | 0.52181 | import requests
import time
import json
url = 'https://api.opendota.com/api/'
gang = {
'will': {
'name': 'Will',
'pid': '67798385'
},
'pat': {
'name': 'Pat',
'pid': '52147853'
},
'james': {
'name': 'James',
'pid': '84941438'
},
'tibi': {
'name': 'Tibi',
'pid': '72600614'
}
}
'''
# Player matches
for pl in gang:
pm = requests.get(url + 'players/' + gang[pl].get('pid') + '/matches')
with open('data/%s.json' % pl, 'w') as f:
f.write(pm.text)
time.sleep(1)
'''
'''
# Player match info
for pl in gang:
input_file = open('data/%s.json' % pl, 'r')
json_array = json.load(input_file)
match_list = []
for item in json_array:
matchId = {"match_id":None}
matchId['match_id'] = item['match_id']
match_list.append(matchId)
with open('data/%sMatches.json' % pl, 'w') as f:
json.dump(match_list, f)
'''
'''
# Filtered All Matches
for pl in gang:
input_file = open('data/%sMatches.json' % pl, 'r')
json_array = json.load(input_file)
all_matches = []
for match in json_array:
matchId = {"match_id":None}
matchId['match_id'] = match['match_id']
all_matches.append(matchId)
with open('data/allMatches.json', 'w') as f:
json.dump(all_matches, f)
'''
# Match GET
with open('data/allMatches.json', 'r') as f:
json_array = json.load(f)
for match in json_array:
matchId = {"mid":None}
matchId['mid'] = str(match['match_id'])
am = requests.get(url + 'matches/' + matchId['mid'])
with open('data/matches/%s.json' % matchId['mid'], 'w', encoding="utf8") as f:
f.write(am.text)
time.sleep(1) | 0 | 0 | 0 |
22b7392e8aa851501042abde6b644a775d931b00 | 3,364 | py | Python | robotarium_node/robotarium_node/utilities/coordinates.py | zmk5/ros2_robotarium | 6fc31bb6d3cdd89cf0b0b063d674897287ec1f8d | [
"MIT"
] | 22 | 2020-08-16T21:40:17.000Z | 2022-03-10T15:54:55.000Z | robotarium_node/robotarium_node/utilities/coordinates.py | zmk5/ros2_robotarium | 6fc31bb6d3cdd89cf0b0b063d674897287ec1f8d | [
"MIT"
] | null | null | null | robotarium_node/robotarium_node/utilities/coordinates.py | zmk5/ros2_robotarium | 6fc31bb6d3cdd89cf0b0b063d674897287ec1f8d | [
"MIT"
] | 1 | 2020-08-26T09:06:12.000Z | 2020-08-26T09:06:12.000Z | """A series of conversion tools for Quaternions and Euler Angles.
These functions are modified versions of the algorithms found here:
https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
Written by: Zahi Kakish (zmk5)
"""
from typing import Tuple
import numpy as np
def quaternion_to_roll(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Roll Euler angle."""
sinr_cosp = 2.0 * (w * x + y * z)
cosr_cosp = 1.0 - 2.0 * (x * x + y * y)
return np.arctan2(sinr_cosp, cosr_cosp)
def quaternion_to_pitch(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Pitch Euler angle."""
sinp = 2 * (w * y - z * x)
if np.abs(sinp) >= 1.0:
return np.copysign(np.pi / 2, sinp)
return np.arcsin(sinp)
def quaternion_to_yaw(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Yaw Euler angle."""
siny_cosp = 2 * (w * z + x * y)
cosy_cosp = 1 - 2 * (y * y + z * z)
return np.arctan2(siny_cosp, cosy_cosp)
def quaternion_to_euler(
x: float,
y: float,
z: float,
w: float) -> Tuple[float, float, float]:
"""Convert Quaternion to Euler angles."""
return (
quaternion_to_roll(x, y, z, w),
quaternion_to_pitch(x, y, z, w),
quaternion_to_yaw(x, y, z, w)
)
def roll_to_quaternion(roll: float) -> Tuple[float, float, float, float]:
"""Convert only a Roll Euler angle to its respective Quaternion values."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = 1.0
s_p = 0.0
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def pitch_to_quaternion(pitch: float) -> Tuple[float, float, float, float]:
"""Convert only a Pitch Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def yaw_to_quaternion(yaw: float) -> Tuple[float, float, float, float]:
"""Convert only a Yaw Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = 1.0
s_p = 0.0
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def euler_to_quaternion(
roll: float,
pitch: float,
yaw: float) -> Tuple[float, float, float, float]:
"""Convert Euler angles to Quaternion."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
| 28.268908 | 79 | 0.54786 | """A series of conversion tools for Quaternions and Euler Angles.
These functions are modified versions of the algorithms found here:
https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
Written by: Zahi Kakish (zmk5)
"""
from typing import Tuple
import numpy as np
def quaternion_to_roll(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Roll Euler angle."""
sinr_cosp = 2.0 * (w * x + y * z)
cosr_cosp = 1.0 - 2.0 * (x * x + y * y)
return np.arctan2(sinr_cosp, cosr_cosp)
def quaternion_to_pitch(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Pitch Euler angle."""
sinp = 2 * (w * y - z * x)
if np.abs(sinp) >= 1.0:
return np.copysign(np.pi / 2, sinp)
return np.arcsin(sinp)
def quaternion_to_yaw(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Yaw Euler angle."""
siny_cosp = 2 * (w * z + x * y)
cosy_cosp = 1 - 2 * (y * y + z * z)
return np.arctan2(siny_cosp, cosy_cosp)
def quaternion_to_euler(
x: float,
y: float,
z: float,
w: float) -> Tuple[float, float, float]:
"""Convert Quaternion to Euler angles."""
return (
quaternion_to_roll(x, y, z, w),
quaternion_to_pitch(x, y, z, w),
quaternion_to_yaw(x, y, z, w)
)
def roll_to_quaternion(roll: float) -> Tuple[float, float, float, float]:
"""Convert only a Roll Euler angle to its respective Quaternion values."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = 1.0
s_p = 0.0
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def pitch_to_quaternion(pitch: float) -> Tuple[float, float, float, float]:
"""Convert only a Pitch Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def yaw_to_quaternion(yaw: float) -> Tuple[float, float, float, float]:
"""Convert only a Yaw Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = 1.0
s_p = 0.0
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def euler_to_quaternion(
roll: float,
pitch: float,
yaw: float) -> Tuple[float, float, float, float]:
"""Convert Euler angles to Quaternion."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
| 0 | 0 | 0 |
60162f2a3bc07b7804ae4d7b5cfdf462e1b196d6 | 12,649 | py | Python | tests/UnitTestBase.py | raj-TIP/wlan-testing | 470e85eb52a8ff6b7e9cacab8fcdf73ccc4b1733 | [
"BSD-3-Clause"
] | null | null | null | tests/UnitTestBase.py | raj-TIP/wlan-testing | 470e85eb52a8ff6b7e9cacab8fcdf73ccc4b1733 | [
"BSD-3-Clause"
] | null | null | null | tests/UnitTestBase.py | raj-TIP/wlan-testing | 470e85eb52a8ff6b7e9cacab8fcdf73ccc4b1733 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python3
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
import sys
for folder in 'py-json', 'py-scripts':
if folder not in sys.path:
sys.path.append(f'../lanforge/lanforge-scripts/{folder}')
sys.path.append(f'../libs/lanforge')
sys.path.append(f'../libs/testrails')
sys.path.append(f'../libs/apnos')
sys.path.append(f'../libs/cloudsdk')
sys.path.append(f'../libs')
sys.path.append(f'../tests/test_utility/')
import base64
import urllib.request
from bs4 import BeautifulSoup
import ssl
import subprocess, os
from artifactory import ArtifactoryPath
import tarfile
import paramiko
from paramiko import SSHClient
from scp import SCPClient
import os
import pexpect
from pexpect import pxssh
import paramiko
from scp import SCPClient
import pprint
from pprint import pprint
from os import listdir
import re
import requests
import json
import logging
import datetime
import time
from datetime import date
from shutil import copyfile
import argparse
from unittest.mock import Mock
from lf_tests import *
from ap_plus_sdk import *
from lab_ap_info import *
from JfrogHelper import *
from reporting import Reporting
# For finding files
# https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
import glob
# external_results_dir=/var/tmp/lanforge
# To run this from your home system to NOLA-01 testbed, use this command. This assumes you have set up an ssh tunnel
# logged to the cicd jumphost that can reach the lab. In separate console to set up the ssh tunnel: ssh -C -L
# 7220:lab-ctlr:22 ubuntu@3.130.51.163 On local machine:
# ./query_ssids.py --testrail-user-id NONE --model ecw5410
# --ap-jumphost-address localhost --ap-jumphost-port 7220 --ap-jumphost-password secret --ap-jumphost-tty /dev/ttyAP1
import testrail_api
from LANforge.LFUtils import *
# if you lack __init__.py in this directory you will not find sta_connect module#
import sta_connect2
from sta_connect2 import StaConnect2
import testrail_api
from testrail_api import TestRail_Client
import eap_connect
from eap_connect import EAPConnect
import cloudsdk
from cloudsdk import CloudSDK
from cloudsdk import CreateAPProfiles
import ap_ssh
from ap_ssh import *
# Import info for lab setup and APs under test
import lab_ap_info
from lab_ap_info import cloud_sdk_models
from lab_ap_info import ap_models
from lab_ap_info import customer_id
from lab_ap_info import cloud_type
from lab_ap_info import test_cases
from lab_ap_info import radius_info
| 49.027132 | 120 | 0.627085 | #!/usr/bin/python3
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
import sys
for folder in 'py-json', 'py-scripts':
if folder not in sys.path:
sys.path.append(f'../lanforge/lanforge-scripts/{folder}')
sys.path.append(f'../libs/lanforge')
sys.path.append(f'../libs/testrails')
sys.path.append(f'../libs/apnos')
sys.path.append(f'../libs/cloudsdk')
sys.path.append(f'../libs')
sys.path.append(f'../tests/test_utility/')
import base64
import urllib.request
from bs4 import BeautifulSoup
import ssl
import subprocess, os
from artifactory import ArtifactoryPath
import tarfile
import paramiko
from paramiko import SSHClient
from scp import SCPClient
import os
import pexpect
from pexpect import pxssh
import paramiko
from scp import SCPClient
import pprint
from pprint import pprint
from os import listdir
import re
import requests
import json
import logging
import datetime
import time
from datetime import date
from shutil import copyfile
import argparse
from unittest.mock import Mock
from lf_tests import *
from ap_plus_sdk import *
from lab_ap_info import *
from JfrogHelper import *
from reporting import Reporting
# For finding files
# https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
import glob
# external_results_dir=/var/tmp/lanforge
# To run this from your home system to NOLA-01 testbed, use this command. This assumes you have set up an ssh tunnel
# logged to the cicd jumphost that can reach the lab. In separate console to set up the ssh tunnel: ssh -C -L
# 7220:lab-ctlr:22 ubuntu@3.130.51.163 On local machine:
# ./query_ssids.py --testrail-user-id NONE --model ecw5410
# --ap-jumphost-address localhost --ap-jumphost-port 7220 --ap-jumphost-password secret --ap-jumphost-tty /dev/ttyAP1
import testrail_api
from LANforge.LFUtils import *
# if you lack __init__.py in this directory you will not find sta_connect module#
import sta_connect2
from sta_connect2 import StaConnect2
import testrail_api
from testrail_api import TestRail_Client
import eap_connect
from eap_connect import EAPConnect
import cloudsdk
from cloudsdk import CloudSDK
from cloudsdk import CreateAPProfiles
import ap_ssh
from ap_ssh import *
# Import info for lab setup and APs under test
import lab_ap_info
from lab_ap_info import cloud_sdk_models
from lab_ap_info import ap_models
from lab_ap_info import customer_id
from lab_ap_info import cloud_type
from lab_ap_info import test_cases
from lab_ap_info import radius_info
class UnitTestBase:
def __init__(self, log_name, args, reporting):
self.parser = argparse.ArgumentParser(description="Sanity Testing on Firmware Build", parents=[args])
self.parser.add_argument("-b", "--build-id", type=str,
help="FW commit ID (latest pending build on dev is default)",
default="pending")
self.parser.add_argument("--skip-upgrade", type=bool, help="Skip upgrading firmware",
default=False)
self.parser.add_argument("--force-upgrade", type=bool,
help="Force upgrading firmware even if it is already current version",
default=False)
self.parser.add_argument("-m", "--model", type=str,
choices=['ea8300', 'ecw5410', 'ecw5211', 'ec420', 'wf188n', 'eap102', 'None'],
help="AP model to be run", required=True)
self.parser.add_argument("--equipment_id", type=str,
help="AP model ID, as exists in the cloud-sdk. -1 to auto-detect.",
default="-1")
self.parser.add_argument("--object_id", type=str,
help="Used when querying and deleting individual objects.",
default=None)
self.parser.add_argument("--customer-id", type=str,
help="Specify cloud customer-id, default is 2",
default="2")
self.parser.add_argument("--testbed", type=str,
help="Testbed name, will be prefixed to profile names and similar",
default=None)
self.parser.add_argument("--sdk-base-url", type=str,
help="cloudsdk base url, default: https://wlan-portal-svc.cicd.lab.wlan.tip.build",
default="https://wlan-portal-svc.cicd.lab.wlan.tip.build")
self.parser.add_argument("--sdk-user-id", type=str, help="cloudsdk user id, default: support@example.conf",
default="support@example.com")
self.parser.add_argument("--sdk-user-password", type=str, help="cloudsdk user password, default: support",
default="support")
self.parser.add_argument("--jfrog-base-url", type=str, help="jfrog base url",
default="tip.jFrog.io/artifactory/tip-wlan-ap-firmware")
self.parser.add_argument("--jfrog-user-id", type=str, help="jfrog user id",
default="tip-read")
self.parser.add_argument("--jfrog-user-password", type=str, help="jfrog user password",
default="tip-read")
self.parser.add_argument("--testrail-base-url", type=str, help="testrail base url",
# was os.getenv('TESTRAIL_URL')
default="https://telecominfraproject.testrail.com")
self.parser.add_argument("--testrail-project", type=str, help="testrail project name",
default="opsfleet-wlan")
self.parser.add_argument("--testrail-user-id", type=str,
help="testrail user id. Use 'NONE' to disable use of testrails.",
default="gleb@opsfleet.com")
self.parser.add_argument("--testrail-user-password", type=str, help="testrail user password",
default="password")
self.parser.add_argument("--testrail-run-prefix", type=str, help="testrail run prefix",
default="prefix-1")
self.parser.add_argument("--milestone", type=str, help="testrail milestone ID",
default="milestone-1")
self.parser.add_argument("--lanforge-ip-address", type=str, help="ip address of the lanforge gui",
default="127.0.0.1")
self.parser.add_argument("--lanforge-port-number", type=str, help="port of the lanforge gui",
default="8080")
self.parser.add_argument("--lanforge-prefix", type=str, help="LANforge api prefix string",
default="sdk")
self.parser.add_argument("--lanforge-2g-radio", type=str, help="LANforge 2Ghz radio to use for testing",
default="1.1.wiphy0")
self.parser.add_argument("--lanforge-5g-radio", type=str, help="LANforge 5Ghz radio to use for testing",
default="1.1.wiphy1")
self.parser.add_argument("--local_dir", type=str, help="Sanity logging directory",
default="logs")
self.parser.add_argument("--report-path", type=str, help="Sanity report directory",
default="reports")
self.parser.add_argument("--report-template", type=str, help="Sanity report template",
default="reports/report_template.php")
self.parser.add_argument("--eap-id", type=str, help="EAP indentity",
default="lanforge")
self.parser.add_argument("--ttls-password", type=str, help="TTLS password",
default="lanforge")
self.parser.add_argument("--ap-ip", type=str, help="AP IP Address, for direct ssh access if not using jumphost",
default="127.0.0.1")
self.parser.add_argument("--ap-username", type=str, help="AP username",
default="root")
self.parser.add_argument("--ap-password", type=str, help="AP password",
default="root")
self.parser.add_argument("--ap-jumphost-address", type=str,
help="IP of system that we can ssh in to get serial console access to AP",
default=None)
self.parser.add_argument("--ap-jumphost-port", type=str,
help="SSH port to use in case we are using ssh tunneling or other non-standard ports",
default="22")
self.parser.add_argument("--ap-jumphost-username", type=str,
help="User-ID for system that we can ssh in to get serial console access to AP",
default="lanforge")
self.parser.add_argument("--ap-jumphost-password", type=str,
help="Passwort for system that we can ssh in to get serial console access to AP",
default="lanforge")
self.parser.add_argument("--ap-jumphost-wlan-testing", type=str, help="wlan-testing repo dir on the jumphost",
default="git/wlan-testing")
self.parser.add_argument("--ap-jumphost-tty", type=str, help="Serial port for the AP we wish to talk to",
default="UNCONFIGURED-JUMPHOST-TTY")
self.parser.add_argument('--skip-update-firmware', dest='update_firmware', action='store_false')
self.parser.set_defaults(update_firmware=True)
self.parser.add_argument('--verbose', dest='verbose', action='store_true')
self.parser.set_defaults(verbose=False)
self.command_line_args = self.parser.parse_args()
# cmd line takes precedence over env-vars.
self.cloudSDK_url = self.command_line_args.sdk_base_url # was os.getenv('CLOUD_SDK_URL')
self.local_dir = self.command_line_args.local_dir # was os.getenv('SANITY_LOG_DIR')
self.report_path = self.command_line_args.report_path # was os.getenv('SANITY_REPORT_DIR')
self.report_template = self.command_line_args.report_template # was os.getenv('REPORT_TEMPLATE')
## TestRail Information
self.tr_user = self.command_line_args.testrail_user_id # was os.getenv('TR_USER')
self.tr_pw = self.command_line_args.testrail_user_password # was os.getenv('TR_PWD')
self.milestoneId = self.command_line_args.milestone # was os.getenv('MILESTONE')
self.projectId = self.command_line_args.testrail_project # was os.getenv('PROJECT_ID')
self.testRunPrefix = self.command_line_args.testrail_run_prefix # os.getenv('TEST_RUN_PREFIX')
##Jfrog credentials
self.jfrog_user = self.command_line_args.jfrog_user_id # was os.getenv('JFROG_USER')
self.jfrog_pwd = self.command_line_args.jfrog_user_password # was os.getenv('JFROG_PWD')
##EAP Credentials
self.identity = self.command_line_args.eap_id # was os.getenv('EAP_IDENTITY')
self.ttls_password = self.command_line_args.ttls_password # was os.getenv('EAP_PWD')
## AP Credentials
self.ap_username = self.command_line_args.ap_username # was os.getenv('AP_USER')
##LANForge Information
self.lanforge_ip = self.command_line_args.lanforge_ip_address
self.lanforge_port = self.command_line_args.lanforge_port_number
self.lanforge_prefix = self.command_line_args.lanforge_prefix
self.build = self.command_line_args.build_id
self.logger = logging.getLogger(log_name)
self.hdlr = logging.FileHandler(reporting.report_path + "/test_run.log")
self.formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
self.hdlr.setFormatter(self.formatter)
self.logger.addHandler(self.hdlr)
self.logger.setLevel(logging.INFO)
####Use variables other than defaults for running tests on custom FW etc
self.model_id = self.command_line_args.model
self.equipment_id = self.command_line_args.equipment_id
###Get Cloud Bearer Token
self.cloud: CloudSDK = CloudSDK(self.command_line_args)
self.bearer = self.cloud.get_bearer(self.cloudSDK_url, cloud_type)
self.customer_id = self.command_line_args.customer_id
| 10,082 | -2 | 50 |
9677e8b1e3d2314707fc9d6edfb084e573607f21 | 1,064 | py | Python | exerciciosEntrega/exercicioEntrega07.py | igorprati/python_modulo01_entrega | ba35181159c8f7c0916eaea431c591666977f16a | [
"MIT"
] | null | null | null | exerciciosEntrega/exercicioEntrega07.py | igorprati/python_modulo01_entrega | ba35181159c8f7c0916eaea431c591666977f16a | [
"MIT"
] | null | null | null | exerciciosEntrega/exercicioEntrega07.py | igorprati/python_modulo01_entrega | ba35181159c8f7c0916eaea431c591666977f16a | [
"MIT"
] | null | null | null | #07 - Crie um programa onde o usuário possa digitar sete valores numéricos e cadastre-os em uma lista única que mantenha separados os valores pares e ímpares. No final, mostre os valores pares e ímpares em ordem crescente.
numeros = list() # lista geral contendo todos os números
listaPar = list() # lista contendo apenas números pares
listaImpar = list() # lista contendo apenas números impares
for i in range(0,7): # num intervalo de 0 a 7
num = int(input('Digite um número: ')) # recebe um input formato inteiro
if num % 2 == 0: # valida se a divisão deste número por 2 dá resto zero, se sim, coloca esse número na lista par
listaPar.append(num)
else: # se condição acima não for atendida, significa que o número é impar. Então adiciona na lista impar
listaImpar.append(num)
# ao final do for, coloca a lista par e a lista impar dentro da lista geral de números
numeros.append(listaPar)
numeros.append(listaImpar)
print(f'\nO valores pares são: {sorted(listaPar)}')
print(f'O valores impares são: {sorted(listaImpar)}\n') | 53.2 | 222 | 0.734023 | #07 - Crie um programa onde o usuário possa digitar sete valores numéricos e cadastre-os em uma lista única que mantenha separados os valores pares e ímpares. No final, mostre os valores pares e ímpares em ordem crescente.
numeros = list() # lista geral contendo todos os números
listaPar = list() # lista contendo apenas números pares
listaImpar = list() # lista contendo apenas números impares
for i in range(0,7): # num intervalo de 0 a 7
num = int(input('Digite um número: ')) # recebe um input formato inteiro
if num % 2 == 0: # valida se a divisão deste número por 2 dá resto zero, se sim, coloca esse número na lista par
listaPar.append(num)
else: # se condição acima não for atendida, significa que o número é impar. Então adiciona na lista impar
listaImpar.append(num)
# ao final do for, coloca a lista par e a lista impar dentro da lista geral de números
numeros.append(listaPar)
numeros.append(listaImpar)
print(f'\nO valores pares são: {sorted(listaPar)}')
print(f'O valores impares são: {sorted(listaImpar)}\n') | 0 | 0 | 0 |
6ddeac54e95c696aac7b3d3256112b68253bdc0e | 2,084 | py | Python | tests/unit/test_04_array_perf.py | PureStorage-OpenConnect/pure-fb-prometheus-exporter | 53fd72a2a858a60d17d4ca4ade1d82540596f9f0 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_04_array_perf.py | PureStorage-OpenConnect/pure-fb-prometheus-exporter | 53fd72a2a858a60d17d4ca4ade1d82540596f9f0 | [
"Apache-2.0"
] | 2 | 2022-02-15T21:30:25.000Z | 2022-02-16T15:29:48.000Z | tests/unit/test_04_array_perf.py | PureStorage-OpenConnect/pure-fb-prometheus-exporter | 53fd72a2a858a60d17d4ca4ade1d82540596f9f0 | [
"Apache-2.0"
] | null | null | null | from pure_fb_openmetrics_exporter.flashblade_collector.flashblade_metrics.array_performance_metrics import ArrayPerformanceMetrics
| 52.1 | 130 | 0.559021 | from pure_fb_openmetrics_exporter.flashblade_collector.flashblade_metrics.array_performance_metrics import ArrayPerformanceMetrics
def test_array_perf_name(fb_client):
array_perf = ArrayPerformanceMetrics(fb_client)
for m in array_perf.get_metrics():
for s in m.samples:
assert s.name in ['purefb_array_performance_latency_usec',
'purefb_array_performance_throughput_iops',
'purefb_array_performance_bandwidth_bytes',
'purefb_array_performance_average_bytes']
def test_array_perf_labels(fb_client):
array_perf = ArrayPerformanceMetrics(fb_client)
for m in array_perf.get_metrics():
for s in m.samples:
assert s.labels['protocol'] in ['all', 'http', 'nfs', 's3', 'smb']
if s.name == 'purefb_array_performance_latency_msec':
assert s.labels['dimension'] in ['usec_per_read_op',
'usec_per_write_op',
'usec_per_other_op']
elif s.name == 'purefb_array_performance_throughput_iops':
assert s.labels['dimension'] in ['others_per_sec',
'reads_per_sec',
'writes_per_sec']
elif s.name == 'purefb_array_performance_bandwidth_bytes':
assert s.labels['dimension'] in ['read_bytes_per_sec',
'write_bytes_per_sec']
elif s.name == 'purefb_array_performance_avg_size_bytes':
assert s.labels['dimension'] in ['bytes_per_op',
'bytes_per_read',
'bytes_per_write']
def test_array_perf_val(fb_client):
array_perf = ArrayPerformanceMetrics(fb_client)
for m in array_perf.get_metrics():
for s in m.samples:
assert s.value is not None
assert s.value >= 0
| 1,874 | 0 | 78 |
536a2bffd176dcd62a3d6b9ea6c56b220e727387 | 1,168 | py | Python | requests_cache/backends/dynamodb.py | Ax6/requests-cache | 6717a336430eb7e6fe17e75c5517f3076ca85b42 | [
"BSD-2-Clause"
] | 6 | 2018-09-12T07:03:55.000Z | 2021-08-11T18:26:32.000Z | requests_cache/backends/dynamodb.py | Ax6/requests-cache | 6717a336430eb7e6fe17e75c5517f3076ca85b42 | [
"BSD-2-Clause"
] | 1 | 2019-05-29T01:43:33.000Z | 2019-05-29T01:43:33.000Z | requests_cache/backends/dynamodb.py | Ax6/requests-cache | 6717a336430eb7e6fe17e75c5517f3076ca85b42 | [
"BSD-2-Clause"
] | 1 | 2020-04-05T14:10:56.000Z | 2020-04-05T14:10:56.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.dynamodb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``dynamodb`` cache backend
"""
from .base import BaseCache
from .storage.dynamodbdict import DynamoDbDict
class DynamoDbCache(BaseCache):
""" ``dynamodb`` cache backend.
"""
def __init__(self, table_name='requests-cache', **options):
"""
:param namespace: dynamodb table name (default: ``'requests-cache'``)
:param connection: (optional) ``boto3.resource('dynamodb')``
"""
super(DynamoDbCache, self).__init__(**options)
self.responses = DynamoDbDict(table_name, 'responses',
options.get('connection'),
options.get('endpont_url'),
options.get('region_name'),
options.get('read_capacity_units'),
options.get('write_capacity_units'))
self.keys_map = DynamoDbDict(table_name,
'urls',
self.responses.connection)
| 37.677419 | 77 | 0.504281 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.dynamodb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``dynamodb`` cache backend
"""
from .base import BaseCache
from .storage.dynamodbdict import DynamoDbDict
class DynamoDbCache(BaseCache):
""" ``dynamodb`` cache backend.
"""
def __init__(self, table_name='requests-cache', **options):
"""
:param namespace: dynamodb table name (default: ``'requests-cache'``)
:param connection: (optional) ``boto3.resource('dynamodb')``
"""
super(DynamoDbCache, self).__init__(**options)
self.responses = DynamoDbDict(table_name, 'responses',
options.get('connection'),
options.get('endpont_url'),
options.get('region_name'),
options.get('read_capacity_units'),
options.get('write_capacity_units'))
self.keys_map = DynamoDbDict(table_name,
'urls',
self.responses.connection)
| 0 | 0 | 0 |
14fb213ab57c57350fada5692e917462a2570e9a | 830 | py | Python | setup.py | kislenko-artem/dataclasses-ujson | 6113d8b3db0f45be0b9ade846e408e3b50979bb2 | [
"Apache-2.0"
] | 3 | 2018-07-02T05:38:13.000Z | 2018-10-06T22:15:48.000Z | setup.py | kislenko-artem/dataclasses-ujson | 6113d8b3db0f45be0b9ade846e408e3b50979bb2 | [
"Apache-2.0"
] | null | null | null | setup.py | kislenko-artem/dataclasses-ujson | 6113d8b3db0f45be0b9ade846e408e3b50979bb2 | [
"Apache-2.0"
] | null | null | null | import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "ReadMe.md").read_text()
setup(
name="dataclasses_ujson",
version="0.0.14",
packages=find_packages(exclude=("tests*","bench_marks.py")),
author="Kislenko Artem ",
author_email="artem@webart-tech.ru",
description="fast converter your json to dataclass",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/kislenko-artem/dataclasses-ujson",
license="Apache",
install_requires=[
"ujson>=1.35"
],
python_requires=">=3.7",
extras_require={
"dev": ["pytest"]
},
include_package_data=True,
py_modules=['dataclasses_ujson'],
setup_requires=["pytest-runner"],
tests_require=["pytest"]
)
| 27.666667 | 64 | 0.678313 | import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "ReadMe.md").read_text()
setup(
name="dataclasses_ujson",
version="0.0.14",
packages=find_packages(exclude=("tests*","bench_marks.py")),
author="Kislenko Artem ",
author_email="artem@webart-tech.ru",
description="fast converter your json to dataclass",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/kislenko-artem/dataclasses-ujson",
license="Apache",
install_requires=[
"ujson>=1.35"
],
python_requires=">=3.7",
extras_require={
"dev": ["pytest"]
},
include_package_data=True,
py_modules=['dataclasses_ujson'],
setup_requires=["pytest-runner"],
tests_require=["pytest"]
)
| 0 | 0 | 0 |
b6e1b5baa73bbe671f372c5fbb39114cf328bee3 | 2,820 | py | Python | django_mail_viewer/backends/cache.py | jmichalicek/django-mail-viewer | ede04bc6a0809b8e62621f5646396893ad966d71 | [
"MIT"
] | 3 | 2017-07-19T23:35:31.000Z | 2022-02-13T18:49:04.000Z | django_mail_viewer/backends/cache.py | jmichalicek/django-mail-viewer | ede04bc6a0809b8e62621f5646396893ad966d71 | [
"MIT"
] | 1 | 2019-10-05T14:51:39.000Z | 2019-10-05T14:51:40.000Z | django_mail_viewer/backends/cache.py | jmichalicek/django-mail-viewer | ede04bc6a0809b8e62621f5646396893ad966d71 | [
"MIT"
] | null | null | null | """
Backend for test environment.
"""
from django.core import cache
from django.core.mail.backends.base import BaseEmailBackend
from .. import settings as mailviewer_settings
class EmailBackend(BaseEmailBackend):
"""
An email backend to use during testing and local development with Django Mail Viewer.
Uses Django's cache framework to store sent emails in the cache so that they can
be easily retrieved in multi-process environments such as using Django Channels or
when sending an email from a python shell.
"""
def get_message(self, lookup_id):
"""
Look up and return a specific message in the outbox
"""
return self.cache.get(lookup_id)
def get_outbox(self, *args, **kwargs):
"""
Get the outbox used by this backend. This backend returns a copy of mail.outbox.
May add pagination args/kwargs.
"""
# grabs all of the keys in the stored self.cache_keys_key
# and passes those into get_many() to retrieve the keys
message_keys = self.cache.get(self.cache_keys_key)
if message_keys:
messages = list(self.cache.get_many(message_keys).values())
else:
messages = []
return messages
def delete_message(self, message_id: str):
"""
Remove the message with the given id from the mailbox
"""
message_keys = self.cache.get(self.cache_keys_key, [])
message_keys.remove(message_id)
self.cache.set(self.cache_keys_key, message_keys)
self.cache.delete(message_id)
| 37.105263 | 89 | 0.65 | """
Backend for test environment.
"""
from django.core import cache
from django.core.mail.backends.base import BaseEmailBackend
from .. import settings as mailviewer_settings
class EmailBackend(BaseEmailBackend):
"""
An email backend to use during testing and local development with Django Mail Viewer.
Uses Django's cache framework to store sent emails in the cache so that they can
be easily retrieved in multi-process environments such as using Django Channels or
when sending an email from a python shell.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cache = cache.caches[mailviewer_settings.MAILVIEWER_CACHE]
# a cache entry with a list of the rest of the cache keys
# This is for get_outbox() so that the system knows which cache keys are there
# to retrieve them. Django does not have a built in way to get the keys
# which exist in the cache.
self.cache_keys_key = 'message_keys'
def send_messages(self, messages):
msg_count = 0
for message in messages:
m = message.message()
message_id = m.get('message-id')
self.cache.set(message_id, m)
# if multiple processes are updating this at the same time then
# things will get hung up. May introduce a lock key and spinlock
# to avoid clobbering the value stored in the list of keys.
current_cache_keys = self.cache.get(self.cache_keys_key)
if not current_cache_keys:
current_cache_keys = []
current_cache_keys.append(message_id)
self.cache.set(self.cache_keys_key, current_cache_keys)
msg_count += 1
return msg_count
def get_message(self, lookup_id):
"""
Look up and return a specific message in the outbox
"""
return self.cache.get(lookup_id)
def get_outbox(self, *args, **kwargs):
"""
Get the outbox used by this backend. This backend returns a copy of mail.outbox.
May add pagination args/kwargs.
"""
# grabs all of the keys in the stored self.cache_keys_key
# and passes those into get_many() to retrieve the keys
message_keys = self.cache.get(self.cache_keys_key)
if message_keys:
messages = list(self.cache.get_many(message_keys).values())
else:
messages = []
return messages
def delete_message(self, message_id: str):
"""
Remove the message with the given id from the mailbox
"""
message_keys = self.cache.get(self.cache_keys_key, [])
message_keys.remove(message_id)
self.cache.set(self.cache_keys_key, message_keys)
self.cache.delete(message_id)
| 1,177 | 0 | 54 |
230c5789d1fbe742142cfe2e077b786e10a70cf9 | 757 | py | Python | puzzle9/main.py | apaku/aoc2020 | b90b1ec519014b851c6160d5b0e14c2ca8f86b91 | [
"BSD-3-Clause"
] | 1 | 2020-12-03T19:51:19.000Z | 2020-12-03T19:51:19.000Z | puzzle9/main.py | apaku/aoc2020 | b90b1ec519014b851c6160d5b0e14c2ca8f86b91 | [
"BSD-3-Clause"
] | null | null | null | puzzle9/main.py | apaku/aoc2020 | b90b1ec519014b851c6160d5b0e14c2ca8f86b91 | [
"BSD-3-Clause"
] | null | null | null | import sys
from itertools import combinations
data = [int(line.strip()) for line in sys.stdin.readlines()]
preamble = int(sys.argv[1])
idx = preamble
while idx < len(data):
found = False
for (a,b) in combinations(data[idx-preamble:idx], 2):
if a + b == data[idx]:
found = True
break
if not found:
break
idx += 1
print("Part1: {} - {}".format(idx, data[idx]))
subdata = data[:idx]
subidx = 0
while subidx < len(subdata)-1:
endidx = subidx + 1
while endidx < len(subdata):
sublist = subdata[subidx:endidx]
if sum(sublist) == data[idx]:
print("Part2: {} {}".format(sublist, min(sublist)+max(sublist)))
sys.exit(0)
endidx += 1
subidx += 1
| 22.939394 | 76 | 0.570674 | import sys
from itertools import combinations
data = [int(line.strip()) for line in sys.stdin.readlines()]
preamble = int(sys.argv[1])
idx = preamble
while idx < len(data):
found = False
for (a,b) in combinations(data[idx-preamble:idx], 2):
if a + b == data[idx]:
found = True
break
if not found:
break
idx += 1
print("Part1: {} - {}".format(idx, data[idx]))
subdata = data[:idx]
subidx = 0
while subidx < len(subdata)-1:
endidx = subidx + 1
while endidx < len(subdata):
sublist = subdata[subidx:endidx]
if sum(sublist) == data[idx]:
print("Part2: {} {}".format(sublist, min(sublist)+max(sublist)))
sys.exit(0)
endidx += 1
subidx += 1
| 0 | 0 | 0 |
aeaca77a0c6cb513574a6991c453ae4a01b6dc4d | 1,252 | py | Python | backend/src/gql/mutation/create_patient.py | spiritutumduo/spiritumDuo | 987785906cd504f46ccebe3bbfe0e81cbf02bf7c | [
"MIT"
] | 1 | 2022-03-11T14:07:16.000Z | 2022-03-11T14:07:16.000Z | backend/src/gql/mutation/create_patient.py | spiritutumduo/spiritumDuo | 987785906cd504f46ccebe3bbfe0e81cbf02bf7c | [
"MIT"
] | 3 | 2022-02-25T22:46:46.000Z | 2022-03-30T08:19:41.000Z | backend/src/gql/mutation/create_patient.py | spiritutumduo/spiritumDuo | 987785906cd504f46ccebe3bbfe0e81cbf02bf7c | [
"MIT"
] | 1 | 2022-03-31T14:35:51.000Z | 2022-03-31T14:35:51.000Z | from models import Patient
from .mutation_type import mutation
from authentication.authentication import needsAuthorization
from graphql.type import GraphQLResolveInfo
from datacreators.patient import CreatePatient
from SdTypes import Permissions
@mutation.field("createPatient")
@needsAuthorization([Permissions.PATIENT_CREATE])
| 33.837838 | 77 | 0.704473 | from models import Patient
from .mutation_type import mutation
from authentication.authentication import needsAuthorization
from graphql.type import GraphQLResolveInfo
from datacreators.patient import CreatePatient
from SdTypes import Permissions
@mutation.field("createPatient")
@needsAuthorization([Permissions.PATIENT_CREATE])
async def resolve_create_patient(
obj=None,
info: GraphQLResolveInfo = None,
input: dict = None
) -> Patient:
patientInfo = {
"first_name": input["firstName"],
"last_name": input["lastName"],
"hospital_number": input["hospitalNumber"],
"national_number": input["nationalNumber"],
"date_of_birth": input["dateOfBirth"],
"pathwayId": input["pathwayId"],
"context": info.context
}
if 'communicationMethod' in input:
patientInfo['communication_method'] = input['communicationMethod']
if 'awaitingDecisionType' in input:
patientInfo['awaiting_decision_type'] = input['awaitingDecisionType']
if 'referredAt' in input:
patientInfo['referred_at'] = input['referredAt']
if 'milestones' in input:
patientInfo['milestones'] = input['milestones']
return await CreatePatient(
**patientInfo
)
| 898 | 0 | 22 |
886b96d6b50ebb192157cd1dec991c1cf191d83a | 1,130 | py | Python | sdk/eventgrid/azure-eventgrid/samples/champion_scenarios/cs6_consume_events_using_cloud_events_1.0_schema.py | sima-zhu/azure-sdk-for-python | a413dc783f0df7dc65e9c2ef9762fabff1708c4e | [
"MIT"
] | null | null | null | sdk/eventgrid/azure-eventgrid/samples/champion_scenarios/cs6_consume_events_using_cloud_events_1.0_schema.py | sima-zhu/azure-sdk-for-python | a413dc783f0df7dc65e9c2ef9762fabff1708c4e | [
"MIT"
] | null | null | null | sdk/eventgrid/azure-eventgrid/samples/champion_scenarios/cs6_consume_events_using_cloud_events_1.0_schema.py | sima-zhu/azure-sdk-for-python | a413dc783f0df7dc65e9c2ef9762fabff1708c4e | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: cs6_consume_events_using_cloud_events_1.0_schema.py
DESCRIPTION:
These samples demonstrate creating a list of CloudEvents and sending then as a list.
USAGE:
python cs6_consume_events_using_cloud_events_1.0_schema.py
"""
import os
from azure.eventgrid import EventGridConsumer
consumer = EventGridConsumer()
# returns List[DeserializedEvent]
deserialized_events = consumer.decode_eventgrid_event(service_bus_received_message)
# CloudEvent schema
for event in deserialized_events:
# both allow access to raw properties as strings
time_string = event.time
time_string = event["time"]
# model returns CloudEvent object
cloud_event = event.model
# all model properties are strongly typed
datetime_object = event.model.time
storage_blobcreated_object = event.model.data | 34.242424 | 88 | 0.683186 | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: cs6_consume_events_using_cloud_events_1.0_schema.py
DESCRIPTION:
These samples demonstrate creating a list of CloudEvents and sending then as a list.
USAGE:
python cs6_consume_events_using_cloud_events_1.0_schema.py
"""
import os
from azure.eventgrid import EventGridConsumer
consumer = EventGridConsumer()
# returns List[DeserializedEvent]
deserialized_events = consumer.decode_eventgrid_event(service_bus_received_message)
# CloudEvent schema
for event in deserialized_events:
# both allow access to raw properties as strings
time_string = event.time
time_string = event["time"]
# model returns CloudEvent object
cloud_event = event.model
# all model properties are strongly typed
datetime_object = event.model.time
storage_blobcreated_object = event.model.data | 0 | 0 | 0 |
668ed1659ae67dc4bdcd72703d472161d2741ed0 | 1,817 | py | Python | augment_image.py | JayD1912/image_outpaint | 0b47d94c6cbd10f749ed717d7d5f76bba03c0d9d | [
"MIT"
] | null | null | null | augment_image.py | JayD1912/image_outpaint | 0b47d94c6cbd10f749ed717d7d5f76bba03c0d9d | [
"MIT"
] | null | null | null | augment_image.py | JayD1912/image_outpaint | 0b47d94c6cbd10f749ed717d7d5f76bba03c0d9d | [
"MIT"
] | null | null | null | import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
import random
import cv2
brightness = iaa.Add((-7, 7), per_channel=0.5)
contrast = iaa.LinearContrast((0.8, 1.6), per_channel=0.5)
perspective = iaa.PerspectiveTransform(scale=(0.025, 0.090))
gaussian_noise = iaa.AdditiveGaussianNoise(loc=0, scale=(0.03*255, 0.04*255), per_channel=0.5)
crop = iaa.Crop(px=(0, 25))
if __name__ == "__main__":
image = cv2.imread('cache/image 10.1.jpg')
aug_images = aug_image(image)
aug_images = [aug_images]
## image = cv2.resize(aug_images[0], (300,400))
## cv2.imshow('Before', image)
print(len(aug_images))
image = cv2.resize(image, (600,600))
image_1 = cv2.resize(aug_images[0], (600,600))
cv2.imshow('1', image)
cv2.waitKey(0)
cv2.imshow('2', image_1)
cv2.waitKey(0)
## image2 = cv2.imread('cache/image 13.2.jpg')
## image2 = cv2.resize(image2, (400,400))
## cv2.imshow('After', image2)
## cv2.waitKey(0)
| 8.735577 | 95 | 0.521739 | import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
import random
import cv2
brightness = iaa.Add((-7, 7), per_channel=0.5)
contrast = iaa.LinearContrast((0.8, 1.6), per_channel=0.5)
perspective = iaa.PerspectiveTransform(scale=(0.025, 0.090))
gaussian_noise = iaa.AdditiveGaussianNoise(loc=0, scale=(0.03*255, 0.04*255), per_channel=0.5)
crop = iaa.Crop(px=(0, 25))
def aug_image(my_image):
image = my_image.copy()
if random.choice([0,0,1]):
image = perspective.augment_image(image)
if random.choice([0,0,1]):
image = brightness.augment_image(image)
if random.choice([0,0,1]):
image = contrast.augment_image(image)
if random.choice([0,0,1]):
image = gaussian_noise.augment_image(image)
if random.choice([0,0,1]):
image = crop.augment_image(image)
return image
if __name__ == "__main__":
image = cv2.imread('cache/image 10.1.jpg')
aug_images = aug_image(image)
aug_images = [aug_images]
## image = cv2.resize(aug_images[0], (300,400))
## cv2.imshow('Before', image)
print(len(aug_images))
image = cv2.resize(image, (600,600))
image_1 = cv2.resize(aug_images[0], (600,600))
cv2.imshow('1', image)
cv2.waitKey(0)
cv2.imshow('2', image_1)
cv2.waitKey(0)
## image2 = cv2.imread('cache/image 13.2.jpg')
## image2 = cv2.resize(image2, (400,400))
## cv2.imshow('After', image2)
## cv2.waitKey(0)
| 452 | 0 | 25 |
53f20f21d4816c8461ab14d8b28d336b630cfee5 | 54,739 | py | Python | pyinjective/proto/injective/peggy/v1/msgs_pb2.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | 10 | 2021-09-07T08:03:52.000Z | 2022-03-08T08:39:30.000Z | pyinjective/proto/injective/peggy/v1/msgs_pb2.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | 39 | 2021-08-19T20:09:35.000Z | 2022-03-22T19:51:59.000Z | pyinjective/proto/injective/peggy/v1/msgs_pb2.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | 5 | 2021-11-02T16:23:48.000Z | 2022-01-20T22:30:05.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: injective/peggy/v1/msgs.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from cosmos.base.v1beta1 import coin_pb2 as cosmos_dot_base_dot_v1beta1_dot_coin__pb2
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from injective.peggy.v1 import types_pb2 as injective_dot_peggy_dot_v1_dot_types__pb2
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='injective/peggy/v1/msgs.proto',
package='injective.peggy.v1',
syntax='proto3',
serialized_options=b'ZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/types',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1dinjective/peggy/v1/msgs.proto\x12\x12injective.peggy.v1\x1a\x1e\x63osmos/base/v1beta1/coin.proto\x1a\x14gogoproto/gogo.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1einjective/peggy/v1/types.proto\x1a\x19google/protobuf/any.proto\"X\n\x1bMsgSetOrchestratorAddresses\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\"%\n#MsgSetOrchestratorAddressesResponse\"_\n\x10MsgValsetConfirm\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\x12\x11\n\tsignature\x18\x04 \x01(\t\"\x1a\n\x18MsgValsetConfirmResponse\"\x96\x01\n\x0cMsgSendToEth\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x10\n\x08\x65th_dest\x18\x02 \x01(\t\x12/\n\x06\x61mount\x18\x03 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\x12\x33\n\nbridge_fee\x18\x04 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\"\x16\n\x14MsgSendToEthResponse\"6\n\x0fMsgRequestBatch\x12\x14\n\x0corchestrator\x18\x01 \x01(\t\x12\r\n\x05\x64\x65nom\x18\x02 \x01(\t\"\x19\n\x17MsgRequestBatchResponse\"u\n\x0fMsgConfirmBatch\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x02 \x01(\t\x12\x12\n\neth_signer\x18\x03 \x01(\t\x12\x14\n\x0corchestrator\x18\x04 \x01(\t\x12\x11\n\tsignature\x18\x05 \x01(\t\"\x19\n\x17MsgConfirmBatchResponse\"\xdc\x01\n\x0fMsgDepositClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x03 \x01(\t\x12>\n\x06\x61mount\x18\x04 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x17\n\x0f\x65thereum_sender\x18\x05 \x01(\t\x12\x17\n\x0f\x63osmos_receiver\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x19\n\x17MsgDepositClaimResponse\"\x80\x01\n\x10MsgWithdrawClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x13\n\x0b\x62\x61tch_nonce\x18\x03 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x14\n\x0corchestrator\x18\x05 \x01(\t\"\x1a\n\x18MsgWithdrawClaimResponse\"\xb6\x01\n\x15MsgERC20DeployedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x14\n\x0c\x63osmos_denom\x18\x03 \x01(\t\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06symbol\x18\x06 \x01(\t\x12\x10\n\x08\x64\x65\x63imals\x18\x07 \x01(\x04\x12\x14\n\x0corchestrator\x18\x08 \x01(\t\"\x1f\n\x1dMsgERC20DeployedClaimResponse\"<\n\x12MsgCancelSendToEth\x12\x16\n\x0etransaction_id\x18\x01 \x01(\x04\x12\x0e\n\x06sender\x18\x02 \x01(\t\"\x1c\n\x1aMsgCancelSendToEthResponse\"i\n\x1dMsgSubmitBadSignatureEvidence\x12%\n\x07subject\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tsignature\x18\x02 \x01(\t\x12\x0e\n\x06sender\x18\x03 \x01(\t\"\'\n%MsgSubmitBadSignatureEvidenceResponse\"\x81\x02\n\x15MsgValsetUpdatedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0cvalset_nonce\x18\x02 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x03 \x01(\x04\x12\x34\n\x07members\x18\x04 \x03(\x0b\x32#.injective.peggy.v1.BridgeValidator\x12\x45\n\rreward_amount\x18\x05 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x14\n\x0creward_token\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x1f\n\x1dMsgValsetUpdatedClaimResponse2\xc4\r\n\x03Msg\x12\x8f\x01\n\rValsetConfirm\x12$.injective.peggy.v1.MsgValsetConfirm\x1a,.injective.peggy.v1.MsgValsetConfirmResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/valset_confirm\x12\x80\x01\n\tSendToEth\x12 .injective.peggy.v1.MsgSendToEth\x1a(.injective.peggy.v1.MsgSendToEthResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1f/injective/peggy/v1/send_to_eth\x12\x8b\x01\n\x0cRequestBatch\x12#.injective.peggy.v1.MsgRequestBatch\x1a+.injective.peggy.v1.MsgRequestBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/request_batch\x12\x8b\x01\n\x0c\x43onfirmBatch\x12#.injective.peggy.v1.MsgConfirmBatch\x1a+.injective.peggy.v1.MsgConfirmBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/confirm_batch\x12\x8b\x01\n\x0c\x44\x65positClaim\x12#.injective.peggy.v1.MsgDepositClaim\x1a+.injective.peggy.v1.MsgDepositClaimResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/deposit_claim\x12\x8f\x01\n\rWithdrawClaim\x12$.injective.peggy.v1.MsgWithdrawClaim\x1a,.injective.peggy.v1.MsgWithdrawClaimResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/withdraw_claim\x12\xa3\x01\n\x11ValsetUpdateClaim\x12).injective.peggy.v1.MsgValsetUpdatedClaim\x1a\x31.injective.peggy.v1.MsgValsetUpdatedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/valset_updated_claim\x12\xa4\x01\n\x12\x45RC20DeployedClaim\x12).injective.peggy.v1.MsgERC20DeployedClaim\x1a\x31.injective.peggy.v1.MsgERC20DeployedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/erc20_deployed_claim\x12\xba\x01\n\x18SetOrchestratorAddresses\x12/.injective.peggy.v1.MsgSetOrchestratorAddresses\x1a\x37.injective.peggy.v1.MsgSetOrchestratorAddressesResponse\"4\x82\xd3\xe4\x93\x02.\",/injective/peggy/v1/set_orchestrator_address\x12\x99\x01\n\x0f\x43\x61ncelSendToEth\x12&.injective.peggy.v1.MsgCancelSendToEth\x1a..injective.peggy.v1.MsgCancelSendToEthResponse\".\x82\xd3\xe4\x93\x02(\"&/injective/peggy/v1/cancel_send_to_eth\x12\xc5\x01\n\x1aSubmitBadSignatureEvidence\x12\x31.injective.peggy.v1.MsgSubmitBadSignatureEvidence\x1a\x39.injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse\"9\x82\xd3\xe4\x93\x02\x33\"1/injective/peggy/v1/submit_bad_signature_evidenceBMZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/typesb\x06proto3'
,
dependencies=[cosmos_dot_base_dot_v1beta1_dot_coin__pb2.DESCRIPTOR,gogoproto_dot_gogo__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,injective_dot_peggy_dot_v1_dot_types__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_MSGSETORCHESTRATORADDRESSES = _descriptor.Descriptor(
name='MsgSetOrchestratorAddresses',
full_name='injective.peggy.v1.MsgSetOrchestratorAddresses',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=196,
serialized_end=284,
)
_MSGSETORCHESTRATORADDRESSESRESPONSE = _descriptor.Descriptor(
name='MsgSetOrchestratorAddressesResponse',
full_name='injective.peggy.v1.MsgSetOrchestratorAddressesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=286,
serialized_end=323,
)
_MSGVALSETCONFIRM = _descriptor.Descriptor(
name='MsgValsetConfirm',
full_name='injective.peggy.v1.MsgValsetConfirm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgValsetConfirm.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetConfirm.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgValsetConfirm.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgValsetConfirm.signature', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=325,
serialized_end=420,
)
_MSGVALSETCONFIRMRESPONSE = _descriptor.Descriptor(
name='MsgValsetConfirmResponse',
full_name='injective.peggy.v1.MsgValsetConfirmResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=422,
serialized_end=448,
)
_MSGSENDTOETH = _descriptor.Descriptor(
name='MsgSendToEth',
full_name='injective.peggy.v1.MsgSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSendToEth.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_dest', full_name='injective.peggy.v1.MsgSendToEth.eth_dest', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgSendToEth.amount', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bridge_fee', full_name='injective.peggy.v1.MsgSendToEth.bridge_fee', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=451,
serialized_end=601,
)
_MSGSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgSendToEthResponse',
full_name='injective.peggy.v1.MsgSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=603,
serialized_end=625,
)
_MSGREQUESTBATCH = _descriptor.Descriptor(
name='MsgRequestBatch',
full_name='injective.peggy.v1.MsgRequestBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgRequestBatch.orchestrator', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='denom', full_name='injective.peggy.v1.MsgRequestBatch.denom', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=627,
serialized_end=681,
)
_MSGREQUESTBATCHRESPONSE = _descriptor.Descriptor(
name='MsgRequestBatchResponse',
full_name='injective.peggy.v1.MsgRequestBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=683,
serialized_end=708,
)
_MSGCONFIRMBATCH = _descriptor.Descriptor(
name='MsgConfirmBatch',
full_name='injective.peggy.v1.MsgConfirmBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgConfirmBatch.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgConfirmBatch.token_contract', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_signer', full_name='injective.peggy.v1.MsgConfirmBatch.eth_signer', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgConfirmBatch.orchestrator', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgConfirmBatch.signature', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=710,
serialized_end=827,
)
_MSGCONFIRMBATCHRESPONSE = _descriptor.Descriptor(
name='MsgConfirmBatchResponse',
full_name='injective.peggy.v1.MsgConfirmBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=829,
serialized_end=854,
)
_MSGDEPOSITCLAIM = _descriptor.Descriptor(
name='MsgDepositClaim',
full_name='injective.peggy.v1.MsgDepositClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgDepositClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgDepositClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgDepositClaim.token_contract', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgDepositClaim.amount', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ethereum_sender', full_name='injective.peggy.v1.MsgDepositClaim.ethereum_sender', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_receiver', full_name='injective.peggy.v1.MsgDepositClaim.cosmos_receiver', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgDepositClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=857,
serialized_end=1077,
)
_MSGDEPOSITCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgDepositClaimResponse',
full_name='injective.peggy.v1.MsgDepositClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1079,
serialized_end=1104,
)
_MSGWITHDRAWCLAIM = _descriptor.Descriptor(
name='MsgWithdrawClaim',
full_name='injective.peggy.v1.MsgWithdrawClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgWithdrawClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='batch_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.batch_nonce', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgWithdrawClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgWithdrawClaim.orchestrator', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1107,
serialized_end=1235,
)
_MSGWITHDRAWCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgWithdrawClaimResponse',
full_name='injective.peggy.v1.MsgWithdrawClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1237,
serialized_end=1263,
)
_MSGERC20DEPLOYEDCLAIM = _descriptor.Descriptor(
name='MsgERC20DeployedClaim',
full_name='injective.peggy.v1.MsgERC20DeployedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgERC20DeployedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgERC20DeployedClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_denom', full_name='injective.peggy.v1.MsgERC20DeployedClaim.cosmos_denom', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgERC20DeployedClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='injective.peggy.v1.MsgERC20DeployedClaim.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symbol', full_name='injective.peggy.v1.MsgERC20DeployedClaim.symbol', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decimals', full_name='injective.peggy.v1.MsgERC20DeployedClaim.decimals', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgERC20DeployedClaim.orchestrator', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1266,
serialized_end=1448,
)
_MSGERC20DEPLOYEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgERC20DeployedClaimResponse',
full_name='injective.peggy.v1.MsgERC20DeployedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1450,
serialized_end=1481,
)
_MSGCANCELSENDTOETH = _descriptor.Descriptor(
name='MsgCancelSendToEth',
full_name='injective.peggy.v1.MsgCancelSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='injective.peggy.v1.MsgCancelSendToEth.transaction_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgCancelSendToEth.sender', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1483,
serialized_end=1543,
)
_MSGCANCELSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgCancelSendToEthResponse',
full_name='injective.peggy.v1.MsgCancelSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1545,
serialized_end=1573,
)
_MSGSUBMITBADSIGNATUREEVIDENCE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidence',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subject', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.subject', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.signature', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.sender', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1575,
serialized_end=1680,
)
_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidenceResponse',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1682,
serialized_end=1721,
)
_MSGVALSETUPDATEDCLAIM = _descriptor.Descriptor(
name='MsgValsetUpdatedClaim',
full_name='injective.peggy.v1.MsgValsetUpdatedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='valset_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.valset_nonce', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.block_height', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='members', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.members', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_amount', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_amount', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_token', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_token', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1724,
serialized_end=1981,
)
_MSGVALSETUPDATEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgValsetUpdatedClaimResponse',
full_name='injective.peggy.v1.MsgValsetUpdatedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1983,
serialized_end=2014,
)
_MSGSENDTOETH.fields_by_name['amount'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSENDTOETH.fields_by_name['bridge_fee'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSUBMITBADSIGNATUREEVIDENCE.fields_by_name['subject'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_MSGVALSETUPDATEDCLAIM.fields_by_name['members'].message_type = injective_dot_peggy_dot_v1_dot_types__pb2._BRIDGEVALIDATOR
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddresses'] = _MSGSETORCHESTRATORADDRESSES
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddressesResponse'] = _MSGSETORCHESTRATORADDRESSESRESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetConfirm'] = _MSGVALSETCONFIRM
DESCRIPTOR.message_types_by_name['MsgValsetConfirmResponse'] = _MSGVALSETCONFIRMRESPONSE
DESCRIPTOR.message_types_by_name['MsgSendToEth'] = _MSGSENDTOETH
DESCRIPTOR.message_types_by_name['MsgSendToEthResponse'] = _MSGSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgRequestBatch'] = _MSGREQUESTBATCH
DESCRIPTOR.message_types_by_name['MsgRequestBatchResponse'] = _MSGREQUESTBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgConfirmBatch'] = _MSGCONFIRMBATCH
DESCRIPTOR.message_types_by_name['MsgConfirmBatchResponse'] = _MSGCONFIRMBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgDepositClaim'] = _MSGDEPOSITCLAIM
DESCRIPTOR.message_types_by_name['MsgDepositClaimResponse'] = _MSGDEPOSITCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgWithdrawClaim'] = _MSGWITHDRAWCLAIM
DESCRIPTOR.message_types_by_name['MsgWithdrawClaimResponse'] = _MSGWITHDRAWCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaim'] = _MSGERC20DEPLOYEDCLAIM
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaimResponse'] = _MSGERC20DEPLOYEDCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgCancelSendToEth'] = _MSGCANCELSENDTOETH
DESCRIPTOR.message_types_by_name['MsgCancelSendToEthResponse'] = _MSGCANCELSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidence'] = _MSGSUBMITBADSIGNATUREEVIDENCE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidenceResponse'] = _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaim'] = _MSGVALSETUPDATEDCLAIM
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaimResponse'] = _MSGVALSETUPDATEDCLAIMRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MsgSetOrchestratorAddresses = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddresses', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSES,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddresses)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddresses)
MsgSetOrchestratorAddressesResponse = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddressesResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSESRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddressesResponse)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddressesResponse)
MsgValsetConfirm = _reflection.GeneratedProtocolMessageType('MsgValsetConfirm', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirm)
})
_sym_db.RegisterMessage(MsgValsetConfirm)
MsgValsetConfirmResponse = _reflection.GeneratedProtocolMessageType('MsgValsetConfirmResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirmResponse)
})
_sym_db.RegisterMessage(MsgValsetConfirmResponse)
MsgSendToEth = _reflection.GeneratedProtocolMessageType('MsgSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEth)
})
_sym_db.RegisterMessage(MsgSendToEth)
MsgSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEthResponse)
})
_sym_db.RegisterMessage(MsgSendToEthResponse)
MsgRequestBatch = _reflection.GeneratedProtocolMessageType('MsgRequestBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatch)
})
_sym_db.RegisterMessage(MsgRequestBatch)
MsgRequestBatchResponse = _reflection.GeneratedProtocolMessageType('MsgRequestBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatchResponse)
})
_sym_db.RegisterMessage(MsgRequestBatchResponse)
MsgConfirmBatch = _reflection.GeneratedProtocolMessageType('MsgConfirmBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatch)
})
_sym_db.RegisterMessage(MsgConfirmBatch)
MsgConfirmBatchResponse = _reflection.GeneratedProtocolMessageType('MsgConfirmBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatchResponse)
})
_sym_db.RegisterMessage(MsgConfirmBatchResponse)
MsgDepositClaim = _reflection.GeneratedProtocolMessageType('MsgDepositClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaim)
})
_sym_db.RegisterMessage(MsgDepositClaim)
MsgDepositClaimResponse = _reflection.GeneratedProtocolMessageType('MsgDepositClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaimResponse)
})
_sym_db.RegisterMessage(MsgDepositClaimResponse)
MsgWithdrawClaim = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaim)
})
_sym_db.RegisterMessage(MsgWithdrawClaim)
MsgWithdrawClaimResponse = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaimResponse)
})
_sym_db.RegisterMessage(MsgWithdrawClaimResponse)
MsgERC20DeployedClaim = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaim)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaim)
MsgERC20DeployedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaimResponse)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaimResponse)
MsgCancelSendToEth = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEth)
})
_sym_db.RegisterMessage(MsgCancelSendToEth)
MsgCancelSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEthResponse)
})
_sym_db.RegisterMessage(MsgCancelSendToEthResponse)
MsgSubmitBadSignatureEvidence = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidence', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidence)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidence)
MsgSubmitBadSignatureEvidenceResponse = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidenceResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidenceResponse)
MsgValsetUpdatedClaim = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaim)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaim)
MsgValsetUpdatedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaimResponse)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaimResponse)
DESCRIPTOR._options = None
_MSGSENDTOETH.fields_by_name['amount']._options = None
_MSGSENDTOETH.fields_by_name['bridge_fee']._options = None
_MSGDEPOSITCLAIM.fields_by_name['amount']._options = None
_MSGVALSETUPDATEDCLAIM.fields_by_name['reward_amount']._options = None
_MSG = _descriptor.ServiceDescriptor(
name='Msg',
full_name='injective.peggy.v1.Msg',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2017,
serialized_end=3749,
methods=[
_descriptor.MethodDescriptor(
name='ValsetConfirm',
full_name='injective.peggy.v1.Msg.ValsetConfirm',
index=0,
containing_service=None,
input_type=_MSGVALSETCONFIRM,
output_type=_MSGVALSETCONFIRMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/valset_confirm',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SendToEth',
full_name='injective.peggy.v1.Msg.SendToEth',
index=1,
containing_service=None,
input_type=_MSGSENDTOETH,
output_type=_MSGSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002!\"\037/injective/peggy/v1/send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='RequestBatch',
full_name='injective.peggy.v1.Msg.RequestBatch',
index=2,
containing_service=None,
input_type=_MSGREQUESTBATCH,
output_type=_MSGREQUESTBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/request_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ConfirmBatch',
full_name='injective.peggy.v1.Msg.ConfirmBatch',
index=3,
containing_service=None,
input_type=_MSGCONFIRMBATCH,
output_type=_MSGCONFIRMBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/confirm_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DepositClaim',
full_name='injective.peggy.v1.Msg.DepositClaim',
index=4,
containing_service=None,
input_type=_MSGDEPOSITCLAIM,
output_type=_MSGDEPOSITCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/deposit_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='WithdrawClaim',
full_name='injective.peggy.v1.Msg.WithdrawClaim',
index=5,
containing_service=None,
input_type=_MSGWITHDRAWCLAIM,
output_type=_MSGWITHDRAWCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/withdraw_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ValsetUpdateClaim',
full_name='injective.peggy.v1.Msg.ValsetUpdateClaim',
index=6,
containing_service=None,
input_type=_MSGVALSETUPDATEDCLAIM,
output_type=_MSGVALSETUPDATEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/valset_updated_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ERC20DeployedClaim',
full_name='injective.peggy.v1.Msg.ERC20DeployedClaim',
index=7,
containing_service=None,
input_type=_MSGERC20DEPLOYEDCLAIM,
output_type=_MSGERC20DEPLOYEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/erc20_deployed_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetOrchestratorAddresses',
full_name='injective.peggy.v1.Msg.SetOrchestratorAddresses',
index=8,
containing_service=None,
input_type=_MSGSETORCHESTRATORADDRESSES,
output_type=_MSGSETORCHESTRATORADDRESSESRESPONSE,
serialized_options=b'\202\323\344\223\002.\",/injective/peggy/v1/set_orchestrator_address',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelSendToEth',
full_name='injective.peggy.v1.Msg.CancelSendToEth',
index=9,
containing_service=None,
input_type=_MSGCANCELSENDTOETH,
output_type=_MSGCANCELSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002(\"&/injective/peggy/v1/cancel_send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubmitBadSignatureEvidence',
full_name='injective.peggy.v1.Msg.SubmitBadSignatureEvidence',
index=10,
containing_service=None,
input_type=_MSGSUBMITBADSIGNATUREEVIDENCE,
output_type=_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
serialized_options=b'\202\323\344\223\0023\"1/injective/peggy/v1/submit_bad_signature_evidence',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MSG)
DESCRIPTOR.services_by_name['Msg'] = _MSG
# @@protoc_insertion_point(module_scope)
| 43.861378 | 5,674 | 0.772502 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: injective/peggy/v1/msgs.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from cosmos.base.v1beta1 import coin_pb2 as cosmos_dot_base_dot_v1beta1_dot_coin__pb2
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from injective.peggy.v1 import types_pb2 as injective_dot_peggy_dot_v1_dot_types__pb2
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='injective/peggy/v1/msgs.proto',
package='injective.peggy.v1',
syntax='proto3',
serialized_options=b'ZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/types',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1dinjective/peggy/v1/msgs.proto\x12\x12injective.peggy.v1\x1a\x1e\x63osmos/base/v1beta1/coin.proto\x1a\x14gogoproto/gogo.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1einjective/peggy/v1/types.proto\x1a\x19google/protobuf/any.proto\"X\n\x1bMsgSetOrchestratorAddresses\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\"%\n#MsgSetOrchestratorAddressesResponse\"_\n\x10MsgValsetConfirm\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\x12\x11\n\tsignature\x18\x04 \x01(\t\"\x1a\n\x18MsgValsetConfirmResponse\"\x96\x01\n\x0cMsgSendToEth\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x10\n\x08\x65th_dest\x18\x02 \x01(\t\x12/\n\x06\x61mount\x18\x03 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\x12\x33\n\nbridge_fee\x18\x04 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\"\x16\n\x14MsgSendToEthResponse\"6\n\x0fMsgRequestBatch\x12\x14\n\x0corchestrator\x18\x01 \x01(\t\x12\r\n\x05\x64\x65nom\x18\x02 \x01(\t\"\x19\n\x17MsgRequestBatchResponse\"u\n\x0fMsgConfirmBatch\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x02 \x01(\t\x12\x12\n\neth_signer\x18\x03 \x01(\t\x12\x14\n\x0corchestrator\x18\x04 \x01(\t\x12\x11\n\tsignature\x18\x05 \x01(\t\"\x19\n\x17MsgConfirmBatchResponse\"\xdc\x01\n\x0fMsgDepositClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x03 \x01(\t\x12>\n\x06\x61mount\x18\x04 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x17\n\x0f\x65thereum_sender\x18\x05 \x01(\t\x12\x17\n\x0f\x63osmos_receiver\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x19\n\x17MsgDepositClaimResponse\"\x80\x01\n\x10MsgWithdrawClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x13\n\x0b\x62\x61tch_nonce\x18\x03 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x14\n\x0corchestrator\x18\x05 \x01(\t\"\x1a\n\x18MsgWithdrawClaimResponse\"\xb6\x01\n\x15MsgERC20DeployedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x14\n\x0c\x63osmos_denom\x18\x03 \x01(\t\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06symbol\x18\x06 \x01(\t\x12\x10\n\x08\x64\x65\x63imals\x18\x07 \x01(\x04\x12\x14\n\x0corchestrator\x18\x08 \x01(\t\"\x1f\n\x1dMsgERC20DeployedClaimResponse\"<\n\x12MsgCancelSendToEth\x12\x16\n\x0etransaction_id\x18\x01 \x01(\x04\x12\x0e\n\x06sender\x18\x02 \x01(\t\"\x1c\n\x1aMsgCancelSendToEthResponse\"i\n\x1dMsgSubmitBadSignatureEvidence\x12%\n\x07subject\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tsignature\x18\x02 \x01(\t\x12\x0e\n\x06sender\x18\x03 \x01(\t\"\'\n%MsgSubmitBadSignatureEvidenceResponse\"\x81\x02\n\x15MsgValsetUpdatedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0cvalset_nonce\x18\x02 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x03 \x01(\x04\x12\x34\n\x07members\x18\x04 \x03(\x0b\x32#.injective.peggy.v1.BridgeValidator\x12\x45\n\rreward_amount\x18\x05 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x14\n\x0creward_token\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x1f\n\x1dMsgValsetUpdatedClaimResponse2\xc4\r\n\x03Msg\x12\x8f\x01\n\rValsetConfirm\x12$.injective.peggy.v1.MsgValsetConfirm\x1a,.injective.peggy.v1.MsgValsetConfirmResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/valset_confirm\x12\x80\x01\n\tSendToEth\x12 .injective.peggy.v1.MsgSendToEth\x1a(.injective.peggy.v1.MsgSendToEthResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1f/injective/peggy/v1/send_to_eth\x12\x8b\x01\n\x0cRequestBatch\x12#.injective.peggy.v1.MsgRequestBatch\x1a+.injective.peggy.v1.MsgRequestBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/request_batch\x12\x8b\x01\n\x0c\x43onfirmBatch\x12#.injective.peggy.v1.MsgConfirmBatch\x1a+.injective.peggy.v1.MsgConfirmBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/confirm_batch\x12\x8b\x01\n\x0c\x44\x65positClaim\x12#.injective.peggy.v1.MsgDepositClaim\x1a+.injective.peggy.v1.MsgDepositClaimResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/deposit_claim\x12\x8f\x01\n\rWithdrawClaim\x12$.injective.peggy.v1.MsgWithdrawClaim\x1a,.injective.peggy.v1.MsgWithdrawClaimResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/withdraw_claim\x12\xa3\x01\n\x11ValsetUpdateClaim\x12).injective.peggy.v1.MsgValsetUpdatedClaim\x1a\x31.injective.peggy.v1.MsgValsetUpdatedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/valset_updated_claim\x12\xa4\x01\n\x12\x45RC20DeployedClaim\x12).injective.peggy.v1.MsgERC20DeployedClaim\x1a\x31.injective.peggy.v1.MsgERC20DeployedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/erc20_deployed_claim\x12\xba\x01\n\x18SetOrchestratorAddresses\x12/.injective.peggy.v1.MsgSetOrchestratorAddresses\x1a\x37.injective.peggy.v1.MsgSetOrchestratorAddressesResponse\"4\x82\xd3\xe4\x93\x02.\",/injective/peggy/v1/set_orchestrator_address\x12\x99\x01\n\x0f\x43\x61ncelSendToEth\x12&.injective.peggy.v1.MsgCancelSendToEth\x1a..injective.peggy.v1.MsgCancelSendToEthResponse\".\x82\xd3\xe4\x93\x02(\"&/injective/peggy/v1/cancel_send_to_eth\x12\xc5\x01\n\x1aSubmitBadSignatureEvidence\x12\x31.injective.peggy.v1.MsgSubmitBadSignatureEvidence\x1a\x39.injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse\"9\x82\xd3\xe4\x93\x02\x33\"1/injective/peggy/v1/submit_bad_signature_evidenceBMZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/typesb\x06proto3'
,
dependencies=[cosmos_dot_base_dot_v1beta1_dot_coin__pb2.DESCRIPTOR,gogoproto_dot_gogo__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,injective_dot_peggy_dot_v1_dot_types__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_MSGSETORCHESTRATORADDRESSES = _descriptor.Descriptor(
name='MsgSetOrchestratorAddresses',
full_name='injective.peggy.v1.MsgSetOrchestratorAddresses',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=196,
serialized_end=284,
)
_MSGSETORCHESTRATORADDRESSESRESPONSE = _descriptor.Descriptor(
name='MsgSetOrchestratorAddressesResponse',
full_name='injective.peggy.v1.MsgSetOrchestratorAddressesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=286,
serialized_end=323,
)
_MSGVALSETCONFIRM = _descriptor.Descriptor(
name='MsgValsetConfirm',
full_name='injective.peggy.v1.MsgValsetConfirm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgValsetConfirm.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetConfirm.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgValsetConfirm.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgValsetConfirm.signature', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=325,
serialized_end=420,
)
_MSGVALSETCONFIRMRESPONSE = _descriptor.Descriptor(
name='MsgValsetConfirmResponse',
full_name='injective.peggy.v1.MsgValsetConfirmResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=422,
serialized_end=448,
)
_MSGSENDTOETH = _descriptor.Descriptor(
name='MsgSendToEth',
full_name='injective.peggy.v1.MsgSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSendToEth.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_dest', full_name='injective.peggy.v1.MsgSendToEth.eth_dest', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgSendToEth.amount', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bridge_fee', full_name='injective.peggy.v1.MsgSendToEth.bridge_fee', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=451,
serialized_end=601,
)
_MSGSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgSendToEthResponse',
full_name='injective.peggy.v1.MsgSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=603,
serialized_end=625,
)
_MSGREQUESTBATCH = _descriptor.Descriptor(
name='MsgRequestBatch',
full_name='injective.peggy.v1.MsgRequestBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgRequestBatch.orchestrator', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='denom', full_name='injective.peggy.v1.MsgRequestBatch.denom', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=627,
serialized_end=681,
)
_MSGREQUESTBATCHRESPONSE = _descriptor.Descriptor(
name='MsgRequestBatchResponse',
full_name='injective.peggy.v1.MsgRequestBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=683,
serialized_end=708,
)
_MSGCONFIRMBATCH = _descriptor.Descriptor(
name='MsgConfirmBatch',
full_name='injective.peggy.v1.MsgConfirmBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgConfirmBatch.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgConfirmBatch.token_contract', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_signer', full_name='injective.peggy.v1.MsgConfirmBatch.eth_signer', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgConfirmBatch.orchestrator', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgConfirmBatch.signature', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=710,
serialized_end=827,
)
_MSGCONFIRMBATCHRESPONSE = _descriptor.Descriptor(
name='MsgConfirmBatchResponse',
full_name='injective.peggy.v1.MsgConfirmBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=829,
serialized_end=854,
)
_MSGDEPOSITCLAIM = _descriptor.Descriptor(
name='MsgDepositClaim',
full_name='injective.peggy.v1.MsgDepositClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgDepositClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgDepositClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgDepositClaim.token_contract', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgDepositClaim.amount', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ethereum_sender', full_name='injective.peggy.v1.MsgDepositClaim.ethereum_sender', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_receiver', full_name='injective.peggy.v1.MsgDepositClaim.cosmos_receiver', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgDepositClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=857,
serialized_end=1077,
)
_MSGDEPOSITCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgDepositClaimResponse',
full_name='injective.peggy.v1.MsgDepositClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1079,
serialized_end=1104,
)
_MSGWITHDRAWCLAIM = _descriptor.Descriptor(
name='MsgWithdrawClaim',
full_name='injective.peggy.v1.MsgWithdrawClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgWithdrawClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='batch_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.batch_nonce', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgWithdrawClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgWithdrawClaim.orchestrator', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1107,
serialized_end=1235,
)
_MSGWITHDRAWCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgWithdrawClaimResponse',
full_name='injective.peggy.v1.MsgWithdrawClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1237,
serialized_end=1263,
)
_MSGERC20DEPLOYEDCLAIM = _descriptor.Descriptor(
name='MsgERC20DeployedClaim',
full_name='injective.peggy.v1.MsgERC20DeployedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgERC20DeployedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgERC20DeployedClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_denom', full_name='injective.peggy.v1.MsgERC20DeployedClaim.cosmos_denom', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgERC20DeployedClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='injective.peggy.v1.MsgERC20DeployedClaim.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symbol', full_name='injective.peggy.v1.MsgERC20DeployedClaim.symbol', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decimals', full_name='injective.peggy.v1.MsgERC20DeployedClaim.decimals', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgERC20DeployedClaim.orchestrator', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1266,
serialized_end=1448,
)
_MSGERC20DEPLOYEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgERC20DeployedClaimResponse',
full_name='injective.peggy.v1.MsgERC20DeployedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1450,
serialized_end=1481,
)
_MSGCANCELSENDTOETH = _descriptor.Descriptor(
name='MsgCancelSendToEth',
full_name='injective.peggy.v1.MsgCancelSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='injective.peggy.v1.MsgCancelSendToEth.transaction_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgCancelSendToEth.sender', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1483,
serialized_end=1543,
)
_MSGCANCELSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgCancelSendToEthResponse',
full_name='injective.peggy.v1.MsgCancelSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1545,
serialized_end=1573,
)
_MSGSUBMITBADSIGNATUREEVIDENCE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidence',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subject', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.subject', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.signature', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.sender', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1575,
serialized_end=1680,
)
_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidenceResponse',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1682,
serialized_end=1721,
)
_MSGVALSETUPDATEDCLAIM = _descriptor.Descriptor(
name='MsgValsetUpdatedClaim',
full_name='injective.peggy.v1.MsgValsetUpdatedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='valset_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.valset_nonce', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.block_height', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='members', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.members', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_amount', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_amount', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_token', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_token', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1724,
serialized_end=1981,
)
_MSGVALSETUPDATEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgValsetUpdatedClaimResponse',
full_name='injective.peggy.v1.MsgValsetUpdatedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1983,
serialized_end=2014,
)
_MSGSENDTOETH.fields_by_name['amount'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSENDTOETH.fields_by_name['bridge_fee'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSUBMITBADSIGNATUREEVIDENCE.fields_by_name['subject'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_MSGVALSETUPDATEDCLAIM.fields_by_name['members'].message_type = injective_dot_peggy_dot_v1_dot_types__pb2._BRIDGEVALIDATOR
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddresses'] = _MSGSETORCHESTRATORADDRESSES
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddressesResponse'] = _MSGSETORCHESTRATORADDRESSESRESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetConfirm'] = _MSGVALSETCONFIRM
DESCRIPTOR.message_types_by_name['MsgValsetConfirmResponse'] = _MSGVALSETCONFIRMRESPONSE
DESCRIPTOR.message_types_by_name['MsgSendToEth'] = _MSGSENDTOETH
DESCRIPTOR.message_types_by_name['MsgSendToEthResponse'] = _MSGSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgRequestBatch'] = _MSGREQUESTBATCH
DESCRIPTOR.message_types_by_name['MsgRequestBatchResponse'] = _MSGREQUESTBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgConfirmBatch'] = _MSGCONFIRMBATCH
DESCRIPTOR.message_types_by_name['MsgConfirmBatchResponse'] = _MSGCONFIRMBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgDepositClaim'] = _MSGDEPOSITCLAIM
DESCRIPTOR.message_types_by_name['MsgDepositClaimResponse'] = _MSGDEPOSITCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgWithdrawClaim'] = _MSGWITHDRAWCLAIM
DESCRIPTOR.message_types_by_name['MsgWithdrawClaimResponse'] = _MSGWITHDRAWCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaim'] = _MSGERC20DEPLOYEDCLAIM
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaimResponse'] = _MSGERC20DEPLOYEDCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgCancelSendToEth'] = _MSGCANCELSENDTOETH
DESCRIPTOR.message_types_by_name['MsgCancelSendToEthResponse'] = _MSGCANCELSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidence'] = _MSGSUBMITBADSIGNATUREEVIDENCE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidenceResponse'] = _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaim'] = _MSGVALSETUPDATEDCLAIM
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaimResponse'] = _MSGVALSETUPDATEDCLAIMRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MsgSetOrchestratorAddresses = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddresses', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSES,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddresses)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddresses)
MsgSetOrchestratorAddressesResponse = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddressesResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSESRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddressesResponse)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddressesResponse)
MsgValsetConfirm = _reflection.GeneratedProtocolMessageType('MsgValsetConfirm', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirm)
})
_sym_db.RegisterMessage(MsgValsetConfirm)
MsgValsetConfirmResponse = _reflection.GeneratedProtocolMessageType('MsgValsetConfirmResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirmResponse)
})
_sym_db.RegisterMessage(MsgValsetConfirmResponse)
MsgSendToEth = _reflection.GeneratedProtocolMessageType('MsgSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEth)
})
_sym_db.RegisterMessage(MsgSendToEth)
MsgSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEthResponse)
})
_sym_db.RegisterMessage(MsgSendToEthResponse)
MsgRequestBatch = _reflection.GeneratedProtocolMessageType('MsgRequestBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatch)
})
_sym_db.RegisterMessage(MsgRequestBatch)
MsgRequestBatchResponse = _reflection.GeneratedProtocolMessageType('MsgRequestBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatchResponse)
})
_sym_db.RegisterMessage(MsgRequestBatchResponse)
MsgConfirmBatch = _reflection.GeneratedProtocolMessageType('MsgConfirmBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatch)
})
_sym_db.RegisterMessage(MsgConfirmBatch)
MsgConfirmBatchResponse = _reflection.GeneratedProtocolMessageType('MsgConfirmBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatchResponse)
})
_sym_db.RegisterMessage(MsgConfirmBatchResponse)
MsgDepositClaim = _reflection.GeneratedProtocolMessageType('MsgDepositClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaim)
})
_sym_db.RegisterMessage(MsgDepositClaim)
MsgDepositClaimResponse = _reflection.GeneratedProtocolMessageType('MsgDepositClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaimResponse)
})
_sym_db.RegisterMessage(MsgDepositClaimResponse)
MsgWithdrawClaim = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaim)
})
_sym_db.RegisterMessage(MsgWithdrawClaim)
MsgWithdrawClaimResponse = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaimResponse)
})
_sym_db.RegisterMessage(MsgWithdrawClaimResponse)
MsgERC20DeployedClaim = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaim)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaim)
MsgERC20DeployedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaimResponse)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaimResponse)
MsgCancelSendToEth = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEth)
})
_sym_db.RegisterMessage(MsgCancelSendToEth)
MsgCancelSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEthResponse)
})
_sym_db.RegisterMessage(MsgCancelSendToEthResponse)
MsgSubmitBadSignatureEvidence = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidence', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidence)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidence)
MsgSubmitBadSignatureEvidenceResponse = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidenceResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidenceResponse)
MsgValsetUpdatedClaim = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaim)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaim)
MsgValsetUpdatedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaimResponse)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaimResponse)
DESCRIPTOR._options = None
_MSGSENDTOETH.fields_by_name['amount']._options = None
_MSGSENDTOETH.fields_by_name['bridge_fee']._options = None
_MSGDEPOSITCLAIM.fields_by_name['amount']._options = None
_MSGVALSETUPDATEDCLAIM.fields_by_name['reward_amount']._options = None
_MSG = _descriptor.ServiceDescriptor(
name='Msg',
full_name='injective.peggy.v1.Msg',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2017,
serialized_end=3749,
methods=[
_descriptor.MethodDescriptor(
name='ValsetConfirm',
full_name='injective.peggy.v1.Msg.ValsetConfirm',
index=0,
containing_service=None,
input_type=_MSGVALSETCONFIRM,
output_type=_MSGVALSETCONFIRMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/valset_confirm',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SendToEth',
full_name='injective.peggy.v1.Msg.SendToEth',
index=1,
containing_service=None,
input_type=_MSGSENDTOETH,
output_type=_MSGSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002!\"\037/injective/peggy/v1/send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='RequestBatch',
full_name='injective.peggy.v1.Msg.RequestBatch',
index=2,
containing_service=None,
input_type=_MSGREQUESTBATCH,
output_type=_MSGREQUESTBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/request_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ConfirmBatch',
full_name='injective.peggy.v1.Msg.ConfirmBatch',
index=3,
containing_service=None,
input_type=_MSGCONFIRMBATCH,
output_type=_MSGCONFIRMBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/confirm_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DepositClaim',
full_name='injective.peggy.v1.Msg.DepositClaim',
index=4,
containing_service=None,
input_type=_MSGDEPOSITCLAIM,
output_type=_MSGDEPOSITCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/deposit_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='WithdrawClaim',
full_name='injective.peggy.v1.Msg.WithdrawClaim',
index=5,
containing_service=None,
input_type=_MSGWITHDRAWCLAIM,
output_type=_MSGWITHDRAWCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/withdraw_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ValsetUpdateClaim',
full_name='injective.peggy.v1.Msg.ValsetUpdateClaim',
index=6,
containing_service=None,
input_type=_MSGVALSETUPDATEDCLAIM,
output_type=_MSGVALSETUPDATEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/valset_updated_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ERC20DeployedClaim',
full_name='injective.peggy.v1.Msg.ERC20DeployedClaim',
index=7,
containing_service=None,
input_type=_MSGERC20DEPLOYEDCLAIM,
output_type=_MSGERC20DEPLOYEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/erc20_deployed_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetOrchestratorAddresses',
full_name='injective.peggy.v1.Msg.SetOrchestratorAddresses',
index=8,
containing_service=None,
input_type=_MSGSETORCHESTRATORADDRESSES,
output_type=_MSGSETORCHESTRATORADDRESSESRESPONSE,
serialized_options=b'\202\323\344\223\002.\",/injective/peggy/v1/set_orchestrator_address',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelSendToEth',
full_name='injective.peggy.v1.Msg.CancelSendToEth',
index=9,
containing_service=None,
input_type=_MSGCANCELSENDTOETH,
output_type=_MSGCANCELSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002(\"&/injective/peggy/v1/cancel_send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubmitBadSignatureEvidence',
full_name='injective.peggy.v1.Msg.SubmitBadSignatureEvidence',
index=10,
containing_service=None,
input_type=_MSGSUBMITBADSIGNATUREEVIDENCE,
output_type=_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
serialized_options=b'\202\323\344\223\0023\"1/injective/peggy/v1/submit_bad_signature_evidence',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MSG)
DESCRIPTOR.services_by_name['Msg'] = _MSG
# @@protoc_insertion_point(module_scope)
| 0 | 0 | 0 |
1d8534395fdd5c7f55a83b8b80a845e28e72c98e | 272 | py | Python | Mundo 2_Estruturas de Controle/Desafio_64.py | VictorOliveira02/Desafios-Python3-Curso-em-Video | 53ee8bd814b816f3a21936677ef3f155b582843f | [
"MIT"
] | null | null | null | Mundo 2_Estruturas de Controle/Desafio_64.py | VictorOliveira02/Desafios-Python3-Curso-em-Video | 53ee8bd814b816f3a21936677ef3f155b582843f | [
"MIT"
] | null | null | null | Mundo 2_Estruturas de Controle/Desafio_64.py | VictorOliveira02/Desafios-Python3-Curso-em-Video | 53ee8bd814b816f3a21936677ef3f155b582843f | [
"MIT"
] | null | null | null | n = conta = soma = 0
n = int(input('Digite um valor para soma (999 para SAIR): '))
while n != 999:
soma = soma + n
conta = conta + 1
n = int(input('Digite um valor para soma (999 para SAIR): '))
print(f'A SOMA DOS {conta} VALORES É IGUAL Á {soma}.')
| 27.2 | 65 | 0.584559 | n = conta = soma = 0
n = int(input('Digite um valor para soma (999 para SAIR): '))
while n != 999:
soma = soma + n
conta = conta + 1
n = int(input('Digite um valor para soma (999 para SAIR): '))
print(f'A SOMA DOS {conta} VALORES É IGUAL Á {soma}.')
| 0 | 0 | 0 |
4d41a74f377233e4cdcc594f3e6d19359490d7d0 | 1,429 | py | Python | Section 3/imdb.py | PacktPublishing/-Natural-Language-Processing-in-Practice | c9d999bdb7ac499d3df4f25e3aa5f949d180ec22 | [
"MIT"
] | 15 | 2019-01-24T20:54:35.000Z | 2022-03-31T07:28:28.000Z | Section 3/imdb.py | PacktPublishing/-Natural-Language-Processing-in-Practice | c9d999bdb7ac499d3df4f25e3aa5f949d180ec22 | [
"MIT"
] | null | null | null | Section 3/imdb.py | PacktPublishing/-Natural-Language-Processing-in-Practice | c9d999bdb7ac499d3df4f25e3aa5f949d180ec22 | [
"MIT"
] | 13 | 2019-02-08T00:35:02.000Z | 2022-03-11T13:31:39.000Z | from keras.datasets import imdb
import keras
from keras.models import Sequential
from keras.layers.embeddings import Embedding
from keras.layers import Flatten, Dense
from keras.preprocessing import sequence
from numpy import array
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=5000)
word_to_id = keras.datasets.imdb.get_word_index()
word_to_id = {k:(v+3) for k,v in word_to_id.items()}
word_to_id["<PAD>"] = 0
word_to_id["<START>"] = 1
word_to_id["<UNK>"] = 2
x_train = sequence.pad_sequences(x_train, maxlen=300)
x_test = sequence.pad_sequences(x_test, maxlen=300)
network = Sequential()
network.add(Embedding(5000, 32, input_length=300))
network.add(Flatten())
network.add(Dense(1, activation='sigmoid'))
network.compile(loss="binary_crossentropy", optimizer='Adam', metrics=['accuracy'])
network.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=3, batch_size=64)
result = network.evaluate(x_test, y_test, verbose=0)
negative = "this movie was bad"
positive = "i had fun"
negative2 = "this movie was terrible"
positive2 = "i really liked the movie"
for review in [positive, positive2, negative, negative2]:
temp = []
for word in review.split(" "):
temp.append(word_to_id[word])
temp_padded = sequence.pad_sequences([temp], maxlen=300)
print(review + " -- Sent -- " + str(network.predict(array([temp_padded][0]))[0][0]))
| 34.02381 | 89 | 0.716585 | from keras.datasets import imdb
import keras
from keras.models import Sequential
from keras.layers.embeddings import Embedding
from keras.layers import Flatten, Dense
from keras.preprocessing import sequence
from numpy import array
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=5000)
word_to_id = keras.datasets.imdb.get_word_index()
word_to_id = {k:(v+3) for k,v in word_to_id.items()}
word_to_id["<PAD>"] = 0
word_to_id["<START>"] = 1
word_to_id["<UNK>"] = 2
x_train = sequence.pad_sequences(x_train, maxlen=300)
x_test = sequence.pad_sequences(x_test, maxlen=300)
network = Sequential()
network.add(Embedding(5000, 32, input_length=300))
network.add(Flatten())
network.add(Dense(1, activation='sigmoid'))
network.compile(loss="binary_crossentropy", optimizer='Adam', metrics=['accuracy'])
network.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=3, batch_size=64)
result = network.evaluate(x_test, y_test, verbose=0)
negative = "this movie was bad"
positive = "i had fun"
negative2 = "this movie was terrible"
positive2 = "i really liked the movie"
for review in [positive, positive2, negative, negative2]:
temp = []
for word in review.split(" "):
temp.append(word_to_id[word])
temp_padded = sequence.pad_sequences([temp], maxlen=300)
print(review + " -- Sent -- " + str(network.predict(array([temp_padded][0]))[0][0]))
| 0 | 0 | 0 |
4af8e420262bb1d6912fb82b24d8afd62bd757c1 | 657 | py | Python | packages/scikit-image/examples/plot_threshold.py | zmoon/scipy-lecture-notes | 75a89ddedeb48930dbdb6fe25a76e9ef0587ae21 | [
"CC-BY-4.0"
] | 2,538 | 2015-01-01T04:58:41.000Z | 2022-03-31T21:06:05.000Z | packages/scikit-image/examples/plot_threshold.py | zmoon/scipy-lecture-notes | 75a89ddedeb48930dbdb6fe25a76e9ef0587ae21 | [
"CC-BY-4.0"
] | 362 | 2015-01-18T14:16:23.000Z | 2021-11-18T16:24:34.000Z | packages/scikit-image/examples/plot_threshold.py | zmoon/scipy-lecture-notes | 75a89ddedeb48930dbdb6fe25a76e9ef0587ae21 | [
"CC-BY-4.0"
] | 1,127 | 2015-01-05T14:39:29.000Z | 2022-03-25T08:38:39.000Z | """
Otsu thresholding
==================
This example illustrates automatic Otsu thresholding.
"""
import matplotlib.pyplot as plt
from skimage import data
from skimage import filters
from skimage import exposure
camera = data.camera()
val = filters.threshold_otsu(camera)
hist, bins_center = exposure.histogram(camera)
plt.figure(figsize=(9, 4))
plt.subplot(131)
plt.imshow(camera, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(132)
plt.imshow(camera < val, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(133)
plt.plot(bins_center, hist, lw=2)
plt.axvline(val, color='k', ls='--')
plt.tight_layout()
plt.show()
| 21.193548 | 62 | 0.727549 | """
Otsu thresholding
==================
This example illustrates automatic Otsu thresholding.
"""
import matplotlib.pyplot as plt
from skimage import data
from skimage import filters
from skimage import exposure
camera = data.camera()
val = filters.threshold_otsu(camera)
hist, bins_center = exposure.histogram(camera)
plt.figure(figsize=(9, 4))
plt.subplot(131)
plt.imshow(camera, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(132)
plt.imshow(camera < val, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(133)
plt.plot(bins_center, hist, lw=2)
plt.axvline(val, color='k', ls='--')
plt.tight_layout()
plt.show()
| 0 | 0 | 0 |
4a399209469f54d6a0788f38c4cdc3ee7b55bff7 | 1,471 | py | Python | BS/BS_082_170521/main.py | Aleksey-Voko/Word_forms_bases | f14173cef830e7a514dfaefba3bbbf0c02a3ac0f | [
"MIT"
] | null | null | null | BS/BS_082_170521/main.py | Aleksey-Voko/Word_forms_bases | f14173cef830e7a514dfaefba3bbbf0c02a3ac0f | [
"MIT"
] | null | null | null | BS/BS_082_170521/main.py | Aleksey-Voko/Word_forms_bases | f14173cef830e7a514dfaefba3bbbf0c02a3ac0f | [
"MIT"
] | null | null | null | from BS.utils import read_src_bs, save_bs_dicts_to_txt, read_src_socket_bs, save_socket_bs_dicts_to_txt
if __name__ == '__main__':
change_template_bg()
| 36.775 | 103 | 0.588715 | from BS.utils import read_src_bs, save_bs_dicts_to_txt, read_src_socket_bs, save_socket_bs_dicts_to_txt
def change_template_bs():
word_forms_bases = list(read_src_bs('src_dict/БС 15.05.21.txt'))
for group in word_forms_bases:
title_form = group.title_word_form
if all([
title_form.idf == '.СеИ',
title_form.name.endswith('ий'),
title_form.name not in ('вий', 'змий', 'кий'),
]):
for count, identifier in enumerate(title_form.info):
if identifier == 'мнI2':
title_form.info[count] = 'мнI2**'
save_bs_dicts_to_txt(sorted(word_forms_bases), 'out/БС 17.05.21.txt')
def change_template_bg():
socket_group_list = list(read_src_socket_bs('src_dict/БГ 15.05.21.txt'))
for socket_group in socket_group_list:
for sub_group in socket_group.sub_groups:
for word_form in sub_group.socket_word_forms:
if all([
word_form.idf == '.СеИ',
word_form.name.endswith('ий'),
word_form.name not in ('вий', 'змий', 'кий'),
]):
for count, identifier in enumerate(word_form.info):
if identifier == 'мнI2':
word_form.info[count] = 'мнI2**'
save_socket_bs_dicts_to_txt(socket_group_list, 'out/БГ 17.05.21.txt')
if __name__ == '__main__':
change_template_bg()
| 1,311 | 0 | 46 |
8f59783e7c2993679127dfa529b5ae4433b0fd0d | 456 | py | Python | config/conv.py | aarzilli/yacco | 8378d59579ed3644a8da6b3dd5ee85496e32f0c9 | [
"BSD-3-Clause"
] | 24 | 2018-05-09T12:43:15.000Z | 2022-03-06T01:58:49.000Z | config/conv.py | aarzilli/yacco | 8378d59579ed3644a8da6b3dd5ee85496e32f0c9 | [
"BSD-3-Clause"
] | 5 | 2019-04-21T20:06:04.000Z | 2020-05-11T07:41:28.000Z | config/conv.py | aarzilli/yacco | 8378d59579ed3644a8da6b3dd5ee85496e32f0c9 | [
"BSD-3-Clause"
] | 3 | 2019-04-23T09:06:54.000Z | 2020-05-09T14:48:30.000Z | #!/usr/bin/env python
convert("config/luxisr.ttf", "luxibytes")
convert("config/luximr.ttf", "luximonobytes")
convert("config/DejaVuSans.ttf", "dejabytes")
convert("config/DejaVuSansMono.ttf", "dejamonobytes")
| 25.333333 | 53 | 0.640351 | #!/usr/bin/env python
def convert(path, name):
v = open(path).read()
out = open("config/" + name + ".go", "w")
out.write("package config\n")
out.write("var " + name + " = []byte{\n\t")
for c in v:
out.write(str(ord(c)) + ", ")
out.write("}\n")
out.close()
convert("config/luxisr.ttf", "luxibytes")
convert("config/luximr.ttf", "luximonobytes")
convert("config/DejaVuSans.ttf", "dejabytes")
convert("config/DejaVuSansMono.ttf", "dejamonobytes")
| 222 | 0 | 23 |
8b3383cc48592356ea045efba36bd7d2d0455bb9 | 3,744 | py | Python | Algos/base.py | benzomo/PyPackages | 880823315411f6bf812827f59dd3a4408052e9a6 | [
"MIT"
] | null | null | null | Algos/base.py | benzomo/PyPackages | 880823315411f6bf812827f59dd3a4408052e9a6 | [
"MIT"
] | null | null | null | Algos/base.py | benzomo/PyPackages | 880823315411f6bf812827f59dd3a4408052e9a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 25 15:32:42 2018
@author: benmo
"""
import pandas as pd, numpy as np
from .functions import *
| 35.320755 | 154 | 0.523504 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 25 15:32:42 2018
@author: benmo
"""
import pandas as pd, numpy as np
from .functions import *
def scale(df, columns=None, how='minmax'):
if columns == None:
columns=df.columns
class Transform():
def __init__(self, tfm, utfm):
self.tfm = tfm
self.utfm = utfm
if how == 'std':
std = df[columns].std(ddof=0)
mu = df[columns].mean()
tfm = lambda df: pd.DataFrame(StandardScaler().fit_transform(df[columns]),
index=df.index, columns=columns)
utfm = lambda x: x[columns]*std + mu
elif how == 'minmax':
maxmin = df[columns].max() - df[columns].min()
mindf= df[columns].min()
tfm = lambda df: pd.DataFrame(MinMaxScaler().fit_transform(df[columns]),
index=df.index, columns=columns)
utfm = lambda x: x[columns]*maxmin + mindf
elif how == 'boxcox':
bxcx_lmd = df.apply(boxcox_normmax)
mu = df.mean()
tfm = lambda df: boxcox(df[columns], bxcx_lmd[columns]) if isinstance(columns, str) else pd.concat(map(lambda x: boxcox(df[x], bxcx_lmd[x]),
df[columns]), axis=1)
utfm = lambda df: boxcox(df[columns], bxcx_lmd[columns]) if isinstance(columns, str) else pd.concat(map(lambda x: inv_boxcox(df[x], bxcx_lmd[x]),
df[columns]), axis=1)
scaler = Transform(tfm,utfm)
return scaler
class MLDataFrame(pd.DataFrame):
_attributes_ = "raw, oh_features, add_oh, add_scaled"
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
if len(args) == 1 and isinstance(args[0], MLDataFrame):
args[0]._copy_attrs(self)
self.raw = pd.DataFrame(self._data)
def _copy_attrs(self, df):
for attr in self._attributes_.split(","):
df.__dict__[attr] = getattr(self, attr, None)
@property
def _constructor(self):
def init(*args, **kw):
mldf = MLDataFrame(*args, **kw)
self._copy_attrs(mldf)
return mldf
return init
def add_oh(self, oh_features):
self.oh_features = oh_features
self.oh = columns_to_onehot(self, oh_features)
return self
def add_scaled(self, df, scale_cols=None, scale_types=None):
if scale_cols == None:
self.scaler = scale(df, how='minmax' if scale_types == None else scale_types)
self.scaled = self.scaler.tfm((df))
else:
scalers = pd.Series(scale_cols, index=scale_types)
scale_types = scalers.index.unique()
self.scaler = {}
tempcols = scalers[scale_types[0]].tolist()
self.scaler[scale_types[0]] = scale(df, columns=tempcols, how=scale_types[0])
self.scaled = scaler.tfm(df)
if len(scale_types) > 1:
for i in scalers.index.unique():
tempcols = scalers[i] if isinstance(scalers[i], str) else scalers[i].tolist()
self.scaler[i] = scale(df, columns=tempcols, how=i)
temp = self.scaler[i].tfm(df)
self.scaled = pd.concat((self.scaled, temp), axis=1)
self.scaled = pd.concat((self.scaled, df[list(filter(lambda x: x not in scale_cols,
df.columns))]), axis=1)
| 3,264 | 237 | 46 |
3a0357b70fc8ab6473954d253bcffcd7a4900065 | 2,110 | py | Python | Draft_Phuong/ternary_ver2/sample.py | phuong27102000/NTRU_HRSS_KEM_SV | fe4fd095134a41f4131a3aa953197e3933b303ad | [
"MIT"
] | null | null | null | Draft_Phuong/ternary_ver2/sample.py | phuong27102000/NTRU_HRSS_KEM_SV | fe4fd095134a41f4131a3aa953197e3933b303ad | [
"MIT"
] | null | null | null | Draft_Phuong/ternary_ver2/sample.py | phuong27102000/NTRU_HRSS_KEM_SV | fe4fd095134a41f4131a3aa953197e3933b303ad | [
"MIT"
] | null | null | null | import poly
| 25.119048 | 218 | 0.493839 | import poly
def fg_HPS(b,sib,sftb,n,q):
#len(b) must = sib + sftb
f = ter(b[0:sib],sib,n)
g = fixed_type(b[sib:sftb+sib],sftb,n,q)
g+=[0]
return f,g
def fg_HRSS(b,sib,n):
#len(b) must = 2*sib
f = ter_plus(b[0:sib],sib,n)
g0 = ter_plus(b[sib:2*sib],sib,n)
g = []
g += [-g0[0]]
for i in range(1,n-1):
g += [g0[i-1]-g0[i]]
g += [g0[n-2]]
return f,g
def rm_HPS(b,sib,sftb,n,q):
#len(b) must = sib + sftb
r = ter(b[0:sib],sib,n)
m = fixed_type(b[sib:sftb+sib],sftb,n,q)
return r,m
def rm_HRSS(b,sib,n):
#len(b) must = 2*sib
r = ter(b[0:sib],sib,n)
m = ter(b[sib:2*sib],sib,n)
return r,m
def ter(b,sib,n):
#len(b) must = sib
v = poly.zeros_gen(n-1)
for i in range (0,n-1):
pow2 = 1
for j in range (0,8):
v[i] += pow2*b[8*i+j]
pow2 *= 2
out = poly.s3(v,n)
return out
def ter_plus(b,sib,n):
#len(b) must = sib
v = ter(b,sib,n)
t = 0
for i in range (0,n-2):
t += v[i]*v[i+1]
if t < 0:
for i in range (0,n-1,2):
v[i] = -v[i]
out = poly.s3(v,n)
return out
def fixed_type(b,sftb,n,q):
#len(b) must = sftb
a = poly.zeros_gen(n-1)
for i in range(0,n-1):
for j in range (0,30):
a[i] += b[30*i+j]<<(2+j)
i = 0
while i < ((q>>4) -1):
a[i] |= 1
i += 1
while i < ((q>>3) -2):
a[i] |= 2
i += 1
poly.sort_int32(a,n-1)
# #delete from here
# f = "test_case.txt"
# file = open(f,'a+')
# file.write("Fixed type:\n")
# for i in range(0,n-1):
# file.write("%X "%a[i])
# if (i & 63) == 63: file.write("\n")
# file.write("\n______________________________________________________________________________________________________________________________________________________________________________________________\n\n");
# file.close()
# #to here
v = []
for i in range(0,n-1):
v += [a[i]&3]
out = poly.s3(v,n)
return out | 1,920 | 0 | 178 |
84756ffec0a18ca4293137fdbd786719cd66ced4 | 3,562 | py | Python | Classification/training.py | Natural-Goldfish/CatDogClassification | 5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263 | [
"CECILL-B"
] | null | null | null | Classification/training.py | Natural-Goldfish/CatDogClassification | 5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263 | [
"CECILL-B"
] | null | null | null | Classification/training.py | Natural-Goldfish/CatDogClassification | 5ae0fe8bc37c6ee8a1cd3bd75cc862c650822263 | [
"CECILL-B"
] | null | null | null | from src.dataset import CatDogDataset
from src.utils import *
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
import os
_CUDA_FLAG = torch.cuda.is_available()
| 45.666667 | 121 | 0.667602 | from src.dataset import CatDogDataset
from src.utils import *
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
import os
_CUDA_FLAG = torch.cuda.is_available()
def train(args):
train_dataset = CatDogDataset(mode = "train", img_path = args.img_path, annotation_path = args.annotation_path)
test_dataset = CatDogDataset(mode = "test", img_path = args.img_path, annotation_path = args.annotation_path)
train_dataloader = DataLoader(train_dataset, batch_size= args.batch_size, shuffle= True)
test_dataloader = DataLoader(test_dataset, batch_size= args.batch_size, shuffle= False)
# For record accuracy and loss on the tensorboard
writer = SummaryWriter("{}".format(MODELS[args.models]))
# Load model and initialize
model = load_model_class(args.models)
if args.model_load_flag :
model.load_state_dict(torch.load(os.path.join(args.model_path, args.pre_trained_model_name)))
if _CUDA_FLAG : model.cuda()
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr = args.learning_rate, momentum = args.momentum)
sigmoid = torch.nn.Sigmoid()
for cur_epoch in range(args.epoch):
# Training
model.train()
train_total_loss = 0.0
train_accuracy = 0.0
for cur_batch, train_data in enumerate(train_dataloader):
optimizer.zero_grad()
train_images, train_labels = train_data
if _CUDA_FLAG :
train_images = train_images.cuda()
train_labels = train_labels.view(-1).cuda()
train_outputs = model(train_images)
train_loss = criterion(train_outputs, train_labels)
train_loss.backward()
optimizer.step()
train_total_loss += train_loss.detach()
train_accuracy += accuracy(sigmoid(train_outputs.cpu().detach()), train_labels.cpu().detach())
# Calculate loss and accuracy about a epoch
train_total_loss = train_total_loss/len(train_dataloader)
train_accuracy = train_accuracy/len(train_dataloader)
print("TRAIN:: EPOCH {}/{} Loss {} Accuracy {}".format(cur_epoch, args.epoch, train_total_loss, train_accuracy))
# Testing
model.eval()
with torch.no_grad():
test_total_loss = 0.0
test_accuracy = 0.0
for cur_batch, test_data in enumerate(test_dataloader):
test_images, test_labels = test_data
if _CUDA_FLAG :
test_images = test_images.cuda()
test_labels = test_labels.view(-1).cuda()
test_outputs = model(test_images)
test_total_loss += criterion(test_outputs, test_labels)
test_accuracy += accuracy(sigmoid(test_outputs.cpu().detach()), test_labels.cpu().detach())
# Calculate loss and accuracy about a epoch
test_total_loss = test_total_loss/len(test_dataloader)
test_accuracy = test_accuracy/len(test_dataloader)
print("TEST:: EPOCH {}/{} Loss {} Accuracy {}".format(cur_epoch, args.epoch, test_total_loss, test_accuracy))
model_name = "{}_{}_checkpoint.pth".format(MODELS[args.models], cur_epoch)
torch.save(model.state_dict(), os.path.join(args.model_path, model_name))
writer.add_scalars("Loss", {"Train" : train_total_loss, "Test" : test_total_loss}, cur_epoch)
writer.add_scalars("Accuracy", {"Train" : train_accuracy, "Test" : test_accuracy}, cur_epoch)
| 3,337 | 0 | 23 |
a4623bf3c0b9253d21821dea3146c26d3d23d37a | 596 | py | Python | apiv2-python/api/vars.py | databeast/apicore | 2ac982e399d98510fc2bafed0aeb55eb0d2cf9c1 | [
"BSD-3-Clause"
] | null | null | null | apiv2-python/api/vars.py | databeast/apicore | 2ac982e399d98510fc2bafed0aeb55eb0d2cf9c1 | [
"BSD-3-Clause"
] | null | null | null | apiv2-python/api/vars.py | databeast/apicore | 2ac982e399d98510fc2bafed0aeb55eb0d2cf9c1 | [
"BSD-3-Clause"
] | null | null | null | from contextvars import ContextVar
current_user_role = ContextVar('role', default=None)
list_of_roles = ContextVar('roles', default=None)
current_user = ContextVar('users_ID', default=None)
headers = ContextVar('headers', default={})
nrpayload = ContextVar('nrpayload', default=None)
tid = ContextVar('tid', default=None)
auth_token = ContextVar('auth', default=None)
current_request = ContextVar('request', default=None)
broker_instance = ContextVar('broker_instance')
background_tasks = ContextVar('background_tasks', default=[])
register_actors = ContextVar("register_actors", default=None)
| 39.733333 | 61 | 0.785235 | from contextvars import ContextVar
current_user_role = ContextVar('role', default=None)
list_of_roles = ContextVar('roles', default=None)
current_user = ContextVar('users_ID', default=None)
headers = ContextVar('headers', default={})
nrpayload = ContextVar('nrpayload', default=None)
tid = ContextVar('tid', default=None)
auth_token = ContextVar('auth', default=None)
current_request = ContextVar('request', default=None)
broker_instance = ContextVar('broker_instance')
background_tasks = ContextVar('background_tasks', default=[])
register_actors = ContextVar("register_actors", default=None)
| 0 | 0 | 0 |