content stringlengths 5 1.05M |
|---|
"""
ExactTarget OAuth support.
Support Authentication from IMH using JWT token and pre-shared key.
Requires package pyjwt
"""
from datetime import timedelta, datetime
import jwt
from social.exceptions import AuthFailed, AuthCanceled
from social.backends.oauth import BaseOAuth2
class ExactTargetOAuth2(BaseOAuth2):
name = 'exacttarget'
def get_user_details(self, response):
"""Use the email address of the user, suffixed by _et"""
user = response.get('token', {})\
.get('request', {})\
.get('user', {})
if 'email' in user:
user['username'] = user['email']
return user
def uses_redirect(self):
return False
def auth_url(self):
return None
def process_error(self, data):
if data.get('error'):
error = self.data.get('error_description') or self.data['error']
raise AuthFailed(self, error)
def do_auth(self, token, *args, **kwargs):
dummy, secret = self.get_key_and_secret()
try: # Decode the token, using the Application Signature from settings
decoded = jwt.decode(token, secret)
except jwt.DecodeError: # Wrong signature, fail authentication
raise AuthCanceled(self)
kwargs.update({'response': {'token': decoded}, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance"""
token = self.data.get('jwt', {})
if not token:
raise AuthFailed(self, 'Authentication Failed')
return self.do_auth(token, *args, **kwargs)
def extra_data(self, user, uid, response, details):
"""Load extra details from the JWT token"""
data = {
'id': details.get('id'),
'email': details.get('email'),
# OAuth token, for use with legacy SOAP API calls:
# http://bit.ly/13pRHfo
'internalOauthToken': details.get('internalOauthToken'),
# Token for use with the Application ClientID for the FUEL API
'oauthToken': details.get('oauthToken'),
# If the token has expired, use the FUEL API to get a new token see
# http://bit.ly/10v1K5l and http://bit.ly/11IbI6F - set legacy=1
'refreshToken': details.get('refreshToken'),
}
# The expiresIn value determines how long the tokens are valid for.
# Take a bit off, then convert to an int timestamp
expiresSeconds = details.get('expiresIn', 0) - 30
expires = datetime.utcnow() + timedelta(seconds=expiresSeconds)
data['expires'] = (expires - datetime(1970, 1, 1)).total_seconds()
if response.get('token'):
token = response['token']
org = token.get('request', {}).get('organization')
if org:
data['stack'] = org.get('stackKey')
data['enterpriseId'] = org.get('enterpriseId')
return data
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 59,
"id": "fatty-container",
"metadata": {},
"outputs": [],
"source": [
"def cuad_pmedio(a, b, f):\n",
" \"\"\"Implementación de la regla del punto medio\n",
" \n",
" Parameters\n",
" ----------\n",
" f: La función a integrar\n",
" a: Límite inferior del intervalo\n",
" b: Límite superior del intervalo\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del punto medio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" \"\"\"\n",
" if a > b:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
" return None\n",
" try:\n",
" x0 = (a+b)/2\n",
" h = f(x0)\n",
" aprox = h*(b-a)\n",
" except:\n",
" print('Error: no fue posible calcular la función')\n",
" return aprox\n",
"\n",
"def cuad_trapecio(a, b, f):\n",
" \"\"\"Implementación de la regla del trapecio\n",
" \n",
" Parameters\n",
" ----------\n",
" f: La función a integrar\n",
" a: Límite inferior del intervalo\n",
" b: Límite superior del intervalo\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del trapecio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" \"\"\"\n",
" if a > b:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
" return None\n",
" try:\n",
" h = f(a) + f(b)\n",
" aprox = (b-a)/2*h\n",
" except:\n",
" print('Error: no fue posible calcular la función')\n",
" return aprox\n",
"\n",
"def cuad_simpson(a, b, f):\n",
" \"\"\"Implementación de la regla de Simpson\n",
" \n",
" Parameters\n",
" ----------\n",
" f: La función a integrar\n",
" a: Límite inferior del intervalo\n",
" b: Límite superior del intervalo\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla de Simpson\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" \"\"\"\n",
" if a > b:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
" return None\n",
" try:\n",
" x0 = (a+b)/2\n",
" h = f(a) + f(b) + 4*f(x0)\n",
" aprox = (b-a)/6*h\n",
" except:\n",
" print('Error: no fue posible calcular la función')\n",
" return aprox\n",
"\n",
"def f(x):\n",
" return x**2\n",
"\n",
"I = cuad_pmedio(1, 2, f)\n",
"print(f'La regla del punto medio da como resultado: {I}')\n",
"\n",
"I = cuad_trapecio(1, 2, f)\n",
"print(f'La regla del trapecio da como resultado: {I}')\n",
"\n",
"I = cuad_simpson(1, 2, f)\n",
"print(f'La regla de simpson da como resultado: {I}')\n",
"\n",
"\n",
"\n",
"def cuad_pmedio(a, b, y0):\n",
" \"\"\"Implementación de la regla del punto medio\n",
" \n",
" Parameters\n",
" ----------\n",
" f: La función a integrar\n",
" a: Límite inferior del intervalo\n",
" b: Límite superior del intervalo\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del punto medio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" \"\"\"\n",
" try:\n",
" x0 = (a+b)/2\n",
" aprox = x0*y0\n",
" except:\n",
" print('Error: no fue posible calcular la función')\n",
" return aprox\n",
"\n",
"cuad_pmedio(0, 1, 0.5)\n",
"\n",
"def cuad_pmedio(a, b, f=None, y0=None):\n",
" \"\"\"Implementación de la regla del punto medio\n",
" \n",
" Parameters\n",
" ----------\n",
" a: float\n",
" Límite inferior del intervalo\n",
" b: float\n",
" Límite superior del intervalo\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" y0: float\n",
" El valor de y en el punto medio.\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del punto medio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" \"\"\"\n",
" if a > b:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
"\n",
" x0 = (a+b)/2\n",
" if (f is None) and (y0 is not None):\n",
" aprox = x0*y0\n",
" elif (f is not None) and (y0 is None): \n",
" try:\n",
" h = f(x0)\n",
" except:\n",
" print(('Error: no fue posible calcular la función'\n",
" ' Si desea ingresar un dato use y0='))\n",
" aprox = h*(b-a)\n",
"\n",
" else:\n",
" raise ValueError(\"Debe ingresar la función o los datos!\") \n",
" \n",
" return aprox\n",
"\n",
"cuad_pmedio(0, 1, y0=0.5)\n",
"\n",
"def cuad_trapecio(x0, x1, f=None, y0=None, y1=None):\n",
" \"\"\"Implementación de la regla del trapecio\n",
" \n",
" Parameters\n",
" ----------\n",
" x0: float\n",
" Límite inferior del intervalo\n",
" x1: float\n",
" Límite superior del intervalo\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" y0: float\n",
" El valor de y en el punto medio.\n",
" y1: float\n",
" El valor de y en el punto medio.\n",
"\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del punto medio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" Uso: \n",
" cuad_trapecio(x0, x1, f=f)\n",
" cuad_trapecio(x0, x1, y0=f(x0), y1=f(x1))\n",
" \"\"\"\n",
" if x0 > x1:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
"\n",
" if (f is None) and (y0 is not None) and (y1 is not None):\n",
" aprox = (x1-x0)*(y0+y1)/2\n",
" elif (f is not None) and (y0 is None): \n",
" try:\n",
" y0 = f(x0)\n",
" y1 = f(x1)\n",
" except:\n",
" print(('Error: no fue posible calcular la función'\n",
" ' Si desea ingresar un dato use y0='))\n",
" aprox = (x1-x0)*(y0+y1)/2\n",
"\n",
" else:\n",
" raise ValueError(\"Debe ingresar la función o los datos!\") \n",
" \n",
" return aprox\n",
"\n",
"cuad_trapecio(0, 1, f)\n",
"\n",
"cuad_trapecio(0, 1, y0=f(0), y1=f(1))\n",
"\n",
"def contar_argumentos(func):\n",
" def inner(*args, **kwargs):\n",
" nargs_in = len(args) + len(kwargs)\n",
" return func(*args, **kwargs, nargs_in=nargs_in)\n",
" return inner\n",
"\n",
"\n",
"\n",
"@contar_argumentos\n",
"def cuad_trapecio(x0, x1, f=None, y0=None, y1=None, nargs_in=None):\n",
" \"\"\"Implementación de la regla del trapecio\n",
" \n",
" Parameters\n",
" ----------\n",
" x0: float\n",
" Límite inferior del intervalo\n",
" x1: float\n",
" Límite superior del intervalo\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" y0: float\n",
" El valor de y en el punto medio.\n",
" y1: float\n",
" El valor de y en el punto medio.\n",
"\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla del punto medio\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" Uso: \n",
" cuad_trapecio(x0, x1, f=f)\n",
" cuad_trapecio(x0, x1, y0=f(x0), y1=f(x1))\n",
" \"\"\"\n",
" if nargs_in==4:\n",
" y1=y0 \n",
" y0=f\n",
" f = None\n",
" elif nargs_in==3:\n",
" if type(f) is float: \n",
" raise ValueError(\"Verificar los argumentos\")\n",
" else:\n",
" raise ValueError(\"Verificar el número de argumentos\")\n",
" \n",
" if x0 > x1:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
"\n",
" if (f is None) and (y0 is not None) and (y1 is not None):\n",
" aprox = (x1-x0)*(y0+y1)/2\n",
" elif (f is not None) and (y0 is None): \n",
" try:\n",
" y0 = f(x0)\n",
" y1 = f(x1)\n",
" except:\n",
" print(('Error: no fue posible calcular la función'\n",
" ' Si desea ingresar un dato use y0='))\n",
" aprox = (x1-x0)*(y0+y1)/2\n",
"\n",
" else:\n",
" raise ValueError(\"Debe ingresar la función o los datos!\") \n",
" \n",
" return aprox\n",
"\n",
"cuad_trapecio(0, 1, f)\n",
"\n",
"cuad_trapecio(0, 1, f(0), f(1))\n",
"\n",
"@contar_argumentos\n",
"def cuad_simpson(x0, x2, f=None, y0=None, y1=None, y2=None, nargs_in=None):\n",
" \"\"\"Implementación de la regla de simpson\n",
" \n",
" Parameters\n",
" ----------\n",
" x0: float\n",
" Límite inferior del intervalo\n",
" x2: float\n",
" Límite superior del intervalo\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" y0: float\n",
" El valor de y en el punto medio.\n",
" y2: float\n",
" El valor de y en el punto medio.\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla de Simpson\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" Uso: \n",
" cuad_simpson(x0, x2, f=f)\n",
" cuad_simpson(x0, x2, y0=f(x0), y2=f(x2))\n",
" cuad_simpson(x0, x2, f)\n",
" cuad_simpson(x0, x2, y0, y2)\n",
" \"\"\"\n",
" \n",
" if nargs_in==5:\n",
" y2=y1\n",
" y1=y0\n",
" y0=f\n",
" f = None\n",
" elif nargs_in==3:\n",
" if type(f) is float: \n",
" raise ValueError(\"Verificar los argumentos\")\n",
" else:\n",
" raise ValueError(\"Verificar el número de argumentos\")\n",
" \n",
" if x0 > x2:\n",
" raise ValueError(\"Oops! Debe ser a<b\")\n",
" \n",
" x1 = (x0+x2)/2\n",
"\n",
" if (f is None) and (y0 is not None) and (y1 is not None):\n",
" aprox = (x2-x0)/6 * (y0 + 4*y1 + y2)\n",
" elif (f is not None) and (y0 is None): \n",
" try:\n",
" y0 = f(x0)\n",
" y1 = f(x1)\n",
" y2 = f(x2)\n",
" except:\n",
" print(('Error: no fue posible calcular la función'\n",
" ' Si desea ingresar un dato use y0='))\n",
" aprox = (x2-x0)/6 * (y0 + 4*y1 + y2)\n",
"\n",
" else:\n",
" raise ValueError(\"Debe ingresar la función o los datos!\") \n",
" \n",
" return aprox\n",
"\n",
"cuad_simpson(0, 1, f)\n",
"\n",
"cuad_simpson(0, 1, f(0), f(0.5), f(1))\n",
"\n",
"\n",
"\n",
"import numpy as np\n",
"x = np.linspace(0, 10, 11)\n",
"\n",
"x\n",
"\n",
"np.diff(x)\n",
"\n",
"def cuad_simpson_compuesta(x, f=None, y=None):\n",
" \"\"\"Implementación de la regla de simpson\n",
" \n",
" Parameters\n",
" ----------\n",
" x: list or array\n",
" Lista de valores de x\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" y: list or array\n",
" La lista de valores de y\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla de Simpson\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" Uso: \n",
" cuad_simpson(x, y=y)\n",
" cuad_simpson(x, f=f)\n",
" \"\"\"\n",
" import numpy as np\n",
"\n",
" # Primero verificar si la particion es regular \n",
" x = np.array(x)\n",
" x.sort() \n",
" H = (max(x) - min(x))/len(x-1)\n",
" equiesp = np.std(np.diff(x)) < H*1.e-6\n",
" \n",
" # Calcular los valores de y (si se pasó una función)\n",
" if (y is None) and (f is not None):\n",
" y = f(x)\n",
" \n",
" n = len(x)\n",
" \n",
" if equiesp: \n",
" impares = y[1:-1:2].sum()\n",
" pares = y[2:-1:2].sum() \n",
" H = y[0] + 2*pares + 4*impares + y[-1] \n",
" H = H / (3*n)\n",
" aprox = (x[-1]-x[0])*H\n",
" else:\n",
" aprox = 0\n",
" for i in range(0, len(x)-2, 2):\n",
" aprox += cuad_simpson(x[i], x[i+2], y[i], y[i+1], y[i+2])\n",
" \n",
" return aprox\n",
"\n",
"def f(x):\n",
" return x**2\n",
"\n",
"x = np.linspace(0, 1, 999)\n",
"xr = np.random.uniform(0, 1, 1000)\n",
"y = f(x)\n",
"yr = f(xr)\n",
"\n",
"cuad_simpson_compuesta(x, y=y)\n",
"\n",
"cuad_simpson_compuesta(xr, y=yr)\n",
"\n",
"cuad_simpson_compuesta(x, f=f)\n",
"\n",
"from scipy import integrate\n",
"\n",
"integrate.quad(f, 0, 1)\n",
"\n",
"##### Otra opción sería dar el intervalo y la función, e ir achicando la norma de la partición hasta que el error sea menor que algún valor dado.\n",
"\n",
"def cuad_simpson_compuesta_II(f, I, eps):\n",
" \"\"\"Implementación de la regla de Simpson\n",
" \n",
" Parameters\n",
" ----------\n",
" I: list\n",
" Intervalo de integración, ingresado como lista de dos elementos\n",
" f: function (1 parameter)\n",
" La función a integrar\n",
" \n",
" Returns\n",
" -------\n",
" aprox: Aproximación de la integral por la regla de Simpson\n",
" \n",
" Notes\n",
" -----\n",
" Este código es parte del curso \"Computación\", Famaf\n",
" Uso: \n",
" cuad_simpson_compuesta_II(f, I)\n",
" cuad_simpson_compuesta_II(f, I)\n",
" \"\"\"\n",
" import numpy as np\n",
"\n",
" delta = 2*eps\n",
" n = 2\n",
" aprox_old = (I[1]-I[0])*f((I[1]+I[0])/2)\n",
"\n",
" while delta > eps:\n",
" x = np.linspace(I[0], I[1], n)\n",
" aprox = cuad_simpson_compuesta(x, f=f)\n",
" delta = abs(aprox - aprox_old)\n",
" aprox_old = aprox\n",
" n += 10\n",
" if n>5000:\n",
" break\n",
"\n",
" return aprox\n",
"\n",
"I = cuad_simpson_compuesta_II(f, [0, 1], 1.e-6)\n",
"\n",
"I"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
|
#!/usr/bin/env python
import cv2
import rospy
import roslib
import numpy as np
roslib.load_manifest('object_detection')
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
'''
This script uses hsv (hue, saturation and value) thresholds to attempt
and identify colors. It the proceeds to find the contour of the identified
shape, the finds the centroid of said contour and draws a rectangle around it.
'''
class color_identifier():
def __init__(self):
#create opencv instance
self.bridge = CvBridge()
#create a message subscriber to the topic "/camera/rgb/image_raw" which receives a message of type Image from sensor_msgs
self.image_sub = rospy.Subscriber("/camera/rgb/image_color", Image, self.identify_colors)
#create a message publisher to the topic "/camera/test_opencv" that publishes sensor_msgs of type Image
#self.image_pub = rospy.Publisher("/camera/test_opencv",Image)
def identify_colors(self, ros_image):
try:
cv_image = self.bridge.imgmsg_to_cv2(ros_image, "bgr8")
#turn image to hsv image
hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)
#setting up lower and upper hsv bounds
#these bounds were found using the hue_adjust script
lower_green = np.array([22,88,0])
upper_green = np.array([65,255,255])
lower_blue = np.array([66,47,0])
upper_blue = np.array([149,255,255])
lower_yellow = np.array([15,159,164])
upper_yellow = np.array([107,255,255])
#erode rectangle elementof size 3x3 pixels
erode_element = cv2.getStructuringElement(cv2.MORPH_RECT,(3,3))
#dilate rectangle element of size 8x8 pixels
dilate_element = cv2.getStructuringElement(cv2.MORPH_RECT,(8,8))
mask_green = cv2.inRange(hsv, lower_green, upper_green)
mask_blue = cv2.inRange(hsv, lower_blue, upper_blue)
mask_yellow = cv2.inRange(hsv,lower_yellow, upper_yellow)
#erode hsv image to get rid of outliers
mask_green = cv2.erode(mask_green, erode_element)
#dilate hsv to recover lost points of interest
mask_green = cv2.dilate(mask_green, dilate_element)
#erode hsv image to get rid of outliers
mask_blue = cv2.erode(mask_blue, erode_element)
#dilate hsv to recover lost points of interest
mask_blue = cv2.dilate(mask_blue, dilate_element)
#erode hsv image to get rid of outliers
mask_yellow = cv2.erode(mask_yellow, erode_element)
#dilate hsv to recover lost points of interest
mask_yellow = cv2.dilate(mask_yellow, dilate_element)
###################### FIND YELLOW ###################################
(cnts, nah) = cv2.findContours(mask_yellow.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = sorted(cnts, key = cv2.contourArea, reverse = True)[:1]
for c in cnts:
#get the perimeter of the countour
peri = cv2.arcLength(c, True)
#approximate perimeter to generate smooth shape
approx = cv2.approxPolyDP(c, 0.01 * peri, True)
screenCnt = approx
#get the area bounded by the contour
area = cv2.contourArea(c)
#if the area is bigger than 30x30 pixels
if area >= 30 * 30:
moment = cv2.moments(c)
#getting centroid coordinates
cx = int(moment['m10']/moment['m00'])
cy = int(moment['m01']/moment['m00'])
h = int(peri / 5)
w = int(peri / 20)
#Draw yellow rectangle around yellow block
cv2.rectangle(cv_image, (cx-h,cy-w), (cx+h, cy+w), (10,240,240), 2)
# Write on image
#write 'yellow block' and the coordinates of the crentroid
yellow_caption = "YELLOW BLOCK (%s,%s)" %(cx,cy)
cv2.putText(cv_image, yellow_caption, (cx,cy), cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 0, 0), 1, 1)
#cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 2)
######################################################################################################
############################### FIND BLUE #################################
(blue_cnts, nah2) = cv2.findContours(mask_blue.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = sorted(blue_cnts, key = cv2.contourArea, reverse = True)[:1]
for c in blue_cnts:
#get the perimeter of the countour
peri = cv2.arcLength(c, True)
#approximate perimeter to generate smooth shape
approx = cv2.approxPolyDP(c, 0.01 * peri, True)
screenCnt = approx
#get the area bounded by the contour
area = cv2.contourArea(c)
#if the area is bigger than 30x30 pixels
if area >= 30 * 30:
moment = cv2.moments(c)
#getting centroid coordinates
cx = int(moment['m10']/moment['m00'])
cy = int(moment['m01']/moment['m00'])
h = int(peri / 5)
w = int(peri / 20)
#Draw yellow rectangle around yellow block
cv2.rectangle(cv_image, (cx-h,cy-w), (cx+h, cy+w), (255,0,0), 2)
# Write on image
#write 'yellow block' and the coordinates of the crentroid
blue_caption = "BLUE BLOCK (%s,%s)" %(cx,cy)
cv2.putText(cv_image, blue_caption, (cx,cy), cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 0, 0), 1, 1)
############################################################################################################
######################### FIND GREEN ###############################################
(green_cnts, nah3) = cv2.findContours(mask_green.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
green_cnts = sorted(green_cnts, key = cv2.contourArea, reverse = True)[:1]
for c in green_cnts:
#get the perimeter of the countour
peri = cv2.arcLength(c, True)
#approximate perimeter to generate smooth shape
approx = cv2.approxPolyDP(c, 0.01 * peri, True)
screenCnt = approx
#get the area bounded by the contour
area = cv2.contourArea(c)
#if the area is bigger than 30x30 pixels
if area >= 30 * 30:
moment = cv2.moments(c)
#getting centroid coordinates
cx = int(moment['m10']/moment['m00'])
cy = int(moment['m01']/moment['m00'])
h = int(peri / 5)
w = int(peri / 20)
#Draw yellow rectangle around yellow block
cv2.rectangle(cv_image, (cx-h,cy-w), (cx+h, cy+w), (0,255,0), 2)
# Write on image
#write 'yellow block' and the coordinates of the crentroid
green_caption = "GREEN BLOCK (%s,%s)" %(cx,cy)
cv2.putText(cv_image, green_caption, (cx,cy), cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 0, 0), 1, 1)
####################################################################################################################
# Why is it not publishing??
#self.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, "bgr8"))
cv2.imshow("COLORS", cv_image)
key = cv2.waitKey(20)
if key == 27:
exit()
except CvBridgeError, e:
print e
def main():
#initializes node of name 'image_test'
rospy.init_node('color_finder', anonymous = True)
#creates an instance of image_test
ci = color_identifier()
#keeps python from exiting
rospy.spin()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
|
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import numpy as np
from .models import Collection, Experiment, Channel
from .lookup import LookUpKey
from .error import BossHTTPError, BossError, ErrorCodes
from .permissions import BossPermissionManager
META_CONNECTOR = "&"
class BossRequest:
"""
Validator for all requests that are made to the endpoint.
"""
def __init__(self, request, bossrequest):
"""
Parse the request and initialize an instance of BossRequest
Args:
request (stream): Django Uwsgi request
Raises:
BossError: If the request is invalid
"""
self.bossrequest = bossrequest
# Datamodel objects
self.collection = None
self.experiment = None
self.channel = None
self.default_time = None
self.coord_frame = None
# Endpoint service and version number from the request
self.service = None
self.version = None
# Boss key representing the datamodel for a valid request
self.base_boss_key = None
# Meta data key and value
self.key = None
self.value = None
# Cutout args from the request
self.resolution = None
self.x_start = 0
self.y_start = 0
self.z_start = 0
self.x_stop = 0
self.y_stop = 0
self.z_stop = 0
# Timesample argument
self.time_start = 0
self.time_stop = 0
self.time_request = False # Flag indicating if the REQUEST contained a time range (True) or if auto-pop (False)
# Request variables
self.user = request.user
self.method = request.method
self.version = request.version
# object service
self.object_id = 0
self.filter_ids = None
# Validate the request based on the service
self.service = self.bossrequest['service']
if self.service == 'meta':
self.validate_meta_service()
elif self.service == 'view':
raise BossError("Views not implemented. Specify the full request", ErrorCodes.FUTURE)
elif self.service == 'image':
self.validate_image_service()
elif self.service == 'tile':
self.validate_tile_service()
elif self.service == 'ids':
# Currently the validation is the same as the cutout service
self.validate_ids_service()
elif self.service == 'reserve':
self.validate_reserve_service()
elif self.service == 'boundingbox':
self.validate_bounding_box()
elif self.service == 'downsample':
self.validate_downsample_service()
else:
self.validate_cutout_service()
def validate_meta_service(self):
"""
"Validate all meta data requests.
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
if 'key' in self.bossrequest:
self.set_key(self.bossrequest['key'])
if 'value' in self.bossrequest:
self.set_value(self.bossrequest['value'])
def validate_downsample_service(self):
"""
"Validate all downsample data requests.
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
def validate_cutout_service(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
# Validate filter arguments if any
if 'ids' in self.bossrequest and self.bossrequest['ids']!= None:
if self.channel.type != 'annotation':
raise BossError("The channel in request has type {}. Filter is only valid for annotation channels"
.format(self.channel.type), ErrorCodes.DATATYPE_NOT_SUPPORTED)
else:
# convert ids to ints
try:
self.filter_ids = np.fromstring(self.bossrequest['ids'], sep= ',', dtype=np.uint64)
except (TypeError, ValueError)as e:
raise BossError("Invalid id in list of filter ids {}. {}".format(self.bossrequest['ids'], str(e)),
ErrorCodes.INVALID_CUTOUT_ARGS)
time = self.bossrequest['time_args']
if not time:
# get default time
self.time_start = self.channel.default_time_sample
self.time_stop = self.channel.default_time_sample + 1
self.time_request = False
else:
self.set_time(time)
self.time_request = True
self.set_cutoutargs(int(self.bossrequest['resolution']), self.bossrequest['x_args'],
self.bossrequest['y_args'], self.bossrequest['z_args'])
def validate_ids_service(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
# Bounding box is only valid for annotation channels
if self.channel.type != 'annotation':
raise BossError("The channel in request has type {}. Can only reserve IDs for annotation channels"
.format(self.channel.type), ErrorCodes.DATATYPE_NOT_SUPPORTED)
time = self.bossrequest['time_args']
if not time:
# get default time
self.time_start = self.channel.default_time_sample
self.time_stop = self.channel.default_time_sample + 1
self.time_request = False
else:
self.set_time(time)
self.time_request = True
self.set_cutoutargs(int(self.bossrequest['resolution']), self.bossrequest['x_args'],
self.bossrequest['y_args'], self.bossrequest['z_args'])
def validate_image_service(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
time = self.bossrequest['time_args']
if not time:
# get default time
self.time_start = self.channel.default_time_sample
self.time_stop = self.channel.default_time_sample + 1
else:
self.set_time(time)
self.set_imageargs(self.bossrequest['orientation'], self.bossrequest['resolution'], self.bossrequest['x_args'],
self.bossrequest['y_args'], self.bossrequest['z_args'])
def validate_tile_service(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
time = self.bossrequest['time_args']
if not time:
# get default time
self.time_start = self.channel.default_time_sample
self.time_stop = self.channel.default_time_sample + 1
else:
self.set_time(time)
self.set_tileargs(self.bossrequest['tile_size'], self.bossrequest['orientation'],
self.bossrequest['resolution'], self.bossrequest['x_args'], self.bossrequest['y_args'],
self.bossrequest['z_args'])
def validate_reserve_service(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
if self.channel.type != 'annotation':
raise BossError("The channel in request has type {}. Can only reserve IDs for annotation channels"
.format(self.channel.type),ErrorCodes.ErrorCodes.DATATYPE_NOT_SUPPORTED)
def validate_bounding_box(self):
"""
Args:
webargs:
Returns:
"""
self.initialize_request(self.bossrequest['collection_name'], self.bossrequest['experiment_name'],
self.bossrequest['channel_name'])
# Bounding box is only valid for annotation channels
if self.channel.type != 'annotation':
raise BossError("The channel in request has type {}. Can only perform bounding box operations on annotation channels"
.format(self.channel.type), ErrorCodes.DATATYPE_NOT_SUPPORTED)
# TODO : validate the object id
try:
self.object_id = int(self.bossrequest['id'])
except (TypeError, ValueError):
raise BossError("The id of the object {} is not a valid int".format(self.bossrequest['id']),
ErrorCodes.TYPE_ERROR)
self.validate_resolution()
def validate_resolution(self):
"""
Ensure requested resolution is between channel's base resolution and the base
resolution + the experiment's number of hierarchy levels
Raises:
(BossError): if resolution invalid
"""
try:
base_res = self.channel.base_resolution
# validate the resolution
if int(self.bossrequest['resolution']) in range(base_res, base_res + self.experiment.num_hierarchy_levels):
self.resolution = int(self.bossrequest['resolution'])
else:
raise BossError("Invalid resolution {}. The resolution has to be within {} and {}".
format(self.bossrequest['resolution'], base_res,
base_res + self.experiment.num_hierarchy_levels),
ErrorCodes.TYPE_ERROR)
except (TypeError, ValueError):
raise BossError("Type error in resolution {}".format(self.bossrequest['resolution']), ErrorCodes.TYPE_ERROR)
def initialize_request(self, collection_name, experiment_name, channel_name):
"""
Initialize the request
Parse and validate all the resource names in the request
Args:
collection_name: Collection name from the request
experiment_name: Experiment name from the request
channel_name: Channel name from the request
"""
if collection_name:
colstatus = self.set_collection(collection_name)
if experiment_name and colstatus:
expstatus = self.set_experiment(experiment_name)
if channel_name and expstatus:
self.set_channel(channel_name)
self.check_permissions()
self.set_boss_key()
def set_cutoutargs(self, resolution, x_range, y_range, z_range):
"""
Validate and initialize cutout arguments in the request
Args:
resolution: Integer indicating the level in the resolution hierarchy (0 = native)
x_range: Python style range indicating the X coordinates (eg. 100:200)
y_range: Python style range indicating the Y coordinates (eg. 100:200)
z_range: Python style range indicating the Z coordinates (eg. 100:200)
Raises:
BossError: For invalid requests
"""
try:
self.validate_resolution()
# TODO --- Get offset for that resolution. Reading from coordinate frame right now, This is WRONG
x_coords = x_range.split(":")
y_coords = y_range.split(":")
z_coords = z_range.split(":")
self.x_start = int(x_coords[0])
self.x_stop = int(x_coords[1])
self.y_start = int(y_coords[0])
self.y_stop = int(y_coords[1])
self.z_start = int(z_coords[0])
self.z_stop = int(z_coords[1])
# Check for valid arguments
if (self.x_start >= self.x_stop) or (self.y_start >= self.y_stop) or (self.z_start >= self.z_stop) or \
(self.x_start < self.coord_frame.x_start) or (self.x_stop > self.coord_frame.x_stop) or \
(self.y_start < self.coord_frame.y_start) or (self.y_stop > self.coord_frame.y_stop) or\
(self.z_start < self.coord_frame.z_start) or (self.z_stop > self.coord_frame.z_stop):
raise BossError("Incorrect cutout arguments {}/{}/{}/{}".format(resolution, x_range, y_range, z_range),
ErrorCodes.INVALID_CUTOUT_ARGS)
except (TypeError, ValueError):
raise BossError("Type error in cutout argument{}/{}/{}/{}".format(resolution, x_range, y_range, z_range),
ErrorCodes.TYPE_ERROR)
def set_imageargs(self, orientation, resolution, x_args, y_args, z_args):
"""
Validate and initialize tile service arguments in the request
Args:
resolution: Integer indicating the level in the resolution hierarchy (0 = native)
x_range: Python style range indicating the X coordinates (eg. 100:200)
y_range: Python style range indicating the Y coordinates (eg. 100:200)
z_range: Python style range indicating the Z coordinates (eg. 100:200)
Raises:
BossError: For invalid requests
"""
try:
self.validate_resolution()
# TODO --- Get offset for that resolution. Reading from coordinate frame right now, This is WRONG
if orientation == 'xy':
x_coords = x_args.split(":")
y_coords = y_args.split(":")
z_coords = [int(z_args), int(z_args)+1]
if len(x_coords) < 2 or len(y_coords) < 2:
raise BossError("Incorrect cutout arguments {}/{}/{}".format(x_args, y_args, z_args),
ErrorCodes.INVALID_CUTOUT_ARGS)
elif orientation == 'xz':
x_coords = x_args.split(":")
y_coords = [int(y_args), int(y_args) + 1]
z_coords = z_args.split(":")
if len(x_coords) < 2 or len(z_coords) < 2:
raise BossError("Incorrect cutout arguments {}/{}/{}".format(x_args, y_args, z_args),
ErrorCodes.INVALID_CUTOUT_ARGS)
elif orientation == 'yz':
x_coords = [int(x_args), int(x_args) + 1]
y_coords = y_args.split(":")
z_coords = z_args.split(":")
if len(y_coords) < 2 or len(z_coords) < 2:
raise BossError("Incorrect cutout arguments {}/{}/{}".format(x_args, y_args, z_args),
ErrorCodes.INVALID_CUTOUT_ARGS)
else:
raise BossError("Incorrect orientation {}".format(orientation), ErrorCodes.INVALID_URL)
self.x_start = int(x_coords[0])
self.x_stop = int(x_coords[1])
self.y_start = int(y_coords[0])
self.y_stop = int(y_coords[1])
self.z_start = int(z_coords[0])
self.z_stop = int(z_coords[1])
# Check for valid arguments
if (self.x_start >= self.x_stop) or (self.y_start >= self.y_stop) or (self.z_start >= self.z_stop) or \
(self.x_start < self.coord_frame.x_start) or (self.x_stop > self.coord_frame.x_stop) or \
(self.y_start < self.coord_frame.y_start) or (self.y_stop > self.coord_frame.y_stop) or \
(self.z_start < self.coord_frame.z_start) or (self.z_stop > self.coord_frame.z_stop):
raise BossError("Incorrect cutout arguments {}/{}/{}/{}".format(resolution, x_args, y_args, z_args),
ErrorCodes.INVALID_CUTOUT_ARGS)
except (TypeError, ValueError):
raise BossError("Type error in cutout argument{}/{}/{}/{}".format(resolution, x_args, y_args, z_args),
ErrorCodes.TYPE_ERROR)
def set_tileargs(self, tile_size, orientation, resolution, x_idx, y_idx, z_idx):
"""
Validate and initialize tile service arguments in the request
Args:
resolution: Integer indicating the level in the resolution hierarchy (0 = native)
orientation:
x_idx: X tile index
y_idx: Y tile index
z_idx: Z tile index
Raises:
BossError: For invalid requests
"""
try:
tile_size = int(tile_size)
x_idx = int(x_idx)
y_idx = int(y_idx)
z_idx = int(z_idx)
self.validate_resolution()
# TODO --- Get offset for that resolution. Reading from coordinate frame right now, This is WRONG
# Get the params to pull data out of the cache
if orientation == 'xy':
corner = (tile_size * x_idx, tile_size * y_idx, z_idx)
extent = (tile_size, tile_size, 1)
elif orientation == 'yz':
corner = (x_idx, tile_size * y_idx, tile_size * z_idx)
extent = (1, tile_size, tile_size)
elif orientation == 'xz':
corner = (tile_size * x_idx, y_idx, tile_size * z_idx)
extent = (tile_size, 1, tile_size)
else:
raise BossHTTPError("Invalid orientation: {}".format(orientation), ErrorCodes.INVALID_CUTOUT_ARGS)
self.x_start = int(corner[0])
self.x_stop = int(corner[0] + extent[0])
self.y_start = int(corner[1])
self.y_stop = int(corner[1] + extent[1])
self.z_start = int(corner[2])
self.z_stop = int(corner[2] + extent[2])
# Check for valid arguments
if (self.x_start >= self.x_stop) or (self.y_start >= self.y_stop) or (self.z_start >= self.z_stop) or \
(self.x_start < self.coord_frame.x_start) or (self.x_stop > self.coord_frame.x_stop) or \
(self.y_start < self.coord_frame.y_start) or (self.y_stop > self.coord_frame.y_stop) or \
(self.z_start < self.coord_frame.z_start) or (self.z_stop > self.coord_frame.z_stop):
raise BossError("Incorrect cutout arguments {}/{}/{}/{}".format(resolution, x_idx, y_idx, z_idx),
ErrorCodes.INVALID_CUTOUT_ARGS)
except (TypeError, ValueError):
raise BossError("Type error in cutout argument{}/{}/{}/{}".format(resolution, x_idx, y_idx, z_idx),
ErrorCodes.TYPE_ERROR)
def initialize_view_request(self, webargs):
"""
Validate and initialize views
Args:
webargs:
"""
print(webargs)
def set_service(self, service):
"""
Set the service variable. The service can be 'meta', 'view' or 'cutout'
Args:
service: Service requested in the request
Returns: None
"""
self.service = service
def set_collection(self, collection_name):
"""
Validate the collection and set collection object for a valid collection.
Args:
collection_name: Collection name from the request
Returns:
Bool : True
Raises : BossError is the collection is not found.
"""
if Collection.objects.filter(name=str(collection_name)).exists():
self.collection = Collection.objects.get(name=collection_name)
if self.collection.to_be_deleted is not None:
raise BossError("Invalid Request. This resource {} has been marked for deletion"
.format(collection_name),ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
return True
else:
raise BossError("Collection {} not found".format(collection_name), ErrorCodes.RESOURCE_NOT_FOUND)
def get_collection(self):
"""
Get the collection name for the current collection
Returns:
collection_name : Name of the collection
"""
if self.collection:
return self.collection.name
def set_experiment(self, experiment_name):
"""
Validate and set the experiment
Args:
experiment_name: Experiment name from the request
Returns: BossError is the experiment with the matching name is not found in the db
"""
if Experiment.objects.filter(name=experiment_name, collection=self.collection).exists():
self.experiment = Experiment.objects.get(name=experiment_name, collection=self.collection)
if self.experiment.to_be_deleted is not None:
raise BossError("Invalid Request. This resource {} has been marked for deletion"
.format(experiment_name),ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
self.coord_frame = self.experiment.coord_frame
else:
raise BossError("Experiment {} not found".format(experiment_name), ErrorCodes.RESOURCE_NOT_FOUND)
return True
def get_experiment(self):
"""
Return the experiment name for the current experiment
Returns:
self.experiment.name (str): Experiment name for the data model representing the current experiment
"""
if self.experiment:
return self.experiment.name
def set_channel(self, channel_name):
"""
Validate and set the channel
Args:
channel_name: Channel name specified in the request
Returns:
"""
if Channel.objects.filter(name=channel_name, experiment=self.experiment).exists():
self.channel = Channel.objects.get(name=channel_name, experiment=self.experiment)
if self.channel.to_be_deleted is not None:
raise BossError("Invalid Request. This resource {} has been marked for deletion"
.format(channel_name),ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
return True
else:
raise BossError("Channel {} not found".format(channel_name), ErrorCodes.RESOURCE_NOT_FOUND)
def get_channel(self):
"""
Return the channel name for the channel
Returns:
self.channel.name (str) : Name of channel
"""
if self.channel:
return self.channel.name
def set_key(self, key):
"""
Set the meta data key. This is an optional argument used by the metadata service
Args:
key: Meta data key specified in the request
"""
self.key = key
def get_key(self):
"""
Return the meta data key
Returns:
self.key (str) : Metadata key
"""
return self.key
def set_value(self, value):
"""
Set the meta data value. This is an optional argument used by the metadata service
Args:
value: String representing the meta data value
"""
self.value = value
def get_value(self):
"""
Return the value associated with the metadata
Returns:
self.value (str) : Meta data value
"""
return self.value
def get_default_time(self):
"""
Return the default timesample for the channel
Returns:
self.default_time (int) : Default timestep for the channel
"""
return self.default_time
def get_coordinate_frame(self):
"""
Returns the coordinate frame for the experiment
Returns:
self.coord_frame.name (str) : Name of coordinate frame
"""
return self.coord_frame.name
def get_resolution(self):
"""
Return the resolution specified in the cutout arguments of the request
Returns:
self.resolution (int) : Resolution
"""
return self.resolution
def get_x_start(self):
"""
Return the lower X bounds for the request
Returns:
self.x_start(int) : Lower bounds for X range
"""
return self.x_start
def get_x_stop(self):
"""
Return the upper X bounds specified in the cutout arguments
Returns:
self.x_stop (int) : Upper bounds for X range
"""
return self.x_stop
def get_y_start(self):
"""
Get the lower Y bounds specified in the cutout arguments of the request
Returns:
self.y_start (int) : lower bounds for Y range
"""
return self.y_start
def get_y_stop(self):
"""
Get the upper Y bounds specified in the cutout arguments of the request
Returns:
self.y_stop (int) : Upper bounds for Y range
"""
return self.y_stop
def get_z_start(self):
"""
Get the lower Z bounds specified in the cutout arguments of the request
Returns:
self.z_start (int) : Lower bounds for Z range
"""
return self.z_start
def get_z_stop(self):
"""
Get the lower Z bounds specified in the cutout arguments of the request
Returns:
self.z_stop (int) : Upper bounds for Z range
"""
return self.z_stop
def get_x_span(self):
"""
Get the x span for the request
Returns:
x_span (int) : X span
"""
return self.x_stop - self.x_start
def get_y_span(self):
"""
Get the Y span for the request
Returns:
y_span (int) : Y span
"""
return self.y_stop - self.y_start
def get_z_span(self):
"""
Get the z span for the request
Returns:
z_span (int): Z span
"""
return self.z_stop - self.z_start
def get_filter_ids(self):
"""
Return the liust of ids to filter the cutout on
Returns:
List (ints)
"""
return self.filter_ids
def set_boss_key(self):
""" Set the base boss key for the request
The boss key concatenates the names of the datamodel stack to create a string represting the request.
Returns:
self.bosskey(str) : String that represents the boss key for the current request
"""
if self.collection and self.experiment and self.channel:
self.base_boss_key = self.collection.name + META_CONNECTOR + self.experiment.name + META_CONNECTOR \
+ self.channel.name
elif self.collection and self.experiment and self.service == 'meta':
self.base_boss_key = self.collection.name + META_CONNECTOR + self.experiment.name
elif self.collection and self.service == 'meta':
self.base_boss_key = self.collection.name
else:
raise BossError("Error creating the boss key", ErrorCodes.UNABLE_TO_VALIDATE)
def check_permissions(self):
""" Check if user has permissions for the service hit in the request
Raises:
(BossError): if user doesn't have permission or if there is some other error
"""
if self.service == 'cutout' or self.service == 'image' or self.service == 'tile'\
or self.service == 'boundingbox' or self.service == 'downsample':
if self.channel.public and self.method == 'GET':
return
perm = BossPermissionManager.check_data_permissions(self.user, self.channel, self.method)
elif self.service == 'ids':
perm = BossPermissionManager.check_data_permissions(self.user, self.channel, self.method)
elif self.service == 'meta':
if self.collection and self.experiment and self.channel:
obj = self.channel
elif self.collection and self.experiment:
obj = self.experiment
elif self.collection:
obj = self.collection
else:
raise BossError("Error encountered while checking permissions for this request",
ErrorCodes.UNABLE_TO_VALIDATE)
perm = BossPermissionManager.check_resource_permissions(self.user, obj, self.method)
elif self.service == 'reserve':
perm = BossPermissionManager.check_object_permissions(self.user, self.channel, self.method)
if not perm:
raise BossError("This user does not have the required permissions", ErrorCodes.MISSING_PERMISSION)
def get_boss_key(self):
"""
Get the boss key for the current object
The boss key is the compound identifier using the "name" attribute of the data model resources used
in the request
Returns:
self.boss_key (str) : The base boss key for the request
"""
return self.base_boss_key
def get_lookup_key(self):
"""
Returns the base lookup key for the request, excluding the resolution and time sample
The lookup key is the compound identifier using the "id" attribute of the data model resources used
in the request
Returns:
lookup (str) : The base lookup key that correspond to the request
"""
return LookUpKey.get_lookup_key(self.base_boss_key).lookup_key
def set_time(self, time):
"""
Set the time range for a request.
Args:
time: String representing the Time range
Raises : Boss Error if the range is out or bounds or invalid
"""
m = re.match("/?(?P<time_start>\d+)\:?(?P<time_stop>\d+)?/?", time)
if m:
[tstart, tstop] = [arg for arg in m.groups()]
if tstart:
self.time_start = int(tstart)
if self.time_start > self.experiment.num_time_samples:
raise BossError("Invalid time range {}. Start time is greater than the maximum time sample {}"
.format(time, str(self.experiment.num_time_samples)), ErrorCodes.INVALID_URL)
else:
raise BossError("Unable to parse time sample argument {}".format(time), ErrorCodes.INVALID_URL)
if tstop:
self.time_stop = int(tstop)
if self.time_start > self.time_stop or self.time_stop > self.experiment.num_time_samples + 1:
raise BossError("Invalid time range {}. End time is greater than the start time or out of "
"bounds with maximum time sample {}".format
(time, str(self.experiment.num_time_samples)), ErrorCodes.INVALID_URL)
else:
self.time_stop = self.time_start + 1
def get_time(self):
"""
Return the time step range
Returns:
Time sample range
"""
return range(self.time_start, self.time_stop)
|
import re
from typing import List
LINK_PATTERN = re.compile(r"\[(.+?)\]\((.+?)\)")
MKAPI_PATTERN = re.compile(r"^(#*) *?!\[mkapi\]\((.+?)\)$", re.MULTILINE)
NODE_PATTERN = re.compile(
r"<!-- mkapi:begin:(\d+):\[(.*?)\] -->(.*?)<!-- mkapi:end -->",
re.MULTILINE | re.DOTALL,
)
def node_markdown(index: int, markdown: str, filters: List[str] = None) -> str:
if filters:
fs = "|".join(filters)
else:
fs = ""
return f"<!-- mkapi:begin:{index}:[{fs}] -->\n\n{markdown}\n\n<!-- mkapi:end -->"
|
from typing import List, Union
from src.buildchain import checker
from src.valtypes import Value, string, boolean, number
from src.errors import RTError
class Array(Value):
def __init__(self, value:List[Value]):
super().__init__()
self.value = value
self.length = len(value)
def add(self, other: Value):
if (type(other) == Array):
return Array(self.value + other.value).set_ctx(self.context), None
return self.illegalOp()
def l_sl(self, other: Value):
self.value.append(other)
self.length+=1
return self, None
def l_sr(self, other: Value):
self.value.pop(other.value)
self.length-=1
return self, None
def mul(self, num):
if(type(num) == number.Number):
if isinstance(self.value[0], Array):
arr = []
for _ in range(num.value):
arr+=([el.copy() for el in self.value])
return Array(arr).set_ctx(self.comp_neq), None
return Array(self.value * num.value).set_ctx(self.context), None
return self.illegalOp()
def comp_eq(self, other: Value):
if(type(other) == Array):
return boolean.Boolean(int(self.value == other.value)).set_ctx(self.context), None
return boolean.Boolean(0), None
def comp_neq(self, other: Value):
if(type(other) == Array):
return boolean.Boolean(int(self.value != other.value)).set_ctx(self.context), None
return boolean.Boolean(0), None
def getElement(self, index: Union[int, float]):
index = int(index)
if(index < 0 or index >= self.length):
return None, RTError( self.range, f'Index out of range, index {index} on array of length {self.length}')
return self.value[index], None
def setElement(self, index, value: Union[int, float]):
index = int(index)
if(index < 0 or index >= self.length):
return None, RTError( self.range, f'Index out of range, index {index} on array of length {self.length}')
self.value[index] = value
return self, None
def is_in(self, value: Value):
return boolean.Boolean(value in self.value), None
def copy(self):
cp = [el.copy() for el in self.value]
cp = Array(cp)
cp.set_ctx(self.context)
cp.set_range(self.range)
return cp
def cast_to_type(self, type):
if isinstance(type, checker.arrayType):
rt = []
for el in self.value:
nel, error = el.cast_to_type(type.elementType)
if error: return None, error
rt.append(nel)
return Array(rt), None
elif type == checker.Types.STRING:
res = ""
for el in self.value:
nel, error = el.cast_to_type(type)
if error: return None, error
res+=nel.value
return string.String(res), None
return self.illegalOp()
def __repr__(self):
return f'{self.value}'
|
class SECRET:
GITHUB_CLIENT_ID = "GITHUB_CLIENT_ID"
GITHUB_CLIENT_SECRET = "GITHUB_CLIENT_SECRET"
SUPER_SECRET = 'FLASK_SUPER_SECRET'
class DB:
HOST = 'DATABASE_HOST'
DB = 'DATABASE_NAME'
USER = 'DATABASE_USER'
PW = 'DATABASE_SECRET'
PORT = 'DATABASE_PORT'
|
from __future__ import division, unicode_literals
import functools
import re
import threading
import typing
import http.server
if typing.TYPE_CHECKING:
from .common import FileDownloader
from ..postprocessor.metadataparser import MetadataParserPP
from ..utils import (
sanitized_Request,
)
class Augment():
_AUGMENT_KEY = None
def __init__(self, dl: 'FileDownloader', info_dict, params: dict) -> None:
self.dl = dl
self.ydl = dl.ydl
if 'init_callback' in params:
info_dict, params = params['init_callback'](info_dict, params)
self.params = params
self.info_dict = info_dict
# children classes may:
# - implement some more initialization tasks
# - modify info_dict directly to make things pass through Augment
# at their __init__
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.end()
def start(self):
"""
Starts augmented service.
Calling start() 2 or more times without end()ing is not permitted.
"""
raise Exception('Implement in inheriting class')
def end(self):
""" Stops augmented service, as well as cleanups """
raise Exception('Implement in inheriting class')
class HeartbeatAugment(Augment):
"""
Augment for heartbeating.
Keys:
interval: Interval to wait, in seconds.
callback: Callable to run periodically. Arguments are: (HeartbeatAugment)
"url" and "data" are ignored once this key is used.
url: (easy mode) URL to reqeust to. Cannot be used with "callback" key
data: (optional) POST payload to pass. Use if needed.
before_dl: Callable to run before download starts. Arguments are: (HeartbeatAugment)
Can be used even if any of "callback", "url" and "data" are used.
after_dl: Callable to run after download ends. Arguments are: (HeartbeatAugment)
"""
_AUGMENT_KEY = 'heartbeat'
def __init__(self, dl: 'FileDownloader', info_dict, params: dict) -> None:
super().__init__(dl, info_dict, params)
params, info_dict = self.params, self.info_dict
self.interval = params.get('interval', 30)
self.lock = threading.Lock()
self.timer = [None]
if 'callback' in params:
self.callback = params['callback']
elif 'url' in params:
heartbeat_url = params['url']
heartbeat_data = params.get('data')
if isinstance(heartbeat_data, str):
heartbeat_data = heartbeat_data.encode()
request = sanitized_Request(heartbeat_url, heartbeat_data)
def callback(a):
self.ydl.urlopen(request).read()
self.callback = callback
else:
raise Exception('Callback is not provided')
def start(self):
self.complete = False
def heartbeat():
try:
self.callback(self)
except Exception:
self.to_screen('[download] Heartbeat failed')
with self.lock:
if self.complete:
self.timer[0] = None
self.complete = False
else:
self.timer[0] = threading.Timer(self.interval, heartbeat)
self.timer[0]._daemonic = True
self.timer[0].start()
if 'before_dl' in self.params:
self.params['before_dl'](self)
heartbeat()
def end(self):
with self.lock:
self.timer[0].cancel()
self.complete = True
if 'after_dl' in self.params:
self.params['after_dl'](self)
class HttpServerAugment(Augment):
"""
Augment for intermediate HTTP server.
Keys:
before_dl: Callable to run before download starts. Arguments are: (HttpServerAugment)
Can be used even if any of "callback", "url" and "data" are used.
after_dl: Callable to run after download ends. Arguments are: (HttpServerAugment)
tag: Key for retrieving port number in subsequent Augments.
Assigned to "_httpserverport_<tag>" key. (optional)
"""
_AUGMENT_KEY = 'http_server'
def __init__(self, dl: 'FileDownloader', info_dict, params: dict) -> None:
super().__init__(dl, info_dict, params)
def start(self):
if 'before_dl' in self.params:
self.params['before_dl'](self)
self.httpd = http.server.HTTPServer(
('127.0.0.1', 0), self.create_handler_class(self.params))
self.port = self.info_dict[f'_httpserverport_{self.params["tag"]}' if self.params.get('tag') else '_httpserverport'] = \
self.httpd.socket.getsockname()[1]
self.server_thread = threading.Thread(
target=self.httpd.serve_forever, daemon=True)
self.server_thread.start()
def end(self):
if 'after_dl' in self.params:
self.params['after_dl'](self)
self.httpd.shutdown()
self.httpd = None
self.server_thread = None
def create_handler_class(self, struct):
# struct = {
# 'routes': [{
# 'method': 'GET',
# 'route': '/hello/(?P<world>[^/]+)',
# 'callback': lambda handler: True
# }, {
# 'method': 'GET',
# 'route': '/static/test',
# 'data': b'hello world',
# }]
# }
def respond_constant(value, status_code, headers, handler):
handler.send_response(status_code)
for k, v in headers.items():
handler.send_header(k, v)
handler.end_headers()
handler.wfile.write(value)
return True
def compile_route(route):
if route.startswith('re:'):
route = route[3:]
else:
route = re.escape(route)
return re.compile(route)
def process_route(regex, callback, method, handler):
mobj = re.fullmatch(regex, handler.path)
if not mobj:
return False
if method and method != handler.command:
return False
try:
setattr(handler, 'route_params', mobj.groupdict())
return callback(handler)
finally:
delattr(handler, 'route_params')
def chain(callbacks, handler):
for cb in callbacks:
if cb(handler):
return True
return False
def wrapper(handler):
assert struct(handler)
if not callable(struct):
route_callbacks = []
for r in struct['routes']:
if callable(r):
route_callbacks.append(r)
continue
if 'data' in r:
dd = r['data']
if isinstance(dd, str):
dd = dd.encode()
r['callback'] = functools.partial(
respond_constant, dd, r.get('status_code', 200), r.get('headers') or {})
route_callbacks.append(functools.partial(
process_route, compile_route(r['route']), r['callback'], r.get('method')))
struct = functools.partial(chain, route_callbacks)
return type('Lol this is allowed here', (ReqHandlerBase, ), {
'do_GET': wrapper,
'do_POST': wrapper,
})
class ReqHandlerBase(http.server.BaseHTTPRequestHandler):
def log_message(self, format, *args):
pass
class MetadataEditorAugment(Augment, MetadataParserPP):
"""
Augment for temporarily rewriting info_dict.
Values are reverted when end() is called.
Keys:
actions: Same as what you pass to MetadataParserPP.
"""
_AUGMENT_KEY = 'metadata_editor'
def __init__(self, dl: 'FileDownloader', info_dict, params: dict) -> None:
super().__init__(dl, info_dict, params)
MetadataParserPP.__init__(self, dl.ydl, [])
self.backlog = None
def start(self):
# create backlog of modification to revert things back in end()
self.backlog = [
x for z in self.params['actions']
for x in getattr(self, z[0].value)(*z[1:])(self.info_dict)]
def end(self):
infodict = self.info_dict
# rollback
for k, v in reversed(self.backlog):
if v is MetadataParserPP.BACKLOG_UNSET:
infodict.pop(k, None)
else:
infodict[k] = v
# clear backlog
self.backlog = None
def to_screen(self, text, prefix=True, *args, **kwargs):
if not self.get_param('verbose', False):
return # don't print anything without -v
super().to_screen(text, prefix, *args, **kwargs)
def report_warning(self, text, *args, **kwargs):
if not self.get_param('verbose', False):
return # don't print anything without -v
super().report_warning(text, *args, **kwargs)
AUGMENT_MAP = {v._AUGMENT_KEY: v for v in (HeartbeatAugment, HttpServerAugment, MetadataEditorAugment)}
|
'''
Utility that performs the dependency searches.
'''
import re
import subprocess
def find_deps(query_package, current_package=None, package_list=[]):
'''
Recursively finds all dependencies of a package.
Paramaters
----------
query_package: string
Name of the query package.
current_package: string
Current package to recursively search.
package_list: list of strings
List that will be populated with dependencies of
the query package.
Returns
----------
package_list: list of strings
Complete list of unique dependencies.
'''
if current_package == None:
current_package = query_package
reqs = None
pip_text = subprocess.run(['pip', 'show', current_package],
stdout=subprocess.PIPE, text=True).stdout.split('\n')
for line in pip_text:
if 'Requires' in line:
reqs = line
if reqs != None:
reqs = re.sub('Requires:| ', '', reqs).split(',')
if reqs != ['']:
for dep in reqs:
if dep not in package_list:
package_list = find_deps(query_package, dep, package_list)
if(current_package not in package_list
and current_package != query_package):
package_list.append(current_package)
return package_list
def generate_requirements(dependencies):
'''
Generates the dependencies in a format suitable
for a "requirements.txt" file.
Parameters
----------
dependencies: list of strings
list of dependencies
Returns
---------
results: list of strings
Dependencies with thier version numbers
'''
results = []
dependencies.sort()
for package in dependencies:
pip_freeze = subprocess.Popen(['pip', 'freeze'],
stdout=subprocess.PIPE)
grep_text = subprocess.run(['grep', '-i', package + '=='],
stdin=pip_freeze.stdout,
stdout=subprocess.PIPE,
text=True).stdout
pip_freeze.stdout.close()
pip_freeze.wait()
req = re.sub('\n', '', grep_text)
if req != '':
results.append(req)
return results
|
# python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OpenAI Gym environment factory."""
from typing import Callable, Mapping, Sequence
from absl import flags
from acme import specs
from acme import types
from acme import wrappers
from acme.datasets import tfds
from acme.jax import utils
from acme.tf import networks
from acme.tf import utils as tf2_utils
from acme.utils import loggers
from acme.utils.loggers import tf_summary
import dm_env
import gym
import jax
import numpy as np
import sonnet as snt
FLAGS = flags.FLAGS
# We want all examples to have this same flag defined.
flags.DEFINE_string('tfsummary_logdir', '',
'Root directory for logging tf.summary.')
TASKS = {
'debug': ['MountainCarContinuous-v0'],
'default': [
'HalfCheetah-v2', 'Hopper-v2', 'InvertedDoublePendulum-v2',
'InvertedPendulum-v2', 'Reacher-v2', 'Swimmer-v2', 'Walker2d-v2'
],
}
def make_environment(
evaluation: bool = False,
task: str = 'MountainCarContinuous-v0') -> dm_env.Environment:
"""Creates an OpenAI Gym environment."""
del evaluation
# Load the gym environment.
environment = gym.make(task)
# Make sure the environment obeys the dm_env.Environment interface.
environment = wrappers.GymWrapper(environment)
# Clip the action returned by the agent to the environment spec.
environment = wrappers.CanonicalSpecWrapper(environment, clip=True)
environment = wrappers.SinglePrecisionWrapper(environment)
return environment
def make_networks(
action_spec: specs.BoundedArray,
policy_layer_sizes: Sequence[int] = (256, 256, 256),
critic_layer_sizes: Sequence[int] = (512, 512, 256),
vmin: float = -150.,
vmax: float = 150.,
num_atoms: int = 51,
) -> Mapping[str, types.TensorTransformation]:
"""Creates networks used by the agent."""
# Get total number of action dimensions from action spec.
num_dimensions = np.prod(action_spec.shape, dtype=int)
# Create the shared observation network; here simply a state-less operation.
observation_network = tf2_utils.batch_concat
# Create the policy network.
policy_network = snt.Sequential([
networks.LayerNormMLP(policy_layer_sizes, activate_final=True),
networks.NearZeroInitializedLinear(num_dimensions),
networks.TanhToSpec(action_spec),
])
# Create the critic network.
critic_network = snt.Sequential([
# The multiplexer concatenates the observations/actions.
networks.CriticMultiplexer(),
networks.LayerNormMLP(critic_layer_sizes, activate_final=True),
networks.DiscreteValuedHead(vmin, vmax, num_atoms),
])
return {
'policy': policy_network,
'critic': critic_network,
'observation': observation_network,
}
def make_demonstration_iterator(batch_size: int,
dataset_name: str,
seed: int = 0):
dataset = tfds.get_tfds_dataset(dataset_name)
return tfds.JaxInMemoryRandomSampleIterator(dataset, jax.random.PRNGKey(seed),
batch_size)
# TODO(sinopalnikov): make it shareable across all examples, not only Gym ones.
def create_logger_fn() -> Callable[[], loggers.Logger]:
"""Returns a function that creates logger instances."""
if not FLAGS.tfsummary_logdir:
# Use default logger.
return lambda: None
def create_logger() -> loggers.Logger:
label = 'learner'
default_learner_logger = loggers.make_default_logger(
label=label,
save_data=False,
time_delta=10.0,
asynchronous=True,
steps_key='learner_steps')
tf_summary_logger = tf_summary.TFSummaryLogger(
logdir=FLAGS.tfsummary_logdir, label=label)
# Sending logs to each of these targets.
destinations = [default_learner_logger, tf_summary_logger]
logger = loggers.aggregators.Dispatcher(
destinations, serialize_fn=utils.fetch_devicearray)
return logger
return create_logger
|
import unittest
from os.path import abspath, basename, join, splitext as split_ext
from tempfile import mkstemp, mkdtemp
from os import remove, close, environ
from time import sleep
from itertools import combinations
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
from shutil import rmtree
class FormManagerTestCase(unittest.TestCase):
_tmpfiles = []
_fm_instance = None
def setUp(self):
environ['KIVY_FORM_DEBUG'] = '1'
# basic class tests
def test_singleton(self):
from kivy.garden.formmanager import FormManager as FM
comb = combinations(
[FM() for i in range(3)], r=2
)
for a, b in comb:
self.assertEqual(a, b)
def test_kill(self):
from kivy.garden.formmanager import FormManager as FM
inst = []
for i in range(3):
fm = FM()
fm.kill()
inst.append(fm)
for a, b in combinations(inst, r=2):
self.assertNotEqual(a, b)
# server tests
def test_not_running(self):
from kivy.garden.formmanager import FormManager
fm = FormManager()
self.assertFalse(fm.running)
self.assertNotEqual(
FormManager.get_manager(),
None
)
def test_run(self):
from kivy.garden.formmanager import FormManager
fm = FormManager()
self._fm_instance = fm
# default class value until changed in run()
self.assertEqual(fm.port, 0)
fm.run()
port = fm.port
self.assertTrue(fm.running)
self.assertTrue(port)
fm.stop()
self.assertFalse(fm.running)
self.assertTrue(fm.port)
self.assertEqual(port, fm.port)
# remove instance
fm.kill()
def test_rerun(self):
# Invalid File Descriptor -1 for socket
# doesn't work, closed socket can't be reopen apparently
# https://bugs.python.org/msg278691
from kivy.garden.formmanager import FormManager
fm = FormManager()
self._fm_instance = fm
# default class value until changed in run()
self.assertEqual(fm.port, 0)
fm.run()
port = fm.port
self.assertTrue(fm.running)
self.assertTrue(port)
fm.stop()
self.assertFalse(fm.running)
self.assertTrue(fm.port)
self.assertEqual(port, fm.port)
# assert the ValueError, because IFD -1
# raises STDLIB selectors.py's _fileobj_to_fd
with self.assertRaises(ValueError):
fm.server.serve_forever()
fm.stop()
self.assertFalse(fm.running)
self.assertTrue(fm.port)
self.assertEqual(port, fm.port)
# remove instance
fm.kill()
def test_dummy_post(self):
# remove later when API is strict
from kivy.garden.formmanager import FormManager
fm = FormManager()
self._fm_instance = fm
fm.run()
self._send_json(
host='http://127.0.0.1',
port=fm.port,
data={"test": "value"}
)
fm.kill()
def test_add_nonform(self):
from kivy.garden.formmanager import FormManager, FormManagerException
fm = FormManager()
self._fm_instance = fm
fm.run()
tmpfd, tmpfn = mkstemp('.py')
FormManagerTestCase._tmpfiles.append([tmpfd, tmpfn])
form = object()
with self.assertRaises(FormManagerException):
fm.add_form(form)
fm.kill()
def test_add_remove_form(self):
from kivy.garden.formmanager import FormManager, Form
fm = FormManager()
self._fm_instance = fm
fm.run()
tmpfd, tmpfn = mkstemp('.py')
FormManagerTestCase._tmpfiles.append([tmpfd, tmpfn])
form = Form(tmpfn)
fm.add_form(form)
self.assertIn(form.name, fm.forms)
fm.remove_form(form)
self.assertNotIn(form.name, fm.forms)
fm.kill()
# helper methods
def _send_json(self, host, port, data):
json = str(data)
request = Request(
host + ':' + str(port),
bytearray(json, 'utf-8'),
{'Content-Type': 'application/json'}
)
json = urlopen(request).read().decode()
print('result:', json)
def tearDown(self):
# in case of assertion error, always kill the server
if self._fm_instance:
self._fm_instance.kill()
environ.pop('KIVY_FORM_DEBUG')
sleep(0.1)
class FormTestCase(unittest.TestCase):
_tmpfiles = []
_fm_instance = None
form_template = (
"from random import randint\n"
"from kivy.config import Config\n"
"Config.set('graphics', 'position', 'custom')\n"
"Config.set('graphics', 'left', randint(0, 600))\n"
"Config.set('graphics', 'top', randint(0, 600))\n"
"from kivy.garden.formmanager import FormApp\n"
"from kivy.lang import Builder\n"
"from kivy.uix.boxlayout import BoxLayout\n"
"Builder.load_string('''\n"
"<Test>:\n"
" Button:\n"
" text: app.name\n"
"''')\n"
"class Test(BoxLayout):\n"
" pass\n"
"class {0}(FormApp):\n"
" def build(self):\n"
" return Test()\n"
"{0}().run()\n"
)
def test_name(self):
from kivy.garden.formmanager import Form
tmpfd, tmpfn = mkstemp('.py')
FormTestCase._tmpfiles.append([tmpfd, tmpfn])
form = Form(tmpfn)
self.assertEqual(
form.name,
split_ext(basename(abspath(tmpfn)))[0]
)
def test_run_form(self):
# needs more details
from kivy.garden.formmanager import FormManager, Form
fm = FormManager()
self._fm_instance = fm
fm.run()
tmpdir = mkdtemp()
tmp_form = join(tmpdir, 'form0.py')
form_name = split_ext(basename(abspath(tmp_form)))[0]
with open(tmp_form, 'w') as f:
f.write(
self.form_template.format(form_name.capitalize())
)
form = Form(tmp_form)
fm.add_form(form)
fm.run_form(form)
# Form application is basically another Kivy app run in
# a separate process, therefore we have to wait for it to load
sleep(2)
self.assertTrue(fm.forms[form.name]['active'])
# remove form test?
fm.kill()
rmtree(tmpdir)
def test_run_multiple_forms(self):
# needs more details
from kivy.garden.formmanager import FormManager, Form
fm = FormManager()
self._fm_instance = fm
fm.run()
tmpdir = mkdtemp()
for i in range(3):
tmp_form = join(tmpdir, 'form{}.py'.format(i + 1))
form_name = split_ext(basename(abspath(tmp_form)))[0]
with open(tmp_form, 'w') as f:
f.write(
self.form_template.format(form_name.capitalize())
)
form = Form(tmp_form)
fm.add_form(form)
fm.run_form(form)
# Form application is basically another Kivy app run in
# a separate process, therefore we have to wait for it to load
sleep(3)
self.assertTrue(fm.forms[form.name]['active'])
# remove form test?
fm.kill()
rmtree(tmpdir)
def test_run_form_request_action(self):
from kivy.garden.formmanager import FormManager, Form, FormManagerException
fm = FormManager()
self._fm_instance = fm
fm.run()
# request action on non-existing Form
with self.assertRaises(FormManagerException):
fm.request_action('form4', 'print', 'nope')
self.assertEqual(fm.queue, {})
tmpdir = mkdtemp()
tmp_form = join(tmpdir, 'form4.py')
form_name = split_ext(basename(abspath(tmp_form)))[0]
with open(tmp_form, 'w') as f:
f.write(
self.form_template.format(form_name.capitalize())
)
form = Form(tmp_form)
fm.add_form(form)
fm.run_form(form)
# Form application is basically another Kivy app run in
# a separate process, therefore we have to wait for it to load
sleep(2)
self.assertTrue(fm.forms[form.name]['active'])
# request action for Form1
fm.request_action('form4', 'print', 'test')
self.assertEqual(
fm.queue,
{'form4': [['print', 'test']]}
)
sleep(1)
# after request the action is popped,
# but Form remains in the queue as a key
self.assertEqual(fm.queue, {"form4": []})
# after the Form is removed, the key should too
fm.remove_form(form)
self.assertNotIn(form.name, fm.forms)
self.assertEqual(fm.queue, {})
fm.kill()
rmtree(tmpdir)
def test_run_form_request_call(self):
from kivy.garden.formmanager import FormManager, Form, FormManagerException
fm = FormManager()
self._fm_instance = fm
fm.run()
self.assertEqual(fm.queue, {})
tmpdir = mkdtemp()
tmp_form = join(tmpdir, 'form5.py')
form_name = split_ext(basename(abspath(tmp_form)))[0]
with open(tmp_form, 'w') as f:
f.write(
self.form_template.format(form_name.capitalize())
)
form = Form(tmp_form)
fm.add_form(form)
fm.run_form(form)
# Form application is basically another Kivy app run in
# a separate process, therefore we have to wait for it to load
sleep(2)
self.assertTrue(fm.forms[form.name]['active'])
# request action for Form1
fm.request_action('form5', 'call', ['self', 'open_settings'])
self.assertEqual(
fm.queue,
{'form5': [['call', ['self', 'open_settings']]]}
)
sleep(1)
# after request the action is popped,
# but Form remains in the queue as a key
self.assertEqual(fm.queue, {"form5": []})
# after the Form is removed, the key should too
fm.remove_form(form)
self.assertNotIn(form.name, fm.forms)
self.assertEqual(fm.queue, {})
fm.kill()
rmtree(tmpdir)
def tearDown(self):
# in case of assertion error, always kill the server
if self._fm_instance:
self._fm_instance.kill()
sleep(1)
def tearDownModule():
# throw away all temporary files after testing
# therefore nothing should use the files here
for desc, tmp in FormManagerTestCase._tmpfiles:
close(desc)
remove(tmp)
for desc, tmp in FormTestCase._tmpfiles:
close(desc)
remove(tmp)
if __name__ == '__main__':
unittest.main()
|
# Copyright (c) 2021 Qualcomm Technologies, Inc.
# All Rights Reserved.
import logging
from math import ceil
import numpy as np
import torch
import torch.nn.functional as F
from quantization.adaround.quantizer import ADAROUND_QUANTIZER_MAP
from quantization.adaround.utils import (
MODE_TO_LOSS_TYPE,
AdaRoundInitMode,
CombinedLoss,
GetLayerInpOut,
LayerOutputMSE,
)
from utils.utils import DotDict
# setup logger
logger = logging.getLogger('AdaRound')
logger.setLevel(logging.INFO)
def apply_adaround_to_layer(model, layer, data_tensor, batch_size, act_quant, adaround_config,
keep_gpu=True):
"""Apply AdaRound to a `layer` in the `model`."""
# disable caching of quantized params
layer.caching = False
# grid initialization
if adaround_config.init == AdaRoundInitMode.range_estimator:
pass # already initialized
elif adaround_config.init == AdaRoundInitMode.mse:
apply_mse_init(layer)
elif adaround_config.init == AdaRoundInitMode.mse_out:
apply_mse_out_init(model, layer, data_tensor, batch_size)
elif adaround_config.init == AdaRoundInitMode.mse_out_asym:
apply_mse_out_init(model, layer, data_tensor, batch_size, asym=True)
else:
raise ValueError(f'Unknown initialization for AdaRound: {adaround_config.init}')
# activation function
if not adaround_config.include_act_func:
org_act_func = layer.activation_function
layer.activation_function = None
# replace quantizer with AdaRound quantizer
org_w_quantizer = layer.weight_quantizer.quantizer
org_w_quant_cls = org_w_quantizer.__class__
if not org_w_quant_cls in ADAROUND_QUANTIZER_MAP:
raise NotImplementedError(f'AdaRound is not supported for "{org_w_quant_cls}"')
new_w_quant_cls = ADAROUND_QUANTIZER_MAP[org_w_quant_cls]
w_quantizer = new_w_quant_cls(
n_bits=org_w_quantizer.n_bits,
scale_domain=org_w_quantizer.scale_domain,
per_channel=org_w_quantizer.per_channel,
eps=org_w_quantizer.eps,
)
w_quantizer.register_buffer('_delta', org_w_quantizer._delta)
w_quantizer.register_buffer('_zero_float', org_w_quantizer._zero_float)
if hasattr(org_w_quantizer, '_signed'):
w_quantizer.register_buffer('_signed', org_w_quantizer._signed)
layer.weight_quantizer.quantizer = w_quantizer
# set AdaRound attributes
w_quantizer.round_mode = adaround_config.round_mode
w_quantizer.temperature = adaround_config.annealing[0]
# single test (and init alpha)
get_inp_out = GetLayerInpOut(model, layer, asym=adaround_config.asym, act_quant=act_quant)
inp, out = get_inp_out(data_tensor[:batch_size])
loss_soft_before, loss_hard_before = _compute_and_display_local_losses(
w_quantizer, layer, inp, out, infix='before optimization'
)
w_quantizer.soft_targets = True
# define loss
loss_type = MODE_TO_LOSS_TYPE[w_quantizer.round_mode]
loss_fn = CombinedLoss(
quantizer=w_quantizer,
loss_type=loss_type,
weight=adaround_config.weight,
max_count=adaround_config.iters,
b_range=adaround_config.annealing,
warmup=adaround_config.warmup,
decay_type=adaround_config.decay_type,
decay_shape=adaround_config.decay_shape,
decay_start=adaround_config.decay_start,
)
# define optimizer
opt_params = [w_quantizer.alpha]
optimizer = torch.optim.Adam(opt_params, lr=adaround_config.lr)
# main loop
optimize_local_loss(
layer,
get_inp_out,
data_tensor,
optimizer,
loss_fn,
batch_size,
adaround_config.iters,
keep_gpu=keep_gpu,
)
# check afterwards
logger.info(f'Local loss before optimization (hard quant): {loss_hard_before:.7f}')
loss_soft_after, loss_hard_after = _compute_and_display_local_losses(
w_quantizer, layer, inp, out, infix='after optimization'
)
# set to hard decision up/down
w_quantizer.soft_targets = False
# restore original activation function
if not adaround_config.include_act_func:
layer.activation_function = org_act_func
# restore caching of quantized params
layer.caching = True
# prepare output
out = DotDict(
loss_soft_before=loss_soft_before,
loss_hard_before=loss_hard_before,
loss_soft_after=loss_soft_after,
loss_hard_after=loss_hard_after,
)
return out
def _compute_and_display_local_losses(quantizer, layer, inp, out, infix=''):
org_soft_targets = quantizer.soft_targets
quantizer.soft_targets = True
out_soft_quant = layer(inp)
quantizer.soft_targets = False
out_hard_quant = layer(inp)
soft_quant_loss = F.mse_loss(out_soft_quant, out)
hard_quant_loss = F.mse_loss(out_hard_quant, out)
if infix:
infix = infix.strip() + ' '
logger.info(f'Local loss {infix}(soft quant): {soft_quant_loss:.7f}')
logger.info(f'Local loss {infix}(hard quant): {hard_quant_loss:.7f}')
quantizer.soft_targets = org_soft_targets
return float(soft_quant_loss), float(hard_quant_loss)
def apply_mse_init(layer):
w = layer.weight
q = layer.weight_quantizer.quantizer
with torch.no_grad():
w_absmax = torch.max(w.max(), torch.abs(w.min()))
best_score = np.inf
best_max = w_absmax
for i in range(80):
s = w_absmax * (1.0 - 0.01 * i)
q.set_quant_range(-s, s)
score = F.mse_loss(w, q(w)).item()
if score < best_score:
best_score = score
best_max = s
logger.info(f'Finished: set max={best_max:.3f} (mse={best_score:.7f})')
q.set_quant_range(-best_max, best_max)
def apply_mse_out_init(model, layer, data_tensor, batch_size, asym=False):
w = layer.weight
q = layer.weight_quantizer.quantizer
get_inp_out = GetLayerInpOut(model, layer, asym=asym)
loss_fn = LayerOutputMSE(layer, get_inp_out, data_tensor, batch_size)
with torch.no_grad():
w_absmax = torch.max(w.max(), torch.abs(w.min()))
best_score = np.inf
best_max = w_absmax
for i in range(80):
s = w_absmax * (1.0 - 0.01 * i)
q.set_quant_range(-s, s)
score = loss_fn()
if score < best_score:
best_score = score
best_max = s
logger.info(f'Finished: set max={best_max:.3f} (mse={best_score:.7f})')
q.set_quant_range(-best_max, best_max)
def optimize_local_loss(layer, get_inp_out, data_tensor, optimizer, loss_fn, batch_size, iters,
use_cached_data=True, keep_gpu=True):
"""AdaRound optimization loop."""
if use_cached_data:
logger.info('Caching data for local loss optimization')
cached_batches = []
if keep_gpu:
torch.cuda.empty_cache()
with torch.no_grad():
for i in range(ceil(data_tensor.size(0) / batch_size)):
cur_inp, cur_out = get_inp_out(data_tensor[i * batch_size:(i + 1) * batch_size])
cached_batches.append((cur_inp.cpu(), cur_out.cpu()))
cached_inps = torch.cat([x[0] for x in cached_batches])
cached_outs = torch.cat([x[1] for x in cached_batches])
device = cur_inp.device
del cached_batches
if keep_gpu: # put all cached data on GPU for faster optimization
torch.cuda.empty_cache()
try:
cached_inps = cached_inps.to(device)
cached_outs = cached_outs.to(device)
except RuntimeError as e:
logger.warning(
f"WARNING: could not cache training data on GPU, keep on CPU ({e})"
)
cached_inps = cached_inps.cpu()
cached_outs = cached_outs.cpu()
for i in range(iters):
idx = torch.randperm(cached_inps.size(0))[:batch_size]
if use_cached_data:
cur_inp = cached_inps[idx].to(device)
cur_out = cached_outs[idx].to(device)
else:
cur_inp, cur_out = get_inp_out(data_tensor[idx])
optimizer.zero_grad()
try:
out_quant = layer(cur_inp)
loss = loss_fn(out_quant, cur_out)
loss.backward()
except RuntimeError as e:
if use_cached_data and 'cuda' in str(cached_inps.device):
logger.warning(
f"WARNING: not enough CUDA memory for forward pass, "
f"move cached data to CPU ({e})"
)
cached_inps = cached_inps.cpu()
cached_outs = cached_outs.cpu()
else:
raise e
optimizer.step()
|
# Copyright (C) 2019 Cancer Care Associates
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
import pytest
import pydicom
from pymedphys._dicom.collection import DicomBase
from pymedphys._dicom.rtplan import (
get_surface_entry_point,
get_surface_entry_point_with_fallback,
)
from pymedphys._dicom.rtplan.core import DICOMEntryMissing
HERE = pathlib.Path(__file__).parent
DATA_DIR = HERE.joinpath("data", "rtplan")
DICOM_PLAN_FILEPATH = DATA_DIR.joinpath("06MV_plan.dcm")
def test_surface_entry_with_fallback():
should_fail_with_unsupported_gantry = DicomBase.from_dict(
{"BeamSequence": [{"ControlPointSequence": [{"GantryAngle": "5.0"}]}]}
)
with pytest.raises(ValueError):
get_surface_entry_point_with_fallback(
should_fail_with_unsupported_gantry.dataset
)
plan_dataset = pydicom.read_file(str(DICOM_PLAN_FILEPATH), force=True)
for beam in plan_dataset.BeamSequence:
for control_point in beam.ControlPointSequence:
try:
del control_point.SurfaceEntryPoint
except AttributeError:
pass
with pytest.raises(DICOMEntryMissing):
get_surface_entry_point(plan_dataset)
assert get_surface_entry_point_with_fallback(plan_dataset) == (0.0, -300.0, 0.0)
def test_surface_entry():
plan = pydicom.read_file(str(DICOM_PLAN_FILEPATH), force=True)
assert get_surface_entry_point(plan) == (0.0, -300.0, 0.0)
should_pass = DicomBase.from_dict(
{
"BeamSequence": [
{
"ControlPointSequence": [
{"SurfaceEntryPoint": ["10.0", "20.0", "30.0"]}
]
}
]
}
)
assert get_surface_entry_point(should_pass.dataset) == (10.0, 20.0, 30.0)
should_fail_with_no_points = DicomBase.from_dict(
{"BeamSequence": [{"ControlPointSequence": []}]}
)
with pytest.raises(DICOMEntryMissing):
get_surface_entry_point(should_fail_with_no_points.dataset)
should_fail_with_differing_points = DicomBase.from_dict(
{
"BeamSequence": [
{
"ControlPointSequence": [
{"SurfaceEntryPoint": ["10.0", "20.0", "30.0"]}
]
},
{
"ControlPointSequence": [
{"SurfaceEntryPoint": ["20.0", "20.0", "30.0"]}
]
},
]
}
)
with pytest.raises(ValueError):
get_surface_entry_point(should_fail_with_differing_points.dataset)
|
from flask import Flask, abort
from flask_restx import Resource, Api, reqparse
from flask_restx import inputs, fields
app = Flask(__name__)
api = Api(app)
@api.route('/hello')
class HelloWorld(Resource):
def get(self):
return ['hello', 'world']
@api.route('/lects/<int:lect_id>', endpoint='goober')
class Lect(Resource):
model = api.model('Lection', {
'id': fields.String,
'season': fields.String,
'short_name': fields.String,
'long_name': fields.String,
'note': fields.String,
'first_reading': fields.String,
'psaslm': fields.String,
'second_reading': fields.String,
'gospel': fields.String,
})
listModel = api.model('LectionList', {
'lections': fields.List(fields.Nested(model)),
})
@api.marshal_with(listModel)
def get(self, lect_id):
parser = reqparse.RequestParser()
parser.add_argument(
'rate', type=int, help='Flow rate for the whozinator')
parser.add_argument(
'start_date',
type=inputs.date,
help='Beginning of time',
default='1970-01-01')
parser.add_argument(
'end_date', type=inputs.date, help='End of time')
args = parser.parse_args()
if lect_id == 13:
abort(400, description="Don't give me a thirteen")
return {
'lections':
[
{
'id': lect_id,
'rate': args.rate,
'start_date': str(args.start_date),
'end_date': str(args.end_date),
'note': 'This is a note!',
}
]
}
if __name__ == '__main__':
app.run(debug=True)
|
################################################################################
# Project : AuShadha
# Description : Immunisation Models.
# Author : Dr.Easwar T.R , All Rights reserved with Dr.Easwar T.R.
# Date : 16-09-2013
################################################################################
import importlib
from django.db import models
from django.contrib.auth.models import User
from AuShadha.apps.aushadha_base_models.models import (
AuShadhaBaseModel,
AuShadhaBaseModelForm )
#from AuShadha.apps.aushadha_users.models import AuShadhaUser
from AuShadha.apps.ui.ui import ui as UI
from AuShadha.apps.clinic.models import Staff
#from patient.models import PatientDetail
from registry.vaccine_registry.models import (
VaccineRegistry,
VaccineDetail,
VaccineData )
PatientDetail = UI.get_module("PatientRegistration")
from dijit_fields_constants import IMMUNISATION_FORM_CONSTANTS
INJECTION_SITE_CHOICES = (("lue", "Left Upper Arm"),
("rue", "Right Upper Arm"),
("lb", "Left Buttock"),
("rb", "Right Buttock"),
("abd", "Abdomen"),
("oral", "Oral")
)
INJECTION_ROUTE_CHOICES = (("im", "IM"),
("deep_im", "Deep IM"),
("iv", "Intravenous"),
("sc", "Sub Cutaneous"),
("oral", "Oral")
)
DEFAULT_IMMUNISATION_FORM_EXCLUDES = ('patient_detail','administrator',)
class Immunisation(AuShadhaBaseModel):
"""
This defines the Immunisation that the patient has had.
"""
def __init__(self, *args, **kwargs):
super(Immunisation,self).__init__(*args, **kwargs)
self.__model_label__ = "immunisation"
self._parent_model = 'patient_detail'
vaccine_detail = models.ForeignKey(VaccineDetail)
route = models.CharField(max_length=30,
choices= INJECTION_ROUTE_CHOICES,
default="IM"
)
injection_site = models.CharField(max_length=100,
choices=INJECTION_SITE_CHOICES,
default="Right Upper Arm"
)
dose = models.CharField(max_length=100,choices=( ('1','1'),
('2','2'),
('3','3'),
('4','4'),
('5','5'),
('booster','Booster')
) )
vaccination_date = models.DateField(auto_now_add=False)
next_due = models.DateField(auto_now_add=False)
adverse_reaction = models.TextField(max_length=100, default="None")
patient_detail = models.ForeignKey(PatientDetail, null=True, blank=True)
# administrator = models.ForeignKey(AuShadhaUser,null=True,blank=True)
administrator = models.ForeignKey(Staff,null=True,blank=True)
def __unicode__(self):
return "%s" % (self.vaccine_detail)
class ImmunisationForm(AuShadhaBaseModelForm):
"""
This defines the Immunisation Form
"""
__form_name__ = "Immunisation Form"
dijit_fields = IMMUNISATION_FORM_CONSTANTS
class Meta:
model = Immunisation
exclude = DEFAULT_IMMUNISATION_FORM_EXCLUDES
|
"""
@Time : 2021/10/9 10:59
@File : moco.py
@Software: PyCharm
@Desc :
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from physiossl.dist.utils import is_distributed_enabled
class Moco(nn.Module):
def __init__(self, base_encoder: nn.Module, feature_dim: int, m: float = 0.999, K: int = 65536):
super(Moco, self).__init__()
self.feature_dim = feature_dim
self.m = m
self.K = K
self.encoder_q = base_encoder(classes=feature_dim)
self.encoder_k = base_encoder(classes=feature_dim)
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data.copy_(param_q.data) # initialize
param_k.requires_grad = False # not update by gradient
# create the queue
self.register_buffer("queue", torch.randn(feature_dim, K))
self.queue = F.normalize(self.queue, dim=0)
self.register_buffer("queue_ptr", torch.zeros(1, dtype=torch.long))
def forward(self, x_q: torch.Tensor, x_k: torch.Tensor):
z_q = self.encoder_q(x_q)
with torch.no_grad():
self._momentum_update_key_encoder()
if is_distributed_enabled():
x_k, idx_unshuffle = self._batch_shuffle_ddp(x_k)
z_k = self.encoder_k(x_k)
if is_distributed_enabled():
z_k = self._batch_unshuffle_ddp(z_k, idx_unshuffle)
queue = self.queue.clone().detach()
self._dequeue_and_enqueue(F.normalize(z_k, p=2, dim=-1))
return z_q, z_k, queue
@staticmethod
@torch.no_grad()
def concat_all_gather(tensor):
"""
Performs all_gather operation on the provided tensors.
*** Warning ***: torch.distributed.all_gather has no gradient.
"""
if is_distributed_enabled():
tensors_gather = [torch.ones_like(tensor)
for _ in range(torch.distributed.get_world_size())]
torch.distributed.all_gather(tensors_gather, tensor, async_op=False)
output = torch.cat(tensors_gather, dim=0)
else:
output = tensor
return output
@torch.no_grad()
def _momentum_update_key_encoder(self):
"""
Momentum update of the key encoder
"""
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data = param_k.data * self.m + param_q.data * (1. - self.m)
@torch.no_grad()
def _dequeue_and_enqueue(self, keys):
# gather keys before updating queue
keys = self.concat_all_gather(keys)
batch_size = keys.shape[0]
ptr = int(self.queue_ptr)
assert self.K % batch_size == 0 # for simplicity
# replace the keys at ptr (dequeue and enqueue)
self.queue[:, ptr:ptr + batch_size] = keys.T
ptr = (ptr + batch_size) % self.K # move pointer
self.queue_ptr[0] = ptr
@torch.no_grad()
def _batch_shuffle_ddp(self, x):
"""
Batch shuffle, for making use of BatchNorm.
*** Only support DistributedDataParallel (DDP) model. ***
"""
# gather from all gpus
batch_size_this = x.shape[0]
x_gather = self.concat_all_gather(x)
batch_size_all = x_gather.shape[0]
num_gpus = batch_size_all // batch_size_this
# random shuffle index
idx_shuffle = torch.randperm(batch_size_all).cuda()
# broadcast to all gpus
torch.distributed.broadcast(idx_shuffle, src=0)
# index for restoring
idx_unshuffle = torch.argsort(idx_shuffle)
# shuffled index for this gpu
gpu_idx = torch.distributed.get_rank()
idx_this = idx_shuffle.view(num_gpus, -1)[gpu_idx]
return x_gather[idx_this], idx_unshuffle
@torch.no_grad()
def _batch_unshuffle_ddp(self, x, idx_unshuffle):
"""
Undo batch shuffle.
*** Only support DistributedDataParallel (DDP) model. ***
"""
# gather from all gpus
batch_size_this = x.shape[0]
x_gather = self.concat_all_gather(x)
batch_size_all = x_gather.shape[0]
num_gpus = batch_size_all // batch_size_this
# restored index for this gpu
gpu_idx = torch.distributed.get_rank()
idx_this = idx_unshuffle.view(num_gpus, -1)[gpu_idx]
return x_gather[idx_this]
|
# -*- coding: utf-8 -*-
# Copyright 2015 www.suishouguan.com
#
# Licensed under the Private License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/samuelbaizg/ssguan/blob/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ssguan.ignitor.base.error import Error
class KeyLengthError(Error):
def __init__(self, key, max_length):
super(KeyLengthError, self).__init__("Cache key {{key}} is over the {{max_length}}.", key=key, max_length=max_length)
@property
def code(self):
return 1150
class KeyCharError(Error):
def __init__(self, key, chars):
super(KeyCharError, self).__init__("Cache key {{key}} contains invisible characters {{chars}}.", key=key, chars=chars)
@property
def code(self):
return 1151 |
from django.apps import AppConfig
class GraphConfig(AppConfig):
name = 'Graph'
|
#!/usr/bin/env/python3
# This Python file uses the following encoding:utf-8
# Author: Jolanda de Koff Bulls Eye
# GitHub: https://github.com/BullsEye0
# Website: https://hackingpassion.com
# linkedin: https://www.linkedin.com/in/jolandadekoff
# Facebook: facebook.com/jolandadekoff
# Facebook Page: https://www.facebook.com/ethical.hack.group
# Facebook Group: https://www.facebook.com/groups/hack.passion/
# YouTube: https://www.youtube.com/BullsEyeJolandadeKoff
# Blue Eye Created June- August 2019
# Blue Eye v2 January 2020
# Copyright (c) 2019 - 2020 Jolanda de Koff.
########################################################################
# A notice to all nerds and n00bs...
# If you will copy the developer's work it will not make you a hacker..!
# Respect all developers, we doing this because it's fun...
########################################################################
import dns.resolver
import json
import nmap
import os
import re
import requests
import socket
import time
import urllib.request
from time import gmtime, strftime
banner = ("""
\033[1;34m
▄▄▄▄ ██▓ █ ██ ▓█████ ▓█████▓██ ██▓▓█████
▓█████▄ ▓██▒ ██ ▓██▒▓█ ▀ ▓█ ▀ ▒██ ██▒▓█ ▀
▒██▒ ▄██▒██░ ▓██ ▒██░▒███ ▒███ ▒██ ██░▒███
▒██░█▀ ▒██░ ▓▓█ ░██░▒▓█ ▄ ▒▓█ ▄ ░ ▐██▓░▒▓█ ▄
░▓█ ▀█▓░██████▒▒▒█████▓ ░▒████▒ ░▒████▒ ░ ██▒▓░░▒████▒
░▒▓███▀▒░ ▒░▓ ░░▒▓▒ ▒ ▒ ░░ ▒░ ░ ░░ ▒░ ░ ██▒▒▒ ░░ ▒░ ░
▒░▒ ░ ░ ░ ▒ ░░░▒░ ░ ░ ░ ░ ░ ░ ░ ░▓██ ░▒░ ░ ░ ░
░ ░ ░ ░ ░░░ ░ ░ ░ ░ ▒ ▒ ░░ ░
░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░
░ ░ ░ v2
\033[1;m
\033[34mBlue Eye\033[0m Recon Toolkit
Author: Jolanda de Koff Bulls Eye
Github: https://github.com/BullsEye0
Website: https://HackingPassion.com
\033[1;31mHi there, Shall we play a game..?\033[0m 😃
""")
print (banner)
time.sleep(0.4)
target = input("[+] \033[34mWhat domain do you want to search: \033[0m").strip()
compname = input("[+] \033[34mEnter the company name: \033[0m").strip()
company = target.partition(".")
comp = company[0]
time.sleep(1)
def jan():
try:
url = ("http://ip-api.com/json/")
response = urllib.request.urlopen(url + target)
data = response.read()
jso = json.loads(data)
time.sleep(1.5)
print("\n [+] \033[34mUrl: " + target + "\033[0m")
print(" [+] " + "\033[34m" + "IP: " + jso["query"] + "\033[0m")
print(" [+] " + "\033[34m" + "Status: " + jso["status"] + "\033[0m")
print(" [+] " + "\033[34m" + "Region: " + jso["regionName"] + "\033[0m")
print(" [+] " + "\033[34m" + "Country: " + jso["country"] + "\033[0m")
print(" [+] " + "\033[34m" + "City: " + jso["city"] + "\033[0m")
print(" [+] " + "\033[34m" + "ISP: " + jso["isp"] + "\033[0m")
print(" [+] " + "\033[34m" + "Lat & Lon: " + str(jso['lat']) + " & " + str(jso['lon']) + "\033[0m")
print(" [+] " + "\033[34m" + "Zipcode: " + jso["zip"] + "\033[0m")
print(" [+] " + "\033[34m" + "TimeZone: " + jso["timezone"] + "\033[0m")
print(" [+] " + "\033[34m" + "AS: " + jso["as"] + "\033[0m" + "\n")
print ("»"*60 + "\n")
time.sleep(1)
except Exception:
pass
except KeyboardInterrupt:
print("\n")
print("[-] User Interruption Detected..!")
time.sleep(1)
def header():
try:
print("\033[34mScanning.... HTTP Header \033[0m" + target)
time.sleep(1.5)
command = ("http -v " + target)
proces = os.popen(command)
results = str(proces.read())
print("\033[1;34m" + results + command + "\033[1;m")
print ("»"*60 + "\n")
except Exception:
pass
except KeyboardInterrupt:
print("\n")
print("[-] User Interruption Detected..!")
time.sleep(1)
def nmaps():
try:
print("\033[34mScanning.... Nmap Port Scan: \033[0m" + target)
print ("[+]\033[34m - --> \033[0mThis may take a moment \033[34mBlue Eye\033[0m gathers the data.....\n")
time.sleep(1)
scanner = nmap.PortScanner()
command = ("nmap -Pn " + target)
process = os.popen(command)
results = str(process.read())
logPath = "logs/nmap-" + strftime("%Y-%m-%d_%H:%M:%S", gmtime())
print("\033[34m" + results + command + logPath + "\033[0m")
print("\033[34mNmap Version: \033[0m", scanner.nmap_version())
print ("»"*60 + "\n")
except Exception:
pass
except KeyboardInterrupt:
print("\n")
print("[-] User Interruption Detected..!")
time.sleep(1)
def aug():
print ("[+] \033[34mMail Servers:\033[0m " + target + "\n")
time.sleep(0.5)
for sun in dns.resolver.query(target, "MX"):
print ("\t\033[34m" + (sun.to_text()) + "\033[0m")
print ("\n" + "»" * 60)
print ("[+] \033[34mDNS Text Records:\033[0m " + target + "\n")
time.sleep(0.2)
for sun in dns.resolver.query(target, "TXT"):
print ("\t\033[34m" + (sun.to_text()) + "\033[0m")
print ("\n" + "»" * 60)
print ("[+] \033[34mNameserver Records:\033[0m " + target + "\n")
time.sleep(0.2)
for sun in dns.resolver.query(target, "NS"):
print ("\t\033[34m" + (sun.to_text()) + "\033[0m")
print ("\n" + "»" * 60)
okta = comp + ".okta.com"
webmail = "webmail." + comp + ".com"
email = "email." + comp + ".com"
slack = "%s.slack.com" % comp
try:
if len(socket.gethostbyname(okta)) <= 15:
print ("\n\t\033[34mHost of interest:\033[0m " + okta)
time.sleep(0.3)
if len(socket.gethostbyname(webmail)) <= 15:
print ("\t\033[34mHost of interest:\033[0m " + webmail)
time.sleep(0.3)
if len(socket.gethostbyname(email)) <= 15:
print ("\t\033[34mHost of interest:\033[0m " + email)
time.sleep(0.3)
if len(socket.gethostbyname(slack)) <= 15:
print ("\t\033[34mHost of interest:\033[0m " + slack + "\n")
time.sleep(0.3)
except Exception:
pass
def june():
print ("»" * 60 + "\n")
print ("[+] \033[34mSearch Results for: \033[0m%s " % target)
url = ("https://ctsearch.entrust.com/api/v1/certificates?fields=subjectDN&domain=%s&includeExpired=false&exactMatch=false&limit=5000" % target)
useragent = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36")
headers = {"User-Agent": useragent}
try:
print ("[+]\033[34m - --> \033[0mThis may take a moment \033[34mBlue Eye\033[0m gathers the data.....\n")
time.sleep(1)
response = requests.get(url, headers=headers)
rain = re.findall(r'subjectDN": "cn\\u003d[a-zA-Z0-9.\-]{1,}', response.text)
babs = []
for item in rain:
xtra = re.sub("subjectDN\": \"cn\\\\u003d", '', item)
babs.append(xtra)
dmset = set(babs)
counter = 0
print ("")
for itemm in dmset:
counter = counter + 1
print ("%s. %s" % (str(counter), str(itemm)))
try:
ns = dns.resolver.query(str(itemm), "A")
if "dns.resolver" in str(ns):
for joo in ns.response.answer:
for aug in joo.items:
ip = str(aug)
print("\t[+]\033[34m %s resolves to: %s\033[0m" % (str(itemm), str(ip)))
except Exception:
pass
print ("")
print ("\033[34m[+] Total Domains: %s\033[0m" % str(counter))
print ("»"*60 + "\n")
time.sleep(1)
except KeyboardInterrupt:
print ("\n\033[1;91m[!]\033[0 User Interruption Detected..!")
time.sleep(1)
except Exception:
pass
mainhub = ("https://github.com/%s" % comp)
gitpeople = ("https://github.com/orgs/%s/people" % comp)
response = requests.get(mainhub)
response_text = response.text
resp = requests.get(gitpeople)
respon_text = resp.text
listusers = re.findall(r'self\" href=\"[a-zA-Z0-9\-/]{3,}', respon_text)
listuser = []
def list_users():
try:
for item in listusers:
x = re.sub("self\" href=\"/", "", item)
listuser.append(x)
usersset = set(listuser)
counter = 0
if listusers != []:
print ("\033[34m[+] List of %s github user pages:\033[0m" % target)
print ("»"*60 + "\n")
for user in usersset:
try:
counter = counter + 1
userpage = ("https://github.com/%s" % user)
print (str(counter) + " \t[+] " + "\033[34m " + userpage + "\033[0m")
except Exception as e:
print("Error: %s" % e)
print ("")
print ("\033[34m[+] Total Users Found: %s\033[0m" % str(counter))
print ("»"*60 + "\n")
time.sleep(1)
except KeyboardInterrupt:
print ("\n")
print ("\033[1;91m[!]\033[0 User Interruption Detected..!")
time.sleep(1)
except Exception:
pass
def mails():
listofusers2 = set(listuser)
try:
print ("[+] \033[34mList of possible company email addresses harvested")
print ("from %s github user pages & duckduckgo searches:\033[0m" % target)
time.sleep(0.5)
print ("»"*60 + "\n")
for user in listofusers2:
userpage = ("https://api.github.com/users/%s/events/public" % user)
respon = requests.get(userpage)
respons_text = respon.text
findemail = re.findall(r'[a-zA-Z0-9-_.]+@[a-zA-Z0-9-_.]+', respons_text)
if findemail != []:
emailset = set(findemail)
for each in emailset:
if target in each:
print ("\t[+] \033[34m" + (each) + "\033[0m")
time.sleep(0.1)
searchurl = ("https://duckduckgo.com/html/?q=site%3Alinkedin.com+email+%40%22" + target + '%22')
webresponse = requests.get(searchurl)
webresp = webresponse.text
findem = re.findall(r'[a-zA-Z0-9-_.]+@[a-zA-Z0-9-_.]+', webresp)
if findem != [1]:
setmail = set(findem)
for each in setmail:
if target in each:
print ("\t[+] \033[34m" + (each) + "\033[0m\n\n")
time.sleep(1)
print ("\n\n\t\033[34m[!] I like to See Ya, Hacking\033[0m 😃\n\n")
except KeyboardInterrupt:
print ("\n\033[1;91m[!] User Interruption Detected..!\033[0")
time.sleep(1)
print ("\n\n\t\033[34m[!] I like to See Ya, Hacking\033[0m\n\n")
except Exception:
pass
print ("\n\t\033[34m I like to See Ya, Hacking\033[0m 😃\n")
# =====# Main #===== #
if __name__ == "__main__":
jan()
header()
nmaps()
aug()
june()
list_users()
mails()
|
import os
import numpy as np
import numpy.random as npr
import PIL
import PIL.ImageOps
import PIL.ImageEnhance
import PIL.Image
import matplotlib
class TransfOps(object):
'''
Class to handle the decoding of the strings used with the genetic
algorithm and all the data transformations.
'''
def __init__(self):
self.transformation_list = ['autocontrast', 'brightness', 'color', 'contrast', 'sharpness', 'solarize', 'grayscale', 'Renhancer', 'Genhancer', 'Benhancer']
self.define_code_correspondances()
def decode_string(self, transf_string):
'''
Code to decode the string used by the genetic algorithm
String example: 't1,l1_3,t4,l4_0,t0,l0_1'. First transformation is the one
associated with index '1', with level set to '3', and so on.
'random_N' with N integer gives N rnd transformations with rnd levels.
'''
if 'random' in transf_string:
transformations = npr.choice(self.transformation_list, int(transf_string.split('_')[-1])) # the string is 'random_N'
levels = [npr.choice(list(self.code_to_level_dict[t].values()), 1)[0] for t in transformations] # list() to make it compatible with Python3
else:
transformation_codes = transf_string.split(',')[0::2]
level_codes = transf_string.split(',')[1::2]
transformations = [self.code_to_transf(code) for code in transformation_codes]
levels = [self.code_to_level(transf,level) for transf,level in zip(transformations, level_codes)]
return transformations, levels
def transform_dataset(self, dataset, transf_string = 't0,l0_0', transformations=None, levels=None):
'''
dataset: set of images, shape should be N x width x height x #channels
transf_string: transformations and levels encoded in a string
'''
#print 'Dataset size:',dataset.shape
if len(dataset.shape) == 3: # if 'dataset' is a single image
dataset = np.expand_dims(dataset, 0)
if dataset.shape[-1] != 3:
print('Input shape:', str(dataset.shape))
raise Exception('The images must be in RGB format')
tr_dataset = np.zeros((dataset.shape))
if transformations is None:
# decoding transformation string
transformations, levels = self.decode_string(transf_string)
for n,img in enumerate(dataset):
pil_img = PIL.Image.fromarray(img.astype('uint8'), 'RGB')
for transf,level in zip(transformations, levels):
pil_img = self.apply_transformation(pil_img, transf, level)
tr_dataset[n] = np.array(pil_img)
return tr_dataset, transformations, levels
def apply_transformation(self, image, transformation, level):
'''
image: image to be tranformed, shape should be 1 x width x height x #channels
transformation: type of transformation to be applied
level: level of the perturbation to be applied
'''
if transformation == 'identity':
return image
elif transformation == 'autocontrast':
return PIL.ImageOps.autocontrast(image, cutoff=level)
elif transformation == 'brightness':
return PIL.ImageEnhance.Brightness(image).enhance(level)
elif transformation == 'color':
return PIL.ImageEnhance.Color(image).enhance(level)
elif transformation == 'contrast':
return PIL.ImageEnhance.Contrast(image).enhance(level)
elif transformation == 'sharpness':
return PIL.ImageEnhance.Sharpness(image).enhance(level)
elif transformation == 'solarize':
return PIL.ImageOps.solarize(image, threshold=level)
elif transformation == 'grayscale':
image = PIL.ImageOps.grayscale(image).convert('RGB')
return image
elif transformation == 'Renhancer':
image = np.array(image).astype(int)
image[:,:,0] += level
image[image>255] = 255
image[image<0] = 0
image = PIL.Image.fromarray(image.astype('uint8'), 'RGB')
return image
elif transformation == 'Genhancer':
image = np.array(image).astype(int)
image[:,:,1] += level
image[image>255] = 255
image[image<0] = 0
image = PIL.Image.fromarray(image.astype('uint8'), 'RGB')
return image
elif transformation == 'Benhancer':
image = np.array(image).astype(int)
image[:,:,2] += level
image[image>255] = 255
image[image<0] = 0
image = PIL.Image.fromarray(image.astype('uint8'), 'RGB')
return image
def code_to_transf(self, code):
'''
Takes in input a code (e.g., 't0', 't1', ...) and gives in output
the related transformation.
'''
return self.code_to_transf_dict[code]
def code_to_level(self, transformation, code):
'''
Takes in input a transfotmation (e.g., 'invert', 'colorize', ...) and
a level code (e.g., 'l0_1', 'l1_3', ...) and gives in output the related level.
'''
return self.code_to_level_dict[transformation][code]
def define_code_correspondances(self):
'''
Define the correpondances between transformation/level codes
and the actual types and values.
'''
self.code_to_transf_dict = dict()
self.code_to_transf_dict['t1'] = 'autocontrast'
self.code_to_transf_dict['t2'] = 'brightness'
self.code_to_transf_dict['t3'] = 'color'
self.code_to_transf_dict['t4'] = 'contrast'
self.code_to_transf_dict['t5'] = 'sharpness'
self.code_to_transf_dict['t6'] = 'solarize'
self.code_to_transf_dict['t7'] = 'grayscale'
self.code_to_transf_dict['t8'] = 'Renhancer'
self.code_to_transf_dict['t9'] = 'Genhancer'
self.code_to_transf_dict['t10'] = 'Benhancer'
self.code_to_level_dict = dict()
for k in self.transformation_list:
self.code_to_level_dict[k] = dict()
# percentages
self.code_to_level_dict['autocontrast'] = dict()
for n,l in enumerate(np.linspace(0.0,0.3,20)):
self.code_to_level_dict['autocontrast']['l1_'+str(n)] = l
# factors
self.code_to_level_dict['brightness'] = dict()
for n,l in enumerate(np.linspace(0.6,1.4,20)):
self.code_to_level_dict['brightness']['l2_'+str(n)] = l
# factors
self.code_to_level_dict['color'] = dict()
for n,l in enumerate(np.linspace(0.6,1.4,20)):
self.code_to_level_dict['color']['l3_'+str(n)] = l
# factors
self.code_to_level_dict['contrast'] = dict()
for n,l in enumerate(np.linspace(0.6,1.4,20)):
self.code_to_level_dict['contrast']['l4_'+str(n)] = l
# factors
self.code_to_level_dict['sharpness'] = dict()
for n,l in enumerate(np.linspace(0.6,1.4,20)):
self.code_to_level_dict['sharpness']['l5_'+str(n)] = l
self.code_to_level_dict['solarize'] = dict()
for n,l in enumerate(np.linspace(0,20,20).astype(int)):
self.code_to_level_dict['solarize']['l6_'+str(n)] = l
self.code_to_level_dict['grayscale']['l7_0'] = None
# percentages
self.code_to_level_dict['Renhancer'] = dict()
for n,l in enumerate(np.linspace(-120,120,30).astype(int)):
self.code_to_level_dict['Renhancer']['l8_'+str(n)] = l
# percentages
self.code_to_level_dict['Genhancer'] = dict()
for n,l in enumerate(np.linspace(-120,120,30).astype(int)):
self.code_to_level_dict['Genhancer']['l9_'+str(n)] = l
# percentages
self.code_to_level_dict['Benhancer'] = dict()
for n,l in enumerate(np.linspace(-120,120,30).astype(int)):
self.code_to_level_dict['Benhancer']['l10_'+str(n)] = l
if __name__=='__main__':
pass
|
from flask import g, request
from flask_jsonschema import validate
from app.core import ApiResponse
from app import db
from app.models import Country
from app.api.decorators import json_response
from . import cp
@cp.route('/countries', methods=['GET'])
def get_cp_countries():
"""Return countries
**Example request**:
.. sourcecode:: http
GET /api/1.0/countries HTTP/1.1
Host: cp.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"countries": [
{
"id": 23,
"cc": "AT",
"name": "Austria"
},
{
"id": 24,
"cc": "JP",
"name": "Japan"
},
]
}
**Example error response**:
.. sourcecode:: http
HTTP/1.0 404 NOT FOUND
Content-Type: application/json
{
"message": "Resource not found",
"status": "not found"
}
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: This depends on `Accept` header or request
:>json array organizations: List of available membership country objects
For details: :http:get:`/api/1.0/countries/(int:country_id)`
:status 200: Country endpoint found, response may be empty
:status 404: Not found
:status 401: Authorization failure. The client MAY repeat the request with
a suitable API-Authorization header field. If the request already
included Authorization credentials, then the 401 response indicates
that authorization has been refused for those credentials.
:status 403: Access denied. Authorization will not help and the request
SHOULD NOT be repeated.
"""
countries = Country.query.all()
return ApiResponse({'countries': [c.serialize() for c in countries]})
@cp.route('/countries/<int:country_id>', methods=['GET'])
def get_cp_country(country_id):
"""Return country identified by ``country_id``
**Example request**:
.. sourcecode:: http
GET /api/1.0/countries/23 HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"id": 23,
"cc": "AT",
"name": "Austria"
},
:param country_id: Membership role unique ID
:reqheader Accept: Content type(s) accepted by the client
:reqheader API-Authorization: API key. If present authentication and
authorization will be attempted.
:resheader Content-Type: This depends on `Accept` header or request
:>json integer id: Membership role unique ID
:>json string cc: Country code
:>json string name: Country name
:status 200: Returns membership role details object
:status 404: Resource not found
:status 401: Authorization failure. The client MAY repeat the request with
a suitable API-Authorization header field. If the request already
included Authorization credentials, then the 401 response indicates
that authorization has been refused for those credentials.
:status 403: Access denied. Authorization will not help and the request
SHOULD NOT be repeated.
"""
c = Country.query.get_or_404(country_id)
return ApiResponse(c.serialize())
|
import csv
import json
import abc
class BaseFormatter(abc.ABC):
def __init__(self, headers, data, export_to):
self.headers = headers
self.data = data
self.export_to = export_to
def export(self):
""""""
def print(self):
""""""
class CSVFormatter(BaseFormatter):
def export(self):
headers = self.data[0].keys()
with open(f"{self.export_to}", "w") as file:
writer = csv.DictWriter(file, delimiter="|", fieldnames=headers)
writer.writeheader()
for data in self.data:
writer.writerow(data)
print(f"{self.export_to} has been created successfully.")
return ""
def print(self):
self.export()
with open(f"{self.export_to}", "r") as file:
reader = csv.reader(file, delimiter="|")
for line in reader:
print(line)
return ""
class DictFormatter(BaseFormatter, abc.ABC):
def export(self):
return self.data
class JsonFormatter(BaseFormatter):
def export(self):
with open(f"{self.export_to}", "w") as file:
json.dump(
self.data,
file,
indent=2,
ensure_ascii=False,
sort_keys=False,
default=str,
)
print(f"{self.export_to} has been created successfully.")
return self.use()
def use(self):
return json.dumps(self.data, ensure_ascii=False, default=str)
def print(self):
return print(json.dumps(self.data, indent=2, ensure_ascii=False, default=str))
class ConsoleFormatter(BaseFormatter):
def export(self):
raise ValueError("Unusable method.")
def print(self):
if isinstance(self.data, list):
for data in self.data:
print(data)
elif isinstance(self.data, dict):
print(self.data)
|
# Servirtium: Service Virtualized HTTP
#
# Copyright (c) 2019, Paul Hammant and committers
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the Servirtium project.
import itertools
import json
import os
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
from json import JSONDecodeError
from pyexpat import ExpatError
from xml.dom import minidom
import requests
from servirtium.interaction_recording import InteractionRecording
from servirtium.interactions import Interaction
def _prune_headers(headers, removables):
def _to_be_removed(item):
(key, value) = item
line = f'{key}: {value}'
any([line.startswith(removable) for removable in removables])
return dict(itertools.filterfalse(_to_be_removed, headers.items()))
class Interception:
def __init__(self,
host: str = "default_host",
request_header_overrides: dict = None,
response_headers_to_remove: list = None) -> None:
self.host = host
self.request_header_overrides = request_header_overrides or {}
self.response_headers_to_remove = response_headers_to_remove or []
self.current_recording = InteractionRecording()
def modified_request_headers(self, new_req_headers):
modified = new_req_headers.copy()
modified.update(self.request_header_overrides)
return modified
def modified_response_headers(self, response):
return _prune_headers(response.headers, self.response_headers_to_remove)
def real_service_host(self):
return self.host.replace('http://', '').replace('https://', '')
# noinspection PyPep8Naming
class RecorderHttpHandler(BaseHTTPRequestHandler):
interception = Interception()
markdown_filename = 'default_method'
mocks_dir = os.getcwd()
pretty = False
@staticmethod
def pretty_print_json_or_xml():
RecorderHttpHandler.pretty = True
@staticmethod
def set_mocks_dir(mocks_dir):
RecorderHttpHandler.mocks_dir = mocks_dir
@staticmethod
def set_markdown_filename(markdown_filename):
RecorderHttpHandler.markdown_filename = markdown_filename
RecorderHttpHandler.current_recording = InteractionRecording()
md_path = RecorderHttpHandler.mocks_dir + os.sep + RecorderHttpHandler.markdown_filename + ".md"
if os.path.exists(md_path):
os.remove(md_path)
# TODO - should override handle() of http.server
# instead of do_GET() of BaseHTTPRequestHandler
def do_GET(self):
self.process_request("\n")
def do_DELETE(self):
self.process_request("\n")
def do_OPTIONS(self):
self.process_request("\n")
def do_POST(self):
self.process_request_with_body()
def do_PATCH(self):
self.process_request_with_body()
def process_request_with_body(self):
l = int(self.headers['Content-Length'])
r = self.rfile.read(l)
print("r typ " + str(type(r)))
self.process_request(r.decode("utf-8"))
def do_PUT(self):
self.process_request_with_body()
def prettifyXML(self, xml_string):
return minidom.parseString(xml_string).toprettyxml(indent=" ")
def process_request(self, request_body):
if RecorderHttpHandler.pretty and len(request_body) > 1:
print("request_body>" + request_body + "<")
try:
request_body = json.dumps(json.loads(request_body), indent=2)
except JSONDecodeError:
pass
try:
request_body = self.prettifyXML(request_body)
except ExpatError:
pass
new_req_headers = dict(self.headers.items())
new_req_headers.update({'Host': self.interception.real_service_host()})
response = self.perform_request_on_real_service(new_req_headers, request_body)
self.send_response(response.status_code)
ctt = str(response.content, 'utf-8').replace("https://todo-backend-sinatra.herokuapp.com", "http://localhost:61417") \
.replace("todo-backend-sinatra.herokuapp.com", "localhost:61417").encode("utf-8")
# Always send Content-Length header and skip Transfer-Encoding header as the response is not chunked
self.send_header("Content-Length", str(len(ctt)))
for name, value in sorted(response.headers.items()):
value = value.replace("https://todo-backend-sinatra.herokuapp.com", "http://localhost:61417")\
.replace("todo-backend-sinatra.herokuapp.com", "localhost:61417")
if name != 'Transfer-Encoding': # skip Transfer-Encoding as we are setting Content-Length
# TODO: add support for Transfer-Encoding = chunked
self.send_header(name, value)
self.end_headers()
self.wfile.write(response.content)
rsp_body = str(response.content, encoding='utf-8')
if RecorderHttpHandler.pretty and len(rsp_body) > 1:
try:
rsp_body = json.dumps(json.loads(rsp_body), indent=2)
except JSONDecodeError:
pass
RecorderHttpHandler.interception.current_recording.add_interaction(
Interaction(http_verb=self.command,
request_headers=self.interception.modified_request_headers(new_req_headers),
request_body=request_body,
request_path=self.path,
response_headers=self.interception.modified_response_headers(response),
response_body= rsp_body,
response_code=response.status_code))
try:
os.mkdir(RecorderHttpHandler.mocks_dir)
except FileExistsError:
pass
md_path = RecorderHttpHandler.mocks_dir + os.sep + RecorderHttpHandler.markdown_filename + ".md"
f = open(md_path, "a")
f.write(RecorderHttpHandler.interception.current_recording.last_interaction_to_markdown_string())
f.close()
def perform_request_on_real_service(self, new_req_headers, request_body):
if self.command == "GET":
response = requests.request(self.command, RecorderHttpHandler.interception.host + self.path,
headers=new_req_headers)
else:
response = requests.request(self.command, RecorderHttpHandler.interception.host + self.path,
headers=new_req_headers, data=request_body)
return response
def set_mocks_dir(mocks_dir):
RecorderHttpHandler.set_mocks_dir(mocks_dir)
def set_markdown_filename(filename):
RecorderHttpHandler.set_markdown_filename(filename)
def set_real_service(host):
RecorderHttpHandler.interception.host = host
def pretty_print_json_or_xml():
RecorderHttpHandler.pretty_print_json_or_xml()
def set_request_header_replacements(replacements):
RecorderHttpHandler.interception.request_header_overrides = replacements
def set_response_header_removals(removals):
RecorderHttpHandler.interception.response_headers_to_remove = removals
def start():
server_address = ('localhost', 61417)
try:
httpd = HTTPServer(server_address, RecorderHttpHandler)
except OSError as e:
if "Address already in use" in str(sys.exc_info()[1]):
assert False, "Address 'localhost:61417' is in use already - can't start recorder"
raise e
httpd.serve_forever()
if __name__ == "__main__":
start()
|
from gama import GamaCluster
if __name__ == "__main__":
file_path = "../tests/data/breast_cancer_{}.arff"
automl = GamaCluster(max_total_time=180, store="nothing", n_jobs=1)
print("Starting `fit` which will take roughly 3 minutes.")
automl.fit_from_file(file_path.format("train"))
label_predictions = automl.predict_from_file(file_path.format("test"))
|
#!/usr/bin/python
import argparse
import arguments
import logconfig
import session
from scaffold.iam.cf_builder import IAMBuilder
def create_stack(args):
boto3_session = session.new(args.profile, args.region, args.role)
builder = IAMBuilder(args, boto3_session, False)
return builder.build(args.dry_run)
default_desc = 'IAM Stack'
default_bucket = 'thousandleaves-iam'
def get_args():
ap = argparse.ArgumentParser(description='Create a CloudFormation stack for logging IAM activity and API calls to an S3 bucket',
add_help=False)
req = ap.add_argument_group('Required arguments')
req.add_argument("stack_name",
help='Name of the iam stack to create')
st = ap.add_argument_group('Stack definitions')
st.add_argument('--desc', default=default_desc,
help=arguments.generate_help('Stack description.', default_desc))
st.add_argument('--bucket', default=default_bucket,
help=arguments.generate_help('Bucket name', default_bucket))
st.add_argument('--enable', type=bool, default=False,
help='Enable API logging. Defaults to False')
arguments.add_deployment_group(ap)
arguments.add_security_control_group(ap)
return ap.parse_args()
if __name__ == "__main__":
logconfig.config()
args = get_args()
results = create_stack(args)
# TODO: move these to logging messages
if results.dry_run:
print results.template
else:
print 'ID: ', results.stack.stack_id
print 'STATUS: ', results.stack.stack_status
if results.stack.stack_status_reason is not None:
print 'REASON: ', results.stack.stack_status_reason
|
import os
import shutil
import sys
from typing import (
Any,
cast,
ClassVar,
Generic,
List,
Optional,
Type,
TYPE_CHECKING,
TypeVar,
Union,
)
import wandb
from wandb import util
from wandb.sdk.interface.artifacts import b64_string_to_hex, md5_files_b64
from ._private import MEDIA_TMP
from .base_types.wb_value import WBValue
if TYPE_CHECKING: # pragma: no cover
from wandb.apis.public import Artifact as PublicArtifact
from ..wandb_artifacts import Artifact as LocalArtifact
from ..wandb_run import Run as LocalRun
import cloudpickle # type: ignore
import torch # type: ignore
import sklearn # type: ignore
import tensorflow # type: ignore
DEBUG_MODE = False
def _add_deterministic_dir_to_artifact(
artifact: "LocalArtifact", dir_name: str, target_dir_root: str
) -> str:
file_paths = []
for dirpath, _, filenames in os.walk(dir_name, topdown=True):
for fn in filenames:
file_paths.append(os.path.join(dirpath, fn))
dirname = b64_string_to_hex(md5_files_b64(file_paths))[:20]
target_path = util.to_forward_slash_path(os.path.join(target_dir_root, dirname))
artifact.add_dir(dir_name, target_path)
return target_path
def _load_dir_from_artifact(source_artifact: "PublicArtifact", path: str) -> str:
dl_path = None
# Look through the entire manifest to find all of the files in the directory.
# Construct the directory path by inspecting the target download location.
for p, _ in source_artifact.manifest.entries.items():
if p.startswith(path):
example_path = source_artifact.get_path(p).download()
if dl_path is None:
root = example_path[: -len(p)]
dl_path = os.path.join(root, path)
assert dl_path is not None, f"Could not find directory {path} in artifact"
return dl_path
SavedModelObjType = TypeVar("SavedModelObjType")
class _SavedModel(WBValue, Generic[SavedModelObjType]):
"""SavedModel is a private data type that can be used to store a model object
inside of a W&B Artifact.
_model_type_id: (str) The id of the SavedModel subclass used to serialize the model.
"""
_log_type: ClassVar[str]
_path_extension: ClassVar[str]
_model_obj: Optional["SavedModelObjType"]
_path: Optional[str]
_input_obj_or_path: Union[SavedModelObjType, str]
# Public Methods
def __init__(
self, obj_or_path: Union[SavedModelObjType, str], **kwargs: Any
) -> None:
super().__init__()
if self.__class__ == _SavedModel:
raise TypeError(
"Cannot instantiate abstract SavedModel class - please use SavedModel.init(...) instead."
)
self._model_obj = None
self._path = None
self._input_obj_or_path = obj_or_path
input_is_path = isinstance(obj_or_path, str) and os.path.exists(obj_or_path)
if input_is_path:
assert isinstance(obj_or_path, str) # mypy
self._set_obj(self._deserialize(obj_or_path))
else:
self._set_obj(obj_or_path)
self._copy_to_disk()
# At this point, the model will be saved to a temp path,
# and self._path will be set to such temp path. If the model
# provided was a path, then both self._path and self._model_obj
# are copies of the user-provided data. However, if the input
# was a model object, then we want to clear the model object. The first
# accessing of the model object (via .model_obj()) will load the model
# from the temp path.
if not input_is_path:
self._unset_obj()
@staticmethod
def init(obj_or_path: Any, **kwargs: Any) -> "_SavedModel":
maybe_instance = _SavedModel._maybe_init(obj_or_path, **kwargs)
if maybe_instance is None:
raise ValueError(
f"No suitable SavedModel subclass constructor found for obj_or_path: {obj_or_path}"
)
return maybe_instance
@classmethod
def from_json(
cls: Type["_SavedModel"], json_obj: dict, source_artifact: "PublicArtifact"
) -> "_SavedModel":
path = json_obj["path"]
# First, if the entry is a file, the download it.
entry = source_artifact.manifest.entries.get(path)
if entry is not None:
dl_path = source_artifact.get_path(path).download()
else:
# If not, assume it is directory.
# FUTURE: Add this functionality to the artifact loader
# (would be nice to parallelize)
dl_path = _load_dir_from_artifact(source_artifact, path)
# Return the SavedModel object instantiated with the downloaded path
# and specified adapter.
return cls(dl_path)
def to_json(self, run_or_artifact: Union["LocalRun", "LocalArtifact"]) -> dict:
# Unlike other data types, we do not allow adding to a Run directly. There is a
# bit of tech debt in the other data types which requires the input to `to_json`
# to accept a Run or Artifact. However, Run additions should be deprecated in the future.
# This check helps ensure we do not add to the debt.
if isinstance(run_or_artifact, wandb.wandb_sdk.wandb_run.Run):
raise ValueError("SavedModel cannot be added to run - must use artifact")
artifact = run_or_artifact
json_obj = {
"type": self._log_type,
}
assert self._path is not None, "Cannot add SavedModel to Artifact without path"
if os.path.isfile(self._path):
# If the path is a file, then we can just add it to the artifact,
# First checking to see if the artifact already has the file (use the cache)
# Else, add it directly, allowing the artifact adder to rename the file deterministically.
already_added_path = artifact.get_added_local_path_name(self._path)
if already_added_path is not None:
json_obj["path"] = already_added_path
else:
target_path = os.path.join(
".wb_data", "saved_models", os.path.basename(self._path)
)
json_obj["path"] = artifact.add_file(self._path, target_path, True).path
elif os.path.isdir(self._path):
# If the path is a directory, then we need to add all of the files
# The directory must be named deterministically based on the contents of the directory,
# but the files themselves need to have their name preserved.
# FUTURE: Add this functionality to the artifact adder itself
json_obj["path"] = _add_deterministic_dir_to_artifact(
artifact, self._path, os.path.join(".wb_data", "saved_models")
)
else:
raise ValueError(
f"Expected a path to a file or directory, got {self._path}"
)
return json_obj
def model_obj(self) -> SavedModelObjType:
"""Returns the model object."""
if self._model_obj is None:
assert self._path is not None, "Cannot load model object without path"
self._set_obj(self._deserialize(self._path))
assert self._model_obj is not None, "Model object is None"
return self._model_obj
# Methods to be implemented by subclasses
@staticmethod
def _deserialize(path: str) -> SavedModelObjType:
"""Returns the model object from a path. Allowed to throw errors"""
raise NotImplementedError()
@staticmethod
def _validate_obj(obj: Any) -> bool:
"""Validates the model object. Allowed to throw errors"""
raise NotImplementedError()
@staticmethod
def _serialize(obj: SavedModelObjType, dir_or_file_path: str) -> None:
"""Save the model to disk. The method will receive a directory path which all
files needed for deserialization should be saved. A directory will always be passed if
_path_extension is an empty string, else a single file will be passed. Allowed to throw errors
"""
raise NotImplementedError()
# Private Class Methods
@classmethod
def _maybe_init(
cls: Type["_SavedModel"], obj_or_path: Any, **kwargs: Any
) -> Optional["_SavedModel"]:
# _maybe_init is an exception-safe method that will return an instance of this class
# (or any subclass of this class - recursively) OR None if no subclass constructor is found.
# We first try the current class, then recursively call this method on children classes. This pattern
# conforms to the new "Weave-type" pattern developed by Shawn. This way, we can for example have a
# pytorch subclass that can itself have two subclasses: one for a TorchScript model, and one for a PyTorch model.
# The children subclasses will know how to serialize/deserialize their respective payloads, but the pytorch
# parent class can know how to execute inference on the model - regardless of serialization strategy.
try:
return cls(obj_or_path, **kwargs)
except Exception as e:
if DEBUG_MODE:
print(f"{cls}._maybe_init({obj_or_path}) failed: {e}")
pass
for child_cls in cls.__subclasses__():
maybe_instance = child_cls._maybe_init(obj_or_path, **kwargs)
if maybe_instance is not None:
return maybe_instance
return None
@classmethod
def _tmp_path(cls: Type["_SavedModel"]) -> str:
# Generates a tmp path under our MEDIA_TMP directory which confirms to the file
# or folder preferences of the class.
assert isinstance(cls._path_extension, str), "_path_extension must be a string"
tmp_path = os.path.abspath(
os.path.join(MEDIA_TMP.name, str(util.generate_id()))
)
if cls._path_extension != "":
tmp_path += "." + cls._path_extension
return tmp_path
# Private Instance Methods
def _copy_to_disk(self) -> None:
# Creates a temporary path and writes a fresh copy of the
# model to disk - updating the _path appropriately.
tmp_path = self._tmp_path()
self._dump(tmp_path)
self._path = tmp_path
def _unset_obj(self) -> None:
assert self._path is not None, "Cannot unset object if path is None"
self._model_obj = None
def _set_obj(self, model_obj: Any) -> None:
assert model_obj is not None and self._validate_obj(
model_obj
), f"Invalid model object {model_obj}"
self._model_obj = model_obj
def _dump(self, target_path: str) -> None:
assert self._model_obj is not None, "Cannot dump if model object is None"
self._serialize(self._model_obj, target_path)
def _get_cloudpickle() -> "cloudpickle":
return cast(
"cloudpickle",
util.get_module("cloudpickle", "ModelAdapter requires `cloudpickle`"),
)
# TODO: Add pip deps
# TODO: potentially move this up to the saved model class
PicklingSavedModelObjType = TypeVar("PicklingSavedModelObjType")
class _PicklingSavedModel(_SavedModel[SavedModelObjType]):
_dep_py_files: Optional[List[str]] = None
_dep_py_files_path: Optional[str] = None
def __init__(
self,
obj_or_path: Union[SavedModelObjType, str],
dep_py_files: Optional[List[str]] = None,
):
super().__init__(obj_or_path)
if self.__class__ == _PicklingSavedModel:
raise TypeError(
"Cannot instantiate abstract _PicklingSavedModel class - please use SavedModel.init(...) instead."
)
if dep_py_files is not None and len(dep_py_files) > 0:
self._dep_py_files = dep_py_files
self._dep_py_files_path = os.path.abspath(
os.path.join(MEDIA_TMP.name, str(util.generate_id()))
)
os.makedirs(self._dep_py_files_path, exist_ok=True)
for extra_file in self._dep_py_files:
if os.path.isfile(extra_file):
shutil.copy(extra_file, self._dep_py_files_path)
elif os.path.isdir(extra_file):
shutil.copytree(
extra_file,
os.path.join(
self._dep_py_files_path, os.path.basename(extra_file)
),
)
else:
raise ValueError(f"Invalid dependency file: {extra_file}")
@classmethod
def from_json(
cls: Type["_SavedModel"], json_obj: dict, source_artifact: "PublicArtifact"
) -> "_PicklingSavedModel":
backup_path = [p for p in sys.path]
if (
"dep_py_files_path" in json_obj
and json_obj["dep_py_files_path"] is not None
):
dl_path = _load_dir_from_artifact(
source_artifact, json_obj["dep_py_files_path"]
)
assert dl_path is not None
sys.path.append(dl_path)
inst = super().from_json(json_obj, source_artifact) # type: ignore
sys.path = backup_path
return inst # type: ignore
def to_json(self, run_or_artifact: Union["LocalRun", "LocalArtifact"]) -> dict:
json_obj = super().to_json(run_or_artifact)
assert isinstance(run_or_artifact, wandb.wandb_sdk.wandb_artifacts.Artifact)
if self._dep_py_files_path is not None:
json_obj["dep_py_files_path"] = _add_deterministic_dir_to_artifact(
run_or_artifact,
self._dep_py_files_path,
os.path.join(".wb_data", "extra_files"),
)
return json_obj
def _get_torch() -> "torch":
return cast(
"torch",
util.get_module("torch", "ModelAdapter requires `torch`"),
)
class _PytorchSavedModel(_PicklingSavedModel["torch.nn.Module"]):
_log_type = "pytorch-model-file"
_path_extension = "pt"
@staticmethod
def _deserialize(dir_or_file_path: str) -> "torch.nn.Module":
return _get_torch().load(dir_or_file_path)
@staticmethod
def _validate_obj(obj: Any) -> bool:
return isinstance(obj, _get_torch().nn.Module)
@staticmethod
def _serialize(model_obj: "torch.nn.Module", dir_or_file_path: str) -> None:
_get_torch().save(
model_obj,
dir_or_file_path,
pickle_module=_get_cloudpickle(),
)
def _get_sklearn() -> "sklearn":
return cast(
"sklearn",
util.get_module("sklearn", "ModelAdapter requires `sklearn`"),
)
class _SklearnSavedModel(_PicklingSavedModel["sklearn.base.BaseEstimator"]):
_log_type = "sklearn-model-file"
_path_extension = "pkl"
@staticmethod
def _deserialize(
dir_or_file_path: str,
) -> "sklearn.base.BaseEstimator":
with open(dir_or_file_path, "rb") as file:
model = _get_cloudpickle().load(file)
return model
@staticmethod
def _validate_obj(obj: Any) -> bool:
dynamic_sklearn = _get_sklearn()
return cast(
bool,
(
dynamic_sklearn.base.is_classifier(obj)
or dynamic_sklearn.base.is_outlier_detector(obj)
or dynamic_sklearn.base.is_regressor(obj)
),
)
@staticmethod
def _serialize(
model_obj: "sklearn.base.BaseEstimator", dir_or_file_path: str
) -> None:
dynamic_cloudpickle = _get_cloudpickle()
with open(dir_or_file_path, "wb") as file:
dynamic_cloudpickle.dump(model_obj, file)
def _get_tf_keras() -> "tensorflow.keras":
return cast(
"tensorflow",
util.get_module("tensorflow", "ModelAdapter requires `tensorflow`"),
).keras
class _TensorflowKerasSavedModel(_SavedModel["tensorflow.keras.Model"]):
_log_type = "tfkeras-model-file"
_path_extension = ""
@staticmethod
def _deserialize(
dir_or_file_path: str,
) -> "tensorflow.keras.Model":
return _get_tf_keras().models.load_model(dir_or_file_path)
@staticmethod
def _validate_obj(obj: Any) -> bool:
return isinstance(obj, _get_tf_keras().models.Model)
@staticmethod
def _serialize(model_obj: "tensorflow.keras.Model", dir_or_file_path: str) -> None:
_get_tf_keras().models.save_model(
model_obj, dir_or_file_path, include_optimizer=True
)
|
# Copyright (C) 2019 Matthew James Harrison
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import shutil
import sys
from collections import namedtuple
from hashlib import md5
from zipfile import ZipFile
MOD_DEFINITION_PATH = 'mods.json'
DEPLOY_TARGET = 'target'
Mod = namedtuple('Mod', [
'game_title',
'mod_title',
'is_pk4',
'sources',
'base_path',
'game',
'zip_name',
'pk4_name',
'should_deploy',
'should_install'
])
def get_source_paths(source):
return [os.path.join(r, f) for r,d,fs in os.walk(source) for f in fs]
def mod_definition_decoder(obj):
return Mod(
game_title=obj['game_title'],
mod_title=obj['mod_title'],
is_pk4=obj['is_pk4'],
sources=obj['sources'],
base_path=obj['base_path'],
game=obj['game'],
zip_name=obj['zip_name'],
pk4_name=obj['pk4_name'],
should_deploy=obj['should_deploy'],
should_install=obj['should_install']
)
def zip_sources(source_data, zip_dir_path, zip_name, zip_type, container_dir=None):
if not os.path.exists(zip_dir_path):
os.makedirs(zip_dir_path)
zip_path = os.path.join(zip_dir_path, zip_name + '.' + zip_type)
if os.path.exists(zip_path):
os.remove(zip_path)
z = ZipFile(zip_path, 'w')
for source, source_paths in source_data:
zip_paths = [
(x, os.path.relpath(x, source))
for x in source_paths
] if not container_dir else [
(x, os.path.join(container_dir, os.path.relpath(x, source)))
for x in source_paths
]
for src, arc in zip_paths:
try:
z.write(src, arcname=arc)
except UserWarning:
pass
z.close()
def copy_sources(source_data, copy_dir_path):
for source, source_paths in source_data:
copy_paths = [
(x, os.path.join(copy_dir_path, os.path.relpath(x, source)))
for x in source_paths
]
for src, dst in copy_paths:
dst_parent = os.path.dirname(dst)
if not os.path.exists(dst_parent):
os.makedirs(dst_parent)
elif os.path.isfile(dst):
with open(src, 'rb') as f_src, open(dst, 'rb') as f_dst:
left = md5(f_src.read()).hexdigest()
right = md5(f_dst.read()).hexdigest()
if left == right:
continue
os.remove(dst)
shutil.copy(src, dst)
def display_help():
print('usage: python autopak.py [install] [deploy] [help]\n')
print('install ', 'copies mods into their respective install locations, zipping them into pk4 files as needed')
print('deploy ', 'packages mods into zip and pk4 files located in the targets/ directory of this project')
print('help ', 'displays this screen')
def install(mod):
destination = os.path.join(mod.base_path, mod.game,
mod.pk4_name + '.pk4' if mod.is_pk4 and mod.pk4_name else '')
print('Installing', mod.mod_title, 'for', mod.game_title, 'to', destination)
source_data = [
(source, get_source_paths(source))
for source in mod.sources
]
if mod.is_pk4:
zip_sources(source_data, os.path.join(mod.base_path, mod.game),
mod.pk4_name, 'pk4')
else:
copy_sources(source_data, os.path.join(mod.base_path, mod.game))
def deploy(mod):
print('Deploying', mod.mod_title, 'for', mod.game_title)
source_data = [
(source, get_source_paths(source))
for source in mod.sources
]
if mod.is_pk4:
zip_sources(source_data, DEPLOY_TARGET, mod.pk4_name, 'pk4')
else:
zip_sources(source_data, DEPLOY_TARGET, mod.zip_name, 'zip',
container_dir=mod.game)
def main():
args = sys.argv
if len(args) != 2:
display_help()
sys.exit(1)
goal = args[1]
mod_defs = json.loads(open(MOD_DEFINITION_PATH, 'r').read(),
object_hook=mod_definition_decoder)
for mod in mod_defs:
if goal == 'install':
if mod.should_install:
install(mod)
elif goal == 'deploy':
if mod.should_deploy:
deploy(mod)
elif goal == 'help':
display_help()
sys.exit(0)
else:
print('autopak:\'' + goal + '\'', 'is not a valid command. Run \'autopak help\' using Python 3')
sys.exit(1)
if __name__ == '__main__':
main() |
from __future__ import division #Make integer 3/2 give 1.5 in python 2.x
from CoolProp.CoolProp import PropsSI
from Correlations import Tsat
class PumpClass():
"""
Pump Model based on correlations obtained from experimental results
"""
def __init__(self,**kwargs):
#Load up the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def Update(self,**kwargs):
#Update the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def OutputList(self):
"""
Return a list of parameters for this component for further output
It is a list of tuples, and each tuple is formed of items with indices:
[0] Description of value
[1] Units of value
[2] The value itself
"""
return []
def Calculate(self):
#Local copies of coefficients
W=self.W
#Power
#Compute the pressure difference between the outlet and the inlet of the pump
self.DELTAP=self.pout_r-self.pin_r
#Get the rated power for all pressure differences
W_dot_rated=W[0]*self.DELTAP+W[1]
#Speed ratio
N_ratio=self.N/self.N_rated
#Get the estimation of the power
self.W_dot=(W[2]*N_ratio+W[3])*W_dot_rated
#Mass flow rate
#Define the slope of the line corresponding to the exhaust temperature as a linear interpolation of the minimum and maximum slope
slope=(self.slope_max-self.slope_min)/(self.p_max-self.p_min)*(self.pout_r-self.p_min)+self.slope_min
#Define the intercept of the line corresponding to the exhaust temperature
intercept=(self.intercept_max-self.intercept_min)/(self.p_max-self.p_min)*(self.pout_r-self.p_min)+self.intercept_min
self.m_dot=slope*self.N+intercept
#Outlet state
hin=PropsSI('H','T',self.Tin+273.15,'P',self.pin_r*1000,self.Ref)#*1000
self.s_in = PropsSI('S','T',self.Tin+273.15,'P',self.pin_r*1000,self.Ref)/1000
hout=hin+self.W_dot/self.m_dot
self.Tout=PropsSI('T','H',hout,'P',self.pout_r*1000,self.Ref) #in K
self.s_out=PropsSI('S','T',self.Tout,'P',self.pout_r*1000 + 100,self.Ref)/1000
self.Tout_s = PropsSI('T','S',self.s_in*1000,'P',self.pout_r*1000,self.Ref)
if __name__=='__main__':
"""
Example Diaphragm pump WANNER ENGINEERING
"""
pin_r_list=[794.7276887,780.158035,784.3067128,808.239602,822.8122092,826.29617,887.1980418]
pout_r_list=[1645.186859,1684.81582,1712.113611,1715.081928,1618.593683,1616.02753,1728.196266]
N_list=[1099.97098,1099.809986,1099.72049,1099.818785,1099.743137,1099.450796,1099.270196]
Tin_list=[15.4903837535014,15.3066340782123,15.5798263305322,15.7492877094972,15.5736862745098,15.7364804469274,15.0563305322129]
W_list_meas = [235.4954587,236.254973,245.3089328,241.3617462,233.9065263,228.6898989,239.6439083]
zip(pin_r_list,pout_r_list,N_list,Tin_list,W_list_meas)
for pin_r,pout_r,N,Tin,Wmeas in zip(pin_r_list,pout_r_list,N_list,Tin_list,W_list_meas):
kwds={
'W':[0.1096,114.34,1.0993,-0.0981],
'Ref':'R134a',
'pin_r':pin_r,
'pout_r':pout_r,
'N':N,
'N_rated':995,
'slope_min':0.000133504, #corresponding to the min outlet pressure
'slope_max':0.000114377, #corresponding to the max outlet pressure
'intercept_min':0.004, #corresponding to the min outlet pressure
'intercept_max':0.025, #corresponding to the max outlet pressure
'p_min':700.4260866,
'p_max':2659.623637,
'Tin':Tin
}
Pump=PumpClass(**kwds)
Pump.Calculate()
print 'Calculated:',Pump.W_dot,'W','Measured:',Wmeas,'W'
|
from cloud_inquisitor.app import initialize
from cloud_inquisitor.plugins.commands import BaseCommand
class Setup(BaseCommand):
"""Sets up the initial state of the configuration stored in the database"""
name = 'Setup'
option_list = ()
def run(self, **kwargs):
initialize()
|
import argparse
import json
import logging
import sys
import requests
parser = argparse.ArgumentParser()
parser.add_argument('-payload', '--queuePayload', help='Payload from queue', required=True)
parser.add_argument('-apiKey', '--apiKey', help='The apiKey of the integration', required=True)
parser.add_argument('-opsgenieUrl', '--opsgenieUrl', help='The url', required=True)
parser.add_argument('-logLevel', '--logLevel', help='Log level', required=True)
parser.add_argument('-url', '--url', help='The url', required=False)
parser.add_argument('-login', '--login', help='Login', required=False)
parser.add_argument('-password', '--password', help='Password', required=False)
args = vars(parser.parse_args())
logging.basicConfig(stream=sys.stdout, level=args['logLevel'])
def parse_field(key, mandatory):
variable = queue_message.get(key)
if not variable:
variable = args.get(key)
if mandatory and not variable:
logging.error(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
raise ValueError(LOG_PREFIX + " Skipping action, Mandatory conf item '" + key +
"' is missing. Check your configuration file.")
return variable
def parse_timeout():
parsed_timeout = args.get('http.timeout')
if not parsed_timeout:
return 30000
return int(parsed_timeout)
def login_to_trackit(url):
final_url = url + "/TrackitWeb/api/login?username=" + parse_field("login",
True) + "&pwd=" + parse_field(
"password", True)
logging.debug("Url: " + final_url)
response = requests.get(final_url, timeout)
if response:
response_map = response.json()
if response_map:
return response_map['data']['apiKey']
return None
def main():
global LOG_PREFIX
global queue_message
global timeout
queue_message_string = args['queuePayload']
queue_message = json.loads(queue_message_string)
alert = queue_message["alert"]
alert_id = alert["alertId"]
action = queue_message["action"]
LOG_PREFIX = "[" + action + "]"
logging.info("Will execute " + action + " for alertId " + alert_id)
timeout = parse_timeout()
url = parse_field("url", True)
track_key = login_to_trackit(url)
if action == "Create":
headers = {
"Content-Type": "text/json",
"Accept": "text/json",
"TrackitAPIKey": track_key
}
content_params = {
"StatusName": "Open",
"Summary": alert['message'],
"RequestorName": parse_field("login", True)
}
create_url = str(url) + "/TrackitWeb/api/workorder/Create"
logging.debug(
"Before Post -> Url: " + create_url + ", " + "Request Headers: " + str(headers) + " Content: " + str(content_params))
response = requests.post(create_url, json.dumps(content_params), headers=headers, timeout=timeout)
if response.status_code < 299:
logging.info(LOG_PREFIX + " Successfully executed at TrackIt.")
try:
response_map = response.json()
if response_map:
flow_id = response_map['data']['data']['Id']
if flow_id:
alert_api_url = args['opsgenieUrl'] + "/v2/alerts/" + alert_id + "/details"
content = {
"details":
{
"workflow_id": flow_id
}
}
headers = {
"Content-Type": "application/json",
"Accept-Language": "application/json",
"Authorization": "GenieKey " + args['apiKey']
}
alert_response = requests.post(alert_api_url,
data=json.dumps(content), headers=headers, timeout=timeout)
if alert_response.status_code < 299:
logging.info(LOG_PREFIX + " Successfully sent to Opsgenie")
logging.debug(
LOG_PREFIX + " TrackIt response: " + str(alert_response.content) + " " + str(alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Could not execute at Opsgenie; response: " + str(alert_response.content) + " status code: " + str(alert_response.status_code))
else:
logging.warning(
LOG_PREFIX + " Flow Id does not exist.")
except ValueError:
logging.error(
LOG_PREFIX + " Response does not have flow Id variable, " + str(response.content) + " " + str(response.status_code))
else:
logging.warning(
LOG_PREFIX + " Could not execute at TrackIt; response: " + str(response.content) + " " + str(response.status_code))
if __name__ == '__main__':
main()
|
from distutils import log
from distutils.command.check import check
from distutils.command.clean import clean
from setuptools.command.install import install
from setuptools.command.build_ext import build_ext
try:
from wheel.bdist_wheel import bdist_wheel
except ImportError:
bdist_wheel = None
def add_rust_extension(dist):
build_ext_base_class = dist.cmdclass.get('build_ext', build_ext)
class build_ext_rust_extension(build_ext_base_class):
def run(self):
if self.distribution.rust_extensions:
log.info("running build_rust")
build_rust = self.get_finalized_command("build_rust")
build_rust.inplace = self.inplace
build_rust.plat_name = self.plat_name
build_rust.run()
build_ext_base_class.run(self)
dist.cmdclass['build_ext'] = build_ext_rust_extension
clean_base_class = dist.cmdclass.get('clean', clean)
class clean_rust_extension(clean_base_class):
def run(self):
clean_base_class.run(self)
if not self.dry_run:
self.run_command("clean_rust")
dist.cmdclass['clean'] = clean_rust_extension
check_base_class = dist.cmdclass.get('check', check)
class check_rust_extension(check_base_class):
def run(self):
check_base_class.run(self)
self.run_command("check_rust")
dist.cmdclass["check"] = check_rust_extension
install_base_class = dist.cmdclass.get('install', install)
# this is required because, install directly access distribution's
# ext_modules attr to check if dist has ext modules
class install_rust_extension(install_base_class):
def finalize_options(self):
ext_modules = self.distribution.ext_modules
# all ext modules
mods = []
if self.distribution.ext_modules:
mods.extend(self.distribution.ext_modules)
if self.distribution.rust_extensions:
mods.extend(self.distribution.rust_extensions)
scripts = []
for ext in self.distribution.rust_extensions:
scripts.extend(ext.entry_points())
if scripts:
if not self.distribution.entry_points:
self.distribution.entry_points = {"console_scripts": scripts}
else:
ep_scripts = self.distribution.entry_points.get("console_scripts")
if ep_scripts:
for script in scripts:
if script not in ep_scripts:
ep_scripts.append(scripts)
else:
ep_scripts = scripts
self.distribution.entry_points["console_scripts"] = ep_scripts
self.distribution.ext_modules = mods
install_base_class.finalize_options(self)
# restore ext_modules
self.distribution.ext_modules = ext_modules
dist.cmdclass["install"] = install_rust_extension
if bdist_wheel is not None:
bdist_wheel_base_class = dist.cmdclass.get("bdist_wheel", bdist_wheel)
# this is for console entries
class bdist_wheel_rust_extension(bdist_wheel_base_class):
def finalize_options(self):
scripts = []
for ext in self.distribution.rust_extensions:
scripts.extend(ext.entry_points())
if scripts:
if not self.distribution.entry_points:
self.distribution.entry_points = {"console_scripts": scripts}
else:
ep_scripts = self.distribution.entry_points.get("console_scripts")
if ep_scripts:
for script in scripts:
if script not in ep_scripts:
ep_scripts.append(scripts)
else:
ep_scripts = scripts
self.distribution.entry_points["console_scripts"] = ep_scripts
bdist_wheel_base_class.finalize_options(self)
dist.cmdclass["bdist_wheel"] = bdist_wheel_rust_extension
def rust_extensions(dist, attr, value):
assert attr == "rust_extensions"
orig_has_ext_modules = dist.has_ext_modules
dist.has_ext_modules = lambda: (
orig_has_ext_modules() or bool(dist.rust_extensions)
)
if dist.rust_extensions:
add_rust_extension(dist)
|
# vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Fail functions because called from wrong mode test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingModeTest(VimivTestCase):
"""Failing Mode Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
def test_fail_focus_slider(self):
"""Fail focus slider because not in manipulate."""
self.vimiv["manipulate"].focus_slider("bri")
self.check_statusbar(
"ERROR: Focusing a slider only makes sense in manipulate")
def test_fail_button_clicked(self):
"""Fail exiting manipulate via button_clicked."""
self.vimiv["manipulate"].finish(False)
self.check_statusbar(
"ERROR: Finishing manipulate only makes sense in manipulate")
if __name__ == "__main__":
main()
|
import os
import numpy as np
import rouge
def score_sentence(sent_rouge, highlights, scorer):
scores = scorer.get_scores(sent_rouge, highlights)
avg_rouge = (scores['rouge-1']['f'] + scores['rouge-2']['f'] +
scores['rouge-l']['f']) / 3.0
return avg_rouge
def save_rouge(filename, content_rouge_file, rouge_scores, sentences_rouge):
# str_indexes = " ".join([str(i) for i in summary_indexes])
new_scores_sents = ["%f - %s" % (score, sent) for score, sent in
zip(rouge_scores, sentences_rouge)]
content_rouge_file[1] = "\n".join(new_scores_sents)
with open('./rouge2/%s' % filename, 'w') as f:
f.write('\n\n'.join(content_rouge_file))
f.close()
print(filename + " salvo com sucesso.")
def generate_multioracle(all_files):
scorer = rouge.Rouge(['rouge-n', 'rouge-l'], max_n=2, stemming=True)
for filename in all_files:
# Ler valor do rouge
content_rouge_file = open("./rouge/%s" % filename).read().split('\n\n')
highlights = open("./papers_highlights_rouge/%s" % filename
).read().split('\n')
sentences_rouge = content_rouge_file[1].split('\n')
sentences_rouge = [sent.split(' - ')[1] for sent in sentences_rouge]
rouge_scores = np.array([score_sentence(sent_rouge, highlights, scorer)
for sent_rouge in sentences_rouge])
save_rouge(filename, content_rouge_file, rouge_scores, sentences_rouge)
# labels = np.array(open("./labels/%s" % filename).read().split('\n'))
# summary_indexes = list(np.where(labels == '1')[0])
# rouge_mean = rouge_scores[summary_indexes].mean()
# save_multioracle(filename, summary_indexes, rouge_mean, len(labels))
if __name__ == "__main__":
all_files = os.listdir('labels')
generate_multioracle(all_files)
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import pystache
class JsonRenderer(pystache.Renderer):
def __init__(self,
file_encoding=None,
string_encoding=None,
decode_errors=None,
search_dirs=None,
file_extension=None,
escape=None,
partials=None,
missing_tags=None):
# json would be html escaped otherwise
if escape is None:
escape = lambda u: u
return super(JsonRenderer, self).__init__(file_encoding,
string_encoding,
decode_errors, search_dirs,
file_extension, escape,
partials, missing_tags)
def str_coerce(self, val):
return json.dumps(val)
|
from functools import wraps
import weakref
from asyncio import coroutine, gather
class Named:
def __init__(self, *args, name, **kws):
super().__init__(*args, **kws)
self.name = name
class Annotate(Named):
""" annotation that is transformed to a class """
def __new__(cls, definition):
return wraps(definition)(super().__new__(cls))
def __init__(self, definition):
super().__init__(name=definition.__name__)
self.definition = definition
class Conotate(Annotate):
""" annotation that is defined as a coroutine """
def __init__(self, definition, *args, **kws):
definition = coroutine(definition)
super().__init__(definition, *args, **kws)
class Descr(Named):
""" base for building descriptors """
def lookup(self, obj):
""" abstract method that returns the dict and key to access/store the value """
raise NotImplementedError
def has_entry(self, obj):
""" check if descriptor is set on an object """
dct, key = self.lookup(obj)
return key in dct
class ObjDescr(Descr):
""" decriptor mixin to putting values in objects dict """
def __init__(self, name):
super().__init__(name=name)
self.entry = '_' + name
def lookup(self, obj):
return obj.__dict__, self.entry
class RefDescr(Descr):
""" descriptor mixin based on weak reference from objects """
def __init__(self, name):
super().__init__(name=name)
self.refs = weakref.WeakKeyDictionary()
def lookup(self, obj):
return self.refs, obj
class Get(Descr):
""" get descriptor calling using provided lookup and falling back to __default__ """
def __get__(self, obj, objtype=None):
if obj is None:
return self
dct, key = self.lookup(obj)
try:
return dct[key]
except KeyError:
return self.__default__(obj)
def __default__(self, obj):
""" provider for default value of descriptor, raising NameError by default """
raise NameError("Descriptor %s of %s object has no value set" %
(self.name, type(obj).__name__))
@classmethod
def iter(desc, obj, bind=False):
"""
iteratete over all fields of the object of this descriptors class
"""
cls = type(obj)
for name in dir(cls):
attr = getattr(cls, name)
if isinstance(attr, desc):
if bind:
yield attr.__get__(obj)
else:
yield attr
class Defaults(Annotate, Descr):
""" descriptor evaluationing definition once """
def __default__(self, obj):
return self.definition(obj)
class Set(Descr):
""" set/delete descriptor """
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
self._post = None
def post(self, f):
assert callable(f)
self._post = f
return self
def __set__(self, obj, value):
dct, key = self.lookup(obj)
dct[key] = value
if self._post:
self._post(obj)
def __delete__(self, obj):
dct, key = self.lookup(obj)
dct.pop(key, None)
class Cache(Set, Get):
"""
get descriptor remembering the default value for further calls to get
"""
def __get__(self, obj, objtype=None):
if obj is None:
return self
dct, key = self.lookup(obj)
try:
return dct[key]
except KeyError:
val = self.__default__(obj)
self.__set__(obj, val)
return val
class attr(Defaults, ObjDescr, Get, Set):
""" attribute descriptor with additional features """
pass
class delayed(Defaults, ObjDescr, Cache):
""" evaluate once and stored in obj dict, so values get pickled """
pass
class refers(Defaults, RefDescr, Cache):
""" keep values around, but reevaluate after pickling """
pass
cached = refers
class once(Defaults, RefDescr, Cache):
def __set__(self, obj, value):
if obj:
dct, key = self.lookup(obj)
if key in dct:
raise AttributeError(
"Attribute {} of {} can only be set once"
.format(self.name, type(obj)))
dct[key] = value
return value
else:
return self
class initialized(Conotate, RefDescr, Cache):
"""
call coroutine once at with `initialize` with supplied kwargs to get value
"""
pass
@coroutine
def initialize(obj, **opts):
""" call all `@initialized` descriptors to initialize values """
calls = []
for desc in initialized.iter(obj):
if desc.has_entry(obj):
@coroutine
def init():
val = yield from desc.definition(obj, **opts)
desc.__set__(obj, val)
return desc.name, val
return dict((yield from gather(*calls)))
|
from django.apps import AppConfig
class VerificationsConfig(AppConfig):
name = 'verifications'
verbose_name = 'Verifications'
|
try:
from .version import __version__
except:
pass
from .get_data import get_data
from .get_data import map_segmentation_to_dataframe
from .get_data import random_basis_projection
from .get_data import hierarchical
from .get_data import deep_brain_parcellation
from .get_data import deep_tissue_segmentation
from .get_data import deep_mtl
from .get_data import label_hemispheres
from .get_data import brain_extraction
from .get_data import deep_hippo
from .get_data import hemi_reg
from .get_data import region_reg
from .get_data import t1_hypointensity
from .get_data import zoom_syn
from .get_data import map_intensity_to_dataframe
from .get_data import trim_segmentation_by_distance
from .get_data import deep_nbm
from .get_data import deep_cit168
from .get_data import write_hierarchical
from .get_data import preprocess_intensity
from .get_data import merge_hierarchical_csvs_to_wide_format
from .get_data import subdivide_hemi_label
from .get_data import special_crop
from .get_data import loop_outlierness
from .get_data import mahalanobis_distance
from .get_data import patch_eigenvalue_ratio
from .get_data import subdivide_labels
from .get_data import inspect_raw_t1
|
# Author: Fei Gao
# Date: 7/6/14
# Definition for a binary tree node
class TreeNode(object):
def __init__(self, x=None):
self.left = None
self.right = None
self.dic = dict()
if isinstance(x, (list, tuple)):
self = self.build_from_list(list(x))
else:
self.val = x
def print_mlr(self):
print("Print tree in (mid -> left -> right):")
if self is None:
print("empty tree")
else:
def _print_mlr(root, indent=0):
if root is None:
pass
print((' ' * indent + '{}').format(root.val))
if root.left is not None:
indent += 2
print(' ' * indent + 'L:')
_print_mlr(root.left, indent)
print(' ' * indent + ':L')
indent -= 2
if root.right is not None:
indent += 2
print(' ' * indent + 'R:')
_print_mlr(root.right, indent)
print(' ' * indent + ':R')
indent -= 2
pass
_print_mlr(self, 0)
print("\nEND")
def _build_dic(self):
if self is None:
pass
else:
def func(node, index):
self.dic[index] = node.val
if node.left is not None:
func(node.left, index * 2 + 1)
if node.right is not None:
func(node.right, index * 2 + 2)
func(self, 0)
def to_dict(self):
self._build_dic()
return self.dic
def to_list(self):
self._build_dic()
n = 1 + max(self.dic.keys())
lst = [None for i in range(n)]
for key in self.dic.keys():
lst[key] = self.dic[key]
return lst
def min_depth(self):
"""
find minimum depth of self
"""
# bfs
depth = 1
found_leaf = False
cur_depth = [self]
next_depth = []
while True:
next_depth = []
for node in cur_depth:
is_leaf = True
if node.left is not None:
next_depth.append(node.left)
is_leaf = False
if node.right is not None:
next_depth.append(node.right)
is_leaf = False
if is_leaf is True:
found_leaf = True
break
if found_leaf is True:
break
else:
depth += 1
cur_depth = next_depth
return depth
def max_depth(self):
"""
find maximum depth of root
"""
# bfs
depth = 0
cur_depth = [self]
while len(cur_depth) != 0:
next_depth = []
for node in cur_depth:
if node.left is not None:
next_depth.append(node.left)
if node.right is not None:
next_depth.append(node.right)
depth += 1
cur_depth = next_depth
return depth
def build_from_list(self, lst):
if lst is []:
return TreeNode()
n = len(lst)
def build_children(node, idx):
l_idx = idx * 2 + 1
r_idx = idx * 2 + 2
if l_idx < n and lst[l_idx] is not None:
node.left = TreeNode(lst[l_idx])
build_children(node.left, l_idx)
if r_idx < n and lst[r_idx] is not None:
node.right = TreeNode(lst[r_idx])
build_children(node.right, r_idx)
self.val = lst[0]
build_children(self, 0)
return self
def preorderTraversal(self):
preorder = []
if self is None:
return preorder
queue = [self]
while len(queue) != 0:
node = queue.pop(0)
preorder.append(node.val)
if node.right is not None:
queue.insert(0, node.right)
if node.left is not None:
queue.insert(0, node.left)
return preorder
def __bool__(self):
return self is not None
|
#urllib module in python.
"""Urllib module is the url handling module for python. It is used fetch URL's(uniform resource locators). It uses the urlopen() and is able to fetch url's using a
variety of different protocols"""
#Urllib is a package that collects several modules for working with URL's.
"""urllib.request for opening and reading.
urllib.parse for parsing the URL's.
urllib.error for raising the exception.
urllib.robotparser for parsing the robot.txt files."""
#1.urllib.request - This module helps to define functions and classes to open URLs.
import urllib.request
a = urllib.request.urlopen('https://www.hackerrank.com/domains/python?filters%5Bstatus%5D%5B%5D=unsolved&badge_type=python')
print(a.read())
"""2.urllib.parse - This module helps to define functions to manipulate URLs and their component parts, to make or brake them. It usually focus on breaking the
components into smaller parts or joining different URL parts to form a string."""
from urllib.parse import *
a = urlparse('https://www.hackerrank.com/ domains')
print(a)
b = urlunparse(a)
print(b)
"""3.urllib.error - This module defines the classes for exception raised by urllib.request. Whenever there is an error when fetching an URL This module helps in
raising exceptions.
URLError – It is raised for the errors in URLs, or errors while fetching the URL due to connectivity, and has a ‘reason’ property that tells a user the reason of
error.
HTTPError – It is raised for the exotic HTTP errors, such as the authentication request errors. It is a subclass or URLError. Typical errors include ‘404’ (page not found), ‘403’ (request forbidden),
and ‘401’ (authentication required)."""
import urllib.request
import urllib.parse
try:
a = urllib.request.urlopen('https://www.google.com')
print(a.read())
except Exception as e:
print(e)
#example2.
import urllib.request
import urllib.parse
try:
a = urllib.request.urlopen('https://www.google.com/ search?q = test')
except Exception as e:
print(e)
"""urllib.robotparse- This module contains a single class Robotfileparser. This class answers question about whether or not, user can fetch a URL that publish
robot.txt files. Robots.txt is a text file webmasters create to instruct web robots how to crawl pages on their website. The robot.txt file tells the web
scraper about what parts of the server should not be accessed."""
# importing robot parser class
import urllib.robotparser as rb
bot = rb.RobotFileParser()
# checks where the website's robot.txt file reside
x = bot.set_url('https://www.geeksforgeeks.org / robot.txt')
print(x)
# reads the files
y = bot.read()
print(y)
# we can crawl the main site
z = bot.can_fetch('*', 'https://www.geeksforgeeks.org/')
print(z)
# but can not crawl the disallowed url
w = bot.can_fetch('*', 'https://www.geeksforgeeks.org / wp-admin/')
print(w)
|
# This file is part of faro.
#
# Developed for the LSST Data Management System.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import lsst.pipe.base as pipeBase
import lsst.pex.config as pexConfig
from lsst.faro.base.CatalogMeasurementBase import (
CatalogMeasurementBaseConnections,
CatalogMeasurementBaseConfig,
CatalogMeasurementBaseTask,
)
__all__ = ("VisitTableMeasurementConfig", "VisitTableMeasurementTask")
class VisitTableMeasurementConnections(
CatalogMeasurementBaseConnections, dimensions=("instrument", "visit", "band")
):
catalog = pipeBase.connectionTypes.Input(
doc="Source table in parquet format, per visit",
dimensions=("instrument", "visit", "band"),
storageClass="DataFrame",
name="sourceTable_visit",
deferLoad=True,
)
measurement = pipeBase.connectionTypes.Output(
doc="Per-visit measurement",
dimensions=("instrument", "visit", "band"),
storageClass="MetricValue",
name="metricvalue_{package}_{metric}",
)
class VisitTableMeasurementConfig(
CatalogMeasurementBaseConfig, pipelineConnections=VisitTableMeasurementConnections
):
"""Configuration for VisitTableMeasurementTask."""
columns = pexConfig.ListField(
doc="Columns from sourceTable_visit to load.",
dtype=str,
default=["coord_ra", "coord_dec"],
)
class VisitTableMeasurementTask(CatalogMeasurementBaseTask):
"""Base class for science performance metrics measured on single-visit source catalogs."""
ConfigClass = VisitTableMeasurementConfig
_DefaultName = "visitTableMeasurementTask"
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
catalog = inputs["catalog"].get(parameters={"columns": self.config.columns})
kwargs = {}
kwargs['catalog'] = catalog
if self.config.connections.refDataset != "":
refCats = inputs.pop("refCat")
filterList = [butlerQC.quantum.dataId.records["physical_filter"].name]
# Time at the start of the visit
epoch = butlerQC.quantum.dataId.records["visit"].timespan.begin
refCat, refCatCorrected = self._getReferenceCatalog(
butlerQC,
[ref.datasetRef.dataId for ref in inputRefs.refCat],
refCats,
filterList,
epoch,
)
kwargs["refCat"] = refCat
kwargs["refCatCorrected"] = refCatCorrected
outputs = self.run(**kwargs)
if outputs.measurement is not None:
butlerQC.put(outputs, outputRefs)
else:
self.log.debugf(
"Skipping measurement of {!r} on {} " "as not applicable.",
self,
inputRefs,
)
|
if "snakemake" in locals():
debug = False
else:
debug = True
if not debug:
import sys
sys.stderr = open(snakemake.log[0], "w")
import pandas as pd
def merge_deep_arg_calls(mapping, meta_data, deep_arg_calls, output):
mapping = pd.read_excel(mapping)
meta_data = pd.read_csv(meta_data, sep="|")
merged_data = pd.merge(
mapping,
meta_data,
how="left",
left_on="label",
right_on="label",
validate="1:1",
)
merged_data = merged_data.drop(columns=["osd_id_y"]).rename(
columns={"osd_id_x": "osd_id"}
)
deep_arg_calls = pd.read_csv(deep_arg_calls, sep="\t")
deep_arg_calls["osd_id"] = deep_arg_calls["sample"].str.extract(r"OSD(\d{1,3})")
deep_arg_calls["osd_id"] = deep_arg_calls["osd_id"].astype(int)
merged_data = pd.merge(
merged_data,
deep_arg_calls,
how="left",
left_on="file_name",
right_on="sample",
validate="1:m",
)
merged_data.to_csv(output, sep="\t", index=False)
if __name__ == "__main__":
if debug:
mapping = "/local/thomas/OSD/resources/ME_osd_id_alloction.xlsx"
meta_data = "/local/thomas/OSD/resources/OSD2014-env_data_2017-11-23.csv"
deep_arg_calls = "/local/thomas/OSD/results/tables/deep_arg_all_calls.tsv"
output = "results/tables/deep_arg_calls_with_meta_data.tsv"
merge_deep_arg_calls(mapping, meta_data, deep_arg_calls, output)
else:
merge_deep_arg_calls(
snakemake.input.mapping,
snakemake.input.meta_data,
snakemake.input.deep_arg_calls,
snakemake.output[0],
)
|
from math import *
import pandas
#Molecular Weights
MW_SiO2 = 60.0855
MW_TiO2 = 79.88
MW_Al2O3 = 101.96
MW_Fe2O3 = 159.69
MW_FeO = 71.85
MW_MgO = 40.3
MW_CaO = 56.08
MW_Na2O = 61.98
MW_K2O = 94.2
MW_H2O = 18.02
#Partial Molar Volumes
#Volumes for SiO2, Al2O3, MgO, CaO, Na2O, K2O at Tref=1773 K (Lange, 1997; CMP)
#Volume for H2O at Tref=1273 K (Ochs and Lange, 1999)
#Volume for FeO at Tref=1723 K (Guo et al., 2014)
#Volume for Fe2O3 at Tref=1723 K (Liu and Lange, 2006)
#Volume for TiO2 at Tref=1773 K (Lange and Carmichael, 1987)
MV_SiO2 = 26.86
MV_TiO2 = 28.32
MV_Al2O3 = 37.42
MV_Fe2O3 = 41.50
MV_FeO = 12.68
MV_MgO = 12.02
MV_CaO = 16.90
MV_Na2O = 29.65
MV_K2O = 47.28
MV_H2O = 22.9
#Partial Molar Volume uncertainties
#value = 0 if not reported
unc_MV_SiO2 = 0.03
unc_MV_TiO2 = 0
unc_MV_Al2O3 = 0.09
unc_MV_Fe2O3 = 0
unc_MV_FeO = 0
unc_MV_MgO = 0.07
unc_MV_CaO = 0.06
unc_MV_Na2O = 0.07
unc_MV_K2O = 0.10
unc_MV_H2O = 0.60
#dV/dT values
#MgO, CaO, Na2O, K2O Table 4 (Lange, 1997)
#SiO2, TiO2, Al2O3 Table 9 (Lange and Carmichael, 1987)
#H2O from Ochs & Lange (1999)
#Fe2O3 from Liu & Lange (2006)
#FeO from Guo et al (2014)
dVdT_SiO2 = 0.0
dVdT_TiO2 = 0.00724
dVdT_Al2O3 = 0.00262
dVdT_Fe2O3 = 0.0
dVdT_FeO = 0.00369
dVdT_MgO = 0.00327
dVdT_CaO = 0.00374
dVdT_Na2O = 0.00768
dVdT_K2O = 0.01208
dVdT_H2O = 0.0095
#dV/dT uncertainties
#value = 0 if not reported
unc_dVdT_SiO2 = 0
unc_dVdT_TiO2 = 0
unc_dVdT_Al2O3 = 0
unc_dVdT_Fe2O3 = 0
unc_dVdT_FeO = 0
unc_dVdT_MgO = 0
unc_dVdT_CaO = 0
unc_dVdT_Na2O = 0
unc_dVdT_K2O = 0
unc_dVdT_H2O = 0.00080
#dV/dP values
#Anhydrous component data from Kess and Carmichael (1991)
#H2O data from Ochs & Lange (1999)
dVdP_SiO2 = -0.000189
dVdP_TiO2 = -0.000231
dVdP_Al2O3 = -0.000226
dVdP_Fe2O3 = -0.000253
dVdP_FeO = -0.000045
dVdP_MgO = 0.000027
dVdP_CaO = 0.000034
dVdP_Na2O = -0.00024
dVdP_K2O = -0.000675
dVdP_H2O = -0.00032
#dV/dP uncertainties
unc_dVdP_SiO2 = 0.000002
unc_dVdP_TiO2 = 0.000006
unc_dVdP_Al2O3 = 0.000009
unc_dVdP_Fe2O3 = 0.000009
unc_dVdP_FeO = 0.000003
unc_dVdP_MgO = 0.000007
unc_dVdP_CaO = 0.000005
unc_dVdP_Na2O = 0.000005
unc_dVdP_K2O = 0.000014
unc_dVdP_H2O = 0.000060
#Tref values
Tref_SiO2 = 1773
Tref_TiO2 = 1773
Tref_Al2O3 = 1773
Tref_Fe2O3 = 1723
Tref_FeO = 1723
Tref_MgO = 1773
Tref_CaO = 1773
Tref_Na2O = 1773
Tref_K2O = 1773
Tref_H2O = 1273
def Density(dataframe):
data = dataframe #takes in a Pandas dataframe with compositional information, P, and T
data = data.fillna(value=0) #Replace any empty cells (which read in as NaN) with 0, otherwise Pandas will break
def NormalizeWtPercentVals(dataframe):
data = dataframe
#Save original wt% values
orig_WP_SiO2 = data["SiO2"]
orig_WP_TiO2 = data["TiO2"]
orig_WP_Al2O3 = data["Al2O3"]
orig_WP_Fe2O3 = data["Fe2O3"]
orig_WP_FeO = data["FeO"]
orig_WP_MgO = data["MgO"]
orig_WP_CaO = data["CaO"]
orig_WP_Na2O = data["Na2O"]
orig_WP_K2O = data["K2O"]
orig_WP_H2O = data["H2O"]
#also save SiO2 in duplicate to avoid corruption
data["SiO2 (User Input)"] = orig_WP_SiO2
#sum original wt% values
data["OriginalSum"] = data["SiO2"] + data["TiO2"] + data["Al2O3"] + data["Fe2O3"] + data["FeO"] + data["MgO"] + data["CaO"] + data["Na2O"] + data["K2O"] + data["H2O"]
#Normalize original wt% values
data.loc[:,'SiO2'] /= data['OriginalSum']
data.loc[:,'TiO2'] /= data['OriginalSum']
data.loc[:,'Al2O3'] /= data['OriginalSum']
data.loc[:,'Fe2O3'] /= data['OriginalSum']
data.loc[:,'FeO'] /= data['OriginalSum']
data.loc[:,'MgO'] /= data['OriginalSum']
data.loc[:,'CaO'] /= data['OriginalSum']
data.loc[:,'Na2O'] /= data['OriginalSum']
data.loc[:,'K2O'] /= data['OriginalSum']
data.loc[:,'H2O'] /= data['OriginalSum']
data.loc[:,'SiO2'] *= 100
data.loc[:,'TiO2'] *= 100
data.loc[:,'Al2O3'] *= 100
data.loc[:,'Fe2O3'] *= 100
data.loc[:,'FeO'] *= 100
data.loc[:,'MgO'] *= 100
data.loc[:,'CaO'] *= 100
data.loc[:,'Na2O'] *= 100
data.loc[:,'K2O'] *= 100
data.loc[:,'H2O'] *= 100
data["NormedSum"] = data["SiO2"] + data["TiO2"] + data["Al2O3"] + data["Fe2O3"] + data["FeO"] + data["MgO"] + data["CaO"] + data["Na2O"] + data["K2O"] + data["H2O"]
#From this point, oxide column values are in normalized wt%
return data
def MoleFraction(dataframe):
data = NormalizeWtPercentVals(dataframe)
#divide normalized wt% values by molecular weights
data.loc[:,'SiO2'] /= MW_SiO2
data.loc[:,'TiO2'] /= MW_TiO2
data.loc[:,'Al2O3'] /= MW_Al2O3
data.loc[:,'Fe2O3'] /= MW_Fe2O3
data.loc[:,'FeO'] /= MW_FeO
data.loc[:,'MgO'] /= MW_MgO
data.loc[:,'CaO'] /= MW_CaO
data.loc[:,'Na2O'] /= MW_Na2O
data.loc[:,'K2O'] /= MW_K2O
data.loc[:,'H2O'] /= MW_H2O
data["MolPropOxSum"] = data["SiO2"] + data["TiO2"] + data["Al2O3"] + data["Fe2O3"] + data["FeO"] + data["MgO"] + data["CaO"] + data["Na2O"] + data["K2O"] + data["H2O"]
#convert to mol fraction
data.loc[:,'SiO2'] /= data['MolPropOxSum']
data.loc[:,'TiO2'] /= data['MolPropOxSum']
data.loc[:,'Al2O3'] /= data['MolPropOxSum']
data.loc[:,'Fe2O3'] /= data['MolPropOxSum']
data.loc[:,'FeO'] /= data['MolPropOxSum']
data.loc[:,'MgO'] /= data['MolPropOxSum']
data.loc[:,'CaO'] /= data['MolPropOxSum']
data.loc[:,'Na2O'] /= data['MolPropOxSum']
data.loc[:,'K2O'] /= data['MolPropOxSum']
data.loc[:,'H2O'] /= data['MolPropOxSum']
#From this point, oxide column values are in mole fraction
return data
data_moleFraction = MoleFraction(data)
#calculating the component density in two equations: one for the denominator, one for the numerator.
#A new numerator is calculated for each oxide.
data["numerSiO2"] = data["SiO2"] * MW_SiO2
data["numerTiO2"] = data["TiO2"] * MW_TiO2
data["numerAl2O3"] = data["Al2O3"] * MW_Al2O3
data["numerFe2O3"] = data["Fe2O3"] * MW_Fe2O3
data["numerFeO"] = data["FeO"] * MW_FeO
data["numerMgO"] = data["MgO"] * MW_MgO
data["numerCaO"] = data["CaO"] * MW_CaO
data["numerNa2O"] = data["Na2O"] * MW_Na2O
data["numerK2O"] = data["K2O"] * MW_K2O
data["numerH2O"] = data["H2O"] * MW_H2O
#Caclulate temperature in Kelvin
data["T_K"] = data["T"] + 273
#A new denominator is calculated for each oxide
data["denomSiO2"] = MV_SiO2 + (dVdT_SiO2 * (data["T_K"] - Tref_SiO2)) + (dVdP_SiO2 * (data["P"] - 1))
data["denomTiO2"] = MV_TiO2 + (dVdT_TiO2 * (data["T_K"] - Tref_TiO2)) + (dVdP_TiO2 * (data["P"] - 1))
data["denomAl2O3"] = MV_Al2O3 + (dVdT_Al2O3 * (data["T_K"] - Tref_Al2O3)) + (dVdP_Al2O3 * (data["P"] - 1))
data["denomFe2O3"] = MV_Fe2O3 + (dVdT_Fe2O3 * (data["T_K"] - Tref_Fe2O3)) + (dVdP_Fe2O3 * (data["P"] - 1))
data["denomFeO"] = MV_FeO + (dVdT_FeO * (data["T_K"] - Tref_FeO)) + (dVdP_FeO * (data["P"] - 1))
data["denomMgO"] = MV_MgO + (dVdT_MgO * (data["T_K"] - Tref_MgO)) + (dVdP_MgO * (data["P"] - 1))
data["denomCaO"] = MV_CaO + (dVdT_CaO * (data["T_K"] - Tref_CaO)) + (dVdP_CaO * (data["P"] - 1))
data["denomNa2O"] = MV_Na2O + (dVdT_Na2O * (data["T_K"] - Tref_Na2O)) + (dVdP_Na2O * (data["P"] - 1))
data["denomK2O"] = MV_K2O + (dVdT_K2O * (data["T_K"] - Tref_K2O)) + (dVdP_K2O * (data["P"] - 1))
data["denomH2O"] = MV_H2O + (dVdT_H2O * (data["T_K"] - Tref_H2O)) + (dVdP_H2O * (data["P"] - 1))
#Calculate component density by dividing numerator by denominator
data["ComponentDensity_SiO2"] = data["numerSiO2"] / data["denomSiO2"]
data["ComponentDensity_TiO2"] = data["numerTiO2"] / data["denomTiO2"]
data["ComponentDensity_Al2O3"] = data["numerAl2O3"] / data["denomAl2O3"]
data["ComponentDensity_Fe2O3"] = data["numerFe2O3"] / data["denomFe2O3"]
data["ComponentDensity_FeO"] = data["numerFeO"] / data["denomFeO"]
data["ComponentDensity_MgO"] = data["numerMgO"] / data["denomMgO"]
data["ComponentDensity_CaO"] = data["numerCaO"] / data["denomCaO"]
data["ComponentDensity_Na2O"] = data["numerNa2O"] / data["denomNa2O"]
data["ComponentDensity_K2O"] = data["numerK2O"] / data["denomK2O"]
data["ComponentDensity_H2O"] = data["numerH2O"] / data["denomH2O"]
#Calculate the individual Vliq for each oxide
data["IndivVliq_SiO2"] = (MV_SiO2 + (dVdT_SiO2 * (data["T_K"] - Tref_SiO2)) + (dVdP_SiO2 * (data["P"]-1))) * data["SiO2"]
data["IndivVliq_TiO2"] = (MV_TiO2 + (dVdT_TiO2 * (data["T_K"] - Tref_TiO2)) + (dVdP_TiO2 * (data["P"]-1))) * data["TiO2"]
data["IndivVliq_Al2O3"] = (MV_Al2O3 + (dVdT_Al2O3 * (data["T_K"] - Tref_Al2O3)) + (dVdP_Al2O3 * (data["P"]-1))) * data["Al2O3"]
data["IndivVliq_Fe2O3"] = (MV_Fe2O3 + (dVdT_Fe2O3 * (data["T_K"] - Tref_Fe2O3)) + (dVdP_Fe2O3 * (data["P"]-1))) * data["Fe2O3"]
data["IndivVliq_FeO"] = (MV_FeO + (dVdT_FeO * (data["T_K"] - Tref_FeO)) + (dVdP_FeO * (data["P"]-1))) * data["FeO"]
data["IndivVliq_MgO"] = (MV_MgO + (dVdT_MgO * (data["T_K"] - Tref_MgO)) + (dVdP_MgO * (data["P"]-1))) * data["MgO"]
data["IndivVliq_CaO"] = (MV_CaO + (dVdT_CaO * (data["T_K"] - Tref_CaO)) + (dVdP_CaO * (data["P"]-1))) * data["CaO"]
data["IndivVliq_Na2O"] = (MV_Na2O + (dVdT_Na2O * (data["T_K"] - Tref_Na2O)) + (dVdP_Na2O * (data["P"]-1))) * data["Na2O"]
data["IndivVliq_K2O"] = (MV_K2O + (dVdT_K2O * (data["T_K"] - Tref_K2O)) + (dVdP_K2O * (data["P"]-1))) * data["K2O"]
data["IndivVliq_H2O"] = (MV_H2O + (dVdT_H2O * (data["T_K"] - Tref_H2O)) + (dVdP_H2O * (data["P"]-1))) * data["H2O"]
#Calculate the sum of all Vliq oxides for each sample
data["VliqSum"] = (data["IndivVliq_SiO2"] + data["IndivVliq_TiO2"] + data["IndivVliq_Al2O3"] + data["IndivVliq_Fe2O3"] + data["IndivVliq_FeO"] +
data["IndivVliq_MgO"] + data["IndivVliq_CaO"] + data["IndivVliq_Na2O"] + data["IndivVliq_K2O"] + data["IndivVliq_H2O"])
#Calculate Indiv X*MW
data.loc[:,'SiO2'] *= MW_SiO2
data.loc[:,'TiO2'] *= MW_TiO2
data.loc[:,'Al2O3'] *= MW_Al2O3
data.loc[:,'Fe2O3'] *= MW_Fe2O3
data.loc[:,'FeO'] *= MW_FeO
data.loc[:,'MgO'] *= MW_MgO
data.loc[:,'CaO'] *= MW_CaO
data.loc[:,'Na2O'] *= MW_Na2O
data.loc[:,'K2O'] *= MW_K2O
data.loc[:,'H2O'] *= MW_H2O
#From this point, oxide column values are in X*MW
#Calculate the sume of X*MW oxides
data["XMW_Sum"] = (data["SiO2"] + data["TiO2"] + data["Al2O3"] + data["Fe2O3"] + data["FeO"] +
data["MgO"] + data["CaO"] + data["Na2O"] + data["K2O"] + data["H2O"])
#Calculate the density of the melt in g/cm3 and in g/L
data["Density_g_per_cm3"] = data["XMW_Sum"] / data["VliqSum"]
data["Density_g_per_L"] = data["Density_g_per_cm3"] * 1000
#Uncertainty Calculations
#Partial Molar Volume,
error_MV = {'SiO2' : (unc_MV_SiO2 / MV_SiO2),
'TiO2' : (unc_MV_TiO2 / MV_TiO2),
'Al2O3' : (unc_MV_Al2O3 / MV_Al2O3),
'Fe2O3' : (unc_MV_Fe2O3 / MV_Fe2O3),
'FeO' : (unc_MV_FeO / MV_FeO),
'MgO' : (unc_MV_MgO / MV_MgO),
'CaO' : (unc_MV_CaO / MV_CaO),
'Na2O' : (unc_MV_Na2O / MV_Na2O),
'K2O' : (unc_MV_K2O / MV_K2O),
'H2O' : (unc_MV_H2O / MV_H2O)}
#dVdT values
error_dVdT = {
'SiO2' : (unc_dVdT_SiO2 / 1),
'TiO2' : (unc_dVdT_TiO2 / dVdT_TiO2),
'Al2O3' : (unc_dVdT_Al2O3 / dVdT_Al2O3),
'Fe2O3' : 0,
'FeO' : (unc_dVdT_FeO / dVdT_FeO),
'MgO' : (unc_dVdT_MgO / dVdT_MgO),
'CaO' : (unc_dVdT_CaO / dVdT_CaO),
'Na2O' : (unc_dVdT_Na2O / dVdT_Na2O),
'K2O' : (unc_dVdT_K2O / dVdT_K2O),
'H2O' : (unc_dVdT_H2O / dVdT_H2O)}
#dVdP values
error_dVdP = {
'SiO2' : (unc_dVdP_SiO2 / dVdP_SiO2),
'TiO2' : (unc_dVdP_TiO2 / dVdP_TiO2),
'Al2O3' : (unc_dVdP_Al2O3 / dVdP_Al2O3),
'Fe2O3' : (unc_dVdP_Fe2O3 / dVdP_Fe2O3),
'FeO' : (unc_dVdP_FeO / dVdP_FeO),
'MgO' : (unc_dVdP_MgO / dVdP_MgO),
'CaO' : (unc_dVdP_CaO / dVdP_CaO),
'Na2O' : (unc_dVdP_Na2O / dVdP_Na2O),
'K2O' : (unc_dVdP_K2O / dVdP_K2O),
'H2O' : (unc_dVdP_H2O / dVdP_H2O)}
#calculate square values
percent_error_Vliq = {}
for key in error_MV:
percent_error_Vliq[key] = sqrt(error_MV[key]**2 + error_dVdT[key]**2 + error_dVdP[key]**2)
data["Unc_Vliq_SiO2"] = data["IndivVliq_SiO2"] * percent_error_Vliq['SiO2']
data["Unc_Vliq_TiO2"] = data["IndivVliq_TiO2"] * percent_error_Vliq['TiO2']
data["Unc_Vliq_Al2O3"] = data["IndivVliq_Al2O3"] * percent_error_Vliq['Al2O3']
data["Unc_Vliq_Fe2O3"] = data["IndivVliq_Fe2O3"] * percent_error_Vliq['Fe2O3']
data["Unc_Vliq_FeO"] = data["IndivVliq_FeO"] * percent_error_Vliq['FeO']
data["Unc_Vliq_MgO"] = data["IndivVliq_MgO"] * percent_error_Vliq['MgO']
data["Unc_Vliq_CaO"] = data["IndivVliq_CaO"] * percent_error_Vliq['CaO']
data["Unc_Vliq_Na2O"] = data["IndivVliq_Na2O"] * percent_error_Vliq['Na2O']
data["Unc_Vliq_K2O"] = data["IndivVliq_K2O"] * percent_error_Vliq['K2O']
data["Unc_Vliq_H2O"] = data["IndivVliq_H2O"] * percent_error_Vliq['H2O']
data["unc_VliqSum"] = ( data["Unc_Vliq_SiO2"] +
data["Unc_Vliq_TiO2"] +
data["Unc_Vliq_Al2O3"]+
data["Unc_Vliq_Fe2O3"]+
data["Unc_Vliq_FeO"] +
data["Unc_Vliq_MgO"] +
data["Unc_Vliq_CaO"] +
data["Unc_Vliq_Na2O"] +
data["Unc_Vliq_K2O"] +
data["Unc_Vliq_H2O"] )
#calculate error on density value
data['Uncertainty_g_per_cm3'] = data["unc_VliqSum"] / data["VliqSum"]
data['Uncertainty_g_per_L'] = data["Uncertainty_g_per_cm3"] * 1000
return data |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class QNetwork(nn.Module):
"""Actor (Policy) Model."""
def __init__(self, state_size, action_size, seed):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
"""
super(QNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
self.fc1 = nn.Linear( state_size, 1024 )
self.fc2 = nn.Linear( 1024, 512 )
self.fc3 = nn.Linear( 512, action_size )
def forward(self, state):
x = F.relu( self.fc1( state ) )
x = F.relu( self.fc2( x ) )
x = self.fc3( x )
return x
class DuelingQNetwork(nn.Module):
"""Actor (Policy) Model."""
def __init__(self, state_size, action_size, seed):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
"""
super(DuelingQNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
#Advantage Stream
self.afc1 = nn.Linear( state_size, 1024 )
self.afc2 = nn.Linear( 1024, 512 )
self.afc3 = nn.Linear( 512, action_size )
#State-Value Strean
self.vfc1 = nn.Linear( state_size, 512 )
self.vfc2 = nn.Linear( 512, 512 )
self.vfc3 = nn.Linear( 512, action_size )
def forward(self, state):
adv = F.relu( self.afc1( state ) )
adv = F.relu( self.afc2( adv ) )
adv = self.afc3( adv )
val = F.relu( self.vfc1( state ) )
val = F.relu( self.vfc2( val ) )
val = self.vfc3( val )
out = val + (adv - adv.mean() )
return out
|
if None is None:
print "OK"
|
class AStanfordDataProcessor(object):
def __init__(self, path_images, transforms, path_human_readable_labels):
self.path_images = path_images
self.transforms = transforms
self.path_human_readable_labels = path_human_readable_labels
def preprocess_data(self, path_to_matdata, validation_percentage, data_subset):
pass
def get_data_generator(self, data_matrix):
pass
|
import sys
import re
import pprint
import itertools
my_name = sys.argv[0]
print("my_name:", my_name)
day_nr = re.search(r"\d+", my_name).group()
print("day_nr:", day_nr)
processed_colors_a = dict()
processed_colors_b = dict()
def read_input():
_all = list()
for line in sys.stdin:
_all.append(int(line.strip()))
return _all
def is_conformer(num, nums):
_res = False
for i in range(len(nums)-1):
if _res:
break
for j in range(i+1, len(nums)):
if num == nums[i] + nums[j]:
_res = True
break
return _res
def find_sol_a(input_data):
non_conformer = -1
nr_preamble = 25
for idx in range(nr_preamble, len(input_data)):
idx_from = idx - nr_preamble
idx_to = idx
cur_num = input_data[idx]
if not is_conformer(cur_num, input_data[idx_from:idx_to]):
non_conformer = cur_num
break
return non_conformer
def gen_cont_lists(reduced_data_set, nr_addend):
_all = list()
for i in range(len(reduced_data_set) - nr_addend + 1):
_all.append(reduced_data_set[i:i+nr_addend])
return _all
def find_sol_b(input_data, invalid_nr):
_res = -1
_all_sums = dict()
init_nr_addents = 2
reduced_data_set = input_data[:input_data.index(invalid_nr)]
# print("INIT reduced set,", len(reduced_data_set))
# pprint.pprint(reduced_data_set)
for nr_addend in range(init_nr_addents, len(reduced_data_set)+1):
# print("nr_addend =>", nr_addend)
_lists = gen_cont_lists(reduced_data_set, nr_addend)
for _inner_list in _lists:
# print(len(_inner_list), " -> ", _inner_list)
_sum = sum(x for x in _inner_list)
_all_sums[_sum] = _inner_list
# print("\tsum =", _sum)
if invalid_nr == _sum:
_inner_list.sort()
_res = _inner_list[0] + _inner_list[-1]
# print(_sum, "found it ->", _inner_list, _res)
return _res
print("FINAL ALL sums:")
# _all_sums.sort()
pprint.pprint(_all_sums)
return _res
def main():
input_data = read_input()
# print("LEN input_data =", len(input_data))
# print("input_data =", pprint.pformat(input_data))
nr_correct = find_sol_a(input_data)
# print("LEN processed_colors =", len(processed_colors))
# print("processed_colors =", pprint.pformat(processed_colors))
print("answer day{day_nr}({task_day}): {result}".format
(day_nr=day_nr, task_day="a", result=nr_correct))
nr_correct = find_sol_b(input_data, nr_correct)
# print("LEN processed_colors_b =", len(processed_colors_b))
# print("processed_colors_b =", pprint.pformat(processed_colors_b))
print("answer day{day_nr}({task_day}): {result}".format
(day_nr=day_nr, task_day="b", result=nr_correct))
if __name__ == "__main__":
# execute only if run as a script
main()
|
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5 import sip
from app.tabs import *
class App(QMainWindow):
def __init__(self):
super().__init__()
# main window
self.setWindowTitle('Iata analyses')
self.setGeometry(50, 50, 1000, 1000)
# tab widgets
self.tab_widget = MyTabWidget(self)
self.setCentralWidget(self.tab_widget)
self.show() |
"""Tools for handling noisy output from classification."""
import numpy as np
from scipy import ndimage as nd
from skimage.filters import rank
def fill_nearest_neighbor(a):
"""Fills masked cells with value from nearest non-masked cell.
Args:
a (MaskedArray): A 2D array.
Raises:
TypeError: If `a` is not a MaskedArray
Returns:
ndarray: A 2D array.
"""
if not isinstance(a, np.ma.MaskedArray):
raise TypeError("Input must be masked array")
if not np.ma.is_masked(a):
return a
indexes = nd.distance_transform_edt(
a.mask, return_indices=True, return_distances=False
)
filled = a.data[tuple(indexes)]
return filled
def sieve(a, min_cluster_size, structure=np.ones((3, 3))):
"""Masks clusters smaller than a threshold size.
A cluster is a group of cells connected to each other as defined by `structure`.
Note:
Changes input array.
Args:
a (MaskedArray): 2D array.
min_cluster_size (int): Minimum size (in number of cells) to keep a cluster. Clusters smaller than
this threshold will be masked.
structure (ndarray, optional): The neighborhood expressed as a 2-D array of 1’s and 0’s. Defaults to
np.ones((3, 3)) which is 8-connectedness.
Raises:
TypeError: If input is not a MaskedArray.
"""
if not isinstance(a, np.ma.MaskedArray):
raise TypeError("Input must be masked array")
class_values = np.unique(a.compressed())
for c in class_values:
mask = sieve_mask(a.data, c, min_cluster_size, structure=structure)
a[mask] = np.ma.masked
def sieve_mask(a, class_number, min_cluster_size, structure=np.ones((3, 3))):
"""Gets a bool mask indicating clusters of given cell value smaller than a threshold size.
Args:
a (ndarray): 2D array.
class_number (number): Cell value.
min_cluster_size (int): Minimum size (in number of cells) to keep a cluster. Clusters smaller than
this threshold will be masked.
structure (ndarray, optional): The neighborhood expressed as a 2-D array of 1’s and 0’s. Defaults to
np.ones((3, 3)) which is 8-connectedness.
Returns:
[ndarray]: 2D array of bools with the same shape as input array.
"""
class_bin = a == class_number
labeled_array, _ = nd.measurements.label(class_bin, structure)
binc = np.bincount(labeled_array.ravel())
noise_idx = np.where(binc < min_cluster_size)
shp = a.shape
mask = np.in1d(labeled_array, noise_idx).reshape(shp)
return mask
def majority_vote(a, iterations=1, structure=np.ones((3, 3))):
"""Changes cell values to the most frequent value in its neighborhood.
Args:
a (ndarray): 2D ndarray. Possible a MaskedArray.
iterations (int, optional): Number of times to repeat the process. Defaults to 1.
structure (ndarray, optional): The neighborhood expressed as a 2-D array of 1’s and 0’s. Defaults to
np.ones((3, 3)) which is 8-connectedness.
Returns:
ndarray: 2D ndarray of same dimensions as input array. MaskedArray if input is masked.
"""
nodata = None
assert a.dtype == "uint8", "Majority vote only works for uint8"
if np.ma.is_masked(a):
# windowed_histogram does not work with masked arrays
nodata = np.max(a) + 1
a = a.filled(nodata)
for _ in range(iterations):
a = rank.windowed_histogram(a, structure).argmax(axis=-1).astype("uint8")
return np.ma.masked_values(a, nodata) if nodata is not None else a
def denoise(a):
"""Applies simple denoising to a classified raster.
Denoising removes small clusters and fills nodata areas.
Args:
a (MaskedArray): 2D MaskedArray with 'uint8' type
Returns:
ndarray: Denoised data
"""
a = majority_vote(a, 2)
a = fill_nearest_neighbor(a)
denoised = majority_vote(a, 1)
return denoised.filled() if isinstance(denoised, np.ma.MaskedArray) else denoised
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
import logging
import docopt
import pprint
import psd_tools.reader
import psd_tools.decoder
from psd_tools import PSDImage
from psd_tools.user_api.layers import group_layers
logger = logging.getLogger('psd_tools')
logger.addHandler(logging.StreamHandler())
def main():
"""
psd-tools.py
Usage:
psd-tools.py <filename> [--encoding <encoding>] [--verbose]
psd-tools.py convert <psd_filename> <out_filename> [--verbose]
psd-tools.py export_layer <psd_filename> <layer_index> <out_filename> [--verbose]
psd-tools.py -h | --help
psd-tools.py --version
Options:
-v --verbose Be more verbose.
--encoding <encoding> Text encoding [default: utf8].
"""
args = docopt.docopt(main.__doc__)
if args['--verbose']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
if args['convert']:
psd = PSDImage.load(args['<psd_filename>'])
im = psd.as_PIL()
im.save(args['<out_filename>'])
elif args['export_layer']:
psd = PSDImage.load(args['<psd_filename>'])
index = int(args['<layer_index>'])
im = psd.layers[index].as_PIL()
im.save(args['<out_filename>'])
print(psd.layers)
psd.as_PIL()
else:
encoding = args['--encoding']
with open(args['<filename>'], "rb") as f:
decoded = psd_tools.decoder.parse(
psd_tools.reader.parse(f, encoding)
)
print(decoded.header)
pprint.pprint(decoded.image_resource_blocks)
pprint.pprint(decoded.layer_and_mask_data)
pprint.pprint(decoded.image_data)
pprint.pprint(group_layers(decoded))
|
□spam□
#
@admin_cmd()
async def spam(event):
id = event.chat_id
raw = event.raw_text.split(" ")
try:
spamCount = raw[1]
except:
await event.edit("**ERROR OCCURRED \n Do :** ```.spam <number> | <spam Message>``` ")
try:
spamCount = int(spamCount)
except:
await event.edit("**ERROR OCCURRED \n Do :** ```.spam <number> | <spam Message>``` ")
reboot()
rawOp = event.raw_text.split("|")
try:
spamMessage = rawOp[1]
except:
await event.edit("**ERROR OCCURRED \n Do :** ```.spam <number> | <spam Message>``` ")
reboot()
i = 0
while i != spamCount:
await user.send_message(id,spamMessage)
i = i+1
print(f"\nCommand Used : spam \nLocation : {id} \n")
|
#
# @lc app=leetcode id=225 lang=python3
#
# [225] Implement Stack using Queues
#
# @lc code=start
class MyStack:
def __init__(self):
"""
Initialize your data structure here.
"""
self.stack = []
def push(self, x: int) -> None:
"""
Push element x onto stack.
"""
self.stack.append(x)
def pop(self) -> int:
"""
Removes the element on top of the stack and returns that element.
"""
if self.stack:
return self.stack.pop()
return None
def top(self) -> int:
"""
Get the top element.
"""
if self.stack:
return self.stack[-1]
return None
def empty(self) -> bool:
"""
Returns whether the stack is empty.
"""
return not self.stack
# Your MyStack object will be instantiated and called as such:
# obj = MyStack()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.top()
# param_4 = obj.empty()
# @lc code=end
# Accepted
# 16/16 cases passed(28 ms)
# Your runtime beats 93.98 % of python3 submissions
# Your memory usage beats 100 % of python3 submissions(12.7 MB)
|
from datetime import datetime
import unittest
from dateutil import parser
from jinja2 import FunctionLoader
from csv_model import CSVModel, cast_to_date
from csv_view import CSVJinjaView
class TestViewFilters(unittest.TestCase):
def setUp(self):
self.view = CSVJinjaView(env_options={'loader': FunctionLoader(lambda x:x)})
self.data = [
['1', 'hi', 'yes', '2017-07-19', '3.5'],
['2', 'bye', 'no', '2017-07-18', '3.6'],
['3', 'heh', 'y', '2017-07-20', '3.7'],
]
self.model = CSVModel(self.data)
def test_bool(self):
data = ['yes', 'no', 'True', 'y', 'false', 'N', 'TrUE']
expected = ['True', 'False', 'True', 'True', 'False', 'False', 'True']
self.assertEqual(len(data), len(expected),
msg='# of test cases should match # of expected outcomes')
for test_data, val in zip(data, expected):
template = '{{ ' + repr(test_data) + ' | bool }}'
self.assertEqual(val, self.view.render_jinja_template(template, None))
def test_date(self):
today = datetime.now()
data = ['2017-07-02', '02/29/2008', '10:15:45.31 AM', '05/11/96']
expected = [datetime(2017, 7, 2), datetime(2008, 2, 29),
datetime(today.year, today.month, today.day, 10, 15, 45, 310000),
datetime(1996, 5, 11)]
self.assertEqual(len(data), len(expected),
msg='# of test cases should match # of expected outcomes')
for test_data, val in zip(data, expected):
template = '{{ ' + repr(test_data) + ' | date }}'
self.assertEqual(str(val), self.view.render_jinja_template(template, None))
def test_date_from_timestamp(self):
data = ['1000', '0', '1', '1931516412']
for test_data in data:
template = '{{ ' + str(test_data) + ' | int | date }}'
expected = str(datetime.fromtimestamp(int(test_data)))
self.assertEqual(expected, self.view.render_jinja_template(template, None))
def test_dateformat(self):
date = '2017-07-18'
formats = ['%d/%m/%y', '%Y-%d-%m', '%y']
expected = ['18/07/17', '2017-18-07', '17']
self.assertEqual(len(formats), len(expected),
msg='# of test cases should match # of expected outcomes')
for fmt, val in zip(formats, expected):
template = '{{ ' + repr(date) + ' | date | dateformat(' + repr(fmt) + ') }}'
self.assertEqual(val, self.view.render_jinja_template(template, None))
def test_cast(self):
template = '{{ rows | cast(["date", None, "int", "date", "bool"]) }}'
expected = str(self.model.cast([cast_to_date, str, int, cast_to_date, bool]))
self.assertEqual(expected, self.view.render_jinja_template(template, self.model))
def test_castrange(self):
template = '{{ rows | castrange(["float", None, "int"], 0, 3) }}'
expected = str(self.model.cast_range([float, str, int], 0, 3))
self.assertEqual(expected, self.view.render_jinja_template(template, self.model))
def test_rowrange(self):
template = '{{ rows | rowrange(1) }}'
expected = str(self.model.row_slice(1))
self.assertEqual(expected, self.view.render_jinja_template(template, self.model))
def test_columnrange(self):
template = '{{ rows | columnrange(2, 3) }}'
expected = str(self.model.col_slice(2, 3))
self.assertEqual(expected, self.view.render_jinja_template(template, self.model))
if __name__ == '__main__':
unittest.main()
|
from typing import NoReturn
from cryspy.A_functions_base.function_1_objects import \
form_items_by_dictionary
from cryspy.B_parent_classes.cl_1_item import ItemN
from cryspy.B_parent_classes.cl_2_loop import LoopN
class Range(ItemN):
"""Contains range information.
Attributes
----------
-
"""
ATTR_MANDATORY_NAMES = ()
ATTR_MANDATORY_TYPES = ()
ATTR_MANDATORY_CIF = ()
ATTR_OPTIONAL_NAMES = ("ttheta_min", "ttheta_max", "phi_min", "phi_max",
"time_min", "time_max", "gamma_min", "gamma_max", "nu_min", "nu_max")
ATTR_OPTIONAL_TYPES = (float, float, float, float, float, float, float, float, float, float)
ATTR_OPTIONAL_CIF = ("2theta_min", "2theta_max", "phi_min", "phi_max",
"time_min", "time_max", "gamma_min", "gamma_max", "nu_min", "nu_max")
ATTR_NAMES = ATTR_MANDATORY_NAMES + ATTR_OPTIONAL_NAMES
ATTR_TYPES = ATTR_MANDATORY_TYPES + ATTR_OPTIONAL_TYPES
ATTR_CIF = ATTR_MANDATORY_CIF + ATTR_OPTIONAL_CIF
ATTR_INT_NAMES = ()
ATTR_INT_PROTECTED_NAMES = ()
# parameters considered are refined parameters
ATTR_REF = ()
ATTR_SIGMA = tuple([f"{_h:}_sigma" for _h in ATTR_REF])
ATTR_CONSTR_FLAG = tuple([f"{_h:}_constraint" for _h in ATTR_REF])
ATTR_REF_FLAG = tuple([f"{_h:}_refinement" for _h in ATTR_REF])
ATTR_CONSTR_MARK = tuple([f"{_h:}_mark" for _h in ATTR_REF])
# constraints on the parameters
D_CONSTRAINTS = {}
# default values for the parameters
D_DEFAULT = {}
for key in ATTR_SIGMA:
D_DEFAULT[key] = 0.
for key in (ATTR_CONSTR_FLAG + ATTR_REF_FLAG):
D_DEFAULT[key] = False
for key in ATTR_CONSTR_MARK:
D_DEFAULT[key] = ""
PREFIX = "range"
def __init__(self, **kwargs) -> NoReturn:
super(Range, self).__init__()
# defined for any integer and float parameters
D_MIN = {"ttheta_min": 0., "ttheta_max": 0., "phi_min": -90.,
"phi_max": -90., "time_min": 0., "time_max": 0.}
# defined for ani integer and float parameters
D_MAX = {"ttheta_min": 180., "ttheta_max": 180., "phi_min": 90.,
"phi_max": 90.}
self.__dict__["D_MIN"] = D_MIN
self.__dict__["D_MAX"] = D_MAX
for key, attr in self.D_DEFAULT.items():
setattr(self, key, attr)
for key, attr in kwargs.items():
setattr(self, key, attr)
class RangeL(LoopN):
"""
Description of Range in loop.
"""
ITEM_CLASS = Range
ATTR_INDEX = None
def __init__(self, loop_name: str = None, **kwargs) -> NoReturn:
super(RangeL, self).__init__()
self.__dict__["items"] = form_items_by_dictionary(self.ITEM_CLASS, kwargs)
self.__dict__["loop_name"] = loop_name
# s_cont = """
# loop_
# _range_phi_min
# _range_2theta_min
# _range_2theta_max
# 7 4 80
# 7 -9 780
# """
# obj = RangeL.from_cif(s_cont)
# print(obj, end="\n\n")
# print(obj[0], end="\n\n")
|
# parsetab.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = "leftORleftANDnonassocLELEGEGTEQNEADD ALERT ALL ALTER AND AS ASC CHAR CONNECT CREATE DELETE DESC DROP EQ EXIT FROM GE GRANT GT HELP ID INDEX INSERT INT INTO IS JDBC KEY LE LIMIT LOAD LT NE NOT NULL NUMBER ON OR OVERWRITE PASSWORD PRIMARY PRINT REGISTER REVOKE SAVE SELECT SET SET SHOW SPARKSQL STRING TABLE TABLES TO TRAIN UPDATE USER VALUES VIEW WHERE start : command\n | command ';' command : ddl\n | dml\n | utility\n | nothing ddl : createtable\n | createindex\n | droptable\n | dropindex\n | showtables\n | alerttable\n | createuser\n | grantuser\n | revokeuser dml : query\n | insert\n | delete\n | update\n | train\n | register\n | load\n | save\n | connect\n | set utility : exit\n | print showtables : SHOW TABLES createuser : CREATE USER ID PASSWORD STRING grantuser : GRANT power_list ON non_mrelation_list TO non_mrelation_list revokeuser : REVOKE power_list ON non_mrelation_list FROM non_mrelation_list power_list : power_list ',' power_type\n | power_type power_type : SELECT\n | UPDATE\n | INSERT\n | DELETE\n | PRINT\n | ALL\n alerttable : ALERT TABLE ID ADD attrtype\n | ALERT TABLE ID DROP non_mrelation_list createtable : CREATE TABLE ID '(' non_mattrtype_list ')' createindex : CREATE INDEX ID '(' ID ')' droptable : DROP TABLE ID dropindex : DROP INDEX ID '(' ID ')' print : PRINT ID exit : EXIT query : SELECT non_mselect_clause FROM non_mrelation_list opwhere_clause oplimit_clause opas_clause insert : INSERT INTO ID VALUES inservalue_list inservalue_list : '(' non_mvalue_list ')' ',' inservalue_list\n | '(' non_mvalue_list ')' delete : DELETE FROM ID opwhere_clause update : UPDATE ID SET relattr EQ relattr_or_value opwhere_clause train : TRAIN non_mselect_clause opas_clause register : REGISTER non_mselect_clause opas_clause load : LOAD non_mselect_clause opas_clause save : SAVE OVERWRITE TABLE opas_clause connect : CONNECT JDBC opwhere_clause set : SET non_mselect_clause opas_clause non_mattrtype_list : attrtype ',' non_mattrtype_list\n | attrtype attrtype : ID type\n | ID type '(' NUMBER ')'\n | PRIMARY KEY '(' ID ')' type : INT\n | CHAR non_mselect_clause : non_mrelattr_list\n | '*' non_mrelattr_list : relattr ',' non_mrelattr_list\n | relattr relattr : ID '.' ID\n | ID non_mrelation_list : relation ',' non_mrelation_list\n | relation relation : ID opwhere_clause : WHERE non_mcond_list\n | nothing oplimit_clause : LIMIT value\n | nothing opas_clause : AS ID\n | nothing non_mcond_list : non_mcond_list AND non_mcond_list\n | non_mcond_list OR non_mcond_list\n | '(' non_mcond_list ')'\n | condition condition : relattr op relattr_or_value\n | relattr EQ null_value\n | relattr NE null_value relattr_or_value : relattr\n | value non_mvalue_list : value ',' non_mvalue_list\n | value\n | null_value ',' non_mvalue_list\n | null_value value : STRING value : NUMBER null_value : NULL op : LT\n | LE\n | GT\n | GE\n | EQ\n | NE nothing : "
_lr_action_items = {';': (
[0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22,
23, 24, 25, 26, 27, 44, 52, 64, 65, 66, 67, 71, 72, 73, 74, 76, 77, 81, 91,
93,
95, 96, 97, 98, 99, 100, 102, 110, 111, 114, 115, 116, 118, 120, 121, 122,
124,
131, 133, 134, 138, 139, 152, 153, 154, 155, 158, 159, 160, 161, 162, 163,
165,
169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 184, 185, 186,
189,
195, 196, 197, ],
[-104, 46, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16,
-17,
-18, -19, -20, -21, -22, -23, -24, -25, -26, -27, -47, -28, -67, -68, -70,
-72,
-104, -104, -104, -104, -104, -46, -44, -104, -59, -81, -54, -55, -56,
-104,
-58, -77, -74, -75, -104, -69, -71, -52, -80, -57, -76, -85, -29, -40, -41,
-104, -49, -62, -65, -66, -42, -43, -45, -30, -73, -31, -104, -79, -95,
-96,
-97, -89, -104, -90, -82, -83, -84, -86, -87, -88, -48, -78, -51, -53, -63,
-64, -50, ]), '$end': (
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21,
22, 23, 24, 25, 26, 27, 44, 46, 52, 64, 65, 66, 67, 71, 72, 73, 74, 76, 77,
81,
91, 93, 95, 96, 97, 98, 99, 100, 102, 110, 111, 114, 115, 116, 118, 120,
121,
122, 124, 131, 133, 134, 138, 139, 152, 153, 154, 155, 158, 159, 160, 161,
162,
163, 165, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 184,
185,
186, 189, 195, 196, 197, ],
[-104, 0, -1, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16,
-17, -18, -19, -20, -21, -22, -23, -24, -25, -26, -27, -47, -2, -28, -67,
-68,
-70, -72, -104, -104, -104, -104, -104, -46, -44, -104, -59, -81, -54, -55,
-56, -104, -58, -77, -74, -75, -104, -69, -71, -52, -80, -57, -76, -85,
-29,
-40, -41, -104, -49, -62, -65, -66, -42, -43, -45, -30, -73, -31, -104,
-79,
-95, -96, -97, -89, -104, -90, -82, -83, -84, -86, -87, -88, -48, -78, -51,
-53, -63, -64, -50, ]), 'CREATE': ([0, ], [28, ]),
'DROP': ([0, 83, ], [29, 108, ]), 'SHOW': ([0, ], [30, ]),
'ALERT': ([0, ], [31, ]), 'GRANT': ([0, ], [32, ]),
'REVOKE': ([0, ], [33, ]),
'SELECT': ([0, 32, 33, 85, ], [34, 56, 56, 56, ]),
'INSERT': ([0, 32, 33, 85, ], [35, 58, 58, 58, ]),
'DELETE': ([0, 32, 33, 85, ], [36, 59, 59, 59, ]),
'UPDATE': ([0, 32, 33, 85, ], [37, 57, 57, 57, ]),
'TRAIN': ([0, ], [39, ]), 'REGISTER': ([0, ], [40, ]),
'LOAD': ([0, ], [41, ]), 'SAVE': ([0, ], [42, ]),
'CONNECT': ([0, ], [43, ]), 'SET': ([0, 70, ], [38, 92, ]),
'EXIT': ([0, ], [44, ]),
'PRINT': ([0, 32, 33, 85, ], [45, 60, 60, 60, ]),
'TABLE': ([28, 29, 31, 75, ], [47, 50, 53, 99, ]),
'INDEX': ([28, 29, ], [48, 51, ]), 'USER': ([28, ], [49, ]),
'TABLES': ([30, ], [52, ]),
'ALL': ([32, 33, 85, ], [61, 61, 61, ]),
'*': ([34, 38, 39, 40, 41, ], [65, 65, 65, 65, 65, ]),
'ID': (
[34, 37, 38, 39, 40, 41, 45, 47, 48, 49, 50, 51, 53, 68, 69,
84, 86, 87, 88, 89, 92, 94, 101, 103, 104, 106, 107, 108,
123, 135, 136, 137, 141, 142, 143, 145, 146, 147, 148, 149,
150, 151, 156, 183, ],
[67, 70, 67, 67, 67, 67, 77, 78, 79, 80, 81, 82, 83, 90, 91,
111, 111, 111, 67, 116, 67, 120, 67, 126, 130, 132, 126,
111, 67, 111, 111, 111, 67, 67, 67, 67, -102, -103, -98,
-99, -100, -101, 126, 191, ]), 'INTO': ([35, ], [68, ]),
'FROM': (
[36, 63, 64, 65, 66, 67, 110, 111, 113, 115, 116, 161, ],
[69, 87, -67, -68, -70, -72, -74, -75, 137, -69, -71,
-73, ]), 'OVERWRITE': ([42, ], [75, ]),
'JDBC': ([43, ], [76, ]), 'ON': (
[54, 55, 56, 57, 58, 59, 60, 61, 62, 112, ],
[84, -33, -34, -35, -36, -37, -38, -39, 86, -32, ]), ',': (
[54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 67, 110, 111, 112, 116, 128,
152,
153, 154, 167, 168, 169, 170, 171, 186, 195, 196, ],
[85, -33, -34, -35, -36, -37, -38, -39, 85, 88, -72, 136, -75, -32, -71,
156, -62, -65, -66, 187, 188, -95, -96, -97, 192, -63, -64, ]), 'AS': (
[64, 65, 66, 67, 71, 72, 73, 74, 99, 102, 110, 111, 114, 115, 116, 122,
124,
138, 161, 163, 165, 169, 170, 171, 172, 174, 175, 176, 177, 178, 179,
180,
185, ],
[-67, -68, -70, -72, 94, 94, 94, 94, 94, -77, -74, -75, -104, -69, -71,
-76,
-85, -104, -73, 94, -79, -95, -96, -97, -89, -90, -82, -83, -84, -86,
-87,
-88, -78, ]), '.': ([67, ], [89, ]),
'EQ': ([67, 116, 119, 125, ], [-72, -71, 141, 146, ]),
'NE': ([67, 116, 125, ], [-72, -71, 147, ]),
'LT': ([67, 116, 125, ], [-72, -71, 148, ]),
'LE': ([67, 116, 125, ], [-72, -71, 149, ]),
'GT': ([67, 116, 125, ], [-72, -71, 150, ]),
'GE': ([67, 116, 125, ], [-72, -71, 151, ]), 'WHERE': (
[67, 76, 91, 110, 111, 114, 116, 161, 169, 170, 172, 173, 174, ],
[-72, 101, 101, -74, -75, 101, -71, -73, -95, -96, -89, 101, -90, ]),
'AND': (
[67, 116, 122, 124, 144, 169, 170, 171, 172, 174, 175, 176,
177, 178, 179, 180, ],
[-72, -71, 142, -85, 142, -95, -96, -97, -89, -90, -82, 142,
-84, -86, -87, -88, ]), 'OR': (
[67, 116, 122, 124, 144, 169, 170, 171, 172, 174, 175, 176, 177, 178,
179,
180, ],
[-72, -71, 143, -85, 143, -95, -96, -97, -89, -90, -82, -83, -84, -86,
-87,
-88, ]), 'LIMIT': (
[67, 102, 110, 111, 114, 116, 122, 124, 138, 161, 169, 170, 171, 172,
174,
175, 176, 177, 178, 179, 180, ],
[-72, -77, -74, -75, -104, -71, -76, -85, 164, -73, -95, -96, -97, -89,
-90,
-82, -83, -84, -86, -87, -88, ]), ')': (
[67, 116, 124, 127, 128, 130, 132, 144, 152, 153, 154, 166, 167, 168,
169,
170, 171, 172, 174, 175, 176, 177, 178, 179, 180, 182, 190, 191, 193,
194,
195, 196, ],
[-72, -71, -85, 155, -61, 158, 159, 177, -62, -65, -66, 186, -92, -94,
-95,
-96, -97, -89, -90, -82, -83, -84, -86, -87, -88, -60, 195, 196, -91,
-93,
-63, -64, ]), '(': (
[78, 79, 82, 101, 117, 123, 142, 143, 152, 153, 154, 157, 192, ],
[103, 104, 106, 123, 140, 123, 123, 123, 181, -65, -66, 183, 140, ]),
'PASSWORD': ([80, ], [105, ]), 'ADD': ([83, ], [107, ]),
'VALUES': ([90, ], [117, ]),
'PRIMARY': ([103, 107, 156, ], [129, 129, 129, ]),
'STRING': (
[105, 140, 141, 145, 146, 147, 148, 149, 150, 151, 164, 187,
188, ],
[131, 169, 169, 169, -102, -103, -98, -99, -100, -101, 169,
169, 169, ]),
'TO': ([109, 110, 111, 161, ], [135, -74, -75, -73, ]),
'INT': ([126, ], [153, ]), 'CHAR': ([126, ], [154, ]),
'KEY': ([129, ], [157, ]), 'NUMBER': (
[140, 141, 145, 146, 147, 148, 149, 150, 151, 164, 181, 187, 188, ],
[170, 170, 170, -102, -103, -98, -99, -100, -101, 170, 190, 170,
170, ]),
'NULL': (
[140, 146, 147, 187, 188, ], [171, 171, 171, 171, 171, ]), }
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'start': ([0, ], [1, ]), 'command': ([0, ], [2, ]),
'ddl': ([0, ], [3, ]), 'dml': ([0, ], [4, ]),
'utility': ([0, ], [5, ]), 'nothing': (
[0, 71, 72, 73, 74, 76, 91, 99, 114, 138, 163, 173, ],
[6, 95, 95, 95, 95, 102, 102, 95, 102, 165, 95, 102, ]),
'createtable': ([0, ], [7, ]), 'createindex': ([0, ], [8, ]),
'droptable': ([0, ], [9, ]), 'dropindex': ([0, ], [10, ]),
'showtables': ([0, ], [11, ]), 'alerttable': ([0, ], [12, ]),
'createuser': ([0, ], [13, ]), 'grantuser': ([0, ], [14, ]),
'revokeuser': ([0, ], [15, ]), 'query': ([0, ], [16, ]),
'insert': ([0, ], [17, ]), 'delete': ([0, ], [18, ]),
'update': ([0, ], [19, ]), 'train': ([0, ], [20, ]),
'register': ([0, ], [21, ]), 'load': ([0, ], [22, ]),
'save': ([0, ], [23, ]), 'connect': ([0, ], [24, ]),
'set': ([0, ], [25, ]), 'exit': ([0, ], [26, ]),
'print': ([0, ], [27, ]),
'power_list': ([32, 33, ], [54, 62, ]),
'power_type': ([32, 33, 85, ], [55, 55, 112, ]),
'non_mselect_clause': (
[34, 38, 39, 40, 41, ], [63, 71, 72, 73, 74, ]),
'non_mrelattr_list': (
[34, 38, 39, 40, 41, 88, ], [64, 64, 64, 64, 64, 115, ]),
'relattr': (
[34, 38, 39, 40, 41, 88, 92, 101, 123, 141, 142, 143,
145, ],
[66, 66, 66, 66, 66, 66, 119, 125, 125, 172, 125, 125,
172, ]), 'opas_clause': (
[71, 72, 73, 74, 99, 163, ], [93, 96, 97, 98, 121, 184, ]),
'opwhere_clause': (
[76, 91, 114, 173, ], [100, 118, 138, 189, ]),
'non_mrelation_list': ([84, 86, 87, 108, 135, 136, 137, ],
[109, 113, 114, 134, 160, 161, 162, ]),
'relation': ([84, 86, 87, 108, 135, 136, 137, ],
[110, 110, 110, 110, 110, 110, 110, ]),
'non_mcond_list': (
[101, 123, 142, 143, ], [122, 144, 175, 176, ]),
'condition': ([101, 123, 142, 143, ], [124, 124, 124, 124, ]),
'non_mattrtype_list': ([103, 156, ], [127, 182, ]),
'attrtype': ([103, 107, 156, ], [128, 133, 128, ]),
'inservalue_list': ([117, 192, ], [139, 197, ]),
'op': ([125, ], [145, ]), 'type': ([126, ], [152, ]),
'oplimit_clause': ([138, ], [163, ]),
'non_mvalue_list': ([140, 187, 188, ], [166, 193, 194, ]),
'value': ([140, 141, 145, 164, 187, 188, ],
[167, 174, 174, 185, 167, 167, ]), 'null_value': (
[140, 146, 147, 187, 188, ], [168, 179, 180, 168, 168, ]),
'relattr_or_value': ([141, 145, ], [173, 178, ]), }
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> start", "S'", 1, None, None, None),
('start -> command', 'start', 1, 'p_start', 'parser.py', 18),
('start -> command ;', 'start', 2, 'p_start', 'parser.py', 19),
('command -> ddl', 'command', 1, 'p_command', 'parser.py', 23),
('command -> dml', 'command', 1, 'p_command', 'parser.py', 24),
('command -> utility', 'command', 1, 'p_command', 'parser.py', 25),
('command -> nothing', 'command', 1, 'p_command', 'parser.py', 26),
('ddl -> createtable', 'ddl', 1, 'p_ddl', 'parser.py', 31),
('ddl -> createindex', 'ddl', 1, 'p_ddl', 'parser.py', 32),
('ddl -> droptable', 'ddl', 1, 'p_ddl', 'parser.py', 33),
('ddl -> dropindex', 'ddl', 1, 'p_ddl', 'parser.py', 34),
('ddl -> showtables', 'ddl', 1, 'p_ddl', 'parser.py', 35),
('ddl -> alerttable', 'ddl', 1, 'p_ddl', 'parser.py', 36),
('ddl -> createuser', 'ddl', 1, 'p_ddl', 'parser.py', 37),
('ddl -> grantuser', 'ddl', 1, 'p_ddl', 'parser.py', 38),
('ddl -> revokeuser', 'ddl', 1, 'p_ddl', 'parser.py', 39),
('dml -> query', 'dml', 1, 'p_dml', 'parser.py', 44),
('dml -> insert', 'dml', 1, 'p_dml', 'parser.py', 45),
('dml -> delete', 'dml', 1, 'p_dml', 'parser.py', 46),
('dml -> update', 'dml', 1, 'p_dml', 'parser.py', 47),
('dml -> train', 'dml', 1, 'p_dml', 'parser.py', 48),
('dml -> register', 'dml', 1, 'p_dml', 'parser.py', 49),
('dml -> load', 'dml', 1, 'p_dml', 'parser.py', 50),
('dml -> save', 'dml', 1, 'p_dml', 'parser.py', 51),
('dml -> connect', 'dml', 1, 'p_dml', 'parser.py', 52),
('dml -> set', 'dml', 1, 'p_dml', 'parser.py', 53),
('utility -> exit', 'utility', 1, 'p_utility', 'parser.py', 58),
('utility -> print', 'utility', 1, 'p_utility', 'parser.py', 59),
('showtables -> SHOW TABLES', 'showtables', 2, 'p_showtables', 'parser.py',
64),
('createuser -> CREATE USER ID PASSWORD STRING', 'createuser', 5,
'p_createuser', 'parser.py', 69),
(
'grantuser -> GRANT power_list ON non_mrelation_list TO non_mrelation_list',
'grantuser', 6, 'p_grantuser', 'parser.py', 74),
(
'revokeuser -> REVOKE power_list ON non_mrelation_list FROM non_mrelation_list',
'revokeuser', 6, 'p_revokeuser', 'parser.py', 79),
('power_list -> power_list , power_type', 'power_list', 3, 'p_power_list',
'parser.py', 84),
('power_list -> power_type', 'power_list', 1, 'p_power_list', 'parser.py',
85),
('power_type -> SELECT', 'power_type', 1, 'p_power_type', 'parser.py', 93),
('power_type -> UPDATE', 'power_type', 1, 'p_power_type', 'parser.py', 94),
('power_type -> INSERT', 'power_type', 1, 'p_power_type', 'parser.py', 95),
('power_type -> DELETE', 'power_type', 1, 'p_power_type', 'parser.py', 96),
('power_type -> PRINT', 'power_type', 1, 'p_power_type', 'parser.py', 97),
('power_type -> ALL', 'power_type', 1, 'p_power_type', 'parser.py', 98),
('alerttable -> ALERT TABLE ID ADD attrtype', 'alerttable', 5,
'p_alerttable', 'parser.py', 104),
('alerttable -> ALERT TABLE ID DROP non_mrelation_list', 'alerttable', 5,
'p_alerttable', 'parser.py', 105),
('createtable -> CREATE TABLE ID ( non_mattrtype_list )', 'createtable', 6,
'p_createtable', 'parser.py', 113),
('createindex -> CREATE INDEX ID ( ID )', 'createindex', 6, 'p_createindex',
'parser.py', 118),
('droptable -> DROP TABLE ID', 'droptable', 3, 'p_droptable', 'parser.py',
123),
('dropindex -> DROP INDEX ID ( ID )', 'dropindex', 6, 'p_dropindex',
'parser.py', 128),
('print -> PRINT ID', 'print', 2, 'p_print', 'parser.py', 133),
('exit -> EXIT', 'exit', 1, 'p_exit', 'parser.py', 138),
(
'query -> SELECT non_mselect_clause FROM non_mrelation_list opwhere_clause oplimit_clause opas_clause',
'query', 7, 'p_query', 'parser.py', 143),
('insert -> INSERT INTO ID VALUES inservalue_list', 'insert', 5, 'p_insert',
'parser.py', 148),
('inservalue_list -> ( non_mvalue_list ) , inservalue_list',
'inservalue_list', 5, 'p_inservalue_list', 'parser.py', 153),
('inservalue_list -> ( non_mvalue_list )', 'inservalue_list', 3,
'p_inservalue_list', 'parser.py', 154),
('delete -> DELETE FROM ID opwhere_clause', 'delete', 4, 'p_delete',
'parser.py', 162),
('update -> UPDATE ID SET relattr EQ relattr_or_value opwhere_clause',
'update', 7, 'p_update', 'parser.py', 167),
('train -> TRAIN non_mselect_clause opas_clause', 'train', 3, 'p_train',
'parser.py', 172),
('register -> REGISTER non_mselect_clause opas_clause', 'register', 3,
'p_register', 'parser.py', 177),
('load -> LOAD non_mselect_clause opas_clause', 'load', 3, 'p_load',
'parser.py', 182),
('save -> SAVE OVERWRITE TABLE opas_clause', 'save', 4, 'p_save',
'parser.py', 187),
('connect -> CONNECT JDBC opwhere_clause', 'connect', 3, 'p_connect',
'parser.py', 192),
(
'set -> SET non_mselect_clause opas_clause', 'set', 3, 'p_set',
'parser.py',
197),
(
'non_mattrtype_list -> attrtype , non_mattrtype_list',
'non_mattrtype_list',
3, 'p_non_mattrtype_list', 'parser.py', 202),
('non_mattrtype_list -> attrtype', 'non_mattrtype_list', 1,
'p_non_mattrtype_list', 'parser.py', 203),
('attrtype -> ID type', 'attrtype', 2, 'p_attrtype', 'parser.py', 211),
('attrtype -> ID type ( NUMBER )', 'attrtype', 5, 'p_attrtype', 'parser.py',
212),
('attrtype -> PRIMARY KEY ( ID )', 'attrtype', 5, 'p_attrtype', 'parser.py',
213),
('type -> INT', 'type', 1, 'p_type', 'parser.py', 223),
('type -> CHAR', 'type', 1, 'p_type', 'parser.py', 224),
('non_mselect_clause -> non_mrelattr_list', 'non_mselect_clause', 1,
'p_non_mselect_clause', 'parser.py', 229),
('non_mselect_clause -> *', 'non_mselect_clause', 1, 'p_non_mselect_clause',
'parser.py', 230),
('non_mrelattr_list -> relattr , non_mrelattr_list', 'non_mrelattr_list', 3,
'p_non_mrelattr_list', 'parser.py', 235),
('non_mrelattr_list -> relattr', 'non_mrelattr_list', 1,
'p_non_mrelattr_list', 'parser.py', 236),
('relattr -> ID . ID', 'relattr', 3, 'p_relattr', 'parser.py', 244),
('relattr -> ID', 'relattr', 1, 'p_relattr', 'parser.py', 245),
(
'non_mrelation_list -> relation , non_mrelation_list',
'non_mrelation_list',
3, 'p_non_mrelation_list', 'parser.py', 253),
('non_mrelation_list -> relation', 'non_mrelation_list', 1,
'p_non_mrelation_list', 'parser.py', 254),
('relation -> ID', 'relation', 1, 'p_relation', 'parser.py', 262),
('opwhere_clause -> WHERE non_mcond_list', 'opwhere_clause', 2,
'p_opwhere_clause', 'parser.py', 267),
('opwhere_clause -> nothing', 'opwhere_clause', 1, 'p_opwhere_clause',
'parser.py', 268),
('oplimit_clause -> LIMIT value', 'oplimit_clause', 2, 'p_oplimit_clause',
'parser.py', 274),
('oplimit_clause -> nothing', 'oplimit_clause', 1, 'p_oplimit_clause',
'parser.py', 275),
('opas_clause -> AS ID', 'opas_clause', 2, 'p_opas_clause', 'parser.py',
281),
('opas_clause -> nothing', 'opas_clause', 1, 'p_opas_clause', 'parser.py',
282),
('non_mcond_list -> non_mcond_list AND non_mcond_list', 'non_mcond_list', 3,
'p_non_mcond_list', 'parser.py', 288),
('non_mcond_list -> non_mcond_list OR non_mcond_list', 'non_mcond_list', 3,
'p_non_mcond_list', 'parser.py', 289),
('non_mcond_list -> ( non_mcond_list )', 'non_mcond_list', 3,
'p_non_mcond_list', 'parser.py', 290),
('non_mcond_list -> condition', 'non_mcond_list', 1, 'p_non_mcond_list',
'parser.py', 291),
('condition -> relattr op relattr_or_value', 'condition', 3, 'p_condition',
'parser.py', 301),
('condition -> relattr EQ null_value', 'condition', 3, 'p_condition',
'parser.py', 302),
('condition -> relattr NE null_value', 'condition', 3, 'p_condition',
'parser.py', 303),
('relattr_or_value -> relattr', 'relattr_or_value', 1, 'p_relattr_or_value',
'parser.py', 308),
('relattr_or_value -> value', 'relattr_or_value', 1, 'p_relattr_or_value',
'parser.py', 309),
('non_mvalue_list -> value , non_mvalue_list', 'non_mvalue_list', 3,
'p_non_mvalue_list', 'parser.py', 314),
('non_mvalue_list -> value', 'non_mvalue_list', 1, 'p_non_mvalue_list',
'parser.py', 315),
('non_mvalue_list -> null_value , non_mvalue_list', 'non_mvalue_list', 3,
'p_non_mvalue_list', 'parser.py', 316),
('non_mvalue_list -> null_value', 'non_mvalue_list', 1, 'p_non_mvalue_list',
'parser.py', 317),
('value -> STRING', 'value', 1, 'p_value_string', 'parser.py', 325),
('value -> NUMBER', 'value', 1, 'p_value_number', 'parser.py', 330),
('null_value -> NULL', 'null_value', 1, 'p_null_value', 'parser.py', 335),
('op -> LT', 'op', 1, 'p_op', 'parser.py', 340),
('op -> LE', 'op', 1, 'p_op', 'parser.py', 341),
('op -> GT', 'op', 1, 'p_op', 'parser.py', 342),
('op -> GE', 'op', 1, 'p_op', 'parser.py', 343),
('op -> EQ', 'op', 1, 'p_op', 'parser.py', 344),
('op -> NE', 'op', 1, 'p_op', 'parser.py', 345),
('nothing -> <empty>', 'nothing', 0, 'p_nothing', 'parser.py', 350),
]
|
import StellarMass
import numpy as n
from scipy.stats import norm
from scipy.integrate import quad
from scipy.interpolate import interp1d
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as p
import glob
import astropy.io.fits as fits
import os
import time
import numpy as n
import sys
import XrayLuminosity
xr = XrayLuminosity.XrayLuminosity()
import matplotlib.pyplot as p
# stellar mass function measured to the Ilbert function
smf_ilbert13 = lambda M, M_star, phi_1s, alpha_1s, phi_2s, alpha_2s : ( phi_1s * (M/M_star) ** alpha_1s + phi_2s * (M/M_star) ** alpha_2s ) * n.e ** (-M/M_star) * (M/ M_star)
ll_dir = os.path.join(os.environ['DATA_DIR'], 'spm', 'literature')
path_ilbert13_SMF = os.path.join(ll_dir, "ilbert_2013_mass_function_params.txt")
zmin, zmax, N, M_comp, M_star, phi_1s, alpha_1s, phi_2s, alpha_2s, log_rho_s = n.loadtxt(os.path.join( ll_dir, "ilbert_2013_mass_function_params.txt"), unpack=True)
smf01 = lambda mass : smf_ilbert13( mass , 10**M_star[0], phi_1s[0]*10**(-3), alpha_1s[0], phi_2s[0]*10**(-3), alpha_2s[0] )
smf08 = lambda mass : smf_ilbert13( mass , 10**M_star[2], phi_1s[2]*10**(-3), alpha_1s[2], phi_2s[2]*10**(-3), alpha_2s[2] )
# the AGN HGMF model
def plot_duty_cycle(env='MD04', volume=400.**3., file_type="hlist", aexp='0.74230', out_dir = os.path.join("../../data/")):
# path for the output file
path_to_duty_cycle = os.path.join(out_dir, env+"_"+file_type+"_"+aexp+"_duty_cycle.txt")
log_stellar_mass, duty_cycle = n.loadtxt(path_to_duty_cycle, unpack="True")
p.plot(log_stellar_mass, duty_cycle, label=env+file_type+' a='+aexp)
def plot_SMF(env='MD04', volume=400.**3., file_type="hlist", aexp='0.74230', out_dir = os.path.join("../../data/")):
# path for the output file
path_to_SMF = os.path.join(out_dir, env+"_"+file_type+"_"+aexp+"_SMF.txt")
logMs_low, logMs_up, counts, dN_dVdlogM = n.loadtxt(path_to_SMF, unpack=True)
p.plot((logMs_low + logMs_up)/2., n.log10(dN_dVdlogM), label=env+' a='+aexp)
logMs = n.arange(7,12.5,0.25)
p.figure(1, (6,6))
# Ilbert 2013
p.plot(logMs, n.log10(smf01(10**logMs)), label='Il13, 0.2<z<0.5', ls='dashed')
p.plot(logMs, n.log10(smf08(10**logMs)), label='Il13, 0.8<z<1.1', ls='dashed')
# MultiDark
#plot_SMF(env='MD04', volume=400.**3., file_type="hlist", aexp='0.74230', out_dir = os.path.join("../../data/"))
plot_SMF(env='MD04', volume=400.**3., file_type="out" , aexp='0.74230', out_dir = os.path.join("../../data/"))
#plot_SMF(env='MD10', volume=1000.**3., file_type="hlist", aexp='0.74980', out_dir = os.path.join("../../data/"))
plot_SMF(env='MD10', volume=1000.**3., file_type="out" , aexp='0.74980', out_dir = os.path.join("../../data/"))
#plot_SMF(env='MD25', volume=2500.**3., file_type="hlist", aexp='0.75440', out_dir = os.path.join("../../data/"))
plot_SMF(env='MD25', volume=2500.**3., file_type="out" , aexp='0.75440', out_dir = os.path.join("../../data/"))
# Bongiorno 2012
p.plot(logMs, n.array([n.log10(xr.Phi_stellar_mass(logMs_i, 0.3)) for logMs_i in logMs]) , label='BO13 z=0.3', ls='dotted')
p.plot(logMs, n.array([n.log10(xr.Phi_stellar_mass(logMs_i, 0.9)) for logMs_i in logMs]) , label='BO13 z=0.9', ls='dotted')
p.plot(logMs, n.array([n.log10(xr.Phi_stellar_mass(logMs_i, 1.8)) for logMs_i in logMs]) , label='BO13 z=2.0', ls='dotted')
p.xlabel(r'$\log(M_*/M_\odot)$')
p.ylabel(r'$\log(\Phi(M_*) / [Mpc^3 dex])$')
p.xlim((7., 12.5))
p.ylim((-7,-1))
p.grid()
p.legend(loc=0, frameon=False)
p.savefig('/home/comparat/data/eRoMok/BO12_MO13_Il13_SMF.png')
p.clf()
p.figure(1, (6,6))
#plot_duty_cycle(env='MD04', volume=400.**3., file_type="hlist", aexp='0.74230', out_dir = os.path.join("../../data/"))
plot_duty_cycle(env='MD04', volume=400.**3., file_type="out" , aexp='0.74230', out_dir = os.path.join("../../data/"))
#plot_duty_cycle(env='MD10', volume=1000.**3., file_type="hlist", aexp='0.74980', out_dir = os.path.join("../../data/"))
plot_duty_cycle(env='MD10', volume=1000.**3., file_type="out" , aexp='0.74980', out_dir = os.path.join("../../data/"))
#plot_duty_cycle(env='MD25', volume=2500.**3., file_type="hlist", aexp='0.75440', out_dir = os.path.join("../../data/"))
plot_duty_cycle(env='MD25', volume=2500.**3., file_type="out" , aexp='0.75440', out_dir = os.path.join("../../data/"))
p.axvline(7.2, c='k' , ls='dashed')
p.axvline(9.7, c='k' , ls='dashed')
p.axvline(11.3, c='k', ls='dashed')
p.ylabel('active fraction [%]')
p.xlabel(r'$\log(M_*/M_\odot)$')
p.xlim((6.5,12.2))
p.yscale('log')
p.ylim((0.005, .9))
p.grid()
p.legend(loc=0, frameon=False)
p.savefig('/home/comparat/data/eRoMok/BO12_duty_cycle.png')
p.clf()
|
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2016 California Institute of Technology.
# Copyright (c) 2016-2021 The Uncertainty Quantification Foundation.
# License: 3-clause BSD. The full license text is available at:
# - https://github.com/uqfoundation/pathos/blob/master/LICENSE
"""
minimal interface to python's multiprocessing module
Notes:
This module has been deprecated in favor of ``pathos.pools``.
"""
from pathos.multiprocessing import ProcessPool, __STATE
from pathos.threading import ThreadPool #XXX: thread __STATE not imported
from pathos.helpers import cpu_count
mp = ProcessPool()
tp = ThreadPool()
__all__ = ['mp_map']
# backward compatibility
#FIXME: deprecated... and buggy! (fails to dill on imap/uimap)
def mp_map(function, sequence, *args, **kwds):
'''extend python's parallel map function to multiprocessing
Inputs:
function -- target function
sequence -- sequence to process in parallel
Additional Inputs:
nproc -- number of 'local' cpus to use [defaut = 'autodetect']
type -- processing type ['blocking', 'non-blocking', 'unordered']
threads -- if True, use threading instead of multiprocessing
'''
processes = cpu_count()
proctype = 'blocking'
threads = False
if 'nproc' in kwds:
processes = kwds['nproc']
kwds.pop('nproc')
# provide a default that is not a function call
if processes == None: processes = cpu_count()
if 'type' in kwds:
proctype = kwds['type']
kwds.pop('type')
if 'threads' in kwds:
threads = kwds['threads']
kwds.pop('threads')
# remove all the junk kwds that are added due to poor design!
if 'nnodes' in kwds: kwds.pop('nnodes')
if 'nodes' in kwds: kwds.pop('nodes')
if 'launcher' in kwds: kwds.pop('launcher')
if 'mapper' in kwds: kwds.pop('mapper')
if 'queue' in kwds: kwds.pop('queue')
if 'timelimit' in kwds: kwds.pop('timelimit')
if 'scheduler' in kwds: kwds.pop('scheduler')
if 'ncpus' in kwds: kwds.pop('ncpus')
if 'servers' in kwds: kwds.pop('servers')
if proctype in ['blocking']:
if not threads:
return mp.map(function,sequence,*args,**kwds)
else:
return tp.map(function,sequence,*args,**kwds)
elif proctype in ['unordered']:
if not threads:
return mp.uimap(function,sequence,*args,**kwds)
else:
return tp.uimap(function,sequence,*args,**kwds)
elif proctype in ['non-blocking', 'ordered']:
if not threads:
return mp.imap(function,sequence,*args,**kwds)
else:
return tp.imap(function,sequence,*args,**kwds)
# default
if not threads:
return mp.map(function,sequence,*args,**kwds)
else:
return tp.map(function,sequence,*args,**kwds)
if __name__ == '__main__':
pass
|
import os
import unittest
from rdflib import URIRef, Graph
from rdflib.namespace import OWL, RDFS, RDF
from linkml import METAMODEL_CONTEXT_URI
from linkml.generators.jsonldcontextgen import ContextGenerator
from linkml.generators.jsonschemagen import JsonSchemaGenerator
from linkml.generators.owlgen import OwlSchemaGenerator
from linkml.generators.rdfgen import RDFGenerator
from linkml.generators.yamlgen import YAMLGenerator
from tests.utils.compare_rdf import compare_rdf
from tests.utils.test_environment import TestEnvironmentTestCase
from tests.test_issues.environment import env
class IssueInheritMetaslotsTestCase(TestEnvironmentTestCase):
"""
Tests: https://github.com/linkml/linkml/issues/270
"""
env = env
def test_metaslot_inheritance(self):
name = 'linkml_issue_270'
infile = env.input_path(f'{name}.yaml')
gen = YAMLGenerator(infile)
schema = gen.schema
for sn, s in schema.slots.items():
print(f'{sn} name={s.name} alias={s.alias} {s}')
s = schema.slots['s1']
c2_s1 = schema.slots['C2_s1']
self.assertEqual(c2_s1.alias, s.name)
self.assertEqual(c2_s1.owner, 'C2')
for k in ['description', 'comments', 'todos', 'pattern', 'recommended', 'slot_uri']:
self.assertEqual(getattr(s, k), getattr(c2_s1, k))
print(gen.serialize())
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import
import datetime
from datetime import timedelta
from django.utils import timezone
from freezegun import freeze_time
from parsimonious.exceptions import IncompleteParseError
from sentry.api.event_search import (
convert_endpoint_params, event_search_grammar, get_snuba_query_args,
parse_search_query, InvalidSearchQuery, SearchBoolean, SearchFilter, SearchKey,
SearchValue, SearchVisitor,
)
from sentry.testutils import TestCase
class ParseSearchQueryTest(TestCase):
def test_simple(self):
# test with raw search query at the end
assert parse_search_query('user.email:foo@example.com release:1.2.1 hello') == [
SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foo@example.com'),
),
SearchFilter(
key=SearchKey(name='release'),
operator="=",
value=SearchValue(raw_value='1.2.1'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello'),
)
]
assert parse_search_query('hello user.email:foo@example.com release:1.2.1') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello'),
),
SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foo@example.com'),
),
SearchFilter(
key=SearchKey(name='release'),
operator="=",
value=SearchValue(raw_value='1.2.1'),
),
]
def test_raw_search_anywhere(self):
assert parse_search_query('hello what user.email:foo@example.com where release:1.2.1 when') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello what'),
),
SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foo@example.com'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='where'),
),
SearchFilter(
key=SearchKey(name='release'),
operator="=",
value=SearchValue(raw_value='1.2.1'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='when'),
),
]
assert parse_search_query('hello') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello'),
),
]
assert parse_search_query(' hello ') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello'),
),
]
assert parse_search_query(' hello there') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello there'),
),
]
assert parse_search_query(' hello there:bye') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello'),
),
SearchFilter(
key=SearchKey(name='there'),
operator='=',
value=SearchValue(raw_value='bye'),
),
]
def test_quoted_raw_search_anywhere(self):
assert parse_search_query('"hello there" user.email:foo@example.com "general kenobi"') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='hello there'),
),
SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foo@example.com'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='general kenobi'),
),
]
assert parse_search_query(' " hello " ') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value=' hello '),
),
]
assert parse_search_query(' " he\\"llo " ') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value=' he"llo '),
),
]
def test_timestamp(self):
# test date format
assert parse_search_query('timestamp>2015-05-18') == [
SearchFilter(
key=SearchKey(name='timestamp'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
0,
0,
tzinfo=timezone.utc),
),
),
]
# test date time format
assert parse_search_query('timestamp>2015-05-18T10:15:01') == [
SearchFilter(
key=SearchKey(name='timestamp'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
10,
15,
1,
tzinfo=timezone.utc),
),
),
]
# test date time format w microseconds
assert parse_search_query('timestamp>2015-05-18T10:15:01.103') == [
SearchFilter(
key=SearchKey(name='timestamp'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
10,
15,
1,
103000,
tzinfo=timezone.utc),
),
),
]
# test date time format w microseconds and utc marker
assert parse_search_query('timestamp:>2015-05-18T10:15:01.103Z') == [
SearchFilter(
key=SearchKey(name='timestamp'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
10,
15,
1,
103000,
tzinfo=timezone.utc),
),
),
]
def test_other_dates(self):
# test date format with other name
assert parse_search_query('first_seen>2015-05-18') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
0,
0,
tzinfo=timezone.utc,
),
),
),
]
# test colon format
assert parse_search_query('first_seen:>2015-05-18') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator=">",
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
0,
0,
tzinfo=timezone.utc,
),
),
),
]
assert parse_search_query('random:>2015-05-18') == [
SearchFilter(
key=SearchKey(name='random'),
operator="=",
value=SearchValue('>2015-05-18'),
),
]
def test_rel_time_filter(self):
now = timezone.now()
with freeze_time(now):
assert parse_search_query('first_seen:+7d') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator="<=",
value=SearchValue(
raw_value=now - timedelta(days=7),
),
),
]
assert parse_search_query('first_seen:-2w') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator=">=",
value=SearchValue(
raw_value=now - timedelta(days=14),
),
),
]
assert parse_search_query('random:-2w') == [
SearchFilter(
key=SearchKey(name='random'),
operator="=",
value=SearchValue('-2w'),
),
]
def test_invalid_date_formats(self):
invalid_queries = [
'first_seen:hello',
'first_seen:123',
'first_seen:2018-01-01T00:01ZZ'
]
for invalid_query in invalid_queries:
with self.assertRaises(
InvalidSearchQuery,
expected_regex='Invalid format for numeric search',
):
parse_search_query(invalid_query)
def test_specific_time_filter(self):
assert parse_search_query('first_seen:2018-01-01') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator=">=",
value=SearchValue(
raw_value=datetime.datetime(2018, 1, 1, tzinfo=timezone.utc),
),
),
SearchFilter(
key=SearchKey(name='first_seen'),
operator="<",
value=SearchValue(
raw_value=datetime.datetime(2018, 1, 2, tzinfo=timezone.utc),
),
),
]
assert parse_search_query('first_seen:2018-01-01T05:06:07') == [
SearchFilter(
key=SearchKey(name='first_seen'),
operator=">=",
value=SearchValue(
raw_value=datetime.datetime(2018, 1, 1, 5, 1, 7, tzinfo=timezone.utc),
),
),
SearchFilter(
key=SearchKey(name='first_seen'),
operator="<",
value=SearchValue(
raw_value=datetime.datetime(2018, 1, 1, 5, 12, 7, tzinfo=timezone.utc),
),
),
]
assert parse_search_query('random:2018-01-01T05:06:07') == [
SearchFilter(
key=SearchKey(name='random'),
operator="=",
value=SearchValue(raw_value='2018-01-01T05:06:07'),
),
]
def test_quoted_val(self):
assert parse_search_query('release:"a release"') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a release'),
),
]
assert parse_search_query('!release:"a release"') == [
SearchFilter(
key=SearchKey(name='release'),
operator='!=',
value=SearchValue('a release'),
),
]
def test_quoted_key(self):
assert parse_search_query('"hi:there":value') == [
SearchFilter(
key=SearchKey(name='hi:there'),
operator='=',
value=SearchValue(raw_value='value'),
),
]
assert parse_search_query('!"hi:there":value') == [
SearchFilter(
key=SearchKey(name='hi:there'),
operator='!=',
value=SearchValue(raw_value='value'),
),
]
def test_newline_within_quote(self):
assert parse_search_query('release:"a\nrelease"') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a\nrelease')
),
]
def test_newline_outside_quote(self):
with self.assertRaises(IncompleteParseError):
parse_search_query('release:a\nrelease')
def test_tab_within_quote(self):
assert parse_search_query('release:"a\trelease"') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a\trelease')
),
]
def test_tab_outside_quote(self):
# tab outside quote
assert parse_search_query('release:a\trelease') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='\trelease')
),
]
def test_escaped_quotes(self):
assert parse_search_query('release:"a\\"thing\\""') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a"thing"')
),
]
assert parse_search_query('release:"a\\"\\"release"') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='a""release')
),
]
def test_multiple_quotes(self):
assert parse_search_query('device.family:"" browser.name:"Chrome"') == [
SearchFilter(
key=SearchKey(name='device.family'),
operator='=',
value=SearchValue(raw_value=''),
),
SearchFilter(
key=SearchKey(name='browser.name'),
operator='=',
value=SearchValue(raw_value='Chrome'),
),
]
assert parse_search_query('device.family:"\\"" browser.name:"Chrome"') == [
SearchFilter(
key=SearchKey(name='device.family'),
operator='=',
value=SearchValue(raw_value='"'),
),
SearchFilter(
key=SearchKey(name='browser.name'),
operator='=',
value=SearchValue(raw_value='Chrome'),
),
]
def test_sooo_many_quotes(self):
assert parse_search_query('device.family:"\\"\\"\\"\\"\\"\\"\\"\\"\\"\\""') == [
SearchFilter(
key=SearchKey(name='device.family'),
operator='=',
value=SearchValue(raw_value='""""""""""'),
),
]
def test_empty_filter_value(self):
assert parse_search_query('device.family:""') == [
SearchFilter(
key=SearchKey(name='device.family'),
operator='=',
value=SearchValue(raw_value=''),
),
]
def test_custom_tag(self):
assert parse_search_query('fruit:apple release:1.2.1') == [
SearchFilter(
key=SearchKey(name='fruit'),
operator='=',
value=SearchValue(raw_value='apple'),
),
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(raw_value='1.2.1'),
),
]
def test_has_tag(self):
# unquoted key
assert parse_search_query('has:release') == [
SearchFilter(
key=SearchKey(name='release'),
operator='!=',
value=SearchValue(raw_value=''),
),
]
# quoted key
assert parse_search_query('has:"hi:there"') == [
SearchFilter(
key=SearchKey(name='hi:there'),
operator='!=',
value=SearchValue(raw_value=''),
),
]
# malformed key
with self.assertRaises(InvalidSearchQuery):
parse_search_query('has:"hi there"')
def test_not_has_tag(self):
# unquoted key
assert parse_search_query('!has:release') == [
SearchFilter(
key=SearchKey(name='release'),
operator='=',
value=SearchValue(''),
),
]
# quoted key
assert parse_search_query('!has:"hi:there"') == [
SearchFilter(
key=SearchKey(name='hi:there'),
operator='=',
value=SearchValue(''),
),
]
def test_is_query_unsupported(self):
with self.assertRaises(InvalidSearchQuery):
parse_search_query('is:unassigned')
def test_key_remapping(self):
class RemapVisitor(SearchVisitor):
key_mappings = {
'target_value': ['someValue', 'legacy-value'],
}
tree = event_search_grammar.parse('someValue:123 legacy-value:456 normal_value:hello')
assert RemapVisitor().visit(tree) == [
SearchFilter(
key=SearchKey(name='target_value'),
operator='=',
value=SearchValue('123'),
),
SearchFilter(
key=SearchKey(name='target_value'),
operator='=',
value=SearchValue('456'),
),
SearchFilter(
key=SearchKey(name='normal_value'),
operator='=',
value=SearchValue('hello'),
),
]
def test_numeric_filter(self):
# Numeric format should still return a string if field isn't whitelisted
assert parse_search_query('random_field:>500') == [
SearchFilter(
key=SearchKey(name='random_field'),
operator="=",
value=SearchValue(raw_value='>500'),
),
]
def test_quotes_filtered_on_raw(self):
# Enclose the full raw query? Strip it.
assert parse_search_query('thinger:unknown "what is this?"') == [
SearchFilter(
key=SearchKey(name='thinger'),
operator='=',
value=SearchValue(raw_value='unknown'),
),
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='what is this?'),
),
]
# Enclose the full query? Strip it and the whole query is raw.
assert parse_search_query('"thinger:unknown what is this?"') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='thinger:unknown what is this?'),
),
]
# Allow a single quotation at end
assert parse_search_query('end"') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='end"'),
),
]
# Allow a single quotation at beginning
assert parse_search_query('"beginning') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='"beginning'),
),
]
# Allow a single quotation
assert parse_search_query('"') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='"'),
),
]
# Empty quotations become a dropped term
assert parse_search_query('""') == []
# Allow a search for space
assert parse_search_query('" "') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value=' '),
),
]
# Strip in a balanced manner
assert parse_search_query('""woof"') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='woof"'),
),
]
# Don't try this at home kids
assert parse_search_query('"""""""""') == [
SearchFilter(
key=SearchKey(name='message'),
operator='=',
value=SearchValue(raw_value='"'),
),
]
def _build_search_filter(self, key_name, operator, value):
return SearchFilter(
key=SearchKey(name=key_name),
operator=operator,
value=SearchValue(raw_value=value),
)
def test_basic_fallthrough(self):
# These should all fall through to basic equal searches, even though they
# look like numeric, date, etc.
queries = [
('random:<hello', self._build_search_filter('random', '=', '<hello')),
('random:<512.1.0', self._build_search_filter('random', '=', '<512.1.0')),
('random:2018-01-01', self._build_search_filter('random', '=', '2018-01-01')),
('random:+7d', self._build_search_filter('random', '=', '+7d')),
('random:>2018-01-01', self._build_search_filter('random', '=', '>2018-01-01')),
('random:2018-01-01', self._build_search_filter('random', '=', '2018-01-01')),
('random:hello', self._build_search_filter('random', '=', 'hello')),
('random:123', self._build_search_filter('random', '=', '123')),
]
for query, expected in queries:
assert parse_search_query(query) == [expected]
def test_empty_string(self):
# Empty quotations become a dropped term
assert parse_search_query('') == []
class ParseBooleanSearchQueryTest(TestCase):
def setUp(self):
super(ParseBooleanSearchQueryTest, self).setUp()
self.term1 = SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foo@example.com'),
)
self.term2 = SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='bar@example.com'),
)
self.term3 = SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='foobar@example.com'),
)
def test_simple(self):
assert parse_search_query(
'user.email:foo@example.com OR user.email:bar@example.com'
) == [SearchBoolean(left_term=self.term1, operator="OR", right_term=self.term2)]
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com'
) == [SearchBoolean(left_term=self.term1, operator="AND", right_term=self.term2)]
def test_single_term(self):
assert parse_search_query('user.email:foo@example.com') == [self.term1]
def test_order_of_operations(self):
assert parse_search_query(
'user.email:foo@example.com OR user.email:bar@example.com AND user.email:foobar@example.com'
) == [SearchBoolean(
left_term=self.term1,
operator='OR',
right_term=SearchBoolean(
left_term=self.term2,
operator='AND',
right_term=self.term3
)
)]
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com OR user.email:foobar@example.com'
) == [SearchBoolean(
left_term=SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=self.term2,
),
operator='OR',
right_term=self.term3
)]
def test_multiple_statements(self):
assert parse_search_query(
'user.email:foo@example.com OR user.email:bar@example.com OR user.email:foobar@example.com'
) == [SearchBoolean(
left_term=self.term1,
operator='OR',
right_term=SearchBoolean(
left_term=self.term2,
operator='OR',
right_term=self.term3
)
)]
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com AND user.email:foobar@example.com'
) == [SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=SearchBoolean(
left_term=self.term2,
operator='AND',
right_term=self.term3
)
)]
term4 = SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='hello@example.com'),
)
# longer even number of terms
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com OR user.email:foobar@example.com AND user.email:hello@example.com'
) == [SearchBoolean(
left_term=SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=self.term2
),
operator='OR',
right_term=SearchBoolean(
left_term=self.term3,
operator='AND',
right_term=term4
)
)]
term5 = SearchFilter(
key=SearchKey(name='user.email'),
operator="=",
value=SearchValue(raw_value='hi@example.com'),
)
# longer odd number of terms
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com OR user.email:foobar@example.com AND user.email:hello@example.com AND user.email:hi@example.com'
) == [
SearchBoolean(
left_term=SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=self.term2
),
operator='OR',
right_term=SearchBoolean(
left_term=self.term3,
operator='AND',
right_term=SearchBoolean(
left_term=term4,
operator='AND',
right_term=term5
)
)
)]
# absurdly long
assert parse_search_query(
'user.email:foo@example.com AND user.email:bar@example.com OR user.email:foobar@example.com AND user.email:hello@example.com AND user.email:hi@example.com OR user.email:foo@example.com AND user.email:bar@example.com OR user.email:foobar@example.com AND user.email:hello@example.com AND user.email:hi@example.com'
) == [SearchBoolean(
left_term=SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=self.term2),
operator='OR',
right_term=SearchBoolean(
left_term=SearchBoolean(
left_term=self.term3,
operator='AND',
right_term=SearchBoolean(
left_term=term4,
operator='AND',
right_term=term5)),
operator='OR',
right_term=SearchBoolean(
left_term=SearchBoolean(
left_term=self.term1,
operator='AND',
right_term=self.term2),
operator='OR',
right_term=SearchBoolean(
left_term=self.term3,
operator='AND',
right_term=SearchBoolean(
left_term=term4,
operator='AND',
right_term=term5
)
)
)
)
)]
class GetSnubaQueryArgsTest(TestCase):
def test_simple(self):
assert get_snuba_query_args('user.email:foo@example.com release:1.2.1 fruit:apple hello', {
'project_id': [1, 2, 3],
'start': datetime.datetime(2015, 5, 18, 10, 15, 1, tzinfo=timezone.utc),
'end': datetime.datetime(2015, 5, 19, 10, 15, 1, tzinfo=timezone.utc),
}) == {
'conditions': [
['email', '=', 'foo@example.com'],
['tags[sentry:release]', '=', '1.2.1'],
[['ifNull', ['tags[fruit]', "''"]], '=', 'apple'],
[['positionCaseInsensitive', ['message', "'hello'"]], '!=', 0],
],
'filter_keys': {'project_id': [1, 2, 3]},
'start': datetime.datetime(2015, 5, 18, 10, 15, 1, tzinfo=timezone.utc),
'end': datetime.datetime(2015, 5, 19, 10, 15, 1, tzinfo=timezone.utc),
}
def test_negation(self):
assert get_snuba_query_args('!user.email:foo@example.com') == {
'conditions': [
[
[['isNull', ['email']], '=', 1],
['email', '!=', 'foo@example.com']
]
],
'filter_keys': {},
}
def test_no_search(self):
assert get_snuba_query_args(params={
'project_id': [1, 2, 3],
'start': datetime.datetime(2015, 5, 18, 10, 15, 1, tzinfo=timezone.utc),
'end': datetime.datetime(2015, 5, 19, 10, 15, 1, tzinfo=timezone.utc),
}) == {
'conditions': [],
'filter_keys': {'project_id': [1, 2, 3]},
'start': datetime.datetime(2015, 5, 18, 10, 15, 1, tzinfo=timezone.utc),
'end': datetime.datetime(2015, 5, 19, 10, 15, 1, tzinfo=timezone.utc),
}
def test_wildcard(self):
assert get_snuba_query_args('release:3.1.* user.email:*@example.com') == {
'conditions': [
[['match', ['tags[sentry:release]', "'(?i)^3\\.1\\..*$'"]], '=', 1],
[['match', ['email', "'(?i)^.*\\@example\\.com$'"]], '=', 1],
],
'filter_keys': {},
}
def test_negated_wildcard(self):
assert get_snuba_query_args('!release:3.1.* user.email:*@example.com') == {
'conditions': [
[
[['isNull', ['tags[sentry:release]']], '=', 1],
[['match', ['tags[sentry:release]', "'(?i)^3\\.1\\..*$'"]], '!=', 1]
],
[['match', ['email', "'(?i)^.*\\@example\\.com$'"]], '=', 1]
],
'filter_keys': {},
}
def test_escaped_wildcard(self):
assert get_snuba_query_args('release:3.1.\\* user.email:\\*@example.com') == {
'conditions': [
[['match', ['tags[sentry:release]', "'(?i)^3\\.1\\.\\*$'"]], '=', 1],
[['match', ['email', "'(?i)^\*\\@example\\.com$'"]], '=', 1],
],
'filter_keys': {},
}
assert get_snuba_query_args('release:\\\\\\*') == {
'conditions': [
[['match', ['tags[sentry:release]', "'(?i)^\\\\\\*$'"]], '=', 1],
],
'filter_keys': {},
}
assert get_snuba_query_args('release:\\\\*') == {
'conditions': [
[['match', ['tags[sentry:release]', "'(?i)^\\\\.*$'"]], '=', 1],
],
'filter_keys': {},
}
def test_has(self):
assert get_snuba_query_args('has:release') == {
'filter_keys': {},
'conditions': [[['isNull', ['tags[sentry:release]']], '!=', 1]]
}
def test_not_has(self):
assert get_snuba_query_args('!has:release') == {
'filter_keys': {},
'conditions': [[['isNull', ['tags[sentry:release]']], '=', 1]]
}
def test_message_negative(self):
assert get_snuba_query_args('!message:"post_process.process_error HTTPError 403"') == {
'filter_keys': {},
'conditions': [[
['positionCaseInsensitive', ['message', "'post_process.process_error HTTPError 403'"]],
'=',
0,
]]
}
class ConvertEndpointParamsTests(TestCase):
def test_simple(self):
assert convert_endpoint_params({
'project_id': [1, 2, 3],
'start': datetime.datetime(2015, 5, 18, 10, 15, 1, tzinfo=timezone.utc),
'end': datetime.datetime(2015, 5, 19, 10, 15, 1, tzinfo=timezone.utc),
}) == [
SearchFilter(
key=SearchKey(name='start'),
operator='=',
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
18,
10,
15,
1,
tzinfo=timezone.utc),
)
),
SearchFilter(
key=SearchKey(name='project_id'),
operator='=',
value=SearchValue(raw_value=[1, 2, 3])
),
SearchFilter(
key=SearchKey(name='end'),
operator='=',
value=SearchValue(
raw_value=datetime.datetime(
2015,
5,
19,
10,
15,
1,
tzinfo=timezone.utc),
)
),
]
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class Dct2d(nn.Module):
"""
Blockwhise 2D DCT
"""
def __init__(self, blocksize=8, interleaving=False):
"""
Parameters:
blocksize: int, size of the Blocks for discrete cosine transform
interleaving: bool, should the blocks interleave?
"""
super().__init__() # call super constructor
self.blocksize = blocksize
self.interleaving = interleaving
if interleaving:
self.stride = self.blocksize // 2
else:
self.stride = self.blocksize
# precompute DCT weight matrix
A = np.zeros((blocksize,blocksize))
for i in range(blocksize):
c_i = 1/np.sqrt(2) if i == 0 else 1.
for n in range(blocksize):
A[i,n] = np.sqrt(2/blocksize) * c_i * np.cos((2*n+ 1)/(blocksize*2) * i * np.pi)
# set up conv layer
self.A = nn.Parameter(torch.tensor(A, dtype=torch.float32), requires_grad=False)
self.unfold = torch.nn.Unfold(kernel_size=blocksize, padding=0, stride=self.stride)
return
def forward(self, x):
"""
performs 2D blockwhise DCT
Parameters:
x: tensor of dimension (N, 1, h, w)
Return:
tensor of dimension (N, k, blocksize, blocksize)
where the 2nd dimension indexes the block. Dimensions 3 and 4 are the block DCT coefficients
"""
(N, C, H, W) = x.shape
assert (C == 1), "DCT is only implemented for a single channel"
assert (H >= self.blocksize), "Input too small for blocksize"
assert (W >= self.blocksize), "Input too small for blocksize"
assert (H % self.stride == 0) and (W % self.stride == 0), "FFT is only for dimensions divisible by the blocksize"
# unfold to blocks
x = self.unfold(x)
# now shape (N, blocksize**2, k)
(N, _, k) = x.shape
x = x.view(-1,self.blocksize,self.blocksize,k).permute(0,3,1,2)
# now shape (N, #k, blocksize, blocksize)
# perform DCT
coeff = self.A.matmul(x).matmul(self.A.transpose(0,1))
return coeff
def inverse(self, coeff, output_shape):
"""
performs 2D blockwhise iDCT
Parameters:
coeff: tensor of dimension (N, k, blocksize, blocksize)
where the 2nd dimension indexes the block. Dimensions 3 and 4 are the block DCT coefficients
output_shape: (h, w) dimensions of the reconstructed image
Return:
tensor of dimension (N, 1, h, w)
"""
if self.interleaving:
raise Exception('Inverse block DCT is not implemented for interleaving blocks!')
# perform iDCT
x = self.A.transpose(0,1).matmul(coeff).matmul(self.A)
(N, k, _, _) = x.shape
x = x.permute(0,2,3,1).view(-1, self.blocksize**2, k)
x = F.fold(x, output_size=(output_shape[-2], output_shape[-1]), kernel_size=self.blocksize, padding=0, stride=self.blocksize)
return x
|
# Generated by Django 3.1.1 on 2021-01-27 15:19
from django.db import migrations, models
from invoice.models import Invoice, Payment
def update_existing_payments_status(app, schema_editor):
for inv in Invoice.objects.all():
if inv.status == Invoice.Status.CLOSED:
for pmt in inv.payment_set.all():
print(f"Invoice close, updating {pmt} to CONFIRMED.")
pmt.status = Payment.Status.CONFIRMED
pmt.save()
else:
for pmt in inv.payment_set.all():
if pmt.method == Payment.Method.EFT:
print(f"Updating {pmt} to PENDING")
pmt.status = Payment.Status.PENDING
pmt.save()
else:
print(f"Updating {pmt} to CONFIRMED")
pmt.status = Payment.Status.CONFIRMED
pmt.save()
class Migration(migrations.Migration):
dependencies = [
('invoice', '0013_payment_status'),
]
operations = [
migrations.RunPython(update_existing_payments_status),
]
|
# _*_ coding: utf-8 _*_
from wtforms import StringField, IntegerField, FileField, MultipleFileField
from wtforms.validators import DataRequired, AnyOf, length, Email, Regexp, ValidationError
from app.validators.base import BaseValidator
from app.libs.enums import ScopeEnum
class CDKeyValidator(BaseValidator):
cdkey = StringField(validators=[DataRequired(message='激活码不能为空')])
class UpdateCDKeyValidator(CDKeyValidator):
state = IntegerField(default=0)
def validate_state(self, value):
self.state.data = int(value.data)
class CreateCDKeyValidator(BaseValidator):
# auth = IntegerField(validators=[AnyOf(values=[ScopeEnum.CO_ADMIN.value,
# ScopeEnum.CO_PROJECT.value,
# ScopeEnum.CO_OPERATE.value,
# ScopeEnum.CO_USER.value],
# message='不在授权范围内')
# ],
# default=ScopeEnum.CO_USER.value)
company_id = StringField(validators=[DataRequired(message='公司ID不能为空')])
project_id = StringField() # 可以为空
group_id = StringField()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger
from .channel import Channel
from .enums import FileMode, NoarchType, PathType
from .index_record import IndexRecord, IndexJsonRecord
from .._vendor.auxlib.entity import (BooleanField, ComposableField, Entity, EnumField,
ImmutableEntity, IntegerField, ListField, StringField)
from ..common.compat import string_types
log = getLogger(__name__)
class NoarchField(EnumField):
def box(self, instance, val):
return super(NoarchField, self).box(instance, NoarchType.coerce(val))
class Noarch(Entity):
type = NoarchField(NoarchType)
entry_points = ListField(string_types, required=False, nullable=True)
class PreferredEnv(Entity):
name = StringField()
executable_paths = ListField(string_types, required=False, nullable=True)
softlink_paths = ListField(string_types, required=False, nullable=True)
class PackageMetadata(Entity):
# from info/package_metadata.json
package_metadata_version = IntegerField()
noarch = ComposableField(Noarch, required=False, nullable=True)
preferred_env = ComposableField(PreferredEnv, required=False, nullable=True)
class PathData(Entity):
_path = StringField()
prefix_placeholder = StringField(required=False, nullable=True)
file_mode = EnumField(FileMode, required=False, nullable=True)
no_link = BooleanField(required=False, nullable=True)
path_type = EnumField(PathType)
@property
def path(self):
# because I don't have aliases as an option for entity fields yet
return self._path
class PathDataV1(PathData):
# TODO: sha256 and size_in_bytes should be required for all PathType.hardlink, but not for softlink and directory # NOQA
sha256 = StringField(required=False, nullable=True)
size_in_bytes = IntegerField(required=False, nullable=True)
inode_paths = ListField(string_types, required=False, nullable=True)
class PathsData(Entity):
# from info/paths.json
paths_version = IntegerField()
paths = ListField(PathData)
class PackageInfo(ImmutableEntity):
# attributes external to the package tarball
extracted_package_dir = StringField()
channel = ComposableField(Channel)
repodata_record = ComposableField(IndexRecord)
url = StringField()
# attributes within the package tarball
index_json_record = ComposableField(IndexJsonRecord)
icondata = StringField(required=False, nullable=True)
package_metadata = ComposableField(PackageMetadata, required=False, nullable=True)
paths_data = ComposableField(PathsData)
|
"""
Given a binary tree root, find the largest subtree (the one with the most nodes) that is a binary search tree.
Constraints
n ≤ 100,000 where n is the number of nodes in root
https://binarysearch.com/problems/Largest-Binary-Search-Subtree-in-Nodes
"""
# class Tree:
# def __init__(self, val, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def solve(self, root):
def closest_descendent(node):
"""Given left node, return closest node in left subtree."""
if node.right is None:
return node
return closest_descendent(node.right)
def closest_ancestor(node):
"""Given right node, return closest node in right subtree."""
if node.left is None:
return node
return closest_ancestor(node.left)
# current largest subtree and it's size
ans_subtree, ans_size = None, 0
def dfs(node):
nonlocal ans_size, ans_subtree
if node is None:
return 1
left_size = dfs(node.left)
right_size = dfs(node.right)
if not left_size or not right_size:
return 0
if ((node.left and (node.val < node.left.val or
node.val < closest_descendent(node.left).val)) or
(node.right and (node.val > node.right.val or
node.val > closest_ancestor(node.right).val))):
return 0
# We have a BST, count the size and update ans if possible
size = 1 + left_size + right_size
if ans_size < size:
ans_size = size
ans_subtree = node
return size
dfs(root)
return ans_subtree |
from pathlib import Path
import typing
import logging as log
import shutil
import pickle
from .matcher import Matcher
from .. import cluster
class TypeCosMatcher(Matcher):
def __init__(
self,
fdir: Path = None,
name=None,
create=False,
exclude_types=["https://www.w3.org/2001/XMLSchema#string"],
**kwargs,
):
self.name = name or self.__class__.__name__
self.indexed = False
self.set_storage(fdir)
self.coltypes: typing.Dict[int, typing.Any] = {}
self.exclude_types = exclude_types
def add(self, table):
if table:
ti = table["tableIndex"]
ci_range = range(
table["columnIndexOffset"],
table["columnIndexOffset"] + table["numCols"],
)
for ci, c in zip(ci_range, range(table["numCols"])):
classes = table.get("classes", {}).get(str(c))
if classes:
classes = {
k: v for k, v in classes.items() if k not in self.exclude_types
}
norm = sum(v ** 2 for v in classes.values()) ** 0.5
self.coltypes.setdefault(ti, {})[ci] = (classes, norm)
def merge(self, matcher: "TypeCosMatcher"):
if matcher is not None:
for ti, ci_classes in matcher.coltypes.items():
self.coltypes.setdefault(ti, {}).update(ci_classes)
return self
def __enter__(self):
if self.indexed and self.storage:
self.coltypes = self.storage.load_pickle("coltypes")
return self
def close(self):
if self.indexed and self.storage:
del self.coltypes
def index(self):
log.debug(f"TypeCos index is len {len(self.coltypes)}")
if self.storage:
self.storage.save_pickle(self.coltypes, "coltypes")
self.indexed = True
self.close()
def match(self, tableid_colids_pairs):
"""Match columns on token jaccard."""
pairs = cluster.progress(tableid_colids_pairs, f"Looking up {self.name}")
for (ti1, _), (ti2, _) in pairs:
ci_classes1 = self.coltypes.get(ti1, {})
ci_classes2 = self.coltypes.get(ti2, {})
for ci1, (cls1, n1) in ci_classes1.items():
for ci2, (cls2, n2) in ci_classes2.items():
dot = lambda a, b: sum((a[k] * b[k]) for k in set(a) & set(b))
cos = dot(cls1, cls2) / (n1 * n2)
yield (ti1, ti2, ci1, ci2), max(cos, 0)
|
"""
This module is responsible for preparing the waiting room for a load test.
1. Reset the waiting room via API
2. Update the inlet handler Lambda function environment variables
with desired rate and duration
AWS credentials from the environment are required.
"""
import json
import os
import time
from urllib.parse import urlparse
import boto3
import requests
from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
PRIVATE_API_ENDPOINT = os.environ["PRIVATE_API_ENDPOINT"]
PRIVATE_API_REGION = os.environ["PRIVATE_API_REGION"]
INLET_LAMBDA_NAME = os.environ["INLET_LAMBDA_NAME"]
EVENT_ID = os.environ["EVENT_ID"]
INCREMENT = 1000
HOLD_OFF = 30
DURATION = 7200
def reset_waiting_room():
"""
This function is responsible for calling the reset_initial_state API
"""
api = f"{PRIVATE_API_ENDPOINT}/reset_initial_state"
body = {"event_id": EVENT_ID}
parsed = urlparse(PRIVATE_API_ENDPOINT)
auth = BotoAWSRequestsAuth(aws_host=parsed.netloc,
aws_region=PRIVATE_API_REGION,
aws_service='execute-api')
response = requests.post(api, json=body, auth=auth, timeout=25)
print(f"/reset_initial_state {response.status_code}")
def update_inlet_run_window():
"""
This function is responsible for updating the time and increment on
the periodic inlet Lambda function.
"""
client = boto3.client("lambda")
response = client.get_function_configuration(
FunctionName=INLET_LAMBDA_NAME)
environment = response["Environment"]["Variables"]
# start in 1 minute from now
start_ingest = int(time.time()) + HOLD_OFF
# stop after 60 minutes
# stop_ingest = start_ingest + DURATION
# update the Lambda
environment["START_TIME"] = f"{start_ingest}"
# environment["END_TIME"] = f"{stop_ingest}"
environment["END_TIME"] = "0"
environment["INCREMENT_BY"] = f"{INCREMENT}"
response = client.update_function_configuration(
FunctionName=INLET_LAMBDA_NAME, Environment={"Variables": environment})
print(json.dumps(response, indent=4))
if __name__ == "__main__":
reset_waiting_room()
update_inlet_run_window()
|
class Solution:
def restoreIpAddresses(self, s: str) -> List[str]:
n = len(s)
if n>12 or n<4:
return []
def check(s):
if int(s[0])==0 and int(s)!=0:
return False
if int(s)==0 and len(s)!=1:
return False
if int(s)<256:
return True
else:
return False
ans = list()
for i in range(1,n-2):
for j in range(i+1,n-1):
for k in range(j+1,n):
one = s[:i]
two = s[i:j]
three = s[j:k]
four = s[k:]
print(one,two,three,four,check(one) and check(two) and check(three) and check(four))
if check(one) and check(two) and check(three) and check(four):
ans.append(one + "." + two + "." + three + "." + four)
return ans
|
# -*- coding: utf-8 -*-
#
# wat-bridge
# https://github.com/rmed/wat-bridge
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Rafael Medina García <rafamedgar@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Code for the Whatsapp side of the bridge."""
from yowsup.layers import YowLayerEvent
from yowsup.layers.interface import YowInterfaceLayer, ProtocolEntityCallback
from yowsup.layers.network import YowNetworkLayer
from yowsup.layers.protocol_messages.protocolentities import TextMessageProtocolEntity
from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity
from yowsup.layers.protocol_acks.protocolentities import OutgoingAckProtocolEntity
from yowsup.stacks import YowStackBuilder
from wat_bridge.static import SETTINGS, SIGNAL_TG, get_logger
from wat_bridge.helper import is_blacklisted
logger = get_logger('wa')
class WaLayer(YowInterfaceLayer):
"""Defines the yowsup layer for interacting with Whatsapp."""
@ProtocolEntityCallback('message')
def on_message(self, message):
"""Received a message."""
# Parse information
sender = message.getFrom(full=False)
logger.debug('received message from %s' % sender)
# Send receipt
receipt = OutgoingReceiptProtocolEntity(
message.getId(),
message.getFrom(),
'read',
message.getParticipant()
)
self.toLower(receipt)
# Ignore non-text messages
if message.getType() != 'text':
logger.info('not a text message, ignoring')
return
# Do stuff
if is_blacklisted(sender):
logger.debug('phone is blacklisted: %s' % sender)
return
body = message.getBody()
# Relay to Telegram
logger.info('relaying message to Telegram')
SIGNAL_TG.send('wabot', phone=sender, message=body)
@ProtocolEntityCallback('receipt')
def on_receipt(self, entity):
"""Received a "receipt" for a message."""
logger.debug('ACK message')
# Acknowledge
ack = OutgoingAckProtocolEntity(
entity.getId(),
'receipt',
entity.getType(),
entity.getFrom()
)
self.toLower(ack)
def send_msg(self, **kwargs):
"""Send a message.
Arguments:
phone (str): Phone to send the message to.
message (str): Message to send
"""
phone = kwargs.get('phone')
if not phone:
# Nothing to do
logger.debug('no phone provided')
return
message = kwargs.get('message')
entity = TextMessageProtocolEntity(
message,
to='%s@s.whatsapp.net' % phone
)
# self.ackQueue.append(entity.getId())
self.toLower(entity)
# Prepare stack
wabot = WaLayer()
_connect_signal = YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT)
WA_STACK = (
YowStackBuilder()
.pushDefaultLayers()
.push(wabot)
.build()
)
WA_STACK.setCredentials((SETTINGS['wa_phone'], SETTINGS['wa_password']))
|
'''
Author: In Ming Loh
Email: inming.loh@countercept.com
Requirements:
1. Python 3
2. pip install pywintrace
3. pip install psutil
4. Windows machine
'''
import time
import etw
import psutil
def getService(name):
service = None
try:
service = psutil.win_service_get(name)
service = service.as_dict()
except Exception as ex:
print("Something went wrong. Please contact the developer.")
print(str(ex))
return service
def get_me_my_parent(x):
_etwData = x[1]
_realParentPid = int(_etwData['EventHeader']['ProcessId']) # PID that generated this event
_parentPid = int(_etwData['ParentProcessID'])
_pid = int(_etwData['ProcessID'])
# Check parent pid with pid that causes this event (In other words, the original parent).
_isSpoofed = _realParentPid != _parentPid
if _isSpoofed:
# Get PID for service Appinfo. This is the one that will cause consent.exe to run
service = getService('Appinfo')
if service and service['status'] == 'running' :
appinfo_pid = service["pid"]
else :
print("Appinfo service not found or is not running.")
return
# Check if this is caused by UAC. (UAC will spoof your parent process by using svchost service name appinfo)
_isCausedByUac = True if _realParentPid == appinfo_pid else False
if _isSpoofed and not _isCausedByUac:
process_name = ""
fake_parent_process_name = ""
real_parent_process_name = ""
for proc in psutil.process_iter():
if proc.pid == _pid:
process_name = proc.name()
elif proc.pid == _parentPid:
fake_parent_process_name = proc.name()
elif proc.pid == _realParentPid:
real_parent_process_name = proc.name()
print("Spoofed parent process detected!!!\n\t{0}({1}) is detected with parent {2}({3}) but originally from parent {4}({5}).".format(process_name, _pid, fake_parent_process_name, _parentPid, real_parent_process_name, _realParentPid))
def main_function():
# define capture provider info
providers = [etw.ProviderInfo('Microsoft-Windows-Kernel-Process', etw.GUID("{22FB2CD6-0E7B-422B-A0C7-2FAD1FD0E716}"))]
# create instance of ETW class
job = etw.ETW(providers=providers, event_callback=lambda x: get_me_my_parent(x), task_name_filters="PROCESSSTART")
# start capture
job.start()
try:
while True:
pass
except(KeyboardInterrupt):
job.stop()
print("ETW monitoring stopped.")
if __name__ == '__main__':
main_function()
|
from ...common import Object
from . import ls
from ...exception import RemoteDirectoryNotFound
class SrcDstParam(Object):
__instance = None
@staticmethod
def instance(src, dest=None):
SrcDstParam(src, dest)
return SrcDstParam.__instance
def __init__(self, src, dest=None):
self._classname = self.__class__.__name__
self.src = src
self.dest = dest
SrcDstParam.__instance = self # pylint: disable=unused-private-member
class ActionResourcesParam(Object):
__instance = None
@staticmethod
def instance():
ActionResourcesParam()
return ActionResourcesParam.__instance
def __init__(self):
self._classname = self.__class__.__name__
self.urls = []
ActionResourcesParam.__instance = self # pylint: disable=unused-private-member
def add(self, param):
self.urls.append(param)
class CreateShareParam(Object):
__instance = None
@staticmethod
def instance(path, access, expire_on):
CreateShareParam(path, access, expire_on)
return CreateShareParam.__instance
def __init__(self, path, access, expire_on):
self._classname = self.__class__.__name__
self.url = path
self.share = Object()
self.share._classname = 'ShareConfig'
self.share.accessMode = access
self.share.protectionLevel = 'publicLink'
self.share.expiration = expire_on
self.share.invitee = Object()
self.share.invitee._classname = 'Collaborator'
self.share.invitee.type = 'external'
CreateShareParam.__instance = self # pylint: disable=unused-private-member
def get_resource_info(ctera_host, path):
response = ls.ls(ctera_host, path, depth=0)
if response.root is None:
raise RemoteDirectoryNotFound(path.fullpath())
return response.root
|
"""
Quickstart introduction
"""
import pandas as pd
import random
from sklearn.datasets import load_breast_cancer
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score, average_precision_score
from donatello.components.data import Dataset
from donatello.components.estimator import Estimator
from donatello.components.measure import Metric, FeatureWeights, ThresholdRates
from donatello.components.core import Sculpture
def load_sklearn_bc_dataset(group=False):
"""
Helper to load sklearn dataset into a pandas dataframe
Returns:
pd.DataFrame: X and y combined
"""
dataset = load_breast_cancer()
df = pd.DataFrame(data=pd.np.c_[dataset['data'], dataset['target']],
columns=(dataset['feature_names'].tolist() + ['is_malignant'])
)
if group:
df['groups_column'] = df.apply(lambda x: random.choice(['a', 'b', 'c', 'd']), axis=1)
return df
def load_data(asDf, group):
# loading dataframe directly vs specifying queries
if asDf:
data = {'raw': load_sklearn_bc_dataset(group)}
else:
data = {'queries': {None: {'querier': load_sklearn_bc_dataset, 'group': group}}}
data['target'] = 'is_malignant'
# Declare intent for partitioning data over groups (rather than all rows being independent)
if group:
data['clay'] = 'group'
data['groupDap'] = {'attrPath': ['groups_column'], 'slicers': (pd.DataFrame, dict)}
dataset = Dataset(**data)
return dataset
def load_sculpture(asDf, group):
"""
Helper to load sculpture
"""
dataset = Dataset(raw=load_sklearn_bc_dataset(), target='is_malignant')
estimator = Estimator(model=LogisticRegression(),
paramGrid={'model__C': list(pd.np.logspace(-2, 0, 5))},
searchKwargs={'scoring': 'roc_auc', 'cv': 3},
method='predict_proba',
scorer='score_second'
)
metrics = [Metric(roc_auc_score), Metric(average_precision_score),
FeatureWeights(sort='coefficients'), ThresholdRates()]
sculpture = Sculpture(dataset=dataset, estimator=estimator, metrics=metrics)
return sculpture
def load_metrics(metrics=None, featureName='coefficients'):
if not metrics:
metrics = [Metric(roc_auc_score), Metric(average_precision_score),
FeatureWeights(sort=featureName), ThresholdRates()]
return metrics
|
from .tasks import Tasks
def setup(fff):
fff.add_cog(Tasks(fff))
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016, 2017, 2018, 2019 Sqreen. All rights reserved.
# Please refer to our terms for more information:
#
# https://www.sqreen.io/terms.html
#
""" Binding for the WAF Input Data Structure
"""
from ctypes import (POINTER, Structure, byref, c_bool, c_char_p, c_int,
c_int64, c_size_t, c_uint64, c_void_p)
from . import get_lib
from ._compat import UNICODE_CLASS
PWI_INVALID = 0
PWI_SIGNED_NUMBER = 1 << 0
PWI_UNSIGNED_NUMBER = 1 << 1
PWI_STRING = 1 << 2
PWI_ARRAY = 1 << 3
PWI_MAP = 1 << 4
class C_PWArgs(Structure):
_fields_ = [
("name", c_char_p),
("name_length", c_uint64),
("value", c_void_p),
("nb_entries", c_uint64),
("type", c_int),
]
_input_lib = None
def get_input_lib():
global _input_lib
if _input_lib is None:
lib = get_lib()
lib.powerwaf_createArray.argstype = []
lib.powerwaf_createArray.restype = C_PWArgs
lib.powerwaf_createInt.argstype = [c_int64]
lib.powerwaf_createInt.restype = C_PWArgs
lib.powerwaf_createUint.argstype = [c_uint64]
lib.powerwaf_createUint.restype = C_PWArgs
lib.powerwaf_createMap.restype = C_PWArgs
lib.powerwaf_createStringWithLength.argstype = [c_char_p, c_size_t]
lib.powerwaf_createStringWithLength.restype = C_PWArgs
lib.powerwaf_addToPWArgsArray.argtypes = [
POINTER(C_PWArgs), C_PWArgs]
lib.powerwaf_addToPWArgsArray.restype = c_bool
lib.powerwaf_addToPWArgsMap.argtypes = [
POINTER(C_PWArgs), c_char_p, c_size_t, C_PWArgs]
lib.powerwaf_addToPWArgsMap.restype = c_bool
lib.powerwaf_freeInput.argtypes = [POINTER(C_PWArgs), c_bool]
lib.powerwaf_freeInput.restype = None
_input_lib = lib
return _input_lib
def create_array():
return get_input_lib().powerwaf_createArray()
def create_int(value):
return get_input_lib().powerwaf_createInt(c_int64(value))
def create_uint(value):
return get_input_lib().powerwaf_createUint(c_uint64(value))
def create_map():
return get_input_lib().powerwaf_createMap()
def create_string(value):
if isinstance(value, UNICODE_CLASS):
value = value.encode("utf-8")
if not isinstance(value, bytes):
raise ValueError("value must be a string or bytes")
return get_input_lib().powerwaf_createStringWithLength(value, len(value))
def append_to_array(array, value):
return get_input_lib().powerwaf_addToPWArgsArray(byref(array), value)
def append_to_map(array, key, value):
if isinstance(key, UNICODE_CLASS):
key = key.encode("utf-8")
if not isinstance(key, bytes):
raise ValueError("value must be a string or bytes")
return get_input_lib().powerwaf_addToPWArgsMap(byref(array), key, 0, value)
def free(value):
get_input_lib().powerwaf_freeInput(byref(value), False)
def create(value, max_depth=32, ignore_none=True):
""" Lower-level function to convert a Python value to input value
"""
if isinstance(value, str) or isinstance(value, bytes):
return create_string(value)
if isinstance(value, bool):
return create_uint(int(value))
if isinstance(value, int):
if value < 0:
return create_int(value)
else:
return create_uint(value)
if isinstance(value, list) or isinstance(value, tuple):
if max_depth <= 0:
raise ValueError("could not convert nested structure")
obj = create_array()
for item in value:
if item is None and ignore_none:
continue
item_obj = create(item, max_depth=max_depth - 1)
ret = append_to_array(obj, item_obj)
if ret is False:
free(item_obj)
return obj
if isinstance(value, dict):
if max_depth <= 0:
raise ValueError("could not convert nested structure")
obj = create_map()
for k, v in value.items():
if v is None and ignore_none:
continue
item_obj = create(v, max_depth=max_depth - 1)
ret = append_to_map(obj, k, item_obj)
if ret is False:
free(item_obj)
return obj
return create_string(UNICODE_CLASS(value))
class PWArgs:
"""
Higher-level bridge between Python values and input values.
"""
def __init__(self, obj):
self._obj = obj
def __del__(self):
if self._obj is None:
return
free(self._obj)
self._obj = None
@classmethod
def from_python(cls, value, max_depth=32, ignore_none=True):
""" Convert a Python value to a PWArgs.
"""
return cls(create(value, max_depth=max_depth, ignore_none=ignore_none))
def __repr__(self):
return "<{} obj={!r}>".format(self.__class__.__name__, self._obj)
|
"""
Copyright (c)
Author: James Bennion-Pedley
Date: 2021 - present
Licence: MIT
"""
# from dashboard import app
from flask import Blueprint, render_template
from flask_login import login_required, current_user
main = Blueprint('main', __name__)
# Home page
@main.route('/')
@login_required
def index():
return render_template('index.html')
|
import datetime
import os
from test import DATA_DIR, TEST_DIR, pushd
import numpy as np
import pytest
from RAiDER.constants import Zenith
from RAiDER.delay import tropo_delay
from RAiDER.utilFcns import gdal_open, makeDelayFileNames, modelName2Module
SCENARIO_DIR = os.path.join(TEST_DIR, "scenario_1")
_RTOL = 1e-4
def test_tropo_delay_ERA5(tmp_path):
'''
Scenario:
1: Small area, ERA5, Zenith delay
'''
core_test_tropo_delay(tmp_path, modelName="ERA5")
def test_tropo_delay_GMAO(tmp_path):
'''
Scenario:
1: Small area, GMAO, Zenith delay
'''
core_test_tropo_delay(tmp_path, modelName="GMAO")
############ comment out MERRA-2 test for now: it passes on local machines but not in CircleCI. Need further look into this.
#def test_tropo_delay_MERRA2(tmp_path):
# '''
# Scenario:
# 1: Small area, MERRA2, Zenith delay
# '''
# core_test_tropo_delay(tmp_path, modelName="MERRA2")
def test_tropo_delay_HRES(tmp_path):
'''
Scenario:
1: Small area, HRES, Zenith delay
'''
core_test_tropo_delay(tmp_path, modelName="HRES")
def test_tropo_delay_ERA5T(tmp_path):
'''
Scenario:
1: Small area, ERA5T, Zenith delay
'''
core_test_tropo_delay(tmp_path, modelName="ERA5T")
def test_tropo_delay_ERAI(tmp_path):
'''
Scenario:
1: Small area, ERAI, Zenith delay
'''
core_test_tropo_delay(tmp_path, modelName="ERAI")
#def test_tropo_delay_NCMR(tmp_path):
# '''
# Scenario:
# 1: Small area, NCMR, Zenith delay
# '''
# core_test_tropo_delay(tmp_path, modelName="NCMR")
def core_test_tropo_delay(tmp_path, modelName):
'''
Scenario:
1: Small area, Zenith delay
'''
lats = gdal_open(os.path.join(
SCENARIO_DIR, 'geom', 'lat.dat'
))
lons = gdal_open(os.path.join(
SCENARIO_DIR, 'geom', 'lon.dat'
))
if modelName == 'ERAI':
time = datetime.datetime(2018, 1, 3, 23, 0)
elif modelName == 'NCMR':
time = datetime.datetime(2018, 7, 1, 0, 0)
else:
time = datetime.datetime(2020, 1, 3, 23, 0)
wmLoc = os.path.join(SCENARIO_DIR, 'weather_files')
if not os.path.exists(wmLoc):
os.mkdir(wmLoc)
_, model_obj = modelName2Module(modelName)
wet_file, hydro_file = makeDelayFileNames(
time, Zenith, "envi", modelName, tmp_path
)
with pushd(tmp_path):
# packing the dictionairy
args = {}
args['los'] = Zenith
args['lats'] = lats
args['lons'] = lons
args['ll_bounds'] = (15.75, 18.25, -103.24, -99.75)
args['heights'] = ("dem", os.path.join(TEST_DIR, "test_geom", "warpedDEM.dem"))
args['pnts_file'] = 'lat_query_points.h5'
args['flag'] = "files"
args['weather_model'] = {"type": model_obj(), "files": None, "name": modelName}
args['wmLoc'] = wmLoc
args['zref'] = 20000.
args['outformat'] = "envi"
args['times'] = time
args['out'] = tmp_path
args['download_only'] = False
args['wetFilenames'] = wet_file
args['hydroFilenames'] = hydro_file
(_, _) = tropo_delay(args)
# get the results
wet = gdal_open(wet_file)
hydro = gdal_open(hydro_file)
true_wet = gdal_open(
os.path.join(
SCENARIO_DIR,
modelName+"/wet.envi"
),
userNDV=0.
)
true_hydro = gdal_open(
os.path.join(
SCENARIO_DIR,
modelName+"/hydro.envi"
),
userNDV=0.
)
# get the true delay from the weather model
assert np.allclose(
wet,
true_wet,
equal_nan=True,
rtol=_RTOL
)
assert np.allclose(
hydro,
true_hydro,
equal_nan=True,
rtol=_RTOL
)
|
"""fine-tune training
https://github.com/dredwardhyde/gpt-neo-fine-tuning-example/blob/main/gpt_neo.py
"""
import click
import joblib
import torch
from torch.utils.data import random_split
from transformers import AutoModelForCausalLM
from transformers import AutoTokenizer
from transformers import IntervalStrategy
from transformers import TrainingArguments
from ..datasets import SeqTxtDataset
from ..models import train_seq_model
from ._utils import get_txt
_BOS_TOKEN = '!!!'
_EOS_TOKEN = '###'
_PAD_TOKEN = '???'
_training_args = TrainingArguments(
output_dir='./results',
num_train_epochs=5,
logging_steps=5000,
save_strategy=IntervalStrategy.NO,
per_device_train_batch_size=2,
per_device_eval_batch_size=2,
warmup_steps=100,
weight_decay=0.01,
logging_dir='./logs',
)
torch.manual_seed(42)
@click.command()
@click.option(
"--data-path",
type=click.STRING,
required=True,
help="flat file for fine-tune training",
)
@click.option(
"--valid-ratio",
type=click.FLOAT,
required=True,
)
@click.option(
"--model-path",
type=click.STRING,
required=True,
help="path to save trained model",
)
@click.option(
"--tokenizer-path",
type=click.STRING,
required=True,
help="path to save tokenizer",
)
@click.option(
"--model-name",
type=click.STRING,
required=True,
default='EleutherAI/gpt-neo-1.3B',
)
def fine_tune_train(
data_path: str,
valid_ratio: float,
model_path: str,
tokenizer_path: str,
model_name: str,
):
"""fing-tune training"""
tokenizer = AutoTokenizer.from_pretrained(
model_name,
bos_token=_BOS_TOKEN,
eos_token=_EOS_TOKEN,
pad_token=_PAD_TOKEN,
)
model = AutoModelForCausalLM.from_pretrained(model_name)
model.resize_token_embeddings(len(tokenizer))
data = get_txt(data_path)
dataset = SeqTxtDataset(
data,
tokenizer,
max_length=max([len(i) for i in data]),
)
val_size = int(valid_ratio * len(dataset))
train_dataset, val_dataset = random_split(
dataset, [len(dataset) - val_size, val_size])
model = train_seq_model(model, _training_args, train_dataset, val_dataset)
joblib.dump(model, model_path)
joblib.dump(tokenizer, tokenizer_path)
|
from rest_framework.permissions import BasePermission
class OnlyWxUserCreate(BasePermission):
def has_permission(self, request, view):
if request.method == 'POST':
if not request.user or not getattr(request.user, 'is_wechat', False):
return False
return True
else:
return True
|
"""
Parts of this code from:
http://arcade.academy/examples/procedural_caves_cellular.html#procedural-caves-cellular
"""
import random
import arcade
from level import create_grid
from level import Level
from constants import *
from randomly_place_sprite import randomly_place_sprite
from wander_sprite import DragonSprite
# Parameters for cellular automata
CHANCE_TO_START_ALIVE = 0.3
DEATH_LIMIT = 3
BIRTH_LIMIT = 4
NUMBER_OF_STEPS = 4
def initialize_grid(grid):
""" Randomly set grid locations to on/off based on chance. """
for row in range(len(grid)):
for column in range(len(grid[row])):
if random.random() <= CHANCE_TO_START_ALIVE:
grid[row][column] = 1
def count_alive_neighbors(grid, x, y):
""" Count neighbors that are alive. """
height = len(grid)
width = len(grid[0])
alive_count = 0
for i in range(-1, 2):
for j in range(-1, 2):
neighbor_x = x + i
neighbor_y = y + j
if i == 0 and j == 0:
continue
elif neighbor_x < 0 or neighbor_y < 0 or neighbor_y >= height or neighbor_x >= width:
# Edges are considered alive. Makes map more likely to appear naturally closed.
alive_count += 1
elif grid[neighbor_y][neighbor_x] == 1:
alive_count += 1
return alive_count
def do_simulation_step(old_grid):
""" Run a step of the cellular automaton. """
height = len(old_grid)
width = len(old_grid[0])
new_grid = create_grid(width, height)
for x in range(width):
for y in range(height):
alive_neighbors = count_alive_neighbors(old_grid, x, y)
if old_grid[y][x] == 1:
if alive_neighbors < DEATH_LIMIT:
new_grid[y][x] = 0
else:
new_grid[y][x] = 1
else:
if alive_neighbors > BIRTH_LIMIT:
new_grid[y][x] = 1
else:
new_grid[y][x] = 0
return new_grid
def get_level_3_array():
# Create cave system using a 2D grid
grid = create_grid(GRID_WIDTH, GRID_HEIGHT)
initialize_grid(grid)
for step in range(NUMBER_OF_STEPS):
grid = do_simulation_step(grid)
# Fill in the outside
for x in range(GRID_WIDTH):
grid[0][x] = 1
grid[GRID_HEIGHT-1][x] = 1
for y in range(GRID_HEIGHT):
grid[y][0] = 1
grid[y][GRID_WIDTH-1] = 1
return grid
def add_level_3_creatures(level: Level, player_sprite: arcade.Sprite):
level.creature_list = arcade.SpriteList()
scepter = arcade.Sprite("images/scepter.png", OBJECT_SPRITE_SCALING)
scepter.tag = "scepter"
randomly_place_sprite(scepter, level.wall_list)
level.objects_list.append(scepter)
dragon = DragonSprite("images/dragon.png", CREATURE_SPRITE_SCALING, player_sprite)
dragon.tag = "dragon"
dragon.physics_engine = arcade.PhysicsEngineSimple(dragon, level.all_obstacles)
randomly_place_sprite(dragon, level.wall_list)
level.creature_list.append(dragon)
|
# coding: utf-8
import logging
import pathlib
import os
import datetime
import ast
import random
import numpy as np
import pandas
import plotly.express as px
import plotly.offline.offline as poff
from io import BytesIO
from matplotlib import pyplot
from matplotlib import dates as mdates
from operator import or_
from flask_sqlalchemy import SQLAlchemy
from flask import request, url_for, send_file, make_response
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy import or_, func
from sqlalchemy.sql.operators import endswith_op
db = SQLAlchemy()
viz_dir = pathlib.PurePath("/viz")
logger = logging.getLogger("gunicorn.error")
max_plots = 5
class Plotter(object):
@staticmethod
def get_current_chart():
path = viz_dir / "chart"
if os.path.isfile(str(path / "current.html")):
out = str(path / "current.html")
else:
out = str(path / "blank-chart.html")
with open(out, "r") as f:
response = make_response(f.read())
response.mimetype = "text/html"
return response
@staticmethod
def chart_types():
return [
{"id": "scatter", "display": "Scatterplot"},
{"id": "grad_scatter", "display": "Bubble Chart"},
{"id": "bar", "display": "Bar Chart"},
{"id": "histogram", "display": "Histogram"},
]
@staticmethod
def call(*args, **kwargs):
verb = request.method.lower()
if hasattr(Plotter, verb):
return getattr(Plotter, verb)(*args, **kwargs)
else:
return {"message": "Method not allowed."}, 405
@staticmethod
def retrieve_data(**kwargs):
kwargs = Plotter.transform_input(**kwargs)
measure_objects = kwargs.get("measures")
location_objects = kwargs.get("locations")
start_date = kwargs.get("start_date")
end_date = kwargs.get("end_date")
frames = []
for i, measure in enumerate(measure_objects):
for j, location in enumerate(location_objects):
query = db.session.query(
WaterwayReading.id,
Location.display.label("location"),
Location.longitude,
Location.latitude,
WaterwayReading.sample_date.label("sample_date"),
WaterwayReading.value.label("value"),
Chemical.display + "(" + UnitOfMeasure.unit_name + ")",
UnitOfMeasure.unit_name.label("unit"),
).join(
Chemical, WaterwayReading.chemical_id == Chemical.id
).join(
Location, WaterwayReading.location_id == Location.id
).join(
UnitOfMeasure, Chemical.unit_of_measure_id == UnitOfMeasure.id
).filter(
WaterwayReading.sample_date.between(start_date, end_date)
)
if location:
query = query.filter(WaterwayReading.location == location)
if measure:
query = query.filter(WaterwayReading.chemical == measure)
frames.append(
pandas.read_sql(
query.statement.compile(compile_kwargs={"literal_binds": True}),
db.session.bind
)
)
df = pandas.concat(frames)
df.columns = ["id", "location", "longitude", "latitude", "sample_date", "value", "measure", "unit"]
return {
"dataframe": df,
"locations": location_objects,
"measures": measure_objects
}
@staticmethod
def post(*args, **kwargs):
chart_type = request.form.get("chart_type")
if not chart_type:
return {"message": "You must specify a chart type."}, 401
else:
return Plotter.plot(**dict(request.form))
@staticmethod
def save_plot(directory):
path = str(viz_dir / directory)
files = [os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))]
files.sort(key=lambda x: os.path.getmtime(x))
if len(files) == max_plots:
os.remove(files[0])
pyplot.savefig(os.path.join(path, f"plot-{datetime.datetime.now().strftime('%Y%m%d%h%M')}.png"))
@staticmethod
def transform_input(**kwargs):
input_args = kwargs
exclude = kwargs.pop("exclude", [])
if exclude:
for arg in exclude:
input_args.pop(arg, None)
out_args = {}
if "measures" not in exclude:
measures = ast.literal_eval(kwargs.get("measures"))
measure_obj_list = []
for m in measures:
measure_obj_list.append(
Chemical.query.filter(Chemical.id == m).first()
)
if not measure_obj_list:
out_args["measures"] = Chemical.query.all()
else:
out_args["measures"] = measure_obj_list
if "locations" not in exclude:
locations = ast.literal_eval(kwargs.get("locations"))
location_obj_list = []
for l in locations:
location_obj_list.append(
Location.query.filter(Location.id == l).first()
)
if not location_obj_list or kwargs.get("chart_type") == "density_heatmap":
out_args["locations"] = Location.query.all()
else:
out_args["locations"] = location_obj_list
if "start_date" not in exclude:
start_date = kwargs.get("start_date")
if not start_date:
start_date = datetime.datetime(1998, 1, 1)
else:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
out_args["start_date"] = start_date
if "end_date" not in exclude:
end_date = kwargs.get("end_date")
if not end_date:
end_date = datetime.datetime.now()
else:
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d")
out_args["end_date"] = end_date
return out_args
@staticmethod
def plot(**kwargs):
data = Plotter.retrieve_data(**kwargs)
df = data["dataframe"]
ct = kwargs["chart_type"]
if not data.get("locations"):
return {"message": "You must select at least one location."}, 400
if not data.get("measures"):
return {"message": "You must select at least one measure."}, 400
condense = kwargs.get("condenser")
in_kwargs = dict(
x="sample_date",
y="value",
facet_col="location",
facet_row="measure",
height=800,
width=1000,
)
if ct == "grad_scatter":
in_kwargs["y"] = "location"
in_kwargs["size"] = "value"
in_kwargs["size_max"] = 80
in_kwargs["color"] = "measure"
in_kwargs.pop("facet_col", None)
in_kwargs.pop("facet_row", None)
if ct == "histogram":
in_kwargs.pop("y", None)
num_measures = len(data["measures"])
if condense and num_measures == 1:
in_kwargs.pop("facet_col", None)
in_kwargs.pop("facet_row", None)
in_kwargs["color"] = "location"
elif num_measures != 1 and condense:
return {"message": "You can only specify one measure type for a condensed chart."}, 400
func = px.scatter if ct == "grad_scatter" else getattr(px, ct)
fig = func(
df,
**in_kwargs
)
fig.for_each_annotation(lambda a: a.update(text=a.text.split("=")[-1]))
html = poff.plot(
fig,
include_plotlyjs=False,
output_type="div"
)
response = make_response(html)
response.mimetype = "text/html"
return response, 201
class Chemical(db.Model):
__tablename__ = 'chemical'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False, unique=True)
display = db.Column(db.String(100), nullable=False)
unit_of_measure_id = db.Column(db.ForeignKey('unit_of_measure.id'))
unit_of_measure = db.relationship('UnitOfMeasure', primaryjoin='Chemical.unit_of_measure_id == UnitOfMeasure.id', backref='chemicals')
@property
def json(self):
return {
"id": int(self.id),
"name": self.name,
"display": self.display,
"unit": str(self.unit_of_measure.unit_name.decode()),
"uri": f"/rest/measure/{self.id}"
}
@staticmethod
def get(id):
if not id:
return {"message": "Not found"}, 404
else:
result = Chemical.query.filter_by(id=id).first()
if result:
return result.json, 200
else:
return {"message": "Measure not found."}, 404
@staticmethod
def search():
term = request.args.get("term")
if not term:
return {"results": []}, 200
else:
term = str(term).upper()
results = Chemical.query.filter(
or_(
Chemical.name.like(f"{term}%"),
func.upper(Chemical.display).like(f"{term}%")
)
).order_by(Chemical.name).all()
return {"results": [{"id": r.id, "text": f"{r.display} ({str(r.unit_of_measure)})"} for r in results]}, 200
class Location(db.Model):
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False, unique=True)
display = db.Column(db.String(100), nullable=False)
longitude = db.Column(db.Numeric, nullable=False)
latitude = db.Column(db.Numeric, nullable=False)
location_type_id = db.Column(db.ForeignKey('location_type.id'))
location_type = db.relationship('LocationType', primaryjoin='Location.location_type_id == LocationType.id', backref='locations')
@staticmethod
def all_sensors():
return Location.query.filter_by(
location_type=LocationType.query.filter_by(name="SENSOR").first()
).order_by(Location.name).all()
@staticmethod
def get(id=None):
if id:
if isinstance(id, str):
results = Location.query.filter_by(name=id.upper()).first()
else:
results = Location.query.filter_by(id=id).first()
if results:
response = results.geojson, 200
else:
response = { "message" : "Not found." }, 404
else:
response = Location.all_locations_geojson(include_waste=not(request.args.get("nowaste") == "Y")), 200
return response
@hybrid_property
def lat_long(self):
return [float(self.latitude), float(self.longitude)]
@hybrid_property
def long_lat(self):
return [float(self.longitude), float(self.latitude)]
@hybrid_property
def geojson(self):
return {
"type": "Feature",
"properties": {
"id": self.id,
"name": self.name,
"display": self.display,
"type": {
"id": self.location_type.id,
"name": self.location_type.name,
"description": self.location_type.description
},
"uri": f"/rest/location/{int(self.id)}"
},
"geometry": {
"type": "Point",
"coordinates": self.long_lat
},
"icon": {
"iconUrl": url_for("static", filename=("icons/" + ("waste.png" if self.location_type.name == "WASTE" else "sensor.png"))),
"iconSize": [30, 30],
"latlng": self.lat_long
},
"style": {
"color": "red" if self.location_type.name == "SENSOR" else "green"
}
}
@property
def json(self):
return self.geojson
@staticmethod
def all_locations_geojson(include_waste=True):
if include_waste:
features = [location.geojson for location in Location.query.all()]
else:
features = [
location.geojson for location in Location.query.filter_by(
location_type=LocationType.query.filter_by(name="SENSOR").first()
).all()
]
return {
"type": "FeatureCollection",
"name": "sensor-locations",
"features": features
}
class LocationType(db.Model):
__tablename__ = 'location_type'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), unique=True)
description = db.Column(db.String(100))
class UnitOfMeasure(db.Model):
__tablename__ = 'unit_of_measure'
id = db.Column(db.Integer, primary_key=True)
unit_name = db.Column(db.String(10), unique=True)
@property
def decoded(self):
return str(self.unit_name.decode())
def __str__(self):
return self.unit_name.decode()
def __repr__(self):
return str(self)
class WaterwayReading(db.Model):
__tablename__ = 'waterway_reading'
id = db.Column(db.Integer, primary_key=True)
value = db.Column(db.Numeric, nullable=False)
chemical_id = db.Column(db.ForeignKey('chemical.id'), nullable=False)
location_id = db.Column(db.ForeignKey('location.id'), nullable=False)
sample_date = db.Column(db.Date, nullable=False)
chemical = db.relationship('Chemical', primaryjoin='WaterwayReading.chemical_id == Chemical.id', backref='waterway_readings')
location = db.relationship('Location', primaryjoin='WaterwayReading.location_id == Location.id', backref='waterway_readings')
@hybrid_property
def location_measure(self):
return f"{self.location.display} - {self.chemical.display}"
@property
def json(self):
return {
"id": int(self.id),
"value": float(self.value),
"sample_date": self.sample_date.strftime("%Y-%m-%d"),
"location": self.location.json,
"measure": self.chemical.json
}
@staticmethod
def min_sample_date():
return db.session.query(
db.func.min(WaterwayReading.sample_date)
).scalar()
@staticmethod
def max_sample_date():
return db.session.query(
db.func.max(WaterwayReading.sample_date)
).scalar()
@staticmethod
def search():
args = dict(request.args)
args = WaterwayReading.transform_input(**args)
location_ids = [loc.id for loc in args["locations"]]
measure_ids = [ms.id for ms in args["measures"]]
query = WaterwayReading.query.filter(
db.between(
WaterwayReading.sample_date,
args["start_date"],
args["end_date"]
)
)
if location_ids:
query = query.filter(WaterwayReading.location_id.in_(location_ids))
if measure_ids:
query = query.filter(WaterwayReading.chemical_id.in_(measure_ids))
data = query.all()
if data:
response = {"results": [row.json for row in data], "message": "Ok."}, 200
else:
response = {"results": [], "message": "None found."}, 200
return response
@staticmethod
def transform_input(**kwargs):
input_args = kwargs
exclude = kwargs.pop("exclude", [])
if exclude:
for arg in exclude:
input_args.pop(arg, None)
out_args = {}
if "measures" not in exclude and kwargs.get("measures"):
measures = ast.literal_eval(kwargs.get("measures"))
out_args["measures"] = Chemical.query.filter(Chemical.id.in_(measures)).all()
else:
out_args["measures"] = []
if "locations" not in exclude and kwargs.get("locations"):
locations = ast.literal_eval(kwargs.get("locations"))
out_args["locations"] = Location.query.filter(Location.id.in_(locations)).all()
else:
out_args["locations"] = []
if "start_date" not in exclude:
start_date = kwargs.get("start_date")
if not start_date:
start_date = datetime.datetime(1998, 1, 1)
else:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
out_args["start_date"] = start_date
if "end_date" not in exclude:
end_date = kwargs.get("end_date")
if not end_date:
end_date = datetime.datetime.now()
else:
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d")
out_args["end_date"] = end_date
return out_args
class WaterwayReadingMaster(db.Model):
__tablename__ = 'waterway_reading_master'
id = db.Column(db.Integer, primary_key=True)
value = db.Column(db.Numeric)
unit_name = db.Column(db.String)
chemical = db.Column(db.String(100))
location = db.Column(db.String(100))
sample_date = db.Column(db.Date)
|
file_to_skip = [] |
import random
lista = ['Paulo', 'Ana', 'Pedro', 'Maria']
escolhido = random.choice(lista)
print('O aluno escolhido foi {}'.format(escolhido))
|
"""
==============================================================
Deep Belief Network features for digit classification
==============================================================
Adapted from http://scikit-learn.org/stable/auto_examples/neural_networks/plot_rbm_logistic_classification.html#sphx-glr-auto-examples-neural-networks-plot-rbm-logistic-classification-py
This example shows how to build a classification pipeline with a UnsupervisedDBN
feature extractor and a :class:`LogisticRegression
<sklearn.linear_model.LogisticRegression>` classifier. The hyperparameters
of the entire model (learning rate, hidden layer size, regularization)
were optimized by grid search, but the search is not reproduced here because
of runtime constraints.
Logistic regression on raw pixel values is presented for comparison. The
example shows that the features extracted by the UnsupervisedDBN help improve the
classification accuracy.
"""
from __future__ import print_function
print(__doc__)
import numpy as np
from scipy.ndimage import convolve
from sklearn import linear_model, datasets, metrics
from sklearn.cross_validation import train_test_split
from sklearn.pipeline import Pipeline
from dbn.models import UnsupervisedDBN # use "from dbn.tensorflow import SupervisedDBNClassification" for computations on TensorFlow
#from matplotlib.pyplot import pyplot as plt
import pickle
import io
###############################################################################
# Setting up
def nudge_dataset(X, Y):
"""
This produces a dataset 5 times bigger than the original one,
by moving the 8x8 images in X around by 1px to left, right, down, up
"""
direction_vectors = [
[[0, 1, 0],
[0, 0, 0],
[0, 0, 0]],
[[0, 0, 0],
[1, 0, 0],
[0, 0, 0]],
[[0, 0, 0],
[0, 0, 1],
[0, 0, 0]],
[[0, 0, 0],
[0, 0, 0],
[0, 1, 0]]]
shift = lambda x, w: convolve(x.reshape((8, 8)), mode='constant',
weights=w).ravel()
X = np.concatenate([X] +
[np.apply_along_axis(shift, 1, X, vector)
for vector in direction_vectors])
Y = np.concatenate([Y for _ in range(5)], axis=0)
return X, Y
# Load Data
digits = datasets.load_digits()
X = np.asarray(digits.data, 'float32')
X, Y = nudge_dataset(X, digits.target)
X = (X - np.min(X, 0)) / (np.max(X, 0) + 0.0001) # 0-1 scaling
X_train, X_test, Y_train, Y_test = train_test_split(X, Y,
test_size=0.2,
random_state=0)
print('X_train size : {0} \n'.format(X_train.shape))
print('X_test size : {0} \n'.format(X_test.shape))
# Models we will use
dbn = UnsupervisedDBN(hidden_layers_structure=[256, 512],
batch_size=32,
learning_rate_rbm=0.06,
learning_rate_backprop = 1e-3,
n_epochs_rbm=50,
n_epochs_fine_tune = 500,
activation_function='sigmoid',
contrastive_divergence_iter = 1)
###############################################################################
# Training RBM-Logistic Pipeline
dbn.fit(X_train)
# Save the training metrics
for layer_wise_error,index in zip(dbn.layer_wise_error,range(len(dbn.layer_wise_error))):
with io.open("layer_" + str(index), 'wb') as f:
pickle.dump(layer_wise_error,f)
# Fine tune the DBN using the reconstruction MSE (over pixels)
recon_error_test,recon_error_train = dbn.fine_tune(X_train,X_test)
# Save fine tuned parameters
with io.open("test_recon_finetune", 'wb') as f:
pickle.dump(recon_error_test,f)
with io.open("train_recon_finetune",'wb') as f:
pickle.dump(recon_error_train,f)
|
#!/usr/bin/env python
import os, json, argparse, ConfigParser
from twisted.internet import reactor, defer
from twisted.internet.task import deferLater
from twisted.web.resource import Resource
from twisted.web.server import Site, NOT_DONE_YET
from twisted.web import static
THIS_DIR=os.path.dirname(os.path.realpath(__file__))
from paxoscore.proposer import Proposer
class MainPage(Resource):
def getChild(self, name, request):
if name == '':
return self
else:
print name, request
return Resource.getChild(self, name, request)
def render_GET(self, request):
f = open('%s/web/index.html' % THIS_DIR, 'r')
return f.read()
class WebServer(Resource):
isLeaf = True
def __init__(self, proposer):
Resource.__init__(self)
self.proposer = proposer
def _waitResponse(self, result, request):
result = result.rstrip('\t\r\n\0')
request.write(result)
request.finish()
def render_GET(self, request):
print request
request.args['action'] = 'get'
data = json.dumps(request.args)
d = self.proposer.submit(data)
d.addCallback(self._waitResponse, request)
return NOT_DONE_YET
def render_POST(self, request):
print request
request.args['action'] = 'put'
data = json.dumps(request.args)
d = self.proposer.submit(data)
d.addCallback(self._waitResponse, request)
return NOT_DONE_YET
if __name__=='__main__':
parser = argparse.ArgumentParser(description='Paxos Proposer.')
parser.add_argument('--cfg', required=True)
args = parser.parse_args()
config = ConfigParser.ConfigParser()
config.read(args.cfg)
proposer = Proposer(config, 0)
reactor.listenUDP(config.getint('proposer', 'port'), proposer)
root = MainPage()
server = WebServer(proposer)
root.putChild('jquery.min.js', static.File('%s/web/jquery.min.js' % THIS_DIR))
root.putChild('get', server)
root.putChild('put', server)
factory = Site(root)
reactor.listenTCP(8080, factory)
reactor.run()
|
#!/usr/bin/env python3
from os import urandom
from hashlib import sha1
from Crypto.Util.number import GCD, getPrime, inverse, long_to_bytes, bytes_to_long
class RSA(object):
def __init__(self, key):
self._n, self._e, self._d = key
self._digest = sha1(b'').digest()
@staticmethod
def generate(bits, e):
p, q = getPrime(bits), getPrime(bits)
n = p * q
phi = (p - 1) * (q - 1)
assert GCD(e, phi) == 1
d = inverse(e, phi)
return RSA((n, e, d))
@property
def publickey(self):
return self._n, self._e
@property
def size(self):
return (self._n.bit_length() + 7) // 8
def encrypt(self, plaintext):
size = 20
max_length = self.size - 2 * size - 2
plaintext = plaintext[:max_length]
seed = urandom(size)
salt = self._digest + b'\x00' * (max_length - len(plaintext)) + b'\x01' + plaintext
salted_seed = self._xor(salt, self._expand(seed, self.size - size - 1))
xored_seed = self._xor(seed, self._expand(salted_seed, size))
return self._encrypt(b'\x00' + xored_seed + salted_seed)
def decrypt(self, ciphertext):
size = 20
message = self._decrypt(ciphertext)
xored_seed = message[:size]
salted_seed = message[size:]
seed = self._xor(xored_seed, self._expand(salted_seed, size))
salt = self._xor(salted_seed, self._expand(seed, self.size - size))
return salt[salt.find(b'\x00\x01', salt.find(self._digest)) + 2:]
def _as_bytes(inner_func):
def new_func(self, raw_data):
data = bytes_to_long(raw_data)
result = inner_func(self, data)
return long_to_bytes(result)
return new_func
@_as_bytes
def _encrypt(self, plaintext):
return pow(plaintext, self._e, self._n)
@_as_bytes
def _decrypt(self, ciphertext):
return pow(ciphertext, self._d, self._n)
def _xor(self, data1, data2):
return bytes(x^y for x, y in zip(data1, data2))
def _expand(self, data, length):
result = b''
counter = 0
while len(result) < length:
current = counter.to_bytes(4, 'big')
result += sha1(data + current).digest()
counter += 1
return result[:length]
|
from .grasp_cifar10_dber import GraspCifar10
class GripCifar10(GraspCifar10):
def __init__(self, root=None, train=True,
transform=None, target_transform=None,
download=False, im_shape=(32, 32, 3), data=None, indexing=False, base_folder='cifar10'):
super(GripCifar10, self).__init__(root=root, train=train,
transform=transform, target_transform=target_transform,
download=download, im_shape=im_shape, data=data, indexing=indexing, base_folder=base_folder)
# db_mean = (0.61799215, 0.29761591, 0.37573633, 0.52014014)#(0.60075633,0.30408495, 0.37778071, 0.22482509)
# db_std = (0.10109334, 0.1636474, 0.14329318, 0.0625928)#(0.11525005,0.2022998, 0.17382977, 0.09038225)
# db_mean = (0.60754877, 0.38245198, 0.44624274, 0.43154834)
# db_std = (0.14569339, 0.18942552, 0.17363705, 0.02090128)
# db_mean = (0.42988802, 0.42988802, 0.42988802) #grip_evaluator_depth_20200327
# db_std = (0.02043479, 0.02043479, 0.02043479)
# db_mean = (0.60747145, 0.3981848, 0.45978323) # grip_evaluator_rgb_20200327
# db_std = (0.14146864, 0.18522996, 0.17041358)
# db_mean = (0.55005549, 0.55005549, 0.55005549) # grip_evaluator_depth_20200329
# db_std = (0.03288065, 0.03288065, 0.03288065)
# db_mean = (0.55184516, 0.55184516, 0.55184516) # grip_evaluator_depth_20200408
# db_std = (0.03759594, 0.03759594, 0.03759594)
# db_mean = (0.54323941, 0.54323941, 0.54323941) # grip_evaluator_depth_20200409
# db_std = (0.04577378, 0.04577378, 0.04577378)
# db_mean = (0.54602665, 0.54602665, 0.54602665) # grip_evaluator_depth_20200423
# db_std = (0.03714684, 0.03714684, 0.03714684)
# db_mean = (0.35978138, 0.41708053, 0.5188343, 0.53351979) # grip_evaluator_depth_20200424
# db_std = (0.11914077, 0.10032271, 0.12174645, 0.04099747)
# db_mean = (0.33610585, 0.38967911, 0.49009963, 0.52970528) # grip_evaluator_depth_20200427
# db_std = (0.12106411, 0.10579008, 0.12965363, 0.04399332)
# db_mean = (0.42963929 0.44455258 0.42168464 0.50111799) # 20200512, or use 202200427
# db_std = (0.145406 0.137699 0.1424108 0.03611386)
# db_mean = (0.41280124, 0.42837454, 0.42626716, 0.31356105) # 20200514
# db_std = (0.15362443, 0.15150588, 0.15615994, 0.03650546)
# db_mean = (0.36361247, 0.40355713, 0.46720627, 0.45218567) # 20200518
# db_std = (0.13274179, 0.12218594, 0.13916006, 0.04130782)
# db_mean = (0.76513104, 0.74994571, 0.74697943, 0.4860831) # 20200610
# db_std = (0.17024282, 0.1789022, 0.1872297, 0.09504798)
# db_mean = (0.41828456, 0.40237707, 0.41067797, 0.24735339, 0.1663269, 0.85723261) # 20200624
# db_std = (0.14510322, 0.14341601, 0.14637822, 0.25664706, 0.21761108, 0.16026509) |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn import ensemble
import random
df_all = pd.read_csv("data/CTrainTestNAsRemoved.csv", sep=";")
cat_keys = [key for key in df_all.keys() if not np.issubdtype(df_all[key].dtype, np.number)]
cont_keys = [key for key in df_all.keys() if key not in cat_keys]
print(f"Cat Columns: {len(cat_keys)}")
print(f"Cat Columns: {len(cont_keys)}")
print(cont_keys)
for key in ['LotFrontage', 'MasVnrArea', 'BsmtFinSF1', 'BsmtFinSF2', 'BsmtUnfSF', 'TotalBsmtSF', 'BsmtFullBath',
'BsmtHalfBath', 'GarageYrBlt', 'GarageCars', 'GarageArea']:
df_all[key] = df_all[key].fillna(0.0)
# Show missing
# df_all_cont = df_all[cont_keys]
# nan = (len(df_all_cont.index) - df_all_cont.count())
# print(f"Columns: {len(df_all.keys())}")
# print(f"Columns: {dsall.keys()}")
df_cat = df_all[cat_keys]
df_cont = df_all[cont_keys]
df_cat_dummies = pd.get_dummies(df_cat)
# print(df_cat_dummies)
df_final = df_cat_dummies.join(df_cont)
df_final.to_csv("data/CTrain01.csv") |
#! coding: utf-8
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import FormView, CreateView, UpdateView, DeleteView
from django.contrib.contenttypes.models import ContentType
from django.contrib.admin.models import LogEntry
from django.shortcuts import render, render_to_response
from django.template import RequestContext
from django.db.models import F, Q, Func, Count
from django.db.models.functions import Substr
from utils.views import ACTIONS
from utils.context_processors import additional_user_info
from help.models import get_help_fields
from utils.views import LoginRequiredView, GenericUpdateWithOneFormset
from operator import itemgetter, attrgetter
from datetime import datetime
from forms import *
import re
class InstGenericListView(LoginRequiredView, ListView):
"""
Handle list view for legislation records objects
"""
paginate_by = settings.ITEMS_PER_PAGE
search_field = 'name'
def dispatch(self, *args, **kwargs):
user_data = additional_user_info(self.request)
user_role = user_data['service_role'].get('DirIns')
user_type = user_data.get('user_type')
# save current filter in session
self.request.session["filtered_list"] = self.request.get_full_path()
# restrict institution module to advanced users with DirIns permission
if user_type != 'advanced' or not user_role:
return HttpResponseForbidden()
return super(InstGenericListView, self).dispatch(*args, **kwargs)
def get_queryset(self):
user_data = additional_user_info(self.request)
user_role = user_data['service_role'].get('DirIns')
user_cc = user_data.get('user_cc')
user_type = user_data.get('user_type')
# getting action parameter
self.actions = {}
for key in ACTIONS.keys():
self.actions[key] = self.request.GET.get(key, ACTIONS[key])
# search filter
search = self.actions['s']
if search:
if ':' in search:
search_parts = search.split(':')
search_field = search_parts[0]
if search_field == 'user':
search_field = 'adm__type_history'
elif search_field == 'cat':
search_field = 'adm__category_history'
search_field, search = "%s%s" % (search_field,'__icontains'), search_parts[1]
object_list = self.model.objects.filter(**{search_field: search})
# search by cc code
elif bool(re.match(r"^[A-Za-z]{2}[0-9]+", search)):
object_list = self.model.objects.filter(cc_code__istartswith=search)
# search by name or acronym
else:
if settings.FULLTEXT_SEARCH:
query_search = Q(unitlevel__unit__name__search=search) | Q(unitlevel__unit__acronym__search=search) | Q(name__search=search) | Q(acronym__search=search)
else:
query_search = Q(unitlevel__unit__name__icontains=search) | Q(unitlevel__unit__acronym__icontains=search) | Q(name__icontains=search) | Q(acronym__icontains=search)
object_list = self.model.objects.filter(query_search).distinct()
else:
object_list = self.model.objects.all()
# filter by user institution
if self.actions['filter_owner'] != "*" or user_cc != 'BR1.1':
object_list = object_list.filter(cc_code=user_cc)
else:
if self.actions['filter_status'] != '':
object_list = object_list.filter(status=self.actions['filter_status'])
if self.actions['filter_type'] != '':
object_list = object_list.filter(adm__type=self.actions['filter_type'])
if self.actions['filter_category'] != '':
object_list = object_list.filter(adm__category=self.actions['filter_category'])
if self.actions['filter_country'] != '':
object_list = object_list.filter(country=self.actions['filter_country'])
# when user sort by country order the result by a numeric value of center code
object_list = object_list.annotate(center_code=Func(Substr('cc_code',3),
template='%(function)s(%(expressions)s AS %(type)s)',
function='Cast', type='decimal')).annotate(n_code=F('center_code')).order_by('-n_code')
else:
# by default order by reverse order of id's
object_list = object_list.order_by('-id')
return object_list
def get_context_data(self, **kwargs):
context = super(InstGenericListView, self).get_context_data(**kwargs)
user_data = additional_user_info(self.request)
user_role = user_data['service_role'].get('DirIns')
type_list = Type.objects.all()
category_list = Category.objects.all()
country_id_list = Institution.objects.values('country_id').distinct()
country_objects = Country.objects.filter(id__in=country_id_list)
# get countries and sort by name
country_list = [(c.pk, unicode(c)) for c in country_objects]
country_list.sort(key=lambda c: c[1])
context['actions'] = self.actions
context['user_role'] = user_role
context['user_cc'] = user_data.get('user_cc')
context['country_list'] = country_list
context['type_list'] = type_list
context['category_list'] = category_list
return context
# ========================= Institution ========================================================
class InstListView(InstGenericListView, ListView):
"""
Extend InstGenericListView to list records
"""
model = Institution
class UnitListView(ListView):
model = Unit
template_name = "institution/institution_unit.html"
paginate_by = 10
def get_queryset(self):
# getting action parameter
self.actions = {}
for key in ACTIONS.keys():
self.actions[key] = self.request.GET.get(key, ACTIONS[key])
param_country = self.request.GET.get('country')
search = self.actions['s']
if search:
search_method = 'search' if settings.FULLTEXT_SEARCH else 'icontains'
search_field1 = 'name__' + search_method
search_field2 = 'acronym__' + search_method
if settings.FULLTEXT_SEARCH:
# search using boolean AND
search = u"+{}".format(search.replace(' ', ' +'))
object_list = self.model.objects.filter(Q(**{search_field1: search}) | Q(**{search_field2: search}))
else:
object_list = self.model.objects.all()
if param_country:
object_list = object_list.filter(country=param_country)
return object_list
def get_context_data(self, **kwargs):
context = super(UnitListView, self).get_context_data(**kwargs)
context['form'] = UnitForm()
context['param_country'] = self.request.GET.get('country')
context['actions'] = self.actions
return context
class InstUpdate(LoginRequiredView):
"""
Handle creation and update
"""
model = Institution
success_url = reverse_lazy('list_institution')
form_class = InstitutionForm
def get_object(self, *args, **kwargs):
obj = super(InstUpdate, self).get_object(*args, **kwargs)
user_data = additional_user_info(self.request)
user_role = user_data['service_role'].get('DirIns')
user_cc = user_data.get('user_cc')
user_type = user_data.get('user_type')
# restrict edition to BR1.1 users or advanced users with same CC code
if user_cc != 'BR1.1':
if user_cc != obj.cc_code or user_type != 'advanced':
return None
return obj
def form_valid(self, form):
formset_contact = ContactFormSet(self.request.POST, instance=self.object)
formset_url = URLFormSet(self.request.POST, instance=self.object)
formset_unitlevel = UnitLevelFormSet(self.request.POST, instance=self.object)
formset_adm = AdmFormSet(self.request.POST, instance=self.object)
# run all validation before for display formset errors at form
form_valid = form.is_valid()
formset_contact_valid = formset_contact.is_valid()
formset_url_valid = formset_url.is_valid()
formset_unitlevel_valid = formset_unitlevel.is_valid()
formset_adm_valid = formset_adm.is_valid()
user_data = additional_user_info(self.request)
if (form_valid and formset_contact_valid and formset_url_valid and
formset_unitlevel_valid and formset_adm_valid):
self.object = form.save()
formset_contact.instance = self.object
formset_contact.save()
formset_url.instance = self.object
formset_url.save()
formset_unitlevel.instance = self.object
formset_unitlevel.save()
formset_adm.instance = self.object
formset_adm.save()
# update solr index
form.save()
return HttpResponseRedirect(self.get_success_url())
else:
return self.render_to_response(
self.get_context_data(form=form,
formset_contact=formset_contact,
formset_url=formset_url,
formset_adm=formset_adm,
formset_unitlevel=formset_unitlevel))
def form_invalid(self, form):
# force use of form_valid method to run all validations
return self.form_valid(form)
def get_form_kwargs(self):
kwargs = super(InstUpdate, self).get_form_kwargs()
user_data = additional_user_info(self.request)
additional_form_parameters = {}
additional_form_parameters['user_data'] = user_data
kwargs.update(additional_form_parameters)
return kwargs
def get_context_data(self, **kwargs):
context = super(InstUpdate, self).get_context_data(**kwargs)
user_data = additional_user_info(self.request)
user_role = user_data['service_role'].get('DirIns')
user_cc = user_data['user_cc']
user_id = self.request.user.id
if self.object:
user_data['is_owner'] = True if self.object.created_by == self.request.user else False
context['user_data'] = user_data
context['user_role'] = user_role
# create flag that control if user have permission to edit the reference
context['user_can_edit'] = True if not self.object or self.object.cooperative_center_code in ['BR1.1', user_data['user_cc']] else False
if user_cc == 'BR1.1':
context['user_can_change_status'] = True
else:
context['user_can_change_status'] = False
context['settings'] = settings
context['help_fields'] = get_help_fields('institution')
if self.object:
c_type = ContentType.objects.get_for_model(self.get_object())
context['c_type'] = c_type
if self.request.method == 'GET':
context['formset_contact'] = ContactFormSet(instance=self.object)
context['formset_url'] = URLFormSet(instance=self.object)
context['formset_adm'] = AdmFormSet(instance=self.object)
context['formset_unitlevel'] = UnitLevelFormSet(instance=self.object)
return context
# use last filtered_url saved in session
def get_success_url(self):
redirect_url = self.request.session.get("filtered_list", self.success_url)
print(redirect_url)
return redirect_url
class InstUpdateView(InstUpdate, UpdateView):
"""
Used as class view to update Institution
Extend InstUpdate that do all the work
"""
class InstCreateView(InstUpdate, CreateView):
"""
Used as class view to create Institution
Extend InstUpdate that do all the work
"""
def dispatch(self, *args, **kwargs):
user_data = additional_user_info(self.request)
user_cc = user_data.get('user_cc')
# restrict create of new institution to BIREME (BR1.1)
if user_cc != 'BR1.1':
return HttpResponseForbidden()
return super(InstCreateView, self).dispatch(*args, **kwargs)
class InstDeleteView(LoginRequiredView, DeleteView):
"""
Handle delete objects
"""
model = Institution
success_url = reverse_lazy('list_institution')
def dispatch(self, *args, **kwargs):
user_data = additional_user_info(self.request)
user_cc = user_data.get('user_cc')
# restrict delete of institution to BIREME (BR1.1)
if user_cc != 'BR1.1':
return HttpResponseForbidden()
return super(InstDeleteView, self).dispatch(*args, **kwargs)
def delete(self, request, *args, **kwargs):
obj = super(InstDeleteView, self).get_object()
c_type = ContentType.objects.get_for_model(obj)
# delete associated data
Contact.objects.filter(institution_id=obj.id).delete()
URL.objects.filter(institution_id=obj.id).delete()
Adm.objects.filter(institution_id=obj.id).delete()
InstitutionAdhesion.objects.filter(institution_id=obj.id).delete()
return super(InstDeleteView, self).delete(request, *args, **kwargs)
@login_required
def add_unit(request):
"""
Add Unit
"""
success_url = ''
if request.method == 'POST':
form = UnitForm(request.POST)
print form.errors
if form.is_valid():
new_unit = form.save()
success_url = "{0}/?s={1}&country={2}".format(reverse_lazy('list_unit'),
new_unit.name, new_unit.country.id)
else:
param_country = request.POST.get('country')
return render(request, 'institution/institution_unit.html',
{'form': form, 'param_country': param_country})
return HttpResponseRedirect(success_url)
@login_required
def adhesionterm(request, institution_id):
serviceproduct_list = ServiceProduct.objects.all()
adhesionterm = AdhesionTerm.objects.last()
inst_servproduct_list = []
acepted_status = False
if request.POST:
acepted_param = request.POST.get('acepted_flag')
set_list_param = request.POST.getlist('set')
unset_list_param = request.POST.getlist('unset')
acepted_flag = True if acepted_param == '1' else False
inst_adhesion, created = InstitutionAdhesion.objects.get_or_create(
institution_id=institution_id, adhesionterm_id=adhesionterm.pk
)
# update acepted flag
inst_adhesion.acepted = acepted_flag
inst_adhesion.save()
if set_list_param or unset_list_param:
# remove duplicated ID's from set/unset lists
set_list = list(set(set_list_param))
unset_list = list(set(unset_list_param))
for srvprod_id in set_list:
inst_servprod, created = InstitutionServiceProduct.objects.get_or_create(institution_id=institution_id,
serviceproduct_id=srvprod_id)
if unset_list:
InstitutionServiceProduct.objects.filter(institution_id=institution_id, serviceproduct_id__in=unset_list).delete()
else:
# check if institution already acepted term
inst_adhesion = InstitutionAdhesion.objects.filter(
institution=institution_id, adhesionterm=adhesionterm.pk)
if inst_adhesion:
acepted_status = inst_adhesion[0].acepted
inst_servproduct_filter = InstitutionServiceProduct.objects.filter(institution_id=institution_id)
inst_servproduct_list = [ips.serviceproduct for ips in inst_servproduct_filter]
user_data = additional_user_info(request)
user_cc = user_data['user_cc']
# get log info for BR1.1 users (administrative)
if user_cc == 'BR1.1':
if inst_adhesion:
ctype_adhesion = ContentType.objects.get_for_model(inst_adhesion[0])
logs_adhesion = LogEntry.objects.filter(content_type=ctype_adhesion,
object_id=inst_adhesion[0].id)
if inst_servproduct_filter:
ctype_inst_servproduct = ContentType.objects.get_for_model(inst_servproduct_filter[0])
inst_servproduct_id_list = [ips.id for ips in inst_servproduct_filter]
logs_serviceproduct = LogEntry.objects.filter(content_type=ctype_inst_servproduct,
object_id__in=inst_servproduct_id_list)
return render_to_response('institution/adhesionterm.html',
{'institution_id': institution_id, 'adhesionterm': adhesionterm,
'acepted_status': acepted_status, 'serviceproduct_list': serviceproduct_list,
'inst_servproduct_list': inst_servproduct_list},
context_instance=RequestContext(request))
|
"""
Generate Truth Tables from boolean expressions.
Example usage:
>>> tt = TruthTable('p and (~q or (p and r))', 'p or q and r', 'p -> q')
>>> tt.display()
┌───┬───┬───┬─────────────────────────┬──────────────┬────────┐
│ p │ q │ r │ p and (~q or (p and r)) │ p or q and r │ p -> q │
├───┼───┼───┼─────────────────────────┼──────────────┼────────┤
│ F │ F │ F │ F │ F │ T │
│ F │ F │ T │ F │ F │ T │
│ F │ T │ F │ F │ F │ T │
│ F │ T │ T │ F │ T │ T │
│ T │ F │ F │ T │ F │ F │
│ T │ F │ T │ T │ T │ F │
│ T │ T │ F │ F │ F │ T │
│ T │ T │ T │ T │ T │ T │
└───┴───┴───┴─────────────────────────┴──────────────┴────────┘
>>> tt.display(binary=True)
┌───┬───┬───┬─────────────────────────┬──────────────┬────────┐
│ p │ q │ r │ p and (~q or (p and r)) │ p or q and r │ p -> q │
├───┼───┼───┼─────────────────────────┼──────────────┼────────┤
│ 0 │ 0 │ 0 │ 0 │ 0 │ 1 │
│ 0 │ 0 │ 1 │ 0 │ 0 │ 1 │
│ 0 │ 1 │ 0 │ 0 │ 0 │ 1 │
│ 0 │ 1 │ 1 │ 0 │ 1 │ 1 │
│ 1 │ 0 │ 0 │ 1 │ 0 │ 0 │
│ 1 │ 0 │ 1 │ 1 │ 1 │ 0 │
│ 1 │ 1 │ 0 │ 0 │ 0 │ 1 │
│ 1 │ 1 │ 1 │ 1 │ 1 │ 1 │
└───┴───┴───┴─────────────────────────┴──────────────┴────────┘
Operator precedence is parens, negate, then left-to-right.
"""
from functools import reduce
from itertools import filterfalse, product
OP_DICT = {'and': lambda p, q: p and q,
'or': lambda p, q: p or q,
'->': lambda p, q: not p or q,
'<->': lambda p, q: p == q,
'xor': lambda p, q: p != q}
TOKENS = set(OP_DICT).union('~()')
def reformat(formula):
"""Add spaces around each parens and negate and split the formula."""
formula = ''.join(f' {i} ' if i in '()~' else i for i in formula)
return formula.split()
def find_vars(expression):
"""Return a set of variables in the expression."""
return set(filterfalse(TOKENS.__contains__, expression))
def evaluate(expression, vars_values):
"""Recursively evaluate expression starting with inner most parens."""
if len(expression) == 1: # Base
p = expression[0]
return vars_values.get(p, p)
expression = expression.copy() # We'll be modifying expression, so let's make a copy.
if '(' in expression:
# To find inner parens, we look for the first occurence of a close parens and then reverse
# search to find the opening parens.
last = expression.index(')')
first = last - expression[last::-1].index('(')
expression[first: last + 1] = [evaluate(expression[first + 1: last], vars_values)]
return evaluate(expression, vars_values)
if '~' in expression:
negate_index = len(expression) - expression[::-1].index('~') - 1 # Find last negate.
var = expression[negate_index + 1]
var = vars_values.get(var, var)
expression[negate_index: negate_index + 2] = [int(not var)]
return evaluate(expression, vars_values)
p, op, q = expression[:3]
p, op, q = vars_values.get(p, p), OP_DICT[op], vars_values.get(q, q)
expression[:3] = [int(op(p, q))]
return evaluate(expression, vars_values)
def table_maker(*rows):
"""Generates an aligned table. Modified from https://github.com/salt-die/Table-Maker"""
rows = list(rows)
# Pad the length of items in each column
lengths = tuple(map(len, rows[0]))
for i, row in enumerate(rows):
for j, (item, length) in enumerate(zip(row, lengths)):
rows[i][j] = f'{item:^{length}}'
# Make separators
horizontals = tuple("─" * (length + 2) for length in lengths)
top, title, bottom = (f'{l}{m.join(horizontals)}{r}' for l, m, r in ('┌┬┐', '├┼┤', '└┴┘'))
table = [f'│ {" │ ".join(row)} │' for row in rows]
table.insert(0, top)
table.insert(2, title)
table.append(bottom)
table = '\n'.join(table)
return table
class TruthTable:
def __init__(self, *props):
self._props = list(props)
self._update_attributes()
@property
def props(self):
return self._props
@props.setter
def props(self, new_props):
self._props = list(new_props)
self._update_attributes()
def add_prop(self, prop):
self._props.append(prop)
self._update_attributes()
def pop(self, i=None):
prop = self._props.pop() if i is None else self._props.pop(i)
self._update_attributes()
return TruthTable(prop)
def _update_attributes(self):
expressions = tuple(map(reformat, self.props))
self.vars = sorted(reduce(set.union, map(find_vars, expressions)))
self.table = []
for values in product((0, 1), repeat=len(self.vars)):
vars_values = dict(zip(self.vars, values))
results = [evaluate(expression, vars_values) for expression in expressions]
self.table.append(list(values) + results)
def display(self, binary=False):
translate = '01' if binary else 'FT'
table = [[translate[i] for i in row] for row in self.table]
table = table_maker(self.vars + self.props, *table)
print(table)
def __repr__(self):
return ' | '.join(self.props)
def __eq__(self, other):
return (isinstance(other, TruthTable)
and self.vars == other.vars
and self.table == other.table)
def __add__(self, other):
if isinstance(other, str):
return TruthTable(*self.props, other)
return TruthTable(*self.props, *other.props)
def __iadd__(self, other):
if isinstance(other, str):
self.add_prop(other)
else:
self.props = self.props + other.props
return self
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function, division
from astropy.table import Table
from ...catalog import FluxDistribution
def test_FluxDistribution():
table = Table([dict(S=42)])
flux_distribution = FluxDistribution(table, label='dummy')
|
# -*- coding: utf-8 -*-
"""Tests for the maxentropy package:
Machine translation example -- English to French -- from the paper 'A
maximum entropy approach to natural language processing' by Berger et
al., 1996.
Consider the translation of the English word 'in' into French. We
notice in a corpus of parallel texts the following facts:
(1) p(dans) + p(en) + p(a) + p(au cours de) + p(pendant) = 1
(2) p(dans) + p(en) = 3/10
(3) p(dans) + p(a) = 1/2
This code finds the probability distribution with maximal entropy
subject to these constraints.
"""
import numpy as np
import maxentropy
from maxentropy.utils import dictsampler
def f0(x):
return x in samplespace
def f1(x):
return x=='dans' or x=='en'
def f2(x):
return x=='dans' or x=='à'
features = [f0, f1, f2]
samplespace = ['dans', 'en', 'à', 'au cours de', 'pendant']
# Now set the desired feature expectations
target_expectations = [1.0, 0.3, 0.5]
X = np.atleast_2d(target_expectations)
def test_berger(algorithm='CG'):
model = maxentropy.MinDivergenceModel(features, samplespace,
vectorized=False,
verbose=False,
algorithm=algorithm)
# Fit the model
model.fit(X)
# How well are the constraints satisfied?
assert np.allclose(X[0, :], model.expectations())
# Manually test if the constraints are satisfied:
p = model.probdist()
assert np.isclose(p.sum(), target_expectations[0])
assert np.isclose(p[0] + p[1], target_expectations[1])
assert np.isclose(p[0] + p[2], target_expectations[2])
# Output the distribution
print("\nFitted model parameters are:\n" + str(model.params))
print("\nFitted distribution is:")
for j, x in enumerate(model.samplespace):
print(f"\tx = {x:15s}: p(x) = {p[j]:.4f}")
# Now show how well the constraints are satisfied:
print()
print("Desired constraints:")
print("\tp['dans'] + p['en'] = 0.3")
print("\tp['dans'] + p['à'] = 0.5")
print()
print("Actual expectations under the fitted model:")
print("\tp['dans'] + p['en'] =", p[0] + p[1])
print("\tp['dans'] + p['à'] =", p[0] + p[2])
def test_berger_simulated(algorithm='CG'):
# Define a uniform instrumental distribution for sampling.
# This can be unnormalized.
samplefreq = {e: 1 for e in samplespace}
n = 10**5
# Now create a function that will be used for importance sampling.
# When called with no arguments it should return a tuple
# (xs, log_q_xs) representing:
# xs: a sample x_1,...,x_n to use for importance sampling
#
# log_q_xs: an array of length n containing the (natural) log
# probability density (pdf or pmf) of each point under the
# auxiliary sampling distribution.
auxiliary_sampler = dictsampler(samplefreq, size=n)
model = maxentropy.MCMinDivergenceModel(features, auxiliary_sampler,
vectorized=False,
verbose=False,
algorithm=algorithm)
model.fit(X)
# How well does the model estimate that the constraints satisfied?
assert np.allclose(X[0, :], model.expectations())
# Manually test if the constraints are satisfied:
F = model.features(samplespace)
p = model.pdf(F)
assert np.isclose(p.sum(), target_expectations[0], atol=1e-2)
assert np.isclose(p[0] + p[1], target_expectations[1], atol=1e-2)
assert np.isclose(p[0] + p[2], target_expectations[2], atol=1e-2)
|
from __future__ import annotations
import itertools
import typing
import aiohttp
import asyncio
import logging
import zlib
from .dispatch import GatewayDispatch
from ..exceptions import GatewayError
logger: logging.Logger = logging.getLogger("tinycord")
ZLIB_SUFFIX = b'\x00\x00\xff\xff'
inflator = zlib.decompressobj()
class Gateway:
"""
The gateway is the handler for all the events and opcodes coming from the `Discord Gateway`
"""
def __init__(
self,
token: str,
*,
intents: typing.List[str],
url: str,
shard_id: int,
shard_count: int,
version: int = 9,
max_retries: int = 5,
reconnect: bool = True,
) -> None:
self.token = token
""" The token of the bot """
self.intents = intents
""" The intents of the bot """
self.url = url
""" The url of the gateway """
self.shard_id = shard_id
""" The shard id of the bot """
self.shard_count = shard_count
""" The shard count of the bot """
self.version = version
""" The version of the gateway """
self.max_retries = max_retries
""" The max retries of the gateway """
self.reconnect = reconnect
""" The reconnect of the gateway """
self.__handler: typing.Dict[str, typing.Callable] = {
7: self.handle_reconnect,
9: self.handle_invalid_session,
10: self.handle_hello,
11: self.handle_heartbeat_ack,
}
""" The handler for the opcodes """
self.__errors: typing.Dict[int, typing.Callable] = {
4000: GatewayError("Unknown Error"),
4001: GatewayError("Unknown Opcode"),
4002: GatewayError("Decode Error"),
4003: GatewayError("Not Authenticated"),
4004: GatewayError("Authentication Failed"),
4005: GatewayError("Already Authenticated"),
4007: GatewayError("Invalid Sequence"),
4008: GatewayError("Rate Limited"),
4009: GatewayError("Session Timeout"),
4010: GatewayError("Invalid Shard"),
4011: GatewayError("Sharding Required"),
4012: GatewayError("Invalid API Version"),
4013: GatewayError("Invalid Intents"),
4014: GatewayError("Disallowed Intents"),
}
""" The error handler for the opcodes """
self.session: aiohttp.ClientSession = aiohttp.ClientSession()
""" The session of the gateway """
self.buffer: bytearray = bytearray()
""" The buffer for the decompression """
self.should_reconnect: bool = False
""" The reconnect of the gateway """
self.sequence: int = 0
""" The sequence of the gateway """
self.session_id: int = None
""" The session id of the gateway """
self.heartbeat_task: typing.Optional[asyncio.Task] = None
""" The heartbeat task of the gateway """
self.heartbeat_interval: int = None
""" The heartbeat interval of the gateway """
self.heartbeat_ack: bool = False
""" The heartbeat ack of the gateway """
def append_handler(self, handlers: typing.Dict[int, typing.Callable]) -> None:
"""
This function is used to append a handler to the gateway.
Parameters
----------
handlers: `typing.Dict[int, typing.Callable]`
A dictionary of opcodes and their handlers.
"""
self.__handler = {**self.__handler, **handlers}
def append_error(self, errors: typing.Dict[int, typing.Callable]) -> None:
"""
This function is used to append a error handler to the gateway.
Parameters
----------
errors: `typing.Dict[int, typing.Callable]`
A dictionary of opcodes and their handlers.
"""
self.__errors = {**self.__errors, **errors}
def decompress(self, data: bytes) -> bytes:
"""
This function is used to decompress the data.
Parameters
----------
data: `bytes`
The data to decompress.
"""
self.buffer.extend(data)
if len(self.buffer) < 4 or data[-4:] != ZLIB_SUFFIX:
return None
msg = inflator.decompress(self.buffer)
self.buffer.clear()
return msg
def make_url(self) -> str:
"""
This function is used to make the url for the gateway.
"""
return f"{self.url}?v={self.version}&encoding=json&compress=zlib-stream"
async def send(self, op: int, payload: typing.Dict[str,typing.Any]):
"""
|coro|
This function is the handler for the hello opcode.
"""
await self.websocket.send_json({
"op": op,
"d": payload,
})
async def voice_connect(self, self_mute: bool, self_deaf: bool, channel_id: int, guild_id: int) -> None:
"""
|coro|
This function is used to connect to a voice.
Parameters
----------
self_mute: `bool`
The self mute of the voice.
self_deaf: `bool`
The self deaf of the voice.
channel_id: `int`
The channel id of the voice.
guild_id: `int`
The guild id of the voice.
"""
await self.send(4,{
"guild_id": guild_id,
"channel_id": channel_id,
"self_mute": self_mute,
"self_deaf": self_deaf,
})
async def voice_disconnect(self, guild_id: int) -> None:
"""
|coro|
This function is used to disconnect from a voice.
Parameters
----------
guild_id: `int`
The guild id of the voice.
"""
await self.send(4,{
"guild_id": guild_id,
"channel_id": None,
"self_mute": False,
"self_deaf": False,
})
async def send_identify(self, payload: GatewayDispatch) -> None:
"""
|coro|
This function is used to send the identify opcode.
"""
if self.should_reconnect:
logger.debug(
f" {self.shard_id} Reconnecting to gateway..."
)
await self.send(
op=6,
payload={
"token": self.token,
"session_id": self.session_id,
"seq": self.sequence,
}
)
return None
await self.send(
op=2,
payload={
"token": self.token,
"properties": {
"$os": "linux",
"$browser": "tinycord",
"$device": "tinycord",
},
"compress": True,
"large_threshold": 250,
"shard": [self.shard_id, self.shard_count],
"intents": self.intents,
},
)
self.heartbeat_interval = int(
payload.data['heartbeat_interval'] / 1000
)
if not self.heartbeat_task or self.heartbeat_task.cancelled():
self.heartbeat_task = asyncio.ensure_future(
self.handle_heartbeat_task()
)
async def start_connection(self) -> None:
"""
|coro|
This function is used to start the connection.
It do connect to the gateway and start the event handling.
"""
for i in itertools.count():
try:
self.websocket = await self.session.ws_connect(
self.make_url(),
)
break
except aiohttp.ClientConnectorError:
logger.warning(
f" {self.shard_id} Failed to connect to gateway, retrying in 5 seconds..."
)
await asyncio.sleep(5)
await self.start_event_handling()
async def handle_message(self, message: aiohttp.ClientWebSocketResponse):
"""
|coro|
This is the handler for the message that come from the websocket
"""
if message.type == aiohttp.WSMsgType.TEXT:
await self.handle_data(message.data)
elif message.type == aiohttp.WSMsgType.BINARY:
data = self.decompress(message.data)
if data is None:
return None
await self.handle_data(data)
elif message.type == aiohttp.WSMsgType.ERROR:
logger.warning(
f" {self.shard_id} Websocket error: {message.exception()}"
)
await self.handle_error(self.websocket.close_code)
async def handle_error(self, code: int):
"""
|coro|
This is the handler for the error that come from the websocket
"""
error = self.__errors.get(code, None)
if error is not None:
raise error
async def start_event_handling(self):
"""
|coro|
This function is the responsible of handling the event and praseing the data
"""
async for message in self.websocket:
await self.handle_message(message)
async def handle_data(self, data: str):
"""
|coro|
This function is the handler for the data that come from the websocket.
"""
payload = GatewayDispatch.form(data)
if payload.seq is not None:
self.sequence = payload.seq
asyncio.ensure_future(
self.__handler.get(payload.op)(payload)
)
async def handle_hello(self, payload: GatewayDispatch):
"""
|coro|
This function is the handler for the hello opcode.
"""
logger.info(
f" {self.shard_id} Connected to gateway"
)
await self.send_identify(payload)
async def handle_reconnect(self, payload: GatewayDispatch):
"""
|coro|
This function is the handler for the reconnect opcode.
"""
logger.debug(
f" {self.shard_id} Reconnecting to gateway..."
)
self.should_reconnect = True
await self.start_connection()
async def handle_invalid_session(self, payload: GatewayDispatch):
"""
|coro|
This function is the handler for the invalid session opcode.
"""
logger.warning(
f" {self.shard_id} Invalid session, reconnecting..."
)
await asyncio.sleep(5)
await self.start_connection()
async def handle_heartbeat_ack(self, payload: GatewayDispatch):
"""
|coro|
This function is the handler for the heartbeat opcode.
"""
if not self.heartbeat_ack:
logger.debug(
f" {self.shard_id} Received heartbeat ack"
)
self.heartbeat_ack = True
async def handle_heartbeat_task(self):
"""
|coro|
This function is the handler for the heartbeat task.
"""
while True:
await asyncio.sleep(self.heartbeat_interval)
await self.send(1, self.sequence)
logger.debug(
f" {self.shard_id} Heartbeat sent to gateway..."
)
def __repr__(self) -> str:
return f'<GatewayShard {self.shard_id}>' |
"""
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* - Neither the name of prim nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior
* written permission.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
"""
import fabric
class Eth100(fabric.Fabric):
"""
This is the public information about 100 GbE using Tomahawk II
"""
def __init__(self, **kwargs):
super(Eth100, self).__init__(**kwargs)
# parse kwargs
for key in kwargs:
if key in super(Eth100, self).using_options():
pass
else:
assert False, 'unknown option key: {}'.format(key)
# electrical
# http://www.fs.com/products/47096.html (Dell)
# optical
# http://www.fs.com/products/65892.html
self._options = [
(1.0, 'pcc', 67, 0),
(2.0, 'pcc', 90, 0),
(3.0, 'pcc', 110, 0),
(5.0, 'pcc', 200, 0),
(7.0, 'aoc', 500, 3.5*2),
(10.0, 'aoc', 510, 3.5*2),
(15.0, 'aoc', 520, 3.5*2),
(20.0, 'aoc', 540, 3.5*2),
(25.0, 'aoc', 550, 3.5*2),
(30.0, 'aoc', 570, 3.5*2),
(50.0, 'aoc', 590, 3.5*2), # a guess
(75.0, 'aoc', 640, 3.5*2)] # a guess
def _make_router(self, minimum_radix):
assert minimum_radix <= 64, 'Eth100 only supports 64 port routers'
return fabric.Router(64)
def _make_cable(self, minimum_length):
for clen, ctype, ccost, cpower in self._options:
if clen >= minimum_length:
if not self.partial_cables:
return fabric.Cable(minimum_length, clen)
else:
return fabric.Cable(minimum_length, minimum_length)
assert False, 'no cable available for length: {}'.format(minimum_length)
def _set_router_attributes(self, router, count):
router.tech = '100GbE'
router.cost = 5000
router.power = 300
def _set_cable_attributes(self, cable, count):
for clen, ctype, ccost, cpower in self._options:
if clen >= cable.actual_length:
cable.tech = ctype
cable.cost = ccost
cable.power = cpower
return
assert False
|
data = (
'byum', # 0x00
'byub', # 0x01
'byubs', # 0x02
'byus', # 0x03
'byuss', # 0x04
'byung', # 0x05
'byuj', # 0x06
'byuc', # 0x07
'byuk', # 0x08
'byut', # 0x09
'byup', # 0x0a
'byuh', # 0x0b
'beu', # 0x0c
'beug', # 0x0d
'beugg', # 0x0e
'beugs', # 0x0f
'beun', # 0x10
'beunj', # 0x11
'beunh', # 0x12
'beud', # 0x13
'beul', # 0x14
'beulg', # 0x15
'beulm', # 0x16
'beulb', # 0x17
'beuls', # 0x18
'beult', # 0x19
'beulp', # 0x1a
'beulh', # 0x1b
'beum', # 0x1c
'beub', # 0x1d
'beubs', # 0x1e
'beus', # 0x1f
'beuss', # 0x20
'beung', # 0x21
'beuj', # 0x22
'beuc', # 0x23
'beuk', # 0x24
'beut', # 0x25
'beup', # 0x26
'beuh', # 0x27
'byi', # 0x28
'byig', # 0x29
'byigg', # 0x2a
'byigs', # 0x2b
'byin', # 0x2c
'byinj', # 0x2d
'byinh', # 0x2e
'byid', # 0x2f
'byil', # 0x30
'byilg', # 0x31
'byilm', # 0x32
'byilb', # 0x33
'byils', # 0x34
'byilt', # 0x35
'byilp', # 0x36
'byilh', # 0x37
'byim', # 0x38
'byib', # 0x39
'byibs', # 0x3a
'byis', # 0x3b
'byiss', # 0x3c
'bying', # 0x3d
'byij', # 0x3e
'byic', # 0x3f
'byik', # 0x40
'byit', # 0x41
'byip', # 0x42
'byih', # 0x43
'bi', # 0x44
'big', # 0x45
'bigg', # 0x46
'bigs', # 0x47
'bin', # 0x48
'binj', # 0x49
'binh', # 0x4a
'bid', # 0x4b
'bil', # 0x4c
'bilg', # 0x4d
'bilm', # 0x4e
'bilb', # 0x4f
'bils', # 0x50
'bilt', # 0x51
'bilp', # 0x52
'bilh', # 0x53
'bim', # 0x54
'bib', # 0x55
'bibs', # 0x56
'bis', # 0x57
'biss', # 0x58
'bing', # 0x59
'bij', # 0x5a
'bic', # 0x5b
'bik', # 0x5c
'bit', # 0x5d
'bip', # 0x5e
'bih', # 0x5f
'bba', # 0x60
'bbag', # 0x61
'bbagg', # 0x62
'bbags', # 0x63
'bban', # 0x64
'bbanj', # 0x65
'bbanh', # 0x66
'bbad', # 0x67
'bbal', # 0x68
'bbalg', # 0x69
'bbalm', # 0x6a
'bbalb', # 0x6b
'bbals', # 0x6c
'bbalt', # 0x6d
'bbalp', # 0x6e
'bbalh', # 0x6f
'bbam', # 0x70
'bbab', # 0x71
'bbabs', # 0x72
'bbas', # 0x73
'bbass', # 0x74
'bbang', # 0x75
'bbaj', # 0x76
'bbac', # 0x77
'bbak', # 0x78
'bbat', # 0x79
'bbap', # 0x7a
'bbah', # 0x7b
'bbae', # 0x7c
'bbaeg', # 0x7d
'bbaegg', # 0x7e
'bbaegs', # 0x7f
'bbaen', # 0x80
'bbaenj', # 0x81
'bbaenh', # 0x82
'bbaed', # 0x83
'bbael', # 0x84
'bbaelg', # 0x85
'bbaelm', # 0x86
'bbaelb', # 0x87
'bbaels', # 0x88
'bbaelt', # 0x89
'bbaelp', # 0x8a
'bbaelh', # 0x8b
'bbaem', # 0x8c
'bbaeb', # 0x8d
'bbaebs', # 0x8e
'bbaes', # 0x8f
'bbaess', # 0x90
'bbaeng', # 0x91
'bbaej', # 0x92
'bbaec', # 0x93
'bbaek', # 0x94
'bbaet', # 0x95
'bbaep', # 0x96
'bbaeh', # 0x97
'bbya', # 0x98
'bbyag', # 0x99
'bbyagg', # 0x9a
'bbyags', # 0x9b
'bbyan', # 0x9c
'bbyanj', # 0x9d
'bbyanh', # 0x9e
'bbyad', # 0x9f
'bbyal', # 0xa0
'bbyalg', # 0xa1
'bbyalm', # 0xa2
'bbyalb', # 0xa3
'bbyals', # 0xa4
'bbyalt', # 0xa5
'bbyalp', # 0xa6
'bbyalh', # 0xa7
'bbyam', # 0xa8
'bbyab', # 0xa9
'bbyabs', # 0xaa
'bbyas', # 0xab
'bbyass', # 0xac
'bbyang', # 0xad
'bbyaj', # 0xae
'bbyac', # 0xaf
'bbyak', # 0xb0
'bbyat', # 0xb1
'bbyap', # 0xb2
'bbyah', # 0xb3
'bbyae', # 0xb4
'bbyaeg', # 0xb5
'bbyaegg', # 0xb6
'bbyaegs', # 0xb7
'bbyaen', # 0xb8
'bbyaenj', # 0xb9
'bbyaenh', # 0xba
'bbyaed', # 0xbb
'bbyael', # 0xbc
'bbyaelg', # 0xbd
'bbyaelm', # 0xbe
'bbyaelb', # 0xbf
'bbyaels', # 0xc0
'bbyaelt', # 0xc1
'bbyaelp', # 0xc2
'bbyaelh', # 0xc3
'bbyaem', # 0xc4
'bbyaeb', # 0xc5
'bbyaebs', # 0xc6
'bbyaes', # 0xc7
'bbyaess', # 0xc8
'bbyaeng', # 0xc9
'bbyaej', # 0xca
'bbyaec', # 0xcb
'bbyaek', # 0xcc
'bbyaet', # 0xcd
'bbyaep', # 0xce
'bbyaeh', # 0xcf
'bbeo', # 0xd0
'bbeog', # 0xd1
'bbeogg', # 0xd2
'bbeogs', # 0xd3
'bbeon', # 0xd4
'bbeonj', # 0xd5
'bbeonh', # 0xd6
'bbeod', # 0xd7
'bbeol', # 0xd8
'bbeolg', # 0xd9
'bbeolm', # 0xda
'bbeolb', # 0xdb
'bbeols', # 0xdc
'bbeolt', # 0xdd
'bbeolp', # 0xde
'bbeolh', # 0xdf
'bbeom', # 0xe0
'bbeob', # 0xe1
'bbeobs', # 0xe2
'bbeos', # 0xe3
'bbeoss', # 0xe4
'bbeong', # 0xe5
'bbeoj', # 0xe6
'bbeoc', # 0xe7
'bbeok', # 0xe8
'bbeot', # 0xe9
'bbeop', # 0xea
'bbeoh', # 0xeb
'bbe', # 0xec
'bbeg', # 0xed
'bbegg', # 0xee
'bbegs', # 0xef
'bben', # 0xf0
'bbenj', # 0xf1
'bbenh', # 0xf2
'bbed', # 0xf3
'bbel', # 0xf4
'bbelg', # 0xf5
'bbelm', # 0xf6
'bbelb', # 0xf7
'bbels', # 0xf8
'bbelt', # 0xf9
'bbelp', # 0xfa
'bbelh', # 0xfb
'bbem', # 0xfc
'bbeb', # 0xfd
'bbebs', # 0xfe
'bbes', # 0xff
)
|
#coding : utf-8
import os
import json
import random
import logging
import requests
import responder
import copy
from linebot import LineBotApi, WebhookHandler
from linebot.exceptions import InvalidSignatureError
from linebot.models import (MessageEvent, FollowEvent, PostbackEvent, UnfollowEvent, TextMessage, TextSendMessage, FlexSendMessage,
RichMenu, RichMenuSize, RichMenuArea, RichMenuBounds, PostbackAction)
from db import LineCrud, SessionManager
from flex_message import password_generate, carousel_message, delete_server_contents, setting_contens, select_contents
BASE_URL = "https://discord.com/api"
HADER = {"Authorization":"Bot {}".format(os.environ["DISCORD_TOKEN"])}
api = responder.API()
line_bot_api = LineBotApi(os.environ["LINE_ACCESS_TOKEN"])
handler = WebhookHandler(os.environ["LINE_CHANNEL_SECRET"])
line_crud = LineCrud()
session_mng = SessionManager()
@api.route("/callback")
async def on_post(req, resp):
@api.background.task
def handles():
handler.handle(body, signature)
signature = req.headers['X-Line-Signature']
body = await req.media()
body = json.dumps(body, ensure_ascii=False).replace(' ', '')
try:
handles()
resp.status_code = 200
resp.text = 'OK'
except InvalidSignatureError as e:
resp.status_code = 400
resp.text = e
@handler.add(MessageEvent, message=TextMessage)
def handle_message(event):
with session_mng.session_create() as session:
webhook_id = line_crud.get_webhook_id(session, event.source.user_id)
if webhook_id == False:
line_bot_api.reply_message(event.reply_token, TextSendMessage("LINEとDiscordを連携させましょう!"))
return
elif webhook_id == None:
line_bot_api.reply_message(event.reply_token, TextSendMessage("送信先サーバーを選択してください!"))
return
with session_mng.session_create() as session:
user = line_crud.get_discord_user(session, event.source.user_id)
user_info = json.loads(requests.get("{}/users/{}".format(BASE_URL, user), headers=HADER).text)
avatar_url = "https://cdn.discordapp.com/avatars/{}/{}.jpg".format(user_info["id"], user_info["avatar"])
webhook_contents = {
"username" : user_info["username"],
"avatar_url" : avatar_url,
"content" : event.message.text
}
webhook_info = json.loads(requests.get("{}/webhooks/{}".format(BASE_URL, webhook_id), headers=HADER).text)
webhook_url = "{}/webhooks/{}/{}".format(BASE_URL, webhook_info["id"], webhook_info["token"])
res = requests.post(webhook_url, webhook_contents)
with session_mng.session_create() as session:
line_crud.set_talk_time(session, event.source.user_id)
@handler.add(FollowEvent)
def following(event):
with session_mng.session_create() as session:
line_crud.add_following_user(session, event.source.user_id)
@handler.add(UnfollowEvent)
def unfollow(event):
with session_mng.session_create() as session:
line_crud.del_userinfo_block(session, event.source.user_id)
@handler.add(PostbackEvent)
def post_back(event):
if event.postback.data == "register_server":
with session_mng.session_create() as session:
if line_crud.exists_line_user(session, event.source.user_id) == True:
line_bot_api.reply_message(event.reply_token, TextSendMessage("既に登録用コマンドを発行済です!"))
else:
password = Line.password_gen()
flex_message = password_generate
flex_message["header"]["contents"][0]["text"] = "!dine add " + str(password)
with session_mng.session_create() as session:
line_crud.add_following_to_password(session, event.source.user_id, password)
line_bot_api.reply_message(
event.reply_token,
[
FlexSendMessage(alt_text="登録メッセージ", contents=flex_message),
TextSendMessage("上記のコマンドを登録したいサーバーのDiscordチャットに入力してください!")
]
)
elif event.postback.data == "delete_server":
with session_mng.session_create() as session:
servers = line_crud.get_server_id(session, event.source.user_id)
if len(servers) > 0:
delete_flex_message = copy.deepcopy(carousel_message)
delete_flex_message_contents = copy.deepcopy(delete_server_contents)
for server in servers:
res = requests.get("{}/guilds/{}".format(BASE_URL, str(server[0])), headers=HADER)
server_info = json.loads(res.text)
delete_flex_message_contents["hero"]["contents"][0]["url"] = "https://cdn.discordapp.com/icons/{}/{}.png".format(str(server_info["id"]), str(server_info["icon"]))
delete_flex_message_contents["body"]["contents"][0]["text"] = server_info["name"]
delete_flex_message_contents["footer"]["contents"][0]["action"]["data"] = "delete,{}".format(server_info["id"])
delete_flex_message["contents"].append(copy.deepcopy(delete_flex_message_contents))
line_bot_api.reply_message(event.reply_token, FlexSendMessage(alt_text="登録メッセージ", contents=delete_flex_message))
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage("登録してるサーバーが1つもありません!"))
elif event.postback.data == "setting_server":
with session_mng.session_create() as session:
servers = line_crud.get_server_id(session, event.source.user_id)
if len(servers) > 0:
setting_flex_message = copy.deepcopy(carousel_message)
setting_flex_message_contents = copy.deepcopy(setting_contens)
for server in servers:
res = requests.get("{}/guilds/{}".format(BASE_URL, server[0]), headers=HADER)
server_info = json.loads(res.text)
with session_mng.session_create() as session:
if line_crud.get_server_text(session, event.source.user_id, server[0]) == True:
setting_flex_message_contents["footer"]["contents"][1]["action"]["label"] = "オフ にする"
elif line_crud.get_server_text(session, event.source.user_id, server[0]) == False:
setting_flex_message_contents["footer"]["contents"][1]["action"]["label"] = "オン にする"
with session_mng.session_create() as session:
if line_crud.get_server_voice(session, event.source.user_id, server[0]) == True:
setting_flex_message_contents["footer"]["contents"][3]["action"]["label"] = "オフ にする"
elif line_crud.get_server_voice(session, event.source.user_id, server[0]) == False:
setting_flex_message_contents["footer"]["contents"][3]["action"]["label"] = "オン にする"
setting_flex_message_contents["hero"]["contents"][0]["url"] = "https://cdn.discordapp.com/icons/{}/{}.png".format(str(server_info["id"]), str(server_info["icon"]))
setting_flex_message_contents["body"]["contents"][0]["text"] = server_info["name"]
setting_flex_message_contents["footer"]["contents"][1]["action"]["data"] = "setting_text,{}".format(server_info["id"])
setting_flex_message_contents["footer"]["contents"][3]["action"]["data"] = "setting_vc,{}".format(server_info["id"])
setting_flex_message["contents"].append(copy.deepcopy(setting_flex_message_contents))
line_bot_api.reply_message(event.reply_token, FlexSendMessage(alt_text="設定メッセージ", contents=setting_flex_message))
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage("登録してるサーバーが1つもありません!"))
elif event.postback.data == "select_server":
with session_mng.session_create() as session:
servers = line_crud.get_server_id(session, event.source.user_id)
if len(servers) > 0:
select_flex_message = copy.deepcopy(carousel_message)
select_flex_message_contents = copy.deepcopy(select_contents)
for server in servers:
res = requests.get("{}/guilds/{}".format(BASE_URL, server[0]), headers=HADER)
server_info = json.loads(res.text)
select_flex_message_contents["hero"]["contents"][0]["url"] = "https://cdn.discordapp.com/icons/{}/{}.png".format(str(server_info["id"]), str(server_info["icon"]))
select_flex_message_contents["body"]["contents"][0]["text"] = server_info["name"]
select_flex_message_contents["footer"]["contents"][0]["action"]["data"] = "select,{}".format(server_info["id"])
select_flex_message["contents"].append(copy.deepcopy(select_flex_message_contents))
line_bot_api.reply_message(event.reply_token, FlexSendMessage(alt_text="選択メッセージ", contents=select_flex_message))
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage("登録してるサーバーが1つもありません!"))
elif event.postback.data == "register_accept":
with session_mng.session_create() as session:
if line_crud.exists_line_user(session, event.source.user_id) == False:
line_bot_api.reply_message(event.reply_token, TextSendMessage("既に選択済みのメッセージです!"))
return
with session_mng.session_create() as session:
line_crud.accept_user(session, event.source.user_id)
with session_mng.session_create() as session:
line_crud.set_user_info(session, event.source.user_id)
line_bot_api.reply_message(event.reply_token, TextSendMessage("サーバーへの登録が完了しました!"))
elif event.postback.data == "register_deny":
with session_mng.session_create() as session:
if line_crud.exists_line_user(session, event.source.user_id) == False:
line_bot_api.reply_message(event.reply_token, TextSendMessage("既に選択済みのメッセージです!"))
return
line_bot_api.reply_message(event.reply_token, TextSendMessage("サーバーへの登録を拒否しました。\n再度登録する場合はパスワードを再生成してください。"))
else:
data = event.postback.data.split(",")
if data[0] == "delete":
with session_mng.session_create() as session:
line_crud.delete_server(session, event.source.user_id, data[1])
line_bot_api.reply_message(event.reply_token, TextSendMessage("サーバーとの連携を解除しました!"))
if data[0] == "setting_text":
with session_mng.session_create() as session:
text_notice = line_crud.setting_server_text(session, event.source.user_id, data[1])
if text_notice == True:
notice_message = "オン"
elif text_notice == False:
notice_message = "オフ"
elif text_notice == None:
line_bot_api.reply_message(event.reply_token, TextSendMessage("設定したサーバーとは連携解除が解除されています!"))
return
line_bot_api.reply_message(event.reply_token, TextSendMessage("サーバーのメッセージ通知を {} にしました!".format(notice_message)))
if data[0] == "setting_vc":
with session_mng.session_create() as session:
text_notice = line_crud.setting_server_voice(session, event.source.user_id, data[1])
if text_notice == True:
notice_message = "オン"
elif text_notice == False:
notice_message = "オフ"
elif text_notice == None:
line_bot_api.reply_message(event.reply_token, TextSendMessage("設定したサーバーとは連携解除が解除されています!"))
return
line_bot_api.reply_message(event.reply_token, TextSendMessage("サーバーのボイスチャット通知を {} にしました!".format(notice_message)))
if data[0] == "select":
with session_mng.session_create() as session:
user_res = line_crud.set_user_talk_server(session, event.source.user_id, data[1])
if user_res == True:
line_bot_api.reply_message(event.reply_token, TextSendMessage("メッセージ送信先のサーバーを変更しました!"))
else:
line_bot_api.reply_message(event.reply_token, TextSendMessage("設定したサーバーとは連携解除が解除されています!"))
class Line():
@staticmethod
def password_gen():
password = random.randint(100000, 999999)
with session_mng.session_create() as session:
if line_crud.exists_password(session, password) == True:
return password_gen()
return password
def __create_richmenu(self):
rich_menu_to_create = RichMenu(
size = RichMenuSize(width=2500, height=1686),
selected = True,
name = "dine_richmenu",
chat_bar_text = "BOT設定はここ!",
areas=[
RichMenuArea(
bounds=RichMenuBounds(x=0, y=0, width=1250, height=843),
action=PostbackAction(data="delete_server", display_text="サーバーを消したいよ!")
),
RichMenuArea(
bounds=RichMenuBounds(x=0, y=843, width=1250, height=1686),
action=PostbackAction(data="setting_server", display_text="サーバーを設定したいよ!")
),
RichMenuArea(
bounds=RichMenuBounds(x=1250, y=0, width=2500, height=843),
action=PostbackAction(data="register_server", display_text="サーバを登録したいよ!")
),
RichMenuArea(
bounds=RichMenuBounds(x=1250, y=843, width=2500, height=1686),
action=PostbackAction(data="select_server", display_text="サーバーを選びたいよ!")
)
]
)
richMenuId = line_bot_api.create_rich_menu(rich_menu=rich_menu_to_create)
with open("picture/richmenu.png", 'rb') as f:
line_bot_api.set_rich_menu_image(richMenuId, "image/png", f)
line_bot_api.set_default_rich_menu(richMenuId)
def begin(self):
self.__create_richmenu()
api.run(address="0.0.0.0", port=8000, debug=True, log_config=None) |
import pytest
import time
from rever import rever, ReachedMaxRetries
class TestRever:
"""
Generally the types of exceptions being raised do not really matter for the tests
# because of the default max pause time of about 1 second, more or less takes a second per test
"""
def test_no_kwargs_raise_max_retries(self):
with pytest.raises(ReachedMaxRetries):
@rever()
def f():
raise OSError
f()
def test_try_to_catch_oserror_but_miss(self):
with pytest.raises(TypeError):
@rever(exception=OSError)
def f():
raise TypeError
f()
def test_catch_oserror_but_ultimately_raise_no_exception(self):
@rever(exception=OSError, raises=False)
def f():
raise OSError
assert f() is None
def test_function_args_kwargs(self):
with pytest.raises(ReachedMaxRetries):
@rever()
def f(*args, **kwargs):
if args or kwargs:
raise OSError
f(1, 2, fruit="apple")
def test_oserror_call_prior(self):
with pytest.raises(ReachedMaxRetries):
def g():
return None
@rever(prior=g)
def f():
raise OSError
f()
def test_return_value_no_errors(self):
@rever()
def f():
return "does this return anything?"
assert f() == "does this return anything?"
def test_backoff_total_pause(self):
@rever(total_pause=2, raises=False)
def f():
raise OSError
st = time.time()
f()
t = time.time() - st
assert 1 < t < 3
def test_backoff_steps(self):
@rever(steps=10, raises=False)
def f():
raise OSError
st = time.time()
f()
t = time.time() - st
assert 0 < t < 2 # cannot test steps directly but 10 steps corresponds to 1 second given default total pause
def test_no_backoff_pause(self):
@rever(backoff=False, pause=2, raises=False)
def f():
raise OSError
st = time.time()
f()
t = time.time() - st
assert 1 < t < 3
def test_function_args_kwargs_times(self):
with pytest.raises(ReachedMaxRetries):
@rever(backoff=False, times=2)
def f(*args, **kwargs):
if args or kwargs:
raise OSError
f(1, 2, fruit="apple")
def test_multiple_uses_of_same_function_no_reached_max_tries_exception_raised(self):
a = 1
@rever(backoff=False, times=1, raises=True)
def f():
nonlocal a
if a == 1:
a -= 1
raise OSError
f() # will catch OSError when times = 1
f()
f()
# prior to v 0.3.0 calling f() repeatedly like this would trigger a ReachedMaxRetries exception
# as the 'times' decreased to 0. The rever_kwargs were not re-initialized with each new function call
# and so the 'times' kept decreasing
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
@login_required()
def web_ssh(request):
return render(request, 'admin/shell.html', {
'host': request.GET.get('host', ''),
'type': request.GET.get('type', ''),
'port': request.GET.get('port', '22'),
'user': request.GET.get('user', 'root'),
'password': request.GET.get('password', None)
})
|
import re
tam_re = re.compile(r'(.*)\((.*)\.(.*)\.(.*)\)')
def split_TAM(TAM_tag):
"""Split TAM tag and return parts as dict
! DEPRECATED !
Starting with yiqtol, we have deprecated
this function since tense tags have now
changed. The function process_TAM now
fulfills the old role.
"""
tam_match = tam_re.match(TAM_tag)
if tam_match:
name, tense, aspect, modality = tam_match.groups()
return {
'tense': tense or '',
'aspect': aspect or '',
'modality': modality or '',
'TAM': f'{tense}.{aspect}.{modality}',
'TAMtag': name.strip(),
}
else:
return {
'tense': '',
'aspect': '',
'modality': '',
'TAM': '',
'TAMtag': '',
}
def process_TAM(full_tam):
"""Take in a TAM tag and build TAM features."""
tam_simp_map = {
'PRES do not': 'PRES',
'PRES question': 'PRES',
'PRES wh-question': 'PRES',
'PRES PERF question': 'PRES PERF',
'PRES PERF wh-question': 'PRES PERF',
'PAST did not': 'PAST',
'PAST question': 'PAST',
'PAST wh-question': 'PAST',
'PAST PROG keep': 'PAST PROG',
'FUT question': 'FUT',
'FUT wh-question': 'FUT',
'PRES do-support': 'PRES',
'PAST do-support': 'PAST',
'IMPV do not': 'IMPV',
'MOD quest shall': 'MOD shall',
'MOD quest should': 'MOD should',
'MOD quest would': 'MOD would',
'MOD quest can': 'MOD can',
'MOD quest could': 'MOD could',
}
# tam_map2 = {
# 'FUT ~ MOD shall': 'FUT',
# 'IMPV ~ MOD shall': 'DEON',
# 'MOD is to ~ MOD shall': 'DEON',
# 'IMPV': 'DEON',
# 'MOD must ~ MOD shall': 'DEON',
# }
features = {
'TAM': full_tam,
'TAMsimp': tam_simp_map.get(full_tam, full_tam)
}
# features['TAMsimp2'] = tam_map2.get(features['TAMsimp'], features['TAMsimp'])
return features
|
import requests_mock
from pycryptoclients.request import DEFAULT_USER_AGENT
from pycryptoclients.markets.stocks_exchange.api import StocksExchangeAPI
from pycryptoclients.markets.stocks_exchange.request import STOCKS_EXCHANGE_BASE_URL
from tests import CCAPITestCase
from tests.test_markets import *
class TestStocksExchangeAPI(CCAPITestCase):
def setUp(self):
super(TestStocksExchangeAPI, self).setUp()
self.api = StocksExchangeAPI(api_secret=self.shared_secret, api_key=self.api_key)
######################################################
# Test public API methods
######################################################
@requests_mock.Mocker()
def test_ticker(self, m):
method_name = 'ticker'
url = STOCKS_EXCHANGE_BASE_URL.format(method=method_name)
m.register_uri('GET', url, text=TICKER_RESPONSE)
data = self.api.call(method_name).data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertIsInstance(data, list)
self.assertEqual(len(data), 1)
@requests_mock.Mocker()
def test_prices(self, m):
method_name = 'prices'
url = STOCKS_EXCHANGE_BASE_URL.format(method=method_name)
m.register_uri('GET', url, text=PRICES_RESPONSE)
data = self.api.call(method_name).data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertIsInstance(data, list)
self.assertEqual(len(data), 3)
@requests_mock.Mocker()
def test_markets(self, m):
method_name = 'markets'
url = STOCKS_EXCHANGE_BASE_URL.format(method=method_name)
m.register_uri('GET', url, text=MARKETS_RESPONSE)
data = self.api.call(method_name).data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertIsInstance(data, list)
self.assertEqual(len(data), 2)
@requests_mock.Mocker()
def test_currencies(self, m):
method_name = 'currencies'
url = STOCKS_EXCHANGE_BASE_URL.format(method=method_name)
m.register_uri('GET', url, text=CURRENCIES_RESPONSE)
data = self.api.call(method_name).data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertIsInstance(data, list)
self.assertEqual(len(data), 2)
@requests_mock.Mocker()
def test_market_summary(self, m):
method_name = 'market_summary'
_method_url = '{}/BTC/USD'.format(method_name)
url = STOCKS_EXCHANGE_BASE_URL.format(method=_method_url)
m.register_uri('GET', url,
text=MARKET_SUMMARY_RESPONSE)
data = self.api.call(method_name, currency1='BTC', currency2='USD').data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertIsInstance(data, list)
self.assertEqual(len(data), 1)
with self.assertRaises(TypeError):
self.api.call(method_name) # currency1 and currency2 are required arguments for request
@requests_mock.Mocker()
def test_trade_history(self, m):
method_name = 'trade_history'
_currency1 = 'BTC'
_currency2 = 'NXT'
_method_url = '{}?pair={}_{}'.format('trades', _currency1, _currency2)
url = STOCKS_EXCHANGE_BASE_URL.format(method=_method_url)
m.register_uri('GET', url, text=TRADE_HISTORY_RESPONSE)
data = self.api.call(method_name, currency1='BTC', currency2='NXT').data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertEqual(data['success'], 1)
result = data['result']
self.assertIsInstance(result, list)
self.assertEqual(len(result), 3)
with self.assertRaises(TypeError):
self.api.call(method_name) # currency1 and currency2 are required arguments for request
@requests_mock.Mocker()
def test_orderbook(self, m):
method_name = 'orderbook'
_currency1 = 'BTC'
_currency2 = 'NXT'
_method_url = '{}?pair={}_{}'.format(method_name, _currency1, _currency2)
url = STOCKS_EXCHANGE_BASE_URL.format(method=_method_url)
m.register_uri('GET', url, text=ORDERBOOK_RESPONSE)
data = self.api.call(method_name, currency1='BTC', currency2='NXT').data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertEqual(data['success'], 1)
result = data['result']
self.assertIsInstance(result, dict)
self.assertIn('buy', result)
self.assertIn('sell', result)
with self.assertRaises(TypeError):
self.api.call(method_name) # currency1 and currency2 are required arguments for request
@requests_mock.Mocker()
def test_public_grafic(self, m):
method_name = 'grafic'
_currency1 = 'BTC'
_currency2 = 'NXT'
_method_url = '{}?pair={}_{}&interval=1D&order=DESC&count=50'.format('grafic_public', _currency1,
_currency2)
url = STOCKS_EXCHANGE_BASE_URL.format(method=_method_url)
m.register_uri('GET', url, text=PUBLIC_GRAFIC_RESPONSE)
data = self.api.call(method_name, currency1='BTC', currency2='NXT').data
self.assertPublicMethod(method_name, m, url, DEFAULT_USER_AGENT)
self.assertTrue(data)
self.assertEqual(data['success'], 1)
result = data['data']
self.assertIsInstance(result, dict)
with self.assertRaises(TypeError):
self.api.call(method_name) # currency1 and currency2 are required arguments for request
######################################################
# Test private API methods
######################################################
def assertPrivateMethod(self, method_name, response_data, m, **request_params):
m.register_uri('POST', STOCKS_EXCHANGE_BASE_URL.format(method=''), text=response_data)
result = self.api.call(method_name, **request_params).data
self.assertTrue(m.called)
self.assertEqual(m.call_count, 1)
self.assertAuth(m)
self.assertTrue(result)
self.assertIsInstance(result, dict)
self.assertEqual(result.get('success'), 1)
self.assertIn('data', result)
req = m.request_history[0]
req_headers = req.headers
self.assertEqual(req_headers['User-Agent'], DEFAULT_USER_AGENT)
self.assertEqual(req_headers['Content-Type'], 'application/json')
return req
@requests_mock.Mocker()
def test_get_account_info(self, m):
self.assertPrivateMethod('get_account_info', response_data=GET_ACCOUNT_INFO_RESPONSE, m=m)
@requests_mock.Mocker()
def test_get_active_orders(self, m):
method_name = 'get_active_orders'
self.assertPrivateMethod(method_name, response_data=GET_ACTIVE_ORDERS_RESPONSE, m=m)
with self.assertRaises(ValueError):
self.api.call(method_name, count=125)
@requests_mock.Mocker()
def test_trade(self, m):
method_name = 'trade'
self.assertPrivateMethod(method_name, response_data=TRADE_RESPONSE, m=m, _type='BUY', currency1='BTC',
currency2='NXT', amount=2345, rate=1)
with self.assertRaises(ValueError):
self.api.call(method_name, _type='DUMP', currency1='BTC', currency2='NXT', amount=2345, rate=1)
with self.assertRaises(ValueError):
self.api.call(method_name, _type='BUY', currency1='BTC', currency2='NXT', amount=-235, rate=1)
with self.assertRaises(ValueError):
self.api.call(method_name, _type='BUY', currency1='BTC', currency2='NXT', amount=2345, rate=-1)
@requests_mock.Mocker()
def test_cancel_order(self, m):
self.assertPrivateMethod('cancel_order', response_data=CANCEL_ORDER_RESPONSE, m=m, order_id=45)
@requests_mock.Mocker()
def test_private_trade_history(self, m):
self.assertPrivateMethod('private_trade_history', response_data=PRIVATE_TRADE_HISTORY_RESPONSE, m=m)
@requests_mock.Mocker()
def test_transactions_history(self, m):
method_name = 'transactions_history'
self.assertPrivateMethod(method_name, response_data=TRANSACTIONS_HISTORY_RESPONSE, m=m)
with self.assertRaises(ValueError):
self.api.call(method_name, count=125)
@requests_mock.Mocker()
def test_private_grafic(self, m):
method_name = 'private_grafic'
self.assertPrivateMethod(method_name, response_data=PRIVATE_GRAFIC_RESPONSE, m=m)
with self.assertRaises(ValueError):
self.api.call(method_name, count=125)
@requests_mock.Mocker()
def test_deposit(self, m):
self.assertPrivateMethod('deposit', response_data=DEPOSIT_RESPONSE, m=m, currency='BTC')
@requests_mock.Mocker()
def test_withdraw(self, m):
self.assertPrivateMethod('withdraw', response_data=WITHDRAW_RESPONSE, m=m, currency='BTC',
address='XXXXXXXX',
amount=457.0)
@requests_mock.Mocker()
def test_generate_wallets(self, m):
self.assertPrivateMethod('generate_wallets', response_data=GENERATE_WALLETS_RESPONSE, m=m, currency='BTC')
@requests_mock.Mocker()
def test_ticket(self, m):
self.assertPrivateMethod('ticket', response_data=TICKET_RESPONSE, m=m, category=1,
message='Can’t get deposit to my ETH wallet', subject='Can’t get deposit')
@requests_mock.Mocker()
def test_get_tickets(self, m):
self.assertPrivateMethod('get_tickets', response_data=GET_TICKETS_RESPONSE, m=m, ticket_id=1,
category=2, status=1)
@requests_mock.Mocker()
def test_reply_ticket(self, m):
self.assertPrivateMethod('reply_ticket', response_data=REPLY_TICKET_RESPONSE, m=m, ticket_id=1,
message='Some message')
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
from web3.exceptions import TimeExhausted
from config import (
CHAIN_ID,
TX_GAS_LIMIT
)
from app.utils.contract_utils import ContractUtils
from app.model.schema import IbetSecurityTokenEscrowApproveTransfer
from app.exceptions import SendTransactionError
class IbetExchangeInterface:
"""IbetExchangeInterface model"""
def __init__(self, contract_address: str, contract_name: str = "IbetExchangeInterface"):
self.exchange_contract = ContractUtils.get_contract(
contract_name=contract_name,
contract_address=contract_address
)
def get_account_balance(self, account_address: str, token_address: str):
"""Get account balance
:param account_address: account address
:param token_address: token address
:return: account balance
"""
balance = ContractUtils.call_function(
contract=self.exchange_contract,
function_name="balanceOf",
args=(account_address, token_address,),
default_returns=0
)
commitment = ContractUtils.call_function(
contract=self.exchange_contract,
function_name="commitmentOf",
args=(account_address, token_address,),
default_returns=0
)
return {
"balance": balance,
"commitment": commitment
}
class IbetSecurityTokenEscrow(IbetExchangeInterface):
"""IbetSecurityTokenEscrow model"""
def __init__(self, contract_address: str):
super().__init__(contract_address=contract_address, contract_name="IbetSecurityTokenEscrow")
def approve_transfer(self,
data: IbetSecurityTokenEscrowApproveTransfer,
tx_from: str,
private_key: str):
"""Approve Transfer"""
try:
tx = self.exchange_contract.functions.approveTransfer(
data.escrow_id, data.data
).buildTransaction({
"chainId": CHAIN_ID,
"from": tx_from,
"gas": TX_GAS_LIMIT,
"gasPrice": 0
})
tx_hash, tx_receipt = ContractUtils.send_transaction(transaction=tx, private_key=private_key)
return tx_hash, tx_receipt
except TimeExhausted as timeout_error:
raise SendTransactionError(timeout_error)
except Exception as err:
raise SendTransactionError(err)
|
import dash_html_components as html
carreta1="When there is a problem over whichever piece of the energy infrastructure in certain place, and it makes a costumer or a group of them do not receive power supply, EPM generates a work order, in which they specify some tasks that a technical crew have to execute over energy infrastructure to fix the problem."
carreta2="In our dashboard, we use the work order as identifier of failure, and we will show you several interactions of the count of failures with another variables the will allow you to understand how the failures are distributed over the region of Urabá and also identify some situations related with their occurrence."
def layout():
return html.Div(
className="container",
children=[
html.Div(
className="row justify-content-center text-center mb-3",
children=[
html.H2("What is the problem we are trying to solve?")
]
),
html.Div(
className="row",
children=[
html.Div(
className="col-12 my-2",
children=[
html.Img(
src="https://1-engineer.ru/wp-content/uploads/2018/08/elektroenergetika-pryamoug-2.jpg",
width="100%",
height="auto"
)
]
),
html.Div(
className="col-8 mx-auto my-2",
children=[
html.P(carreta1),
html.Br(),
html.P(carreta2)
]
)
]
)
]
)
|
from setuptools import setup
setup( name = 'Custom-env',
version = '0.0.1',
install_requires = ['gym']
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.