max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
src/__init__.py
|
molsonkiko/json_tabularize
| 0
|
12787051
|
<reponame>molsonkiko/json_tabularize
'''
An unusually powerful algorithm for converting JSON into a tabular format, which is defined as an array of flat (all scalar values) objects.
Every algorithm I've seen for making JSON into a table can't fully normalize very deeply nested JSON, but instead produces partially normalized JSON where some rows in the resultant table just have raw un-normalized JSON.
'''
__version__ = '1.0.3'
from tabularize import SchemaBuilder, get_schema, build_tab
__all__ = ['SchemaBuilder', 'get_schema', 'build_tab', 'test']
| 2.390625
| 2
|
proj/fpga/ultra96/crowd_estimation/python/crowd_counter.py
|
timebe00/Mercenary
| 3
|
12787052
|
<filename>proj/fpga/ultra96/crowd_estimation/python/crowd_counter.py<gh_stars>1-10
# In[1]:
import cv2
import imutils
from imutils.object_detection import non_max_suppression
import numpy as np
import requests
import time
import base64
from matplotlib import pyplot as plt
from IPython.display import clear_output
# In[2]:
URL = "http://industrial.api.ubidots.com"
INDUSTRIAL_USER = True
#TOKEN = "<PASSWORD>"
TOKEN = "<KEY>"
DEVICE = "camera"
VARIABLE = "people"
# HOG cv2 object
hog = cv2.HOGDescriptor()
hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector())
# In[3]:
def detector(image):
rects, weights = hog.detectMultiScale(image, winStride=(4, 4), padding=(8, 8), scale=1.05)
for (x, y, w, h) in rects:
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 0, 255), 2)
rects = np.array([[x, y, x + w, y + h] for (x, y, w, h) in rects])
result = non_max_suppression(rects, probs=None, overlapThresh=0.7)
return result
# In[4]:
def buildPayload(variable, value):
return {variable: {"value": value}}
# In[5]:
def send(token, device, variable, value, industrial=True):
# build endpoint
url = URL
url = "{}/api/v1.6/devices/{}".format(url, device)
payload = buildPayload(variable, value)
headers = {"X-Auth-Token": token, "Content-Type": "application/json"}
attempts = 0
status = 400
# handle bad requests
while status >= 400 and attempts <= 5:
req = requests.post(url=url, headers=headers, json=payload)
status = req.status_code
attempts += 1
time.sleep(1)
return req
# In[6]:
def record(token, device, variable, sample_time=5):
print("recording")
camera = cv2.VideoCapture(0)
init = time.time()
# ubidots sample limit
if sample_time < 1:
sample_time = 1
while(True):
print("cap frames")
ret, frame = camera.read()
frame = imutils.resize(frame, width=min(400, frame.shape[1]))
result = detector(frame.copy())
# show frame with bounding rectangle for debugging/ optimisation
for (xA, yA, xB, yB) in result:
cv2.rectangle(frame, (xA, yA), (xB, yB), (0, 255, 0), 2)
plt.imshow(frame)
plt.show()
# sends results
if time.time() - init >= sample_time:
print("sending result")
send(token, device, variable, len(result))
init = time.time()
camera.release()
cv2.destroyAllWindows()
# In[7]:
def main():
record(TOKEN, DEVICE, VARIABLE)
# In[8]:
if __name__ == '__main__':
main()
| 2.546875
| 3
|
examples/how_to/start_parameterized_build.py
|
pingod/python-jenkins_api
| 556
|
12787053
|
"""
Start a Parameterized Build
"""
from __future__ import print_function
from jenkinsapi.jenkins import Jenkins
jenkins = Jenkins('http://localhost:8080')
params = {'VERSION': '1.2.3', 'PYTHON_VER': '2.7'}
# This will start the job in non-blocking manner
jenkins.build_job('foo', params)
# This will start the job and will return a QueueItem object which
# can be used to get build results
job = jenkins['foo']
qi = job.invoke(build_params=params)
# Block this script until build is finished
if qi.is_queued() or qi.is_running():
qi.block_until_complete()
build = qi.get_build()
print(build)
| 3.046875
| 3
|
nip24/vatentity.py
|
nip24pl/nip24-python-client
| 0
|
12787054
|
#
# -*- coding: utf-8 -*-
#
# Copyright 2015-2020 NETCAT (www.netcat.pl)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author NETCAT <<EMAIL>>
# @copyright 2015-2020 NETCAT (www.netcat.pl)
# @license http://www.apache.org/licenses/LICENSE-2.0
#
class VATEntity:
"""
VAT registry entity
"""
def __init__(self):
self.name = None
self.nip = None
self.regon = None
self.krs = None
self.residenceAddress = None
self.workingAddress = None
self.vatStatus = None
self.vatResult = None
self.representatives = []
self.authorizedClerks = []
self.partners = []
self.ibans = []
self.hasVirtualAccounts = None
self.registrationLegalDate = None
self.registrationDenialDate = None
self.registrationDenialBasis = None
self.restorationDate = None
self.restorationBasis = None
self.removalDate = None
self.removalBasis = None
def __str__(self):
return 'VATEntity: [name = ' + str(self.name) \
+ ', nip = ' + str(self.nip) \
+ ', regon = ' + str(self.regon) \
+ ', krs = ' + str(self.krs) \
+ ', residenceAddress = ' + str(self.residenceAddress) \
+ ', workingAddress = ' + str(self.workingAddress) \
+ ', vatStatus = ' + str(self.vatStatus) \
+ ', vatResult = ' + str(self.vatResult) \
+ ', representatives = [' + ', '.join(str(e) for e in self.representatives) + ']' \
+ ', authorizedClerks = [' + ', '.join(str(e) for e in self.authorizedClerks) + ']' \
+ ', partners = [' + ', '.join(str(e) for e in self.partners) + ']' \
+ ', ibans = [' + ', '.join(str(e) for e in self.ibans) + ']' \
+ ', hasVirtualAccounts = ' + str(self.hasVirtualAccounts) \
+ ', registrationLegalDate = ' + str(self.registrationLegalDate) \
+ ', registrationDenialDate = ' + str(self.registrationDenialDate) \
+ ', registrationDenialBasis = ' + str(self.registrationDenialBasis) \
+ ', restorationDate = ' + str(self.restorationDate) \
+ ', restorationBasis = ' + str(self.restorationBasis) \
+ ', removalDate = ' + str(self.removalDate) \
+ ', removalBasis = ' + str(self.removalBasis) \
+ ']'
| 2.3125
| 2
|
projectautoobj.py
|
byronwongdev/project-opencv
| 0
|
12787055
|
<reponame>byronwongdev/project-opencv
"""
working in progress
"""
import cv2
import numpy as np
frameWidth = 1280
frameHeight = 720
cap = cv2.VideoCapture(1)
cap.set(3, frameWidth)
cap.set(4, frameHeight)
cap.set(10,150)
def empty(a):
pass
def hsv_to_rgb(h, s, v):
if s == 0.0: return (v, v, v)
i = int(h*6.) # XXX assume int() truncates!
f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6
if i == 0: return (v, t, p)
if i == 1: return (q, v, p)
if i == 2: return (p, v, t)
if i == 3: return (p, q, v)
if i == 4: return (t, p, v)
if i == 5: return (v, p, q)
# h_min,s_min,v_min,h_max,s_max,v_max
#if you want to detect more colour add to the list
h_min = 0
s_min = 0
v_min = 0
h_max = 179
s_max = 255
v_max = 255
mycolours = [[h_min,s_min,v_min,h_max,s_max,v_max]]
#the drawing colour BGR
mycoloursvalues = [hsv_to_rgb(h_min,s_min,v_min)]
mypoints = [] ##[x,y,colorId]
def findColour(img,mycolours,mycoloursvalues):
imgHSV = cv2.cvtColor(img,cv2.COLOR_BGR2HSV)
count = 0
newpoint = []
for color in mycolours:
lower = np.array(color[0:3])
upper = np.array(color[3:6])
mask = cv2.inRange(imgHSV,lower,upper)
x,y = getContours(mask)
#drawing the circle on the top of the object
cv2.circle(imgResult,(x,y),10,mycoloursvalues[count],cv2.FILLED)
if x != 0 and y != 0:
newpoint.append([x,y,count])
count += 1
#cv2.imshow(str(color[0]),mask)
return newpoint
# kwang kwang for the object
def getContours(img):
contours,hierarchy = cv2.findContours(img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
x,y,w,h = 0,0,0,0
for cnt in contours:
area = cv2.contourArea(cnt)
if area>500:
#cv2.drawContours(imgResult, cnt, -1, (255, 0, 0), 3)
peri = cv2.arcLength(cnt,True)
approx = cv2.approxPolyDP(cnt,0.02*peri,True)
x, y, w, h = cv2.boundingRect(approx)
return x+w//2,y
#### to draw on the canvas
def drawoncanvas(mypoints,mycoloursvalues):
for point in mypoints:
cv2.circle(imgResult,(point[0],point[1]),10,mycoloursvalues[point[2]],cv2.FILLED)
cv2.namedWindow("TrackBars")
cv2.resizeWindow("TrackBars",640,240)
cv2.createTrackbar("Hue Min","TrackBars",0,179,empty)
cv2.createTrackbar("Hue Max","TrackBars",179,179,empty)
cv2.createTrackbar("Sat Min","TrackBars",0,255,empty)
cv2.createTrackbar("Sat Max","TrackBars",255,255,empty)
cv2.createTrackbar("Val Min","TrackBars",0,255,empty)
cv2.createTrackbar("Val Max","TrackBars",255,255,empty)
while True:
success, img = cap.read()
imgResult = img.copy()
newpoint = findColour(img, mycolours,mycoloursvalues)
h_min = cv2.getTrackbarPos("Hue Min","TrackBars")
h_max = cv2.getTrackbarPos("Hue Max", "TrackBars")
s_min = cv2.getTrackbarPos("Sat Min", "TrackBars")
s_max = cv2.getTrackbarPos("Sat Max", "TrackBars")
v_min = cv2.getTrackbarPos("Val Min", "TrackBars")
v_max = cv2.getTrackbarPos("Val Max", "TrackBars")
if len(newpoint) != 0:
for newp in newpoint:
mypoints.append(newp)
if len(mypoints) != 0:
drawoncanvas(mypoints,mycoloursvalues)
findColour(img,mycolours,mycoloursvalues)
cv2.imshow("Result", imgResult)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
| 2.9375
| 3
|
tony-modulebasedpgm.py
|
tonythott/tonnewcode
| 0
|
12787056
|
<filename>tony-modulebasedpgm.py
age = int(input("Enter your age : "))
print (age)
nextyrage = age + 1
print ("Next year age will be : ",nextyrage)
price = 1.21997
print ("Price per liter %.2f" %(price))
| 3.5625
| 4
|
alembic/versions/73340f5f1adf_change_schedules_repeat_cycle_column.py
|
Tharlaw/petsrus
| 0
|
12787057
|
"""change_schedules_repeat_cycle_column
Revision ID: 73340f5f1adf
Revises: acf23daeb12b
Create Date: 2020-08-23 12:09:49.948494
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "73340f5f1adf"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
op.drop_column("schedules", "repeat_cycle")
op.add_column("schedules", sa.Column("repeat_cycle", sa.Integer, nullable=True))
def downgrade():
op.drop_column("schedules", "repeat_cycle")
op.add_column("schedules", sa.Column("repeat_cycle", sa.String(10), nullable=True))
| 1.53125
| 2
|
vision/common/bounding_box.py
|
MissouriMRR/SUAS-2022
| 6
|
12787058
|
<reponame>MissouriMRR/SUAS-2022<gh_stars>1-10
"""
Bounding box objects represent an area in an image and
are used to convey information between flight and vision processes.
"""
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple
import numpy as np
class ObjectType(Enum):
"""
Type of object that a BoundingBox represents.
"""
STD_OBJECT: str = "std_object"
EMG_OBJECT: str = "emg_object"
TEXT: str = "text"
class BoundingBox:
"""
A set of 4 coordinates that distinguish a region of an image.
The order of the coordinates is (top-left, top-right, bottom-right, bottom-left).
Parameters
----------
vertices : Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]]
The main structure of the BoundingBox. Denotes the 4 coordinates
representing a box in an image. Vertices is a tuple of 4 coordinates. Each
coordinate consists of a tuple 2 integers.
obj_type : ObjectType
Enumeration that denotes what type of object the BoundingBox represents.
attributes : Optional[Dict[str, Any]]
Any additional attributes to convey about the object in the BoundingBox.
"""
def __init__(
self,
vertices: Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]],
obj_type: ObjectType,
attributes: Optional[Dict[str, Any]] = None,
) -> None:
self._vertices: Tuple[
Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]
] = vertices
self._obj_type: ObjectType = obj_type
self._attributes: Dict[str, Any] = attributes if attributes is not None else {}
@property
def vertices(self) -> Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]]:
"""
Getter for _vertices. Gets the 4 vertices that make up the BoundingBox.
Returns
-------
_vertices : Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]]
The 4 coordinates of the BoundingBox.
"""
return self._vertices
@vertices.setter
def vertices(
self, verts: Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]]
) -> None:
"""
Setter for _vertices. Sets the 4 vertices that make up the BoundingBox.
Parameters
----------
vert : Tuple[Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int]]
The 4 coordinates to assign to the BoundingBox.
"""
self._vertices = verts
@property
def obj_type(self) -> ObjectType:
"""
Getter for _obj_type. Gets the ObjectType of the BoundingBox.
Returns
-------
_obj_type : ObjectType
The ObjectType of the BoundingBox.
"""
return self._obj_type
@obj_type.setter
def obj_type(self, o_type: ObjectType) -> None:
"""
Setter for _obj_type. Sets the value of the BoundingBox's ObjectType.
Parameters
----------
o_type : ObjectType
The ObjectType to assign to the BoundingBox.
"""
self._obj_type = o_type
@property
def attributes(self) -> Dict[str, Any]:
"""
Getter for _attributes. Gets the additional attributes of the BoundingBox.
Returns
-------
_attributes : Dict[str, Any]
Any additional attributes of the BoundingBox.
"""
return self._attributes
@attributes.setter
def attributes(self, att: Dict[str, Any]) -> None:
"""
Setter for _attributes. Sets the value of the BoundingBox's additional attributes.
Parameters
----------
att : Dict[str, Any]
The additional attributes to assign to the BoundingBox.
"""
self._attributes = att
def __repr__(self) -> str:
"""
Returns a string representation of the BoundingBox
that contains its id, object type, and vertices.
Returns
-------
str
The string representation of the BoundingBox object.
"""
return f"BoundingBox[{id(self)}, {self.obj_type}]: {str(self._vertices)}"
def get_x_vals(self) -> List[int]:
"""
Gets the x values of the 4 coordinates.
Returns
-------
x_vals : List[int]
The 4 x values of the vertices.
"""
x_vals: List[int] = [vert[0] for vert in self._vertices]
return x_vals
def get_y_vals(self) -> List[int]:
"""
Gets the y values of the 4 coordinates.
Returns
-------
y_vals : List[int]
The 4 y values of the vertices.
"""
y_vals: List[int] = [vert[1] for vert in self._vertices]
return y_vals
def get_x_extremes(self) -> Tuple[int, int]:
"""
Gets the minimum and maximum x values of the BoundingBox
Returns
-------
min_x, max_x : Tuple[int, int]
The minimum and maximum x values.
"""
x_vals: List[int] = self.get_x_vals()
min_x: int = np.amin(x_vals)
max_x: int = np.amax(x_vals)
return min_x, max_x
def get_y_extremes(self) -> Tuple[int, int]:
"""
Gets the minimum and maximum y values of the BoundingBox
Returns
-------
min_y, max_y : Tuple[int, int]
The minimum and maximum y values.
"""
y_vals: List[int] = self.get_y_vals()
min_y: int = np.amin(y_vals)
max_y: int = np.amax(y_vals)
return min_y, max_y
def get_rotation_angle(self) -> float:
"""
Calculates the angle of rotation of the BoundingBox
based on the top left and right coordinates.
Returns
-------
angle : float
The angle of rotation of the BoundingBox in degrees.
"""
tl_x: int = self.vertices[0][0]
tr_x: int = self.vertices[1][0]
tl_y: int = self.vertices[0][1]
tr_y: int = self.vertices[1][1]
angle: float = 0
if tr_x - tl_x == 0: # prevent division by 0
angle = 90.0 if (tr_y - tl_y > 0) else -90.0
else:
angle = np.rad2deg(np.arctan((tr_y - tl_y) / (tr_x - tl_x)))
return angle
if __name__ == "__main__":
pass
| 3.40625
| 3
|
calibration-tools/wiringpi_setpwm.py
|
mrwunderbar666/rpi-vumonitor-python
| 2
|
12787059
|
<gh_stars>1-10
#!/usr/bin/python
"""
== VU Meter Calibration Toolkit ==
==== Pulse Width Modulation and WiringPi GPIO Library ====
Hardware PWM
Manually set the PWM Value and calibrate your output!
Requires:
- Wiring Pi
MIT License
"""
from __future__ import division
import wiringpi
import time
""" Configure your pin here """
PIN_TO_PWM = 18 # Physical Pin 12
OUTPUT = 2
wiringpi.wiringPiSetupGpio()
wiringpi.pinMode(PIN_TO_PWM, OUTPUT)
wiringpi.pwmWrite(PIN_TO_PWM, 0) # Setup PWM using Pin, Initial Value
print("Press CTRL + C to exit")
try:
while True:
pwm_value = int(raw_input("Select PWM Value: "))
wiringpi.pwmWrite(PIN_TO_PWM, pwm_value)
except KeyboardInterrupt:
# manual cleanup
wiringpi.pwmWrite(PIN_TO_PWM, 0)
wiringpi.pinMode(PIN_TO_PWM, 0)
pass
| 3.28125
| 3
|
src/openprocurement/tender/openeu/procedure/models/document.py
|
ProzorroUKR/openprocurement.api
| 10
|
12787060
|
from openprocurement.tender.openua.procedure.models.document import (
PostDocument as BasePostDocument,
PatchDocument as BasePatchDocument,
Document as BaseDocument,
)
from schematics.types import StringType
class PostDocument(BasePostDocument):
language = StringType(required=True, choices=["uk", "en", "ru"], default="uk")
class PatchDocument(BasePatchDocument):
pass
class Document(BaseDocument):
pass
| 2.265625
| 2
|
covid-survive-master/player.py
|
brunouni/HealthPub
| 0
|
12787061
|
import pygame
class Player(pygame.sprite.Sprite):
def __init__(self, *groups):
super().__init__(*groups)
self.image = pygame.image.load("img/baixo1.png")
self.image = pygame.transform.scale(self.image, [45, 45])
self.rect = pygame.Rect(540, 360, 45, 45)
self.speed = 5
def update(self, *args):
keys = pygame.key.get_pressed()
if keys[pygame.K_w]:
self.rect.y -= self.speed
elif keys[pygame.K_s]:
self.rect.y += self.speed
elif keys[pygame.K_a]:
self.rect.x -= self.speed
elif keys[pygame.K_d]:
self.rect.x += self.speed
if self.rect.top < 130:
self.rect.top = 130
elif self.rect.bottom > 680:
self.rect.bottom = 680
elif self.rect.left < 10:
self.rect.left = 10
elif self.rect.right > 1070:
self.rect.right = 1070
| 3.0625
| 3
|
src/pyf/aggregator/plugins/version.py
|
collective/pyf.aggregator
| 0
|
12787062
|
from pkg_resources import parse_version
def process_version(identifier, data):
# parse version to test against:
data["version_raw"] = data["version"]
try:
version = parse_version(data["version"])
except TypeError:
return
try:
parts = version.base_version.split(".")
parts += ["0"] * (4 - len(parts))
data["version_major"] = int(parts[0])
data["version_minor"] = int(parts[1])
data["version_bugfix"] = int(parts[2])
data["version_postfix"] = parts[3]
except ValueError:
return
def load_version(settings):
return process_version
| 2.578125
| 3
|
recipes/Python/546543_simple_way_create_change_your_registry/recipe-546543.py
|
tdiprima/code
| 2,023
|
12787063
|
#IN THE NAME OF ALLAH
#Nike Name: Pcrlth0n
#(C) 2008
#a simple way to create and change your registry on windows
import win32api
def new_key():
reg1 = open('C:\\reg1.reg', 'w')
reg1.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example""")
reg1.close()
win32api.WinExec('reg import C:\\reg1.reg', 0)
def new_string_key():
reg2 = open('C:\\reg2.reg', 'w')
reg2.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example]\n"String Key"="C:\\\\\"""")
reg2.close()
win32api.WinExec('reg import C:\\reg2.reg', 0)
def new_dword_key():
reg3 = open('C:\\reg3.reg', 'w')
reg3.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example]\n"Dword key"=dword:00000000 """)
reg3.close()
win32api.WinExec('reg import C:\\reg3.reg', 0)
#new_key()
#new_string_key()
#new_dword_key()
| 3.0625
| 3
|
openminer/entry/__init__.py
|
zhd785576549/openminer
| 0
|
12787064
|
<filename>openminer/entry/__init__.py
from openminer.core.commands import execute_from_command
def main():
execute_from_command()
| 1.328125
| 1
|
msi/lgbm_msi.py
|
jeonghyukpark/msi_highlow
| 0
|
12787065
|
<reponame>jeonghyukpark/msi_highlow<gh_stars>0
import pandas as pd
import numpy as np
import lightgbm as lgb
import pickle5 as pickle
import scipy
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
def mean_confidence_interval(data, confidence=0.95):
a = 1.0 * np.array(data)
n = len(a)
m, se = np.mean(a), scipy.stats.sem(a)
h = se * scipy.stats.t.ppf((1 + confidence) / 2., n-1)
return m, m-h, m+h
def preprocess_df(df):
def quantile_25(series):
return np.quantile(series, q=0.25)
def quantile_50(series):
return np.quantile(series, q=0.50)
def quantile_75(series):
return np.quantile(series, q=0.75)
tissue_features = ['prob_ADI', 'prob_BACK', 'prob_DEB', 'prob_LYM',
'prob_MUC', 'prob_MUS', 'prob_NORM', 'prob_STR', 'prob_TUM']
cell_features = ['no-label', 'neoplastic', 'inflammatory', 'connective', 'necrosis', 'non-neoplastic']
gland_features = ['benign_gland', 'malignant_gland']
feature_cols = tissue_features + cell_features + gland_features
df_case = df.groupby(['case_id'])[feature_cols].agg(['mean', 'std', 'max', 'min', quantile_25,quantile_50,quantile_75]).reset_index()
df_case.columns = ['_'.join(col) for col in df_case.columns]
for idx in df_case.index:
case_id = df_case.loc[idx,'case_id_']
for col in ['MSS_or_MSI', 'fold', 'train_or_test', 'cohort']:
col_val = df[df['case_id'] == case_id][col].iloc[0]
df_case.loc[idx, col] = col_val
return df_case, feature_cols
def inference(df_case, feature_cols, model_path):
result_df = {}
result_df['auc'] = []
result_df['lambda'] = []
result_df['learning_rate'] = []
result_df['feature_fraction'] = []
target_columns = []
for prefix in feature_cols:
for appendix in ['mean', 'std', 'max', 'min', 'quantile_25', 'quantile_50', 'quantile_75']:
target_columns.append(f'{prefix}_{appendix}')
with open(model_path, 'rb') as file:
models, params, scalers = pickle.load(file)
df_case_copy = df_case.copy()
for ind, model in enumerate(models):
X_all_transform = scalers[ind].transform(df_case[target_columns])
y_pred = model.predict(X_all_transform, num_iteration=model.best_iteration)
df_case_copy[f'pred_F{ind}'] = y_pred
return df_case_copy
def evaluate(df_case, feature_cols, model_path):
result_df = {}
result_df['model_path'] = []
result_df['auc'] = []
result_df['lambda'] = []
result_df['learning_rate'] = []
result_df['feature_fraction'] = []
target_columns = []
for prefix in feature_cols:
for appendix in ['mean', 'std', 'max', 'min', 'quantile_25', 'quantile_50', 'quantile_75']:
target_columns.append(f'{prefix}_{appendix}')
with open(model_path, 'rb') as file:
models, params, scalers = pickle.load(file)
df_case_copy = df_case.copy()
df_case_copy['target']= df_case['MSS_or_MSI'] == 'MSI'
for ind, model in enumerate(models):
X_all_transform = scalers[ind].transform(df_case[target_columns])
y_pred = model.predict(X_all_transform, num_iteration=model.best_iteration)
df_case_copy[f'pred_{ind}'] = y_pred
aucs = []
target_df = df_case_copy[(df_case_copy['cohort']=='CRC') & (df_case_copy['train_or_test'] == 'test')]
for fold in range(5):
y_valid = target_df[f'target']
y_pred = target_df[f'pred_{fold}']
fpr, tpr, thresholds = metrics.roc_curve(y_valid, y_pred, pos_label=1)
auc = metrics.auc(fpr, tpr)
aucs.append(auc)
cis = mean_confidence_interval(aucs)
result_df['model_path'].append(model_path)
result_df['auc'].append(f'{cis[0]:.4f} ({cis[1]:.4f}-{cis[2]:.4f})')
result_df['lambda'].append(params['lambda_l1'])
result_df['learning_rate'].append(params['learning_rate'])
result_df['feature_fraction'].append(params['feature_fraction'])
result_df=pd.DataFrame(result_df)
return result_df
| 2.125
| 2
|
infrastructor/data/SqlBuilder.py
|
ahmetcagriakca/ImageProcessingApi
| 0
|
12787066
|
<reponame>ahmetcagriakca/ImageProcessingApi
class SqlBuilder:
def __init__(self):
pass
def build(self):
pass
class DefaultInsertSqlBuilder(SqlBuilder):
def __init__(self, table_name, length):
self.table_name = table_name
self.length = length
def build(self):
value_sql = ''
for rec in range(self.length):
value_sql += f':{rec + 1}'
if rec != self.length - 1:
value_sql += ','
return f"INSERT INTO {self.table_name} values ({value_sql})"
| 2.75
| 3
|
src/son/package/tests/test_integ_Packager.py
|
dang03/son-cli
| 4
|
12787067
|
# Copyright (c) 2015 SONATA-NFV, UBIWHERE
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, UBIWHERE
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
import unittest
from son.package.package import Packager
from son.workspace.workspace import Workspace
from son.workspace.workspace import Project
class IntPDTester(unittest.TestCase):
__pfd__ = {
'version': '0.5',
'package': {
'version': '0.1',
'name': 'sonata-project-sample',
'vendor': 'com.sonata.project',
'maintainer': 'Name, Company, Contact',
'description': 'Project description',
},
'descriptor_extension': 'yml'
}
def __init__(self, *args, **kwargs):
super(IntPDTester, self).__init__(*args, **kwargs)
ws = Workspace("")
prj = Project(ws, '/')
self.pck = Packager(workspace=ws, project=prj, generate_pd=False)
def test_correct_gds(self):
""" Test the correct general description section """
gsd = self.pck.package_gds(IntPDTester.__pfd__)
self.assertNotEqual(gsd, False)
def test_incomplete_gds(self):
"""
Test the returning message when the provided
project has incomplete information.
"""
pfd = IntPDTester.__pfd__
pfd.pop('package')
gsd = self.pck.package_gds(pfd)
self.assertEqual(gsd, None)
| 1.726563
| 2
|
backend/animal_config.py
|
PH-P-H/flask-object-detection
| 0
|
12787068
|
category_index = {1: 'brown_bear',
2: 'cattle',
3: 'deer',
4: 'dog',
5: 'horse',
6: 'lynx',
7: 'mule',
8: 'pig',
9:'raccoon',
10: 'sheep',
11: 'tiger'}
| 1.429688
| 1
|
deploy-kanboard.py
|
r4jeshwar/python-kanboard-deploy
| 0
|
12787069
|
#!/usr/bin/env python3
import subprocess as sp
def system_update():
sp.call(["sudo", "apt-get","update"])
sp.call(["sudo", "apt", "upgrade", "-y"])
def install_apache_php():
sp.call(["sudo", "apt", "install", "-y", "apache2", "libapache2-mod-php", "php-cli", "php-mbstring", "php-sqlite3", "php-opcache", "php-json", "php-mysql", "php-pgsql", "php-ldap", "php-gd", "php-xml"])
sp.call(["sudo", "systemctl", "enable", "--now", "apache2.service"])
def install_maraidb():
sp.call(["sudo", "apt", "install", "-y", "mariadb-server", "mariadb-client"])
sp.call(["sudo", "systemctl", "enable", "--now", "mariadb.service"])
sp.call(["sudo", "mysql_secure_installation"])
def install_kb():
version = input("Enter the version: ")
print(version)
kbversion = "kanboard-"+version+"/data"
filename = "v"+version+".tar.gz"
url = "https://github.com/kanboard/kanboard/archive/"+filename
print(url)
sp.call(["wget", url ])
sp.call(["tar", "xzvf", filename, "-C", "/var/www/html/"])
sp.call(["sudo", "mv", "/var/www/html/kanboard-"+version, "/var/www/html/kanboard"])
sp.call(["chown", "-R", "www-data:www-data", "/var/www/html/kanboard/data"])
sp.call(["rm", filename])
sp.call(["mysql", "-u", "root" , "-p", "-e", "CREATE DATABASE kanboard"])
sp.call(["mysql", "-u", "root", "-p", "-e", "CREATE USER 'kanboarduser'@'localhost' IDENTIFIED BY 'rajeshwar';"])
sp.call(["mysql", "-u", "root", "-p", "-e", "GRANT ALL PRIVILEGES ON kanboard.* TO 'kanboarduser'@'localhost' IDENTIFIED BY '<PASSWORD>' WITH GRANT OPTION;"])
sp.call(["mysql", "-u", "root", "-p", "-e", "FLUSH PRIVILEGES;"])
sp.call(["sudo", "sed", "-i", "s/DB_DRIVER', 'sqlite'/DB_DRIVER', 'mysql'/g", "/var/www/html/kanboard/config.default.php"])
sp.call(["sudo", "sed", "-i", "s/DB_USERNAME', 'root'/DB_USERNAME', 'kanboarduser'/g", "/var/www/html/kanboard/config.default.php"])
sp.call(["sudo", "sed", "-i", "s/DB_PASSWORD', ''/DB_PASSWORD', '<PASSWORD>'/g", "/var/www/html/kanboard/config.default.php"])
def restart_apache():
sp.call(["sudo", "touch", "/etc/php/7.4/mods-available/php.ini"])
f=open('/etc/php/7.4/mods-available/php.ini', "w")
sp.call(["echo", "extension=php.so"],stdout=f)
sp.call(["sudo", "systemctl", "restart", "apache2.service"])
sp.call(["sudo", "systemctl", "restart", "mysqld.service"])
def update_admin_passwd():
sp.call(["python3", "./update-admin-passwd.py"])
if __name__ == '__main__':
system_update()
install_apache_php()
install_maraidb()
install_kb()
restart_apache()
update_admin_passwd()
| 2.078125
| 2
|
Back/ecoreleve_server/utils/thesaurusLoad.py
|
NaturalSolutions/ecoReleve-Data
| 15
|
12787070
|
from sqlalchemy import select
from ..core import Base
thesaurusDictTraduction = {}
invertedThesaurusDict = {'en': {}, 'fr': {}}
userOAuthDict = {}
def loadThesaurusTrad(config):
session = config.registry.dbmaker()
thesTable = Base.metadata.tables['ERDThesaurusTerm']
query = select(thesTable.c)
results = session.execute(query).fetchall()
for row in results:
newTraduction = {
'en': row['nameEn'], 'fr': row['nameFr'], 'parentID': row['parentID']}
if thesaurusDictTraduction.get(row['fullPath'], None):
thesaurusDictTraduction[row['fullPath']].append(newTraduction)
else:
thesaurusDictTraduction[row['fullPath']] = [newTraduction]
invertedThesaurusDict['en'][row['nameEn']] = row['fullPath']
invertedThesaurusDict['fr'][row['nameFr']] = row['fullPath']
session.close()
| 2.328125
| 2
|
last_seen/migrations/0001_initial.py
|
Jafte/jasn.ru
| 0
|
12787071
|
<reponame>Jafte/jasn.ru
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-13 00:15
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.CreateModel(
name='LastSeen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('module', models.CharField(default='default', max_length=20)),
('last_seen', models.DateTimeField(default=django.utils.timezone.now)),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-last_seen',),
},
),
migrations.AlterUniqueTogether(
name='lastseen',
unique_together=set([('user', 'site', 'module')]),
),
]
| 1.757813
| 2
|
main.py
|
pzmarzly/ancs4linux
| 120
|
12787072
|
<filename>main.py
#!/usr/bin/env python3
# https://developer.apple.com/library/archive/documentation/CoreBluetooth/Reference/AppleNotificationCenterServiceSpecification/Introduction/Introduction.html
import sys
import signal
import argparse
import time
from Hci import Hci
from Handler import DefaultHandler
parser = argparse.ArgumentParser()
parser.add_argument("--hci", metavar="INT", type=int, default=0,
help="use Bluetooth hciX (default 0, see `hcitool dev')")
parser.add_argument("--resolution", metavar="INT", type=int, default=20,
help="polling rate (default 20 per second)")
args = parser.parse_args()
hciID = args.hci
resolution = args.resolution
def signal_handler(sig, frame):
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
handler = DefaultHandler()
hci = Hci(hciID)
while True:
device = hci.search_for_device()
while device is None:
time.sleep(1)
device = hci.search_for_device()
handler.device_connected()
try:
device.main_loop(handler, resolution)
except Exception as e:
handler.error(exception=e)
handler.device_disconnected()
| 2.65625
| 3
|
stackable/contrib/config/conf_model_l10n.py
|
productaize/stackable
| 1
|
12787073
|
<filename>stackable/contrib/config/conf_model_l10n.py
'''
Created on Jan 13, 2015
@author: patrick
'''
from stackable.stackable import StackableSettings
class Config_ModelTranslation(object):
# http://django-modeltranslation.readthedocs.org/en/latest/installation.html#setup
USE_I18N = True
MODELTRANSLATION_DEFAULT_LANGUAGE = 'en'
_apps_ = (
'modeltranslation',
)
__patches__ = (
StackableSettings.patch_apps(_apps_, prepend=True,
at='django.contrib.admin'),
)
# fix "'Country' object has no attribute 'name_ascii_en'"
# https://bitbucket.org/neithere/django-autoslug/issues/42/setting-to-enable-disable-modeltranslation
AUTOSLUG_MODELTRANSLATION_ENABLE = False
| 1.90625
| 2
|
python/plotly/die_visual.py
|
letitgone/python-crash-course
| 0
|
12787074
|
# @Author ZhangGJ
# @Date 2021/01/13 21:59
from plotly import offline
from plotly.graph_objs import Bar, Layout
from die import Die
die = Die()
# 掷几次骰子并将结果存储在一个列表中
results = []
for roll_num in range(1000):
result = die.roll()
results.append(result)
# 分析结果
frequencies = []
for value in range(1, die.num_sides + 1):
frequency = results.count(value)
frequencies.append(frequency)
# 对结果进行可视化
x_values = list(range(1, die.num_sides + 1))
data = [Bar(x=x_values, y=frequencies)]
x_axis_config = {'title': '结果'}
y_axis_config = {'title': '结果的频率'}
my_layout = Layout(title='掷一个D6 1000次的结果', xaxis=x_axis_config, yaxis=y_axis_config)
offline.plot({'data': data, 'layout': my_layout}, filename='d6.html')
| 2.8125
| 3
|
opencorpora/converters.py
|
OpenCorpora/opencorpora-tools
| 3
|
12787075
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import logging
from opencorpora import reader
from opencorpora.reader import CorpusReader
from russian_tagsets import converters
from pymorphy2.opencorpora_dict.parse import parse_opencorpora_xml
class UDConverter(object):
"""
Tries to convert the data provided by CorpusReader
to Universal Dependencies 1.4 (CoNLL-U) format.
OpenCorpora currently has no syntax markup so
respective fields remain empty
Processes and returns one sentence at a time
"""
def __init__(self, reader, path_to_dict, docids=None, categories=None):
assert isinstance(reader, CorpusReader)
self.docs = reader.iter_documents(docids, categories)
self.converter = converters.converter('opencorpora-int', 'ud14')
# prepare data to normalize verbal forms to INFN
self.lemma_rewrite = {}
dictionary = parse_opencorpora_xml(path_to_dict)
for from_id, to_id, type_id in dictionary.links:
if int(type_id) in (3, 5): # INFN -> VERB, GRND
self.lemma_rewrite[to_id] = dictionary.lexemes[from_id][0][0]
def sentences(self):
for doc in self.docs:
for sent in doc.iter_parsed_sents():
yield self._convert_sentence(sent)
def _convert_token(self, token, token_no):
if len(token[1]) > 1:
raise Exception("Ambiguous parses cannot be converted to UD: {}".format(token[1]))
lemma_id = token[1][0][2]
lemma = self.lemma_rewrite.get(lemma_id, token[1][0][0])
pos, grams = self.converter(token[1][0][1], lemma).split()
return '\t'.join((
str(token_no),
token[0],
lemma.upper(),
pos,
'_', # here should be XPOSTAG (lang-specific POS)
grams,
'\t'.join(['_'] * 4) # here should be syntax and misc
))
def _convert_sentence(self, sent):
return '\n'.join(self._convert_token(token, i+1) for i, token in enumerate(sent))
if __name__ == "__main__":
reader = CorpusReader(sys.argv[1])
conv = UDConverter(reader, sys.argv[2])
for sent_str in conv.sentences():
print(sent_str.encode('utf-8') + '\n')
| 2.296875
| 2
|
SingleCORE/Canny Edge Algorithm/PrewittEdgeDetector.py
|
RjPatil27/ACA-Project
| 0
|
12787076
|
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
# Open the image
img = np.array(Image.open('house.jpg')).astype(np.uint8)
# Apply gray scale
gray_img = np.round(0.299 * img[:, :, 0] +
0.587 * img[:, :, 1] +
0.114 * img[:, :, 2]).astype(np.uint8)
# Prewitt Operator
h, w = gray_img.shape
# define filters
horizontal = np.array([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]]) # s2
vertical = np.array([[-1, -1, -1], [0, 0, 0], [1, 1, 1]]) # s1
# define images with 0s
newgradientImage = np.zeros((h, w))
# offset by 1
for i in range(1, h - 1):
for j in range(1, w - 1):
horizontalGrad = (horizontal[0, 0] * gray_img[i - 1, j - 1]) + \
(horizontal[0, 1] * gray_img[i - 1, j]) + \
(horizontal[0, 2] * gray_img[i - 1, j + 1]) + \
(horizontal[1, 0] * gray_img[i, j - 1]) + \
(horizontal[1, 1] * gray_img[i, j]) + \
(horizontal[1, 2] * gray_img[i, j + 1]) + \
(horizontal[2, 0] * gray_img[i + 1, j - 1]) + \
(horizontal[2, 1] * gray_img[i + 1, j]) + \
(horizontal[2, 2] * gray_img[i + 1, j + 1])
verticalGrad = (vertical[0, 0] * gray_img[i - 1, j - 1]) + \
(vertical[0, 1] * gray_img[i - 1, j]) + \
(vertical[0, 2] * gray_img[i - 1, j + 1]) + \
(vertical[1, 0] * gray_img[i, j - 1]) + \
(vertical[1, 1] * gray_img[i, j]) + \
(vertical[1, 2] * gray_img[i, j + 1]) + \
(vertical[2, 0] * gray_img[i + 1, j - 1]) + \
(vertical[2, 1] * gray_img[i + 1, j]) + \
(vertical[2, 2] * gray_img[i + 1, j + 1])
# Edge Magnitude
mag = np.sqrt(pow(horizontalGrad, 2.0) + pow(verticalGrad, 2.0))
newgradientImage[i - 1, j - 1] = mag
plt.figure()
plt.title('Prewitt_House')
plt.imsave('prewitt_house.jpg', newgradientImage, cmap='gray')
plt.imshow(newgradientImage, cmap='gray')
plt.show()
| 3.21875
| 3
|
scripts/grab.py
|
scanon/kb_ModelIndexer
| 0
|
12787077
|
<gh_stars>0
from Workspace.WorkspaceClient import Workspace
import json
import os
def grab(upa, file):
if not os.path.exists(file):
d = ws.get_objects2({'objects': [{'ref': upa}]})
with open(file, 'w') as f:
f.write(json.dumps(d, indent=4))
ws = Workspace('https://ci.kbase.us/services/ws')
grab('36815/4/1', './test/mock_data/media_object.json')
grab('17335/21/2', './test/mock_data/fbamodel_object.json')
grab('4/23/1', './test/mock_data/media2_object.json')
upa = '16174/15/1'
l = ['scientific_name', 'taxonomy', 'id']
fname = './test/mock_data/genome_sub_object.json'
if not os.path.exists(fname):
d = ws.get_objects2({'objects': [{'ref': upa, 'included': l}]})
with open(fname, 'w') as f:
f.write(json.dumps(d, indent=2))
| 2.421875
| 2
|
rcsb/utils/chem/ChemCompSearchWrapper.py
|
rcsb/py-rcsb_utils_chem
| 0
|
12787078
|
##
# File: ChemCompSearchWrapper.py
# Author: jdw
# Date: 9-Mar-2020
# Version: 0.001
#
# Updates:
#
##
"""
Wrapper for chemical component search operations.
"""
__docformat__ = "restructuredtext en"
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "Apache 2.0"
import copy
import logging
import platform
import resource
import os
import time
from collections import namedtuple
from rcsb.utils.chem.ChemCompIndexProvider import ChemCompIndexProvider
from rcsb.utils.chem.ChemCompSearchIndexProvider import ChemCompSearchIndexProvider
from rcsb.utils.chem.MolecularFormula import MolecularFormula
from rcsb.utils.chem.OeSearchMoleculeProvider import OeSearchMoleculeProvider
from rcsb.utils.chem.OeIoUtils import OeIoUtils
from rcsb.utils.chem.OeSearchUtils import OeSearchUtils
from rcsb.utils.chem.OeSubStructSearchUtils import OeSubStructSearchUtils
from rcsb.utils.io.FileUtil import FileUtil
from rcsb.utils.io.MarshalUtil import MarshalUtil
from rcsb.utils.io.SftpUtil import SftpUtil
from rcsb.utils.io.SingletonClass import SingletonClass
HERE = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
logger = logging.getLogger(__name__)
MatchResults = namedtuple("MatchResults", "ccId oeMol searchType matchOpts screenType fpType fpScore oeIdx formula", defaults=(None,) * 9)
class ChemCompSearchWrapper(SingletonClass):
"""Wrapper for chemical component search operations."""
def __init__(self, **kwargs):
"""Wrapper class for chemical search/depiction operations.
Path and prefix data for wrapper class may be set as keyword arguments
as environmental variables.
Args:
cachePath (str): path to top-level cache directory used to store search index file dependencies
(default environment variable CHEM_SEARCH_CACHE_PATH or ".")
ccFileNamePrefix (str): prefix code used to distinguish different subsets of chemical definitions
(default environment variable CHEM_SEARCH_CC_PREFIX or "cc-full")
"""
self.__startTime = time.time()
#
self.__cachePath = kwargs.get("cachePath", os.environ.get("CHEM_SEARCH_CACHE_PATH", "."))
self.__ccFileNamePrefix = kwargs.get("ccFileNamePrefix", os.environ.get("CHEM_SEARCH_CC_PREFIX", "cc-full"))
#
self.__dependFileName = "ChemCompSearchWrapperData.tar.gz"
self.__dependTarFilePath = os.path.join(self.__cachePath, self.__dependFileName)
# ---
self.__mU = MarshalUtil(workPath=self.__cachePath)
# ---
self.__configD = {}
self.__ccIdxP = None
self.__siIdxP = None
self.__siIdx = {}
self.__oesmP = None
self.__oesU = None
self.__oesubsU = None
# ---
self.__statusDescriptorError = -100
self.__searchError = -200
self.__searchSuccess = 0
def setConfig(self, ccUrlTarget, birdUrlTarget, **kwargs):
"""Provide the chemical definition source path details for rebuilding search
index file dependencies.
Args:
ccUrlTarget (str): path to concatenated chemical component definition file
birdUrlTarget (str): path to the concatenated BIRD definition file
Other options are propagated to configurations of the wrapped classes in __bootstrapConfig()
"""
kwargs["ccUrlTarget"] = ccUrlTarget
kwargs["birdUrlTarget"] = birdUrlTarget
kwargs["cachePath"] = self.__cachePath
kwargs["ccFileNamePrefix"] = self.__ccFileNamePrefix
self.__configD = self.__bootstrapConfig(**kwargs)
return len(self.__configD) >= 3
def __bootstrapConfig(self, **kwargs):
"""Build on-the-fly default configuration for this wrapper class."""
# The following few options have no defaults -- and should be specified.
ccUrlTarget = kwargs.get("ccUrlTarget", None)
birdUrlTarget = kwargs.get("birdUrlTarget", None)
cachePath = kwargs.get("cachePath", None)
ccFileNamePrefix = kwargs.get("ccFileNamePrefix", None)
logger.info("Bootstrap configuration for prefix %r cc %r bird %r", ccFileNamePrefix, ccUrlTarget, birdUrlTarget)
# ---
# Reasonable values are selected for the remaining options...
oeFileNamePrefix = "oe-" + ccFileNamePrefix
try:
storeConfig = kwargs.get("storeConfig", True)
molLimit = kwargs.get("molLimit", None)
useCache = kwargs.get("useCache", False)
logSizes = kwargs.get("logSizes", False)
#
numProc = kwargs.get("numProc", 12)
maxProc = os.cpu_count()
numProc = min(numProc, maxProc)
maxChunkSize = kwargs.get("maxChunkSize", 50)
#
logger.debug("+++ >>> Assigning numProc as %d", numProc)
#
limitPerceptions = kwargs.get("limitPerceptions", False)
quietFlag = kwargs.get("quietFlag", True)
#
# fpTypeCuttoffD = {"TREE": 0.6, "MACCS": 0.9, "PATH": 0.6, "CIRCULAR": 0.6, "LINGO": 0.9}
fpTypeCuttoffD = kwargs.get("fpTypeCuttoffD", {"TREE": 0.6, "MACCS": 0.9})
buildTypeList = kwargs.get("buildTypeList", ["oe-iso-smiles", "oe-smiles", "cactvs-iso-smiles", "cactvs-smiles", "inchi"])
#
oesmpKwargs = {
"ccUrlTarget": ccUrlTarget,
"birdUrlTarget": birdUrlTarget,
"cachePath": cachePath,
"useCache": useCache,
"ccFileNamePrefix": ccFileNamePrefix,
"oeFileNamePrefix": oeFileNamePrefix,
"limitPerceptions": limitPerceptions,
"minCount": None,
"maxFpResults": 50,
"fpTypeCuttoffD": fpTypeCuttoffD,
"buildTypeList": buildTypeList,
"screenTypeList": None,
"quietFlag": quietFlag,
"numProc": numProc,
"maxChunkSize": maxChunkSize,
"molLimit": molLimit,
"logSizes": logSizes,
"suppressHydrogens": True,
}
ccsiKwargs = {
"ccUrlTarget": ccUrlTarget,
"birdUrlTarget": birdUrlTarget,
"cachePath": cachePath,
"useCache": useCache,
"ccFileNamePrefix": ccFileNamePrefix,
"oeFileNamePrefix": oeFileNamePrefix,
"limitPerceptions": limitPerceptions,
"minCount": None,
"numProc": numProc,
"quietFlag": quietFlag,
"maxChunkSize": maxChunkSize,
"molLimit": None,
"logSizes": False,
}
configD = {"versionNumber": 0.30, "ccsiKwargs": ccsiKwargs, "oesmpKwargs": oesmpKwargs}
#
if storeConfig:
configDirPath = os.path.join(cachePath, "config")
configFilePath = os.path.join(configDirPath, ccFileNamePrefix + "-config.json")
logger.info("Saving configuration bootstrap in %r", configFilePath)
self.__mU.mkdir(configDirPath)
self.__mU.doExport(configFilePath, configD, fmt="json", indent=3)
except Exception as e:
logger.exception("Failing with %s", str(e))
return configD
def readConfig(self, resetCachePath=True):
"""Read a prepared configuration file for the search wrapper class. This will override
any default configuration settings.
Args:
resetCachPath (bool): update cachePath configuration option with the current cachePath setting.
Returns:
bool : True for success or False otherwise
"""
#
#
ok = False
try:
#
configFilePath = os.path.join(self.__cachePath, "config", self.__ccFileNamePrefix + "-config.json")
configD = self.__mU.doImport(configFilePath, fmt="json")
logger.debug("ConfigD: %r", configD)
if configD and (len(configD) > 2) and float(configD["versionNumber"]) > 0.2:
logger.info("Read version %r sections %r from %s", configD["versionNumber"], list(configD.keys()), configFilePath)
ok = True
self.__configD = configD
if resetCachePath:
# Allow the configuration to be relocatable.
configD["ccsiKwargs"]["cachePath"] = self.__cachePath
configD["oesmpKwargs"]["cachePath"] = self.__cachePath
else:
logger.error("Reading config file fails from %r", configFilePath)
except Exception as e:
logger.exception("Failing with %s", str(e))
ok = False
return ok
def buildDependenices(self, ccUrlTarget, birdUrlTarget, **kwargs):
"""Convenience method to build configuration and static dependencies for the chemical search services.
Args:
ccUrlTarget (str): path to source concatenated chemical component definition file
birdUrlTarget (str): path to the source concatenated BIRD definition file
Other options are propagated to configurations of the wrapped classes in __bootstrapConfig()
"""
try:
okT = False
ok1 = self.setConfig(ccUrlTarget=ccUrlTarget, birdUrlTarget=birdUrlTarget, **kwargs)
useCache = kwargs.get("useCache", False)
ok2 = self.updateChemCompIndex(useCache=useCache)
ok3 = self.updateSearchIndex(useCache=useCache)
ok4 = self.updateSearchMoleculeProvider(useCache=useCache)
okBuild = ok1 and ok2 and ok3 and ok4
if okBuild:
fileU = FileUtil()
dirPathList = [os.path.join(self.__cachePath, subDir) for subDir in ["chem_comp", "oe_mol", "config"]]
okT = fileU.bundleTarfile(self.__dependTarFilePath, dirPathList, mode="w:gz", recursive=True)
#
return okT and okBuild
except Exception as e:
logger.exception("Failing build with %r and %r with %s", ccUrlTarget, birdUrlTarget, str(e))
return False
def stashDependencies(self, url, dirPath, bundleLabel="A", userName=None, pw=None):
"""Store a copy of the bundled search dependencies remotely -
Args:
url (str): URL string for the destination host (e.g. sftp://myserver.net or None for a local file)
dirPath (str): directory path on the remote resource
bundleLabel (str, optional): optional label preppended to the stashed dependency bundle artifact (default='A')
userName (str, optional): optional access information. Defaults to None.
password (str, optional): optional access information. Defaults to None.
Returns:
bool: True for success or False otherwise
"""
try:
ok = False
fn = self.__makeBundleFileName(self.__dependFileName, bundleLabel=bundleLabel)
if url and url.startswith("sftp://"):
sftpU = SftpUtil()
hostName = url[7:]
ok = sftpU.connect(hostName, userName, pw=pw, port=22)
if ok:
remotePath = os.path.join("/", dirPath, fn)
ok = sftpU.put(self.__dependTarFilePath, remotePath)
elif not url:
fileU = FileUtil()
remotePath = os.path.join(dirPath, fn)
ok = fileU.put(self.__dependTarFilePath, remotePath)
else:
logger.error("Unsupported stash protocol %r", url)
return ok
except Exception as e:
logger.exception("For %r %r failing with %s", url, dirPath, str(e))
return False
def __makeBundleFileName(self, rootName, bundleLabel="A"):
fn = rootName
try:
fn = rootName
fn = "%s-%s" % (bundleLabel.upper(), rootName) if bundleLabel else rootName
except Exception as e:
logger.exception("Failing with %s", str(e))
return fn
def restoreDependencies(self, url, dirPath, bundleLabel="A", userName=None, pw=None):
"""Restore bundled dependencies from remote storage and unbundle these in the
current local cache directory.
Args:
url (str): remote URL
dirPath (str): remote directory path on the
bundleLabel (str, optional): optional label preppended to the stashed dependency bundle artifact (default='A')
userName (str, optional): optional access information. Defaults to None.
password (str, optional): optional access information. Defaults to None.
"""
try:
ok = False
fileU = FileUtil()
fn = self.__makeBundleFileName(self.__dependFileName, bundleLabel=bundleLabel)
if not url:
remotePath = os.path.join(dirPath, fn)
ok = fileU.get(remotePath, self.__dependTarFilePath)
elif url and url.startswith("http://"):
remotePath = url + os.path.join("/", dirPath, fn)
ok = fileU.get(remotePath, self.__dependTarFilePath)
elif url and url.startswith("sftp://"):
sftpU = SftpUtil()
ok = sftpU.connect(url[7:], userName, pw=pw, port=22)
if ok:
remotePath = os.path.join(dirPath, fn)
ok = sftpU.get(remotePath, self.__dependTarFilePath)
else:
logger.error("Unsupported protocol %r", url)
if ok:
ok = fileU.unbundleTarfile(self.__dependTarFilePath, dirPath=self.__cachePath)
return ok
except Exception as e:
logger.exception("For %r %r Failing with %s", url, dirPath, str(e))
ok = False
return ok
def updateChemCompIndex(self, useCache=False):
"""Rebuild the basic index of source chemical component and BIRD definitions.
Update the internal state of this index in the current object instance.
Resource requirements: 94 sec 1 proc 7GB memory macbook pro
Args:
useCache (bool): False to rebuild search index and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["ccsiKwargs"]) if "ccsiKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
ccIdxP = ChemCompIndexProvider(**kwargs)
ok = ccIdxP.testCache()
self.__ccIdxP = ccIdxP if ok else None
logger.info("Chemical component index status %r", ok)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def getChemCompIndex(self):
return self.__ccIdxP.getIndex() if self.__ccIdxP else {}
def getSearchMoleculeProvider(self):
return self.__oesmP if self.__oesmP else None
def updateSearchIndex(self, useCache=False):
"""Rebuild the search index from source chemical component and BIRD definitions.
Update the internal state of this index in the current object instance.
Resource requirements 771 secs 6 proc macbook pro 7GB memory.
Args:
useCache (bool): False to rebuild search index and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["ccsiKwargs"]) if "ccsiKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
siIdxP = ChemCompSearchIndexProvider(**kwargs)
ok = siIdxP.testCache()
self.__siIdxP = siIdxP if siIdxP else None
self.__siIdx = siIdxP.getIndex() if siIdxP and ok else {}
logger.info("Search index status %r index len %d", ok, len(self.__siIdx) if self.__siIdx else 0)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def updateSearchMoleculeProvider(self, useCache=False):
"""Rebuild the search molecule provider.
Update the internal state of this object reference in the current object instance.
Resource requirements: 151 seconds 1 proc 0.5GB memory macbook pro
Args:
useCache (bool): False to rebuild molecule store and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["oesmpKwargs"]) if "oesmpKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
oesmP = OeSearchMoleculeProvider(**kwargs)
ok = oesmP.testCache()
self.__oesmP = oesmP if oesmP and ok else None
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def reloadSearchDatabase(self):
"""Reload the in-memory search databases from the OE molecule provider.
Resource requirements: ~90sec load time 0.35 GB memory
Returns:
bool: True for success or False otherwise
"""
ok = False
try:
okmp = self.updateSearchMoleculeProvider(useCache=True)
if not okmp:
return ok
fpTypeCuttoffD = self.__configD["oesmpKwargs"]["fpTypeCuttoffD"] if "fpTypeCuttoffD" in self.__configD["oesmpKwargs"] else {}
fpTypeList = [k for k, v in fpTypeCuttoffD.items()]
oesU = OeSearchUtils(self.__oesmP, fpTypeList=fpTypeList)
ok1 = oesU.testCache()
self.__oesU = oesU if ok1 else None
#
oesubsU = OeSubStructSearchUtils(self.__oesmP)
ok2 = oesubsU.testCache()
self.__oesubsU = oesubsU if ok2 else None
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok1 and ok2
def searchByDescriptor(self, descriptor, descriptorType, matchOpts="graph-relaxed", searchId=None):
"""Wrapper method for descriptor match and descriptor substructure search methods.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI
matchOpts (str, optional): graph match criteria (graph-relaxed, graph-relaxed-stereo, graph-strict,
fingerprint-similarity, sub-struct-graph-relaxed, sub-struct-graph-relaxed-stereo,
sub-struct-graph-strict Defaults to "graph-relaxed")
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, graph match and finger match lists of type (MatchResults)
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
if matchOpts.startswith("sub-struct-"):
return self.subStructSearchByDescriptor(descriptor, descriptorType, matchOpts=matchOpts, searchId=searchId)
else:
return self.matchByDescriptor(descriptor, descriptorType, matchOpts=matchOpts, searchId=searchId)
def matchByDescriptor(self, descriptor, descriptorType, matchOpts="graph-relaxed", searchId=None):
"""Return graph match (w/ finger print pre-filtering) and finger print search results for the
input desriptor.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI
matchOpts (str, optional): graph match criteria (graph-relaxed, graph-relaxed-stereo, graph-strict,
fingerprint-similarity, Defaults to "graph-relaxed")
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, graph match and finger match lists of type (MatchResults)
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
ssL = fpL = []
retStatus = False
statusCode = -200
try:
fpTypeCuttoffD = self.__configD["oesmpKwargs"]["fpTypeCuttoffD"] if "fpTypeCuttoffD" in self.__configD["oesmpKwargs"] else {}
maxFpResults = self.__configD["oesmpKwargs"]["maxFpResults"] if "maxFpResults" in self.__configD["oesmpKwargs"] else 50
limitPerceptions = self.__configD["oesmpKwargs"]["limitPerceptions"] if "limitPerceptions" in self.__configD["oesmpKwargs"] else False
#
searchId = searchId if searchId else "query"
messageTag = searchId + ":" + descriptorType
oeioU = OeIoUtils()
oeMol = oeioU.descriptorToMol(descriptor, descriptorType, limitPerceptions=limitPerceptions, messageTag=messageTag)
oeMol = oeioU.suppressHydrogens(oeMol)
if not oeMol:
logger.warning("descriptor type %r molecule build fails: %r", descriptorType, descriptor)
return self.__statusDescriptorError, ssL, fpL
#
retStatus, ssL, fpL = self.__oesU.searchSubStructureAndFingerPrint(oeMol, list(fpTypeCuttoffD.items())[:2], maxFpResults, matchOpts=matchOpts)
statusCode = 0 if retStatus else self.__searchError
except Exception as e:
logger.exception("Failing with %s", str(e))
#
return statusCode, ssL, fpL
def subStructSearchByDescriptor(self, descriptor, descriptorType, matchOpts="sub-struct-graph-relaxed", searchId=None):
"""Return graph match (w/ finger print pre-filtering) and finger print search results for the
input desriptor.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI)
matchOpts (str, optional): graph match criteria (sub-struct-graph-relaxed, sub-struct-graph-relaxed-stereo,
sub-struct-graph-strict). Defaults to "sub-struct-graph-relaxed".
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, substructure search results of type (MatchResults), empty list placeholder
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
ssL = []
retStatus = False
statusCode = -200
try:
limitPerceptions = self.__configD["oesmpKwargs"]["limitPerceptions"] if "limitPerceptions" in self.__configD["oesmpKwargs"] else False
numProc = self.__configD["oesmpKwargs"]["numProc"] if "numProc" in self.__configD["oesmpKwargs"] else 4
#
searchId = searchId if searchId else "query"
messageTag = searchId + ":" + descriptorType
oeioU = OeIoUtils()
oeMol = oeioU.descriptorToMol(descriptor, descriptorType, limitPerceptions=limitPerceptions, messageTag=messageTag)
oeMol = oeioU.suppressHydrogens(oeMol)
if not oeMol:
logger.warning("descriptor type %r molecule build fails: %r", descriptorType, descriptor)
return self.__statusDescriptorError, ssL, []
#
ccIdL = self.__oesubsU.prefilterIndex(oeMol, self.__siIdxP, matchOpts=matchOpts)
retStatus, ssL = self.__oesubsU.searchSubStructure(oeMol, ccIdList=ccIdL, matchOpts=matchOpts, numProc=numProc)
statusCode = 0 if retStatus else self.__searchError
except Exception as e:
logger.exception("Failing with %s", str(e))
#
return statusCode, ssL, []
def matchByFormulaRange(self, elementRangeD, matchSubset=False, searchId=None):
"""Return formula match results for input element range dictionary.
Args:
elementRangeD (dict): {'<element_name>: {'min': <int>, 'max': <int>}, ... }
matchSubset (bool, optional): query for formula subset (default: False)
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list): status, list of chemical component identifiers
"""
ok = False
rL = []
try:
startTime = time.time()
searchId = searchId if searchId else "query"
rL = self.__ccIdxP.matchMolecularFormulaRange(elementRangeD, matchSubset=matchSubset)
ok = True
logger.info("%s formula %r matched %d (%.4f seconds)", searchId, elementRangeD, len(rL), time.time() - startTime)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok, rL
def matchByFormula(self, formula, matchSubset=False, searchId=None):
"""Return formula match results for input molecular formula.
Args:
formula (str): molecular formula (ex. 'C6H6')
matchSubset (bool, optional): query for formula subset (default: False)
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list): status, list of chemical component identifiers
"""
ok = False
rL = []
try:
startTime = time.time()
searchId = searchId if searchId else "query"
mf = MolecularFormula()
eD = mf.parseFormula(formula)
elementRangeD = {k.upper(): {"min": v, "max": v} for k, v in eD.items()}
rL = self.__ccIdxP.matchMolecularFormulaRange(elementRangeD, matchSubset=matchSubset)
ok = True
logger.info("%s formula %r matched %d (%.4f seconds)", searchId, elementRangeD, len(rL), time.time() - startTime)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok, rL
def status(self):
unitS = "MB" if platform.system() == "Darwin" else "GB"
rusageMax = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
logger.info("Maximum resident memory size %.4f %s", rusageMax / 10 ** 6, unitS)
endTime = time.time()
logger.info("Status at %s (up %.4f seconds)", time.strftime("%Y %m %d %H:%M:%S", time.localtime()), endTime - self.__startTime)
| 1.757813
| 2
|
source/client/lib/tcp_client.py
|
nsubiron/mess-engine
| 1
|
12787079
|
<filename>source/client/lib/tcp_client.py
import logging
import socket
import struct
from contextlib import contextmanager
from util import to_hex_str
class TCPClient(object):
def __init__(self, host, port, timeout):
self._host = host
self._port = port
self._timeout = timeout
self._socket = None
def connect(self):
self._log('connecting...')
self._socket = socket.create_connection(address=(self._host, self._port), timeout=self._timeout)
self._socket.settimeout(self._timeout)
self._log('connected')
def disconnect(self):
if self._socket is not None:
self._socket.close()
def write(self, message):
header = struct.pack('<L', len(message))
self._socket.sendall(header + message)
def read(self):
header = self._read_n(4)
if header == '':
return ''
length = struct.unpack('<L', header)[0]
data = self._read_n(length)
return data
def _read_n(self, length):
buf = ''
while length > 0:
data = self._socket.recv(length)
if data == '':
raise RuntimeError('connection closed')
buf += data
length -= len(data)
return buf
def _log(self, message, *args):
logging.debug('tcpclient %s:%s - ' + message, self._host, self._port, *args)
| 2.765625
| 3
|
kinto_algolia/indexer.py
|
Kinto/kinto-algolia
| 0
|
12787080
|
import logging
from copy import deepcopy
from contextlib import contextmanager
from algoliasearch.http.verb import Verb
from algoliasearch.search_client import SearchClient
from algoliasearch.exceptions import AlgoliaException
from pyramid.exceptions import ConfigurationError
logger = logging.getLogger(__name__)
class Indexer(object):
def __init__(self, application_id, api_key, prefix="kinto"):
self.client = SearchClient.create(application_id, api_key)
self.prefix = prefix
self.tasks = []
def join(self):
for indexname, taskID in self.tasks:
index = self.client.init_index(indexname)
index.wait_task(taskID)
self.tasks = []
def set_extra_headers(self, headers):
self.client._config.headers.update(headers)
def indexname(self, bucket_id, collection_id):
return "{}-{}-{}".format(self.prefix, bucket_id, collection_id)
def create_index(
self, bucket_id, collection_id, settings=None, wait_for_creation=False
):
if settings is None:
settings = {}
self.update_index(
bucket_id, collection_id, settings=settings, wait_for_task=wait_for_creation
)
def update_index(
self, bucket_id, collection_id, settings=None, wait_for_task=False
):
indexname = self.indexname(bucket_id, collection_id)
if settings is not None:
index = self.client.init_index(indexname)
res = index.set_settings(settings, {"forwardToReplicas": True})
if wait_for_task:
res.wait()
else:
self.tasks.append((indexname, res[0]["taskID"]))
def delete_index(self, bucket_id, collection_id=None):
if collection_id is None:
response = self.client.list_indices()
index_prefix = self.indexname(bucket_id, "")
collections = [
i["name"]
for i in response["items"]
if i["name"].startswith(index_prefix)
]
else:
collections = [self.indexname(bucket_id, collection_id)]
for indexname in collections:
try:
self.client.init_index(indexname).delete()
except AlgoliaException as e: # pragma: no cover
if "HTTP Code: 404" not in str(e):
raise
def search(self, bucket_id, collection_id, **kwargs):
indexname = self.indexname(bucket_id, collection_id)
index = self.client.init_index(indexname)
query = kwargs.pop("query", "")
return index.search(query, kwargs)
def flush(self):
response = self.client.list_indices()
for index in response["items"]:
indexname = index["name"]
if indexname.startswith(self.prefix):
index = self.client.init_index(indexname)
index.clear_objects().wait()
index.delete().wait()
def isalive(self):
self.client._transporter.read(Verb.GET, "1/isalive", {}, None)
@contextmanager
def bulk(self):
bulk = BulkClient(self)
yield bulk
for indexname, requests in bulk.operations.items():
index = self.client.init_index(indexname)
res = index.batch(requests)
self.tasks.append((indexname, res[0]["taskID"]))
class BulkClient:
def __init__(self, indexer):
self.indexer = indexer
self.operations = {}
def index_record(self, bucket_id, collection_id, record, id_field="id"):
indexname = self.indexer.indexname(bucket_id, collection_id)
self.operations.setdefault(indexname, [])
obj = deepcopy(record)
record_id = obj.pop(id_field)
obj["objectID"] = record_id
self.operations[indexname].append({"action": "addObject", "body": obj})
def unindex_record(self, bucket_id, collection_id, record, id_field="id"):
indexname = self.indexer.indexname(bucket_id, collection_id)
record_id = record[id_field]
self.operations.setdefault(indexname, [])
self.operations[indexname].append(
{"action": "deleteObject", "body": {"objectID": record_id}}
)
def heartbeat(request):
"""Test that Algolia is operationnal.
:param request: current request object
:type request: :class:`~pyramid:pyramid.request.Request`
:returns: ``True`` is everything is ok, ``False`` otherwise.
:rtype: bool
"""
indexer = request.registry.indexer
try:
indexer.isalive()
except Exception as e:
logger.exception(e)
return False
else:
return True
def load_from_config(config):
settings = config.get_settings()
application_id = settings.get("algolia.application_id")
api_key = settings.get("algolia.api_key")
if application_id is None or api_key is None:
message = (
"kinto-algolia needs kinto.algolia.application_id "
"and kinto.algolia.api_key settings to be set."
)
raise ConfigurationError(message)
prefix = settings.get("algolia.index_prefix", "kinto")
indexer = Indexer(application_id=application_id, api_key=api_key, prefix=prefix)
return indexer
| 2.0625
| 2
|
stationery/migrations/0005_auto_20210713_1022.py
|
shaxpakistan/e-stationery
| 1
|
12787081
|
# Generated by Django 3.1.4 on 2021-07-13 07:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stationery', '0004_auto_20210710_0307'),
]
operations = [
migrations.DeleteModel(
name='Document',
),
migrations.AddField(
model_name='booking',
name='delivery_mode',
field=models.CharField(default='self taking', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='booking',
name='doc_cost',
field=models.PositiveIntegerField(default=500),
preserve_default=False,
),
migrations.AddField(
model_name='booking',
name='doc_type',
field=models.CharField(default='pdf', max_length=255),
preserve_default=False,
),
]
| 1.664063
| 2
|
parkinglot/admin.py
|
Amankori2307/Park-Here
| 0
|
12787082
|
<filename>parkinglot/admin.py
from django.contrib import admin
from .models import User, ParkingLot, Charges, Parking, Transaction
# Register your models here.
admin.site.register(User)
admin.site.register(ParkingLot)
admin.site.register(Charges)
admin.site.register(Parking)
admin.site.register(Transaction)
| 1.5
| 2
|
raiutils/raiutils/models/model_utils.py
|
imatiach-msft/responsible-ai-toolbox
| 0
|
12787083
|
<gh_stars>0
# Copyright (c) Microsoft Corporation
# Licensed under the MIT License.
class SKLearn(object):
"""Provide scikit-learn related constants."""
EXAMPLES = 'examples'
LABELS = 'labels'
PREDICT = 'predict'
PREDICTIONS = 'predictions'
PREDICT_PROBA = 'predict_proba'
def is_classifier(model):
"""Check if the model is a classifier.
:return: True if the model is a classifier, False otherwise.
:rtype: bool
"""
return (model is not None and
hasattr(model, SKLearn.PREDICT_PROBA) and
model.predict_proba is not None)
| 2.84375
| 3
|
django_socio_grpc/tests/test_authentication.py
|
socotecio/django-socio-grpc
| 13
|
12787084
|
import json
import logging
import mock
from django.test import TestCase, override_settings
from grpc._cython.cygrpc import _Metadatum
from django_socio_grpc.services import Service
from django_socio_grpc.settings import grpc_settings
from .utils import FakeContext
logger = logging.getLogger()
class FakeAuthentication:
def authenticate(self, context):
return ({"email": "<EMAIL>"}, context.META.get("HTTP_AUTHORIZATION"))
class DummyService(Service):
pass
service = DummyService()
def fake_create_service(self):
return service
class TestAuthenticationUnitary(TestCase):
@override_settings(
GRPC_FRAMEWORK={
"DEFAULT_AUTHENTICATION_CLASSES": [
"django_socio_grpc.tests.test_authentication.FakeAuthentication",
],
}
)
def test_settings(self):
# test settings correctly passed to grpc_settings
self.assertEqual(grpc_settings.DEFAULT_AUTHENTICATION_CLASSES, [FakeAuthentication])
def test_perform_authentication(self):
# Create a dummyservice for unitary tests
dummy_service = DummyService()
dummy_service.context = FakeContext()
# Call func
with mock.patch(
"django_socio_grpc.services.Service.resolve_user"
) as mock_resolve_user:
mock_resolve_user.return_value = ({"email": "<EMAIL>"}, {})
dummy_service.perform_authentication()
mock_resolve_user.assert_called_once_with()
self.assertEqual(dummy_service.context.user, {"email": "<EMAIL>"})
self.assertEqual(dummy_service.context.auth, {})
def test_resolve_user(self):
dummy_service = DummyService()
dummy_service.context = FakeContext()
dummy_service.context.META = {"HTTP_AUTHORIZATION": "faketoken"}
dummy_service.authentication_classes = [FakeAuthentication]
auth_user_tuple = dummy_service.resolve_user()
self.assertEqual(auth_user_tuple, ({"email": "<EMAIL>"}, "faketoken"))
@mock.patch("django_socio_grpc.services.Service.check_permissions", mock.MagicMock())
def test_perform_authentication_called_in_before_action(self):
dummy_service = DummyService()
with mock.patch(
"django_socio_grpc.services.Service.perform_authentication"
) as mock_perform_authentication:
dummy_service.before_action()
mock_perform_authentication.assert_called_once_with()
@mock.patch(
"django_socio_grpc.servicer_proxy.ServicerProxy.create_service", new=fake_create_service
)
class TestAuthenticationIntegration(TestCase):
def setUp(self):
self.service = DummyService
self.servicer = self.service.as_servicer()
self.fake_context = FakeContext()
def dummy_method(service, request, context):
pass
self.service.DummyMethod = dummy_method
def test_user_and_token_none_if_no_auth_class(self):
self.servicer.DummyMethod(None, self.fake_context)
self.assertIsNone(service.context.user)
self.assertIsNone(service.context.auth)
def test_user_and_token_set(self):
self.service.authentication_classes = [FakeAuthentication]
metadata = (("headers", json.dumps({"Authorization": "faketoken"})),)
self.fake_context._invocation_metadata.extend((_Metadatum(k, v) for k, v in metadata))
self.servicer.DummyMethod(None, self.fake_context)
self.assertEqual(service.context.META, {"HTTP_AUTHORIZATION": "faketoken"})
self.assertEqual(service.context.user, {"email": "<EMAIL>"})
self.assertEqual(service.context.auth, "faketoken")
| 2.0625
| 2
|
models/gitm/python/plot_data_2d.py
|
hkershaw-brown/feature-preprocess
| 65
|
12787085
|
<gh_stars>10-100
#plot difference
al = 0.5 #transparency
#vi,va = 0,max(VtecED[:,:,-1].flatten()) #specify zlim and colorlimits
vi, va = 0., 20.
#levels = np.arange(vi,va)
f = open('map.txt', 'r'); elev = np.array(map(int, f.read().split())).reshape((360, 180)).T; f.close() #load the Earth map
for ti in range(len(tcommon)): #sorted(tcommon): #timeE[1:3]:#
fig = plt.figure(1)
fig.set_size_inches((8,4))
fig.clf() #in case it was already open
ax = fig.add_subplot(111)
#map
N,T= np.meshgrid(np.linspace(0,359,360),np.linspace(-90,89,180))
ma = ax.contour(N, T, elev, 0, colors='black') #the Earth map plotted on the bottom of the z-axis
#data
x = LonD[timeD1==tcommon[ti]]
y = LatD[timeD1==tcommon[ti]]
z = VtecD[timeD1==tcommon[ti]]
scatD = ax.scatter(x, y, c=z, cmap=plt.cm.jet, s=25, vmin=vi, vmax=va, alpha=al, lw=0)
cbar = plt.colorbar(scatD)
cbar.ax.set_ylabel('TEC [TECU]')
#cbar.orientation='horizontal'
#labels, etc
#ax.set_title(str(ti))
plt.grid(True)
ax.set_xlim(0,360)
ax.set_ylim(-90,90)
ax.set_xticks(np.linspace(0,360,7) );
ax.set_yticks(np.linspace(-90,90,7) );
ax.set_xlabel('Longitude [deg]');
ax.set_ylabel('Latitude [deg]');
ax.text(0, .01, str(tcommon[ti]), fontsize=10, transform=ax.transAxes)
# fig.canvas.draw()
plt.savefig('data' + str(ti).zfill(2) + '.png', bbox_inches=0)
if ti==0: plt.savefig('data00.eps', bbox_inches=0)
print ti
#plt.ion()
print subprocess.check_output('convert data*.png data00.gif', shell=True) #convert to animated gif
# DART $Id$
# from <NAME>
# <next few lines under version control, do not edit>
# $URL$
# $Revision$
# $Date$
| 2.34375
| 2
|
legodb/indev.py
|
andrejacobs/learn-openfaas
| 0
|
12787086
|
<reponame>andrejacobs/learn-openfaas
from legodb.handler import handle
import os
from pprint import pprint
class Event:
def __init__(self):
self.body = None
self.headers = None
self.method = 'GET'
self.query = None
self.path = '/legosets'
class Context:
def __init__(self):
self.hostname = 'localhost'
if __name__ == "__main__":
os.environ['queue-name'] = 'johnny5'
os.environ['redis-url'] = 'redis://192.168.64.4:9988/0'
event = Event()
context = Context()
# response = handle(event, context)
# print(f'{event.path}')
# pprint(response)
# print('')
event.method = 'PUT'
event.path = '/legosets-download-images'
response = handle(event, context)
print(f'{event.path}')
pprint(response)
print('')
| 2.578125
| 3
|
Aulas/aula7_part1.py
|
CamilaContrucci/DIO_Python
| 0
|
12787087
|
# Função é tudo que retorna ao valor, método é o que não retorna.No Python o método é chamado de definição.
def soma(a, b):
return a + b
print(soma(1, 2))
print(soma(3, 4))
def subtracao(a, b):
return a - b
print(soma(5, 3))
# Ensinando a fazer criando a classe
class Calculadora:
def __init__(self, num1, num2): # tem dois underline antes e dois depois essa merda aqui
self.valor_a = num1
self.valor_b = num2
def soma(self):
return self.valor_a + self.valor_b
def subtracao(self):
return self.valor_a - self.valor_b
def multiplicacao(self):
return self.valor_a * self.valor_b
def divisao(self):
return self.valor_a / self.valor_b
if __name__ == '__main__':
calculadora = Calculadora(10, 2)
#chamei a classe da calculadora
print(calculadora.valor_a)
print(calculadora.soma())
print(calculadora.subtracao())
print(calculadora.multiplicacao())
print(calculadora.divisao())
# Fazer a mesma coisa sem definir os num
class Calculadora2:
def __init__(self):
# tem dois underline antes e dois depois essa merda aqui
pass
#não faz nada, só pro init não fica vazio
# poderia acabar o def init e o pass tbm
def soma(self, valor_a, valor_b): #tem que declarar eles aqui
return valor_a + valor_b
def subtracao(self, valor_a, valor_b): #tem que declarar eles aqui
return valor_a - valor_b
def multiplicacao(self, valor_a, valor_b): #tem que declarar eles aqui
return valor_a * valor_b
def divisao(self, valor_a, valor_b): #tem que declarar eles aqui
return valor_a / valor_b
calculadora = Calculadora2()
print(calculadora.soma(10, 2))
print(calculadora.subtracao(10, 5))
print(calculadora.multiplicacao(2, 3))
print(calculadora.divisao(9, 3))
| 4.21875
| 4
|
survol/sources_types/CIM_DataFile/elftools_parse_classes.py
|
AugustinMascarelli/survol
| 0
|
12787088
|
<filename>survol/sources_types/CIM_DataFile/elftools_parse_classes.py
#!/usr/bin/env python
"""
Classes in ELF files
"""
import os
import sys
import lib_elf
import lib_util
import lib_common
from lib_properties import pc
Usable = lib_util.UsableLinuxBinary
def Main():
paramkeyMaxDepth = "Maximum depth"
cgiEnv = lib_common.CgiEnv(
parameters = { paramkeyMaxDepth : 1 })
maxDepth = int(cgiEnv.get_parameters( paramkeyMaxDepth ))
fileSharedLib = cgiEnv.GetId()
grph = cgiEnv.GetGraph()
nodeSharedLib = lib_common.gUriGen.FileUri( fileSharedLib )
nodeGlobalNamespace = lib_common.gUriGen.ClassUri( "__global_namespace", fileSharedLib )
grph.add( ( nodeSharedLib, pc.property_member, nodeGlobalNamespace ) )
try:
readelf = lib_elf.ReadElf(fileSharedLib)
except Exception:
exc = sys.exc_info()[1]
lib_common.ErrorMessageHtml("Caught:"+str(exc))
listNotes = readelf.display_notes()
for pr in listNotes:
infoMsg = pr[0] + ":" + pr[1]
grph.add( ( nodeSharedLib, pc.property_information, lib_common.NodeLiteral(infoMsg) ) )
listSyms, setClasses = readelf.display_symbol_tables()
Main.nodesByClass = dict()
def ClassToNode( classSplit, idx ):
clsNam = "::".join( classSplit[ : idx ] )
try:
nodeClass = Main.nodesByClass[clsNam]
except KeyError:
nodeClass = lib_common.gUriGen.ClassUri( clsNam, fileSharedLib )
# TODO: Create base classes ?
Main.nodesByClass[clsNam] = nodeClass
if idx > 1:
nodeBaseClass = ClassToNode( classSplit, idx - 1 )
grph.add( ( nodeBaseClass, pc.property_member, nodeClass ) )
else:
grph.add( ( nodeSharedLib, pc.property_member, nodeClass ) )
return nodeClass
classAlreadyDone = set()
for sym in listSyms:
lenSplit = len(sym.m_splt)
if lenSplit > maxDepth:
spltShort = sym.m_splt[:maxDepth]
# TODO: Do the join only once.
joinShort = "::".join(spltShort)
# TODO: Should test and insert in one lookup only.
if joinShort in classAlreadyDone:
continue
classAlreadyDone.add( joinShort )
# So it cannot be a symbol but a class or a namespace.
clsNod = ClassToNode( spltShort, maxDepth )
# It is already linked to its ancestors.
else:
spltShort = sym.m_splt
# symNod = lib_common.gUriGen.SymbolUri( lib_util.EncodeUri(sym.m_name), fileSharedLib )
symNod = lib_common.gUriGen.SymbolUri( sym.m_name_demang, fileSharedLib )
grph.add( ( symNod, lib_common.MakeProp("Version"), lib_common.NodeLiteral(sym.m_vers) ) )
if lenSplit > 1:
clsNod = ClassToNode( sym.m_splt, lenSplit - 1 )
grph.add( ( clsNod, pc.property_symbol_defined, symNod ) )
else:
grph.add( ( nodeGlobalNamespace, pc.property_symbol_defined, symNod ) )
cgiEnv.OutCgiRdf("LAYOUT_RECT",[ pc.property_symbol_defined, pc.property_member ] )
if __name__ == '__main__':
Main()
| 2.15625
| 2
|
bin/fusearchd.py
|
larroy/fusearch
| 1
|
12787089
|
#!/usr/bin/env python3
"""Fusearch daemon"""
import argparse
import os
import signal
import sys
import logging
import textract
import functools
import progressbar
import tempfile
import pickle
import io
from fusearch.index import Index
from fusearch.model import Document
from fusearch.tokenizer import get_tokenizer, tokfreq, Tokenizer
from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader
from fusearch.config import Config
from multiprocessing import Process, Queue, cpu_count
import collections.abc
progressbar_index_widgets_ = [
" [",
progressbar.Timer(format="Elapsed %(elapsed)s"),
", ",
progressbar.SimpleProgress(),
" files"
#'count: ', progressbar.Counter(),
"] ",
progressbar.Bar(),
" (",
progressbar.ETA(),
") ",
]
def cleanup() -> None:
pass
def reload_config() -> None:
pass
def config_signal_handlers() -> None:
signal.signal(signal.SIGHUP, signal.SIG_IGN)
signal.signal(signal.SIGTERM, cleanup)
signal.signal(signal.SIGUSR1, reload_config)
signal.signal(signal.SIGTTIN, signal.SIG_IGN)
signal.signal(signal.SIGTSTP, signal.SIG_IGN)
signal.signal(signal.SIGTTOU, signal.SIG_IGN)
def redirect_stream(system_stream, target_stream):
""" Redirect a system stream to a specified file.
:param standard_stream: A file object representing a standard I/O
stream.
:param target_stream: The target file object for the redirected
stream, or ``None`` to specify the null device.
:return: ``None``.
`system_stream` is a standard system stream such as
``sys.stdout``. `target_stream` is an open file object that
should replace the corresponding system stream object.
If `target_stream` is ``None``, defaults to opening the
operating system's null device and using its file descriptor.
"""
if target_stream is None:
target_fd = os.open(os.devnull, os.O_RDWR)
else:
target_fd = target_stream.fileno()
os.dup2(target_fd, system_stream.fileno())
def fork_exit_parent() -> None:
pid = os.fork()
if pid > 0:
sys.exit(0)
def daemonize() -> None:
fork_exit_parent()
os.setsid()
fork_exit_parent()
os.chdir("/")
config_signal_handlers()
os.umask(0o022)
redirect_stream(sys.stdin, None)
redirect_stream(sys.stdout, open("/tmp/fusearch.out", "a"))
redirect_stream(sys.stderr, open("/tmp/fusearch.err", "a"))
fusearch_main()
def config_argparse() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="fusearch daemon", epilog="")
parser.add_argument("-f", "--foreground", action="store_true", help="Don't daemonize")
parser.add_argument("-c", "--config", type=str, default="/etc/fusearch/config.yaml", help="config file")
return parser
def to_text(file: str) -> str:
assert os.path.isfile(file)
try:
txt_b = textract.process(file, method="pdftotext")
# TODO more intelligent decoding? there be dragons
txt = bytes_to_str(txt_b)
# print(file)
# print(len(txt))
# print(txt[:80])
# print('-------------------')
except Exception as e:
txt = ""
logging.exception("Exception while extracting text from '%s'", file)
# TODO mark it as failed instead of empty text
return txt
def document_from_file(file: str, tokenizer: Tokenizer) -> Document:
mtime_latest = mtime(file)
filename = filename_without_extension(file)
txt = filename + "\n" + to_text(file)
# Detect language and check that the document makes sense, OCR returns garbage sometimes
# TODO: add filename to content
document = Document(url=file, filename=filename, content=txt, tokfreq=tokfreq(tokenizer(txt)), mtime=mtime_latest)
return document
def needs_indexing(index: Index, file: str) -> bool:
mtime_latest = mtime(file)
# document = index.document_from_url(file)
mtime_last_known = index.mtime(file)
if not mtime_last_known or mtime_last_known and mtime_latest > mtime_last_known:
# logging.debug("needs_indexing: need '%s'", file)
return True
else:
# logging.debug("needs_indexing: NOT need '%s'", file)
return False
def get_index(path: str, config: Config) -> Index:
index_db = os.path.join(path, ".fusearch.db")
index = Index({"provider": "sqlite", "filename": index_db, "create_db": True}, tokenizer=get_tokenizer(config))
logging.debug("get_index: '%s' %d docs", index_db, index.doc_count)
return index
class NeedsIndexFileGenerator(object):
def __init__(self, path, config):
self.path = path
self.config = config
self.index = get_index(path, config)
assert os.path.isdir(path)
def __call__(self) -> collections.abc.Iterable:
""":returns a generator of files which are updated from the mtime in the index"""
file_needs_indexing = functools.partial(needs_indexing, self.index)
return filter(file_needs_indexing, file_generator_ext(self.path, self.config.include_extensions))
def file_producer(path: str, config: Config, file_queue: Queue, file_inventory: io.IOBase) -> None:
for file in pickle_loader(file_inventory):
# logging.debug("file_producer: %s", file)
file_queue.put(file)
logging.debug("file_producer is done")
def text_extract(config: Config, file_queue: Queue, document_queue: Queue):
# logging.debug("text_extract started")
tokenizer = get_tokenizer(config)
while True:
file = file_queue.get()
if file is None:
logging.debug("text_extract is done")
return
logging.debug(
"text_extract: file_queue.qsize %d document_queue.qsize %d", file_queue.qsize(), document_queue.qsize()
)
logging.debug("text_extract: '%s'", file)
# logging.debug("text_extract: %s", file)
document = document_from_file(file, tokenizer)
document_queue.put(document)
def document_consumer(path: str, config: Config, document_queue: Queue, file_count: int) -> None:
index = get_index(path, config)
if config.verbose:
pbar = progressbar.ProgressBar(max_value=file_count, widgets=progressbar_index_widgets_)
file_i = 0
while True:
doc = document_queue.get()
logging.debug("document_consumer(%d): document_queue.qsize %d", os.getpid(), document_queue.qsize())
if doc is None:
logging.debug("Document consumer, no more elements in the queue")
if config.verbose:
pbar.finish()
return
try:
index.add_document(doc)
logging.debug("document_consumer(%d): added %s", os.getpid(), doc.url)
except Exception as e:
logging.exception("document_consumer: index.add_document exception. Document[%s]", doc.url)
if config.verbose:
pbar.update(file_i)
file_i += 1
def gather_files(path, config, file_inventory) -> int:
""":returns file count"""
if not os.path.isdir(path):
logging.error("Not a directory: '%s', skipping indexing", path)
return
logging.info("Indexing %s", path)
logging.info("Calculating number of files to index (.=100files)")
if config.verbose:
widgets = [
" [",
progressbar.Timer(format="Elapsed %(elapsed)s"),
" ",
"count: ",
progressbar.Counter(),
"] ",
progressbar.BouncingBar(),
]
pbar = progressbar.ProgressBar(widgets=widgets)
file_count = 0
for file in NeedsIndexFileGenerator(path, config)():
pickle.dump(file, file_inventory)
file_count += 1
# if config.verbose and (file_count % 100) == 0:
# sys.stdout.write('.')
# sys.stdout.flush()
if config.verbose:
pbar.update(file_count)
# if config.verbose:
# sys.stdout.write('\n')
if config.verbose:
pbar.finish()
file_inventory.seek(0)
return file_count
def index_do(path, config) -> None:
file_inventory = tempfile.TemporaryFile()
file_count = gather_files(path, config, file_inventory)
logging.info("%d files to process", file_count)
if config.parallel_extraction:
index_parallel(path, config, file_count, file_inventory)
else:
index_serial(path, config, file_count, file_inventory)
def index_parallel(path: str, config: Config, file_count: int, file_inventory) -> None:
#
# file_producer -> N * test_extract -> document_consumer
#
# TODO: check that processes are alive to prevent deadlocks on exceptions in children
file_queue = Queue(cpu_count() * 8)
document_queue = Queue(256)
text_extract_procs = []
file_producer_proc = Process(
name="file producer", target=file_producer, daemon=True, args=(path, config, file_queue, file_inventory)
)
file_producer_proc.start()
document_consumer_proc = Process(
name="document consumer", target=document_consumer, daemon=True, args=(path, config, document_queue, file_count)
)
for i in range(cpu_count()):
p = Process(
name="text extractor {}".format(i),
target=text_extract,
daemon=True,
args=(config, file_queue, document_queue),
)
text_extract_procs.append(p)
p.start()
document_consumer_proc.start()
logging.debug("child processes started")
logging.debug("joining producer")
file_producer_proc.join()
logging.debug("joining text_extract")
for p in text_extract_procs:
file_queue.put(None)
for p in text_extract_procs:
logging.debug("joining text_extract %s", p)
p.join()
document_queue.put(None)
logging.debug("joining document_consumer")
document_consumer_proc.join()
logging.info("Parallel indexing finished")
def index_serial(path, config, file_count, file_inventory):
if config.verbose:
pbar = progressbar.ProgressBar(max_value=file_count, widgets=progressbar_index_widgets_)
file_i = 0
tokenizer = get_tokenizer(config)
logging.info("Indexing started")
index = get_index(path, config)
for file in pickle_loader(file_inventory):
doc = document_from_file(file, tokenizer)
try:
index.add_document(doc)
except Exception as e:
logging.exception("index_serial: index.add_document exception. Document[%s]", doc.url)
if config.verbose:
pbar.update(file_i)
file_i += 1
if config.verbose:
pbar.finish()
def fusearch_main(args) -> int:
logging.info("reading config from %s", args.config)
config = Config.from_file(args.config)
logging.info("%s", config)
for path in config.index_dirs:
index_do(path, config)
def script_name() -> str:
""":returns: script name with leading paths removed"""
return os.path.split(sys.argv[0])[1]
def config_logging() -> None:
import time
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.basicConfig(format="{}: %(asctime)sZ %(name)s %(levelname)s %(message)s".format(script_name()))
logging.Formatter.converter = time.gmtime
def main() -> int:
config_logging()
parser = config_argparse()
args = parser.parse_args()
if not args.foreground:
return daemonize()
fusearch_main(args)
if __name__ == "__main__":
sys.exit(main())
| 2.171875
| 2
|
src/apps/authentication/backends.py
|
snicoper/snicoper.com
| 2
|
12787090
|
<reponame>snicoper/snicoper.com<gh_stars>1-10
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.db.models import Q
from . import settings as auth_settings
UserModel = get_user_model()
class EmailOrUsernameModelBackend(ModelBackend):
def authenticate(self, request, username=None, password=None, **kwargs):
"""Backend para autenticación.
Dependiendo de AUTH_TYPE comprobara que el tipo de autenticación del usuario:
both: Email y username.
username: username.
email: email.
Returns:
User: El User en caso de éxito, None en caso contrario.
"""
auth_type = auth_settings.AUTH_TYPE.lower()
if auth_type == 'username':
return super().authenticate(request, username, password, **kwargs)
try:
if auth_type == 'both':
user = UserModel.objects.get(
Q(username__exact=username) | Q(email__exact=username)
)
else:
user = UserModel.objects.get(email__exact=username)
if user.check_password(password):
return user
except UserModel.DoesNotExist:
return None
| 2.40625
| 2
|
mmvec/multimodal.py
|
mortonjt/deep-mae
| 68
|
12787091
|
<gh_stars>10-100
import os
import time
from tqdm import tqdm
import numpy as np
import tensorflow as tf
from tensorflow.contrib.distributions import Multinomial, Normal
import datetime
class MMvec(object):
def __init__(self, u_mean=0, u_scale=1, v_mean=0, v_scale=1,
batch_size=50, latent_dim=3,
learning_rate=0.1, beta_1=0.8, beta_2=0.9,
clipnorm=10., device_name='/cpu:0', save_path=None):
""" Build a tensorflow model for microbe-metabolite vectors
Returns
-------
loss : tf.Tensor
The log loss of the model.
Notes
-----
To enable a GPU, set the device to '/device:GPU:x'
where x is 0 or greater
"""
p = latent_dim
self.device_name = device_name
if save_path is None:
basename = "logdir"
suffix = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
save_path = "_".join([basename, suffix])
self.p = p
self.u_mean = u_mean
self.u_scale = u_scale
self.v_mean = v_mean
self.v_scale = v_scale
self.batch_size = batch_size
self.latent_dim = latent_dim
self.learning_rate = learning_rate
self.beta_1 = beta_1
self.beta_2 = beta_2
self.clipnorm = clipnorm
self.save_path = save_path
def __call__(self, session, trainX, trainY, testX, testY):
""" Initialize the actual graph
Parameters
----------
session : tf.Session
Tensorflow session
trainX : sparse array in coo format
Test input OTU table, where rows are samples and columns are
observations
trainY : np.array
Test output metabolite table
testX : sparse array in coo format
Test input OTU table, where rows are samples and columns are
observations. This is mainly for cross validation.
testY : np.array
Test output metabolite table. This is mainly for cross validation.
"""
self.session = session
self.nnz = len(trainX.data)
self.d1 = trainX.shape[1]
self.d2 = trainY.shape[1]
self.cv_size = len(testX.data)
# keep the multinomial sampling on the cpu
# https://github.com/tensorflow/tensorflow/issues/18058
with tf.device('/cpu:0'):
X_ph = tf.SparseTensor(
indices=np.array([trainX.row, trainX.col]).T,
values=trainX.data,
dense_shape=trainX.shape)
Y_ph = tf.constant(trainY, dtype=tf.float32)
X_holdout = tf.SparseTensor(
indices=np.array([testX.row, testX.col]).T,
values=testX.data,
dense_shape=testX.shape)
Y_holdout = tf.constant(testY, dtype=tf.float32)
total_count = tf.reduce_sum(Y_ph, axis=1)
batch_ids = tf.multinomial(
tf.log(tf.reshape(X_ph.values, [1, -1])),
self.batch_size)
batch_ids = tf.squeeze(batch_ids)
X_samples = tf.gather(X_ph.indices, 0, axis=1)
X_obs = tf.gather(X_ph.indices, 1, axis=1)
sample_ids = tf.gather(X_samples, batch_ids)
Y_batch = tf.gather(Y_ph, sample_ids)
X_batch = tf.gather(X_obs, batch_ids)
with tf.device(self.device_name):
self.qUmain = tf.Variable(
tf.random_normal([self.d1, self.p]), name='qU')
self.qUbias = tf.Variable(
tf.random_normal([self.d1, 1]), name='qUbias')
self.qVmain = tf.Variable(
tf.random_normal([self.p, self.d2-1]), name='qV')
self.qVbias = tf.Variable(
tf.random_normal([1, self.d2-1]), name='qVbias')
qU = tf.concat(
[tf.ones([self.d1, 1]), self.qUbias, self.qUmain], axis=1)
qV = tf.concat(
[self.qVbias, tf.ones([1, self.d2-1]), self.qVmain], axis=0)
# regression coefficents distribution
Umain = Normal(loc=tf.zeros([self.d1, self.p]) + self.u_mean,
scale=tf.ones([self.d1, self.p]) * self.u_scale,
name='U')
Ubias = Normal(loc=tf.zeros([self.d1, 1]) + self.u_mean,
scale=tf.ones([self.d1, 1]) * self.u_scale,
name='biasU')
Vmain = Normal(loc=tf.zeros([self.p, self.d2-1]) + self.v_mean,
scale=tf.ones([self.p, self.d2-1]) * self.v_scale,
name='V')
Vbias = Normal(loc=tf.zeros([1, self.d2-1]) + self.v_mean,
scale=tf.ones([1, self.d2-1]) * self.v_scale,
name='biasV')
du = tf.gather(qU, X_batch, axis=0, name='du')
dv = tf.concat([tf.zeros([self.batch_size, 1]),
du @ qV], axis=1, name='dv')
tc = tf.gather(total_count, sample_ids)
Y = Multinomial(total_count=tc, logits=dv, name='Y')
num_samples = trainX.shape[0]
norm = num_samples / self.batch_size
logprob_vmain = tf.reduce_sum(
Vmain.log_prob(self.qVmain), name='logprob_vmain')
logprob_vbias = tf.reduce_sum(
Vbias.log_prob(self.qVbias), name='logprob_vbias')
logprob_umain = tf.reduce_sum(
Umain.log_prob(self.qUmain), name='logprob_umain')
logprob_ubias = tf.reduce_sum(
Ubias.log_prob(self.qUbias), name='logprob_ubias')
logprob_y = tf.reduce_sum(Y.log_prob(Y_batch), name='logprob_y')
self.log_loss = - (
logprob_y * norm +
logprob_umain + logprob_ubias +
logprob_vmain + logprob_vbias
)
# keep the multinomial sampling on the cpu
# https://github.com/tensorflow/tensorflow/issues/18058
with tf.device('/cpu:0'):
# cross validation
with tf.name_scope('accuracy'):
cv_batch_ids = tf.multinomial(
tf.log(tf.reshape(X_holdout.values, [1, -1])),
self.cv_size)
cv_batch_ids = tf.squeeze(cv_batch_ids)
X_cv_samples = tf.gather(X_holdout.indices, 0, axis=1)
X_cv = tf.gather(X_holdout.indices, 1, axis=1)
cv_sample_ids = tf.gather(X_cv_samples, cv_batch_ids)
Y_cvbatch = tf.gather(Y_holdout, cv_sample_ids)
X_cvbatch = tf.gather(X_cv, cv_batch_ids)
holdout_count = tf.reduce_sum(Y_cvbatch, axis=1)
cv_du = tf.gather(qU, X_cvbatch, axis=0, name='cv_du')
pred = tf.reshape(
holdout_count, [-1, 1]) * tf.nn.softmax(
tf.concat([tf.zeros([
self.cv_size, 1]),
cv_du @ qV], axis=1, name='pred')
)
self.cv = tf.reduce_mean(
tf.squeeze(tf.abs(pred - Y_cvbatch))
)
# keep all summaries on the cpu
with tf.device('/cpu:0'):
tf.summary.scalar('logloss', self.log_loss)
tf.summary.scalar('cv_rmse', self.cv)
tf.summary.histogram('qUmain', self.qUmain)
tf.summary.histogram('qVmain', self.qVmain)
tf.summary.histogram('qUbias', self.qUbias)
tf.summary.histogram('qVbias', self.qVbias)
self.merged = tf.summary.merge_all()
self.writer = tf.summary.FileWriter(
self.save_path, self.session.graph)
with tf.device(self.device_name):
with tf.name_scope('optimize'):
optimizer = tf.train.AdamOptimizer(
self.learning_rate, beta1=self.beta_1, beta2=self.beta_2)
gradients, self.variables = zip(
*optimizer.compute_gradients(self.log_loss))
self.gradients, _ = tf.clip_by_global_norm(
gradients, self.clipnorm)
self.train = optimizer.apply_gradients(
zip(self.gradients, self.variables))
tf.global_variables_initializer().run()
def ranks(self):
modelU = np.hstack(
(np.ones((self.U.shape[0], 1)), self.Ubias, self.U))
modelV = np.vstack(
(self.Vbias, np.ones((1, self.V.shape[1])), self.V))
res = np.hstack((np.zeros((self.U.shape[0], 1)), modelU @ modelV))
res = res - res.mean(axis=1).reshape(-1, 1)
return res
def fit(self, epoch=10, summary_interval=1000, checkpoint_interval=3600,
testX=None, testY=None):
""" Fits the model.
Parameters
----------
epoch : int
Number of epochs to train
summary_interval : int
Number of seconds until a summary is recorded
checkpoint_interval : int
Number of seconds until a checkpoint is recorded
Returns
-------
loss: float
log likelihood loss.
cv : float
cross validation loss
"""
iterations = epoch * self.nnz // self.batch_size
losses, cvs = [], []
cv = None
last_checkpoint_time = 0
last_summary_time = 0
saver = tf.train.Saver()
now = time.time()
for i in tqdm(range(0, iterations)):
if now - last_summary_time > summary_interval:
res = self.session.run(
[self.train, self.merged, self.log_loss, self.cv,
self.qUmain, self.qUbias,
self.qVmain, self.qVbias]
)
train_, summary, loss, cv, rU, rUb, rV, rVb = res
self.writer.add_summary(summary, i)
last_summary_time = now
else:
res = self.session.run(
[self.train, self.log_loss,
self.qUmain, self.qUbias,
self.qVmain, self.qVbias]
)
train_, loss, rU, rUb, rV, rVb = res
losses.append(loss)
cvs.append(cv)
cv = None
# checkpoint model
now = time.time()
if now - last_checkpoint_time > checkpoint_interval:
saver.save(self.session,
os.path.join(self.save_path, "model.ckpt"),
global_step=i)
last_checkpoint_time = now
self.U = rU
self.V = rV
self.Ubias = rUb
self.Vbias = rVb
return losses, cvs
| 2.421875
| 2
|
nenupy/crosslet/__init__.py
|
AlanLoh/nenupy
| 4
|
12787092
|
<gh_stars>1-10
#! /usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. inheritance-diagram:: nenupy.crosslet.xstdata nenupy.crosslet.tvdata
:parts: 3
.. inheritance-diagram:: nenupy.crosslet.imageprod
:parts: 3
"""
from .uvw import UVW
from .imageprod import NearField, NenuFarTV
from .crosslet import Crosslet
from .xstdata import XST_Data
from .tvdata import TV_Data
| 1.171875
| 1
|
apps/account/migrations/0008_merge_20211016_0545.py
|
mobius-labs/app
| 1
|
12787093
|
<reponame>mobius-labs/app
# Generated by Django 3.2.6 on 2021-10-16 05:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0006_user_business_card_theme'),
('account', '0007_alter_user_email'),
]
operations = [
]
| 1.15625
| 1
|
vesc_driver/src/tm_heading.py
|
Taek-16/vesc_study
| 1
|
12787094
|
#!/usr/bin/env python
import rospy
from pyproj import Proj, transform
import numpy as np
from math import cos, sin, pi
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from sensor_msgs.msg import NavSatFix
from ublox_msgs.msg import NavPVT
#Projection definition
#UTM-K
proj_UTMK = Proj(init='epsg:5178')
#WGS1984
proj_WGS84 = Proj(init='epsg:4326')
class Tm_heading:
def __init__(self):
self.pubtm = rospy.Publisher('current_tm', Pose, queue_size=1)
self.pubhead = rospy.Publisher('heading', Float64, queue_size=1)
self.subtm = rospy.Subscriber("ublox_gps/fix", NavSatFix, self.tm,queue_size=1)
self.subheading = rospy.Subscriber("ublox_gps/navpvt", NavPVT, self.heading,queue_size=1)
self.run()
def tm(self,Fix):
current_tm= Pose()
lon=Fix.longitude
lat=Fix.latitude
x, y = transform(proj_WGS84, proj_UTMK, lon, lat)
current_tm.position.x = x
current_tm.position.y = y
self.pubtm.publish(current_tm)
def heading(self,head):
heading = Float64()
heading.data=5*pi/2 - np.deg2rad(float(head.heading / 100000))
self.pubhead.publish(heading)
def run(self):
rate=rospy.Rate(1)
while not rospy.is_shutdown():
rate.sleep()
def main():
rospy.init_node('tm_heading',anonymous=True)
Tm_heading()
if __name__ == "__main__":
main()
| 2.5
| 2
|
tests/test_wfs_USDASSURGO.py
|
tilmanb/OWSLib
| 0
|
12787095
|
# Test ability of OWSLib.wfs to interact with USDA SSURGO WFS 1.0.0 web service
# Contact e-mail: <EMAIL>
import unittest
from owslib.wfs import WebFeatureService
class USDASSURGOWFSTestCase(unittest.TestCase):
def runTest(self):
minX = -76.766960
minY = 39.283611
maxX = -76.684120
maxY = 39.338394
filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
wfs = WebFeatureService('http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs', version='1.0.0')
response = wfs.getfeature(typename=('MapunitPolyExtended',), filter=filter, propertyname=None)
self.assertTrue(response.read().find('<wfs:FeatureCollection') > 0,
'Unable to find feature dataset in WFS response')
| 2.5
| 2
|
Exercise 09/exercise_code/util/__init__.py
|
CornellLenard/Deep-Learning-Course-Exercises
| 0
|
12787096
|
<reponame>CornellLenard/Deep-Learning-Course-Exercises
"""Util functions"""
from .vis_utils import show_all_keypoints
from .save_model import save_model
| 1.125
| 1
|
29.py
|
zseen/practicepython-exercises
| 0
|
12787097
|
from enum import Enum
class Game(object):
class WinState(Enum):
FIRST_WINS = "First player wins"
SECOND_WINS = "Second player wins"
NOBODY_WINS = "Nobody wins"
def __init__(self, size):
self.size = size
self.numFields = self.size ** 2
self.board = None
self.counter = None
def play(self):
self.__resetBoard()
while True:
if self.__turnAndCheckGameOver('X'):
break
if self.__turnAndCheckGameOver('Y'):
break
# Private methods
def __resetBoard(self):
self.board = [[0 for _ in range(self.size)] for _ in range(self.size)]
self.counter = 0
def __turnAndCheckGameOver(self, mark):
self.__askForInput(mark)
self.__drawBoard()
if self.__isGameOver():
return True
self.counter += 1
if self.counter >= self.numFields:
print("Draw")
return True
def __drawBoard(self):
for j in range(0, self.size):
print(" ---" * self.size)
print("| ", end="")
for i in range(0, self.size):
print(self.board[j][i], "|", end=" ")
print("")
print(" ---" * self.size)
print("")
def __askForInput(self, mark):
while True:
try:
inputString = input("%s, it is your turn! Type the row and column (1-3)"
" where you would like to put your %s in this format: r,c.:" % (mark, mark))
inputList = inputString.split(",")
r = int(inputList[0])
c = int(inputList[1])
if len(inputList) != 2:
print("Ohh, too many numbers.")
elif r < 1 or c < 1:
print("Ohh, your numbers should positive integers.")
elif self.board[r - 1][c - 1] == 0:
self.board[r - 1][c - 1] = mark
break
else:
print("Ohh, this field is already marked.")
except ValueError as ve:
print("Ohh, a ValueError: " + str(ve))
except IndexError:
print("Ohh, an IndexError, please check if your numbers are <= %s" % str(self.size))
except NameError as ne:
print("Ohh, a NameError: " + str(ne))
except Exception as e:
print("Ohh, Exception: " + str(e))
def __winCheck(self):
horizontal = self.__horizontalWin()
if horizontal != Game.WinState.NOBODY_WINS:
return horizontal
vertical = self.__verticalWin()
if vertical != Game.WinState.NOBODY_WINS:
return vertical
return self.__diagonalWin()
def __isGameOver(self):
if self.__winCheck() == Game.WinState.FIRST_WINS:
print(Game.WinState.FIRST_WINS.value)
return True
if self.__winCheck() == Game.WinState.SECOND_WINS:
print(Game.WinState.SECOND_WINS.value)
return True
return False
@staticmethod
def __markToWinner(mark):
if mark == 'X':
return Game.WinState.FIRST_WINS
elif mark == 'Y':
return Game.WinState.SECOND_WINS
else:
raise ValueError("Unexpected winner")
@staticmethod
def __allSame(items):
return all(x == items[0] for x in items)
def __horizontalWin(self):
N = len(self.board)
for j in range(0, N):
rowList = []
for i in range(0, N):
rowList.append(self.board[j][i])
if (Game.__allSame(rowList)) and rowList[0] != 0:
return Game.__markToWinner(rowList[0])
return Game.WinState.NOBODY_WINS
def __verticalWin(self):
N = len(self.board)
for i in range(0, N):
columnList = []
for j in range(0, N):
columnList.append(self.board[j][i])
if (Game.__allSame(columnList)) and columnList[0] != 0:
return Game.__markToWinner(columnList[0])
return Game.WinState.NOBODY_WINS
def __diagonalWin(self):
N = len(self.board)
mainDiagonalList = []
for i in range(0, N):
mainDiagonalList.append(self.board[i][i])
if (Game.__allSame(mainDiagonalList)) and mainDiagonalList[0] != 0:
return Game.__markToWinner(mainDiagonalList[0])
secondaryDiagonalList = []
for i in range(0, N):
secondaryDiagonalList.append(self.board[i][N - 1 - i])
if (Game.__allSame(secondaryDiagonalList)) and secondaryDiagonalList[0] != 0:
return Game.__markToWinner(secondaryDiagonalList[0])
return Game.WinState.NOBODY_WINS
def main():
g = Game(3)
g.play()
if __name__ == "__main__":
main()
| 3.78125
| 4
|
tests/test_show.py
|
dmkskn/isle
| 0
|
12787098
|
import inspect
from itertools import islice
import pytest
import isle.show
from isle import Show
def test_get_latest():
show = isle.show.get_latest()
assert isinstance(show, Show)
def test_get_popular():
shows = isle.show.get_popular()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_top_rated():
shows = isle.show.get_top_rated()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_airing_today():
shows = isle.show.get_airing_today()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_on_the_air():
shows = isle.show.get_on_the_air()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
| 2.15625
| 2
|
examples/operators_plot/example_tnorm_plot.py
|
mmunar97/discrete-fuzzy-operators
| 0
|
12787099
|
import numpy
from discrete_fuzzy_operators.base.operators.binary_operators.fuzzy_discrete_binary_operator import \
FuzzyDiscreteBinaryOperator
from discrete_fuzzy_operators.builtin_operators.discrete.tnorms import TnormExamples
if __name__ == "__main__":
# EXAMPLE: Plot of some known t-norms.
lukasiewicz_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.LUKASIEWICZ, n=7)
lukasiewicz_operator.plot_operator(figure_size=(700, 700), figure_title="Lukasiewicz t-norm")
lukasiewicz_operator.plot_three_dimensional_operator(draw_diagonal=True,
figure_size=(700, 700), figure_title="Lukasiewicz tensor")
drastic_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.DRASTIC, n=7)
drastic_operator.plot_operator(figure_size=(700, 700), figure_title="Drastic t-norm")
nilpotent_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.NILPOTENT_MINIMUM, n=7)
nilpotent_operator.plot_operator(figure_size=(700, 700), figure_title="Nilpotent minimum t-norm")
| 2.140625
| 2
|
GetInfo.py
|
XarisA/nethack
| 0
|
12787100
|
<filename>GetInfo.py
#!/usr/bin/env python
import os
from socket import *
import dns.resolver
import dns.reversename
# TODO Pass arguments from terminal
def reach_host(hostname,arguments='-c 1'):
# Pinging the host
print ('[+] Pinging ' + hostname)
if os.system("ping " + arguments + ' '+ hostname) == 0:
print ("Host appears to be up ")
else:
print ("Host is down or does not reply to ping requests ")
print ("Host's ip is " , gethostbyname(hostname))
def nslookup(hostname, typ3='MX'):
answers = dns.resolver.query(hostname, typ3)
for rdata in answers:
print ('Host', rdata.exchange, 'has preference', rdata.preference)
def name_reverse(hostip):
n = dns.reversename.from_address(hostip)
print(n)
#print(dns.reversename.to_address(n))
def main():
hostname = "dnspython.org"
print ('[+] Gatering information about host')
reach_host(hostname)
print ("Mail server lookup ")
nslookup(hostname)
if __name__ == "__main__":
main()
| 3.53125
| 4
|
aptfimleapparser/utils/create_testinput_h5.py
|
nomad-coe/nomad-parser-aptfim-leap
| 0
|
12787101
|
<reponame>nomad-coe/nomad-parser-aptfim-leap
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 3 15:26:33 2021
@author: kuehbach
a much more covering example with metadata for atom probe microscopy experiments which matches the fields discussed with the
International Field Emission Society's Atom Probe Technical Committee and uses the discussed top-level hierarchy definitions
from the FAIRmat Area B discussions
"""
import os, sys, glob
from pathlib import Path
import numpy as np
import h5py
from datetime import datetime, timezone
h5fn = 'example.h5'
#h5fn = 'attribute.test.nx5'
h5w = h5py.File( h5fn, 'w')
#dst = h5w.create_group('MeasurementID'+str(simid))
#testing the writing and reading of group/dataset attributes
#dst.attrs['Temperature'] = 273.15
#dst.attrs['Unit'] = 'K'
#h5w.close()
#h5r = h5py.File( h5fn, 'r')
#src = h5r['MeasurementID'+str(simid)]
#src.attrs.keys()
#h5r.close()
#create the HDF5 tree of data and metadata, we populate here with dummy data but make the example packed with
#the most interesting quantities to be reported ideally when measuring an atom probe microscopy specimen
#we make here no attempt to parse results coming from a processing of such measured data, such as iso-surface
#computations or other computational geometry, spatial statistics, or AI-method results applied on APT dataset
#this should be part of another parser, there is a sophisticated open-source alternative to the commercial
#postprocessing tool of the community available which I contribute as a plugin for the nomad analytics
#toolkit, it makes sense to define then with this tool how we should go about storing results from
#computational and data analytics tasks applied on measured atom probe data sets
#MaRkusKuehbach, I proposed the following structure for such an HDF5 file (needs to be cast into a paper tbd):
#Measurement
#Calibration
#Reconstruction
#Ranging
#why is it smart to define these four sections?
#As of 2021 virtually all APT/FIM groups world-wide use microscopes of a single company.
#This company has legally protected their source code so the purest results that experimentalists can get
#from an APT experiment is encoded in a proprietary file format which experimentalists are not allowed
#legally to get access to for all data fields, plus the format is in key parts undocumented for the public.
#Most importantly, the file contains calibration values and routines which are specific to AMETEK/Cameca
#(the manufacturer) instrument and hence are also not shared with the public.
#The only way that is legally safe prior future negotiations with the company to get your results out of this cage
#is to use the also proprietary analysis software, formerly known as IVAS since version 4 now AP Suite
#During such initial processing after your microscope session the proprietary calibrations
#are applied via this software and you get a reconstructed dataset and calibrated mass-to-charge-state ratio values
#This is what 99% of the community works and is used to work with, so our first aim to get into NOMAD.
#There are very few specialists' groups currently (2021) which build their own microscope from scratch
#e.g. Rouen group (Vurpillot group), FAU Erlangen (Felfer group), Stuttgart (Schmitz group), PNNL group
#These groups have their own way of storing their data, luckily all of them are open to data sharing.
#Further work is required to get their formats supported as well in NOMAD
#The above concept of having four layers mirrors the current procedure of creating an APT/FIM experiment
#Measurement stores what is immediately relevant to the actual microscope session, i.e. data acquisition while
#evaporating the specimen, i.e. physically destroying the specimen during the measurement by removing atoms
#Calibration stores or could be used to store the calibrations, so delay-line detector settings
#Reconstruction stores or could be used to store all metadata related to the process of evaluating numerically
#a physical model of how field evaporation proceeds and the physical assumption inherent in this model
#what the initial specimen shape is to compute a model of how the atoms were likely positioned in the
#evaporated specimen, keep in mind in APT the specimen is the lens of the microscope!
#Ranging is used to store all metadata related how a mass-to-charge-state ratio of an ion is assumed
#to translate into a so-called ion type. Because of facing limited mass-to-charge-resolution and ambiguous case,
#ranging has to build on assumptions here I use a recently proposed (Kuehbach et al. Microsc. Microanal. 2021)
#very general strategy to store information (metadata) on the ion type definitions, much more covering than are
#current formats (RNG and RRNG)
#It is possible that during field-evaporation not only single element isotopes are detected but also
#combinations of multiple ions or fragments hitting the detector.I n the APT/FIM community such ions
# are known as molecular ions, we can represent each isotope by a unique hash value of Nprotons + 256*Nneutrons,
#if no isotope information is desired, i.e. we wish to communicate only element names, we set Nneutrons = 0 by definition
#Furthermore, we reserve the special hash value 0, i.e. Nprotons = 0 && Nneutrons = 0, for defining a defaul type
#for ions for which we either have not found or are not interested in performing a ranging.
#This brings an HDF5-based data format which can handle all current ways how APT measurements are accessible
#by experimentalists. If in the future the manufacturers are more willing to share details, we have then
#also with this format the required flexibility to store also such pieces of information in the above sections
#I should say that currently most APT file formats (pos, epos, apt) do not store all (meta)data handled
#by the here instantiated HDF5-based file. So mainly the purpose of this example is here
#to have a tool available for developing the parser and to reality-check this parser to find whether or not
#it is sophisticated enough to handle the cases which the scientists experience and ask for in their daily practice.
#I am a fan of keeping things organized, so have a ID for each section, thereby one could for instance,
#have, and this is common in practice, multiple ranging definitions for the same reconstructed tuple of atom positions
#and mass-to-charge
simid = 1
#dummy assumed number of atoms. here unrealistically low to keep the example file very small,
#for real experiments N is between 1.0e6 and 1.0-1.5e9 million ions for larger dataset volume the
#specimens has be longer and longer physically because gaining volume by making wider specimens is limited
#as you cannot realize the high fields at the apex required to provoke atom removal
#however, the longer the specimen gets the less mechanically stable it is the higher the chances for
#for failure, i.e. rupture or what not
N = 10
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Name', data = np.str('My precious PtIr specimen'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/UUID', data = np.str('DummySampleUUID'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/TrivialName', data = np.str('PtIr'))
asciiList = [n.encode("ascii", "ignore") for n in ['Pt', 'Ir']]
charlength = 'S' + str(len(max(asciiList, key = len)))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Elements', (len(asciiList),1), charlength, asciiList)
f32 = [0.9, 0.1]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/NominalComposition', (len(f32),1), 'f4', f32)
dst.attrs['Unit'] = 'at.-%'
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Shape', data = np.str('tip'))
f32 = [24.0, 24.0, 100.0]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Dimensions', (len(f32),1), 'f4', f32)
dst.attrs['Unit'] = 'nm'
###MK::the following should be an array of element names! variable length string array
asciiList = [n.encode("ascii", "ignore") for n in ['Platinum', 'Ir']]
charlength = 'S' + str(len(max(asciiList, key = len)))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Constituents/0/Elements', (len(asciiList),1), charlength, asciiList)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Constituents/0/Description', data = np.str('solid solution'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Constituents/0/CrystalStructure', data = np.str('fcc'))
f32 = [0.9, 0.1]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/Constituents/0/NominalComposition', (len(f32),1), 'f4', f32)
dst.attrs['Unit'] = 'at.-%'
dst = h5w.create_group('MeasurementID'+str(simid)+'/Metadata/Sample/Material/RegionOfInterest')
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/RegionOfInterest/Shape', data = np.str(''))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/RegionOfInterest/Dimensions', data = np.str(''))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/RegionOfInterest/Offset', data = np.str(''))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Sample/Material/RegionOfInterest/Stride', data = np.str(''))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Experiment/Name', data = np.str('APTFIM-LEAP'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/Name', data = np.str('DummyComputerName'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/UUID', data = np.str('DummyComputerUUID'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/OperatingSystem', data = np.str('Win10 DummyVersion'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/MainMemory', data = np.str('DummyMainMemory'))
#asciiList = [n.encode("ascii", "ignore") for n in ['Xeon', 'Xeon']]
#charlength = 'S' + str(len(max(asciiList, key = len)))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/CPUs/0/Name', data = np.str('Xeon'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/GPGPUs/0/Name', data = np.str('Nvidia V100 32GB'))
#dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Environment/Computers/0/Storage/0/Name', data = np.str('DummySSD'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/0/Name', data = np.str('<NAME>'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/0/Affiliation', data = np.str('FHI Berlin'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/0/Email', data = np.str('<EMAIL>'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/1/Name', data = np.str('Nomad Experimentalist'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/1/Affiliation', data = np.str('NomadLand'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Author/1/Email', data = np.str('<EMAIL>'))
dtg = datetime.now(timezone.utc)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Experiment/TimeStamp/StartUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Experiment/TimeStamp/EndUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dtl = datetime.now()
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Experiment/TimeStamp/StartLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Experiment/TimeStamp/EndLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Name', data = np.str('DummyLeapMicroscope'))
####MK::for systems with laser only or hybrid systems
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Laser/Emitter/Name', data = np.str('DummyLaserEmitter'))
f32 = 100.0
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Laser/Emitter/LaserEnergy', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'pJ'
###MK::for systems with HV pulser only or hybrid systems
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/HighVoltagePulser/Name', data = np.str('DummyHVPulser'))
f32 = 250.0
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/HighVoltagePulser/PulseRateTarget', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'kHz'
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Reflectron/Name', data = np.str('DummyReflectron'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Aperture/0/Name', data = np.str('DummyAperture'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/AnalysisChamber/Name', data = np.str('DummyAnalysisChamber'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/UltraHighVacuumPump/Name', data = np.str('DummyUHVPump'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/SpecimenHolder/Name', data = np.str('DummmySpecimenHolder'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/Name', data = np.str('DelayLineDetector'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/Readout', data = np.str('DelayLine'))
u32 = [1024, 1024]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/Resolution', (len(u32),1), 'u4', u32)
dst.attrs['Unit'] = 'pixel^2'
f32 = [20.0, 20.0]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/Dimensions', (len(f32),1), 'f4', u32)
dst.attrs['Unit'] = 'cm'
f32 = 300.0
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/FlightPathLength', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'mm'
f32 = 0.0025
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/DetectionRateTarget', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'ions/pulse'
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/0/GeometryOpticalEquivalent', data = np.str('DummyEquivalent'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/1/Name', data = np.str('BaseTemperatureDetector'))
f32 = 40.0
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/1/BaseTemperatureTarget', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'K'
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/2/Name', data = np.str('AnalysisChamberPressureDetector'))
f32 = 1.0e-10
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/Component/Detector/2/AnalysisChamberPressure', (1,1), 'f4', f32)
dst.attrs['Unit'] = 'Torr'
f32o = [0.0, 0.0, 0.0]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Origin', (3,1), 'f4', f32o)
f32m = [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/AnalysisChamber/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/AnalysisChamber/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/AnalysisChamber/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/AnalysisChamber/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/SpecimenHolder/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/SpecimenHolder/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/SpecimenHolder/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/SpecimenHolder/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Specimen/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Specimen/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Specimen/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Specimen/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/LaserProbe/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/LaserProbe/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/LaserProbe/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/LaserProbe/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Detector/0/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Detector/0/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Detector/0/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Detector/0/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Metadata/UserGenerated/ProcessStatus/Comment', data = np.str('Successfully created dummy file for testing'))
dst = h5w.create_group('MeasurementID'+str(simid)+'/Metadata/DataHeader')
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/FlightPath/Spatial', data = np.str('DummyFlightPathSpatial'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/FlightPath/Timing', data = np.str('DummyFlightPathTiming'))
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/Laser/Emitter/Current', data = np.float32(0.0))
#dst.attrs['Unit'] = ''
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/Laser/Emitter/Wavelength', data = np.float32(0.0))
dst.attrs['Unit'] = 'nm'
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/Laser/Emitter/Incidence', data = np.float32(0.0))
#dst.attrs['Unit'] = 'Torr'
dst = h5w.create_group('MeasurementID'+str(simid)+'/Data/HighVoltagePulser')
f32 = np.full( (N,1), 50.0, 'f4')
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/AnalysisChamber/Pressure', (N,1), 'f4', f32)
dst = h5w.create_group('MeasurementID'+str(simid)+'/Data/UltraHighVacuumPump')
f32 = np.array([np.linspace(1,N,N),]*3).transpose()
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/SpecimenHolder/Position', (N,3), 'f4', f32)
dst.attrs['Unit'] = 'cm'
dst = h5w.create_group('MeasurementID'+str(simid)+'/Data/Detector/0')
dst = h5w.create_group('MeasurementID'+str(simid)+'/Data/Detector/1')
f32 = np.full( (N,1), 40.0, 'f4')
dst = h5w.create_dataset('MeasurementID'+str(simid)+'/Data/Detector/1/SpecimenTemperature', (N,1), 'f4', f32)
dst.attrs['Unit'] = 'K'
dst = h5w.create_group('MeasurementID'+str(simid)+'/Data/Images')
#calibration is a pure computational process so we do not have a specimen or sample, only a dataset !
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Dataset/0/Name', data = np.str('My precious RHIT/HITS file for the PtIr sample'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Dataset/0/UUID', data = np.str('DummyDatasetUUID'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Name', data = np.str('APTFIM-Reconstruction'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/Name', data = np.str('DummyComputerName'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/UUID', data = np.str('DummyComputerUUID'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/OperatingSystem', data = np.str('Win10 DummyVersion'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/MainMemory', data = np.str('DummyMainMemory'))
asciiList = [n.encode("ascii", "ignore") for n in ['Xeon', 'Xeon']]
charlength = 'S' + str(len(max(asciiList, key = len)))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/CPUSocket/0/CPU/0/Name', data = np.str('Xeon'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/AcceleratorSocket/0/Accelerator/Name', data = np.str('Nvidia V100 32GB'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/Computer/0/Storage/0/Name', data = np.str('DummySSD'))
dtg = datetime.now(timezone.utc)
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dtl = datetime.now()
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/Name', data = np.str('IVAS'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/Version', data = np.str('v3.6.4'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/UUID', data = np.str('DummyInstrumentID'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/Component/HitPositionFilter/Name', data = np.str('all'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/Component/TimeOfFlightFilter/Name', data = np.str('all'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/Component/BowlCorrection/Name', data = np.str('DummyBowlCorrectionName'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/Author/0/Name', data = np.str('IVAS application'))
dst = h5w.create_dataset('CalibrationID'+str(simid)+'/Metadata/UserGenerated/ProcessStatus/Comment', data = np.str('Successful calibration inside IVAS'))
dst = h5w.create_group('CalibrationID'+str(simid)+'/Metadata/DataHeader')
dst = h5w.create_group('CalibrationID'+str(simid)+'/Data')
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Dataset/0/Name', data = np.str('My precious RHIT/HITS file for the PtIr sample'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Dataset/0/UUID', data = np.str('DummyDatasetUUID'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Name', data = np.str('APTFIM-Reconstruction'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/Name', data = np.str('DummyComputerName'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/UUID', data = np.str('DummyComputerUUID'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/OperatingSystem', data = np.str('Win10 DummyVersion'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/MainMemory', data = np.str('DummyMainMemory'))
#asciiList = [n.encode("ascii", "ignore") for n in ['Xeon', 'Xeon']]
#charlength = 'S' + str(len(max(asciiList, key = len)))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/CPUs/0/Name', data = np.str('Xeon'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/GPGPUs/0/Name', data = np.str('Nvidia V100 32GB'))
#dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/Computer/0/Storage/0/Name', data = np.str('DummySSD'))
dtg = datetime.now(timezone.utc)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dtl = datetime.now()
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Name', data = np.str('IVAS'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Version', data = np.str('v3.6.4'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/UUID', data = np.str('DummyInstrumentID'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Component/ReconstructionAlgorithm/Name', data = np.str('DummyReconstructionName'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Component/ReconstructionAlgorithm/Protocol', data = np.str('IVAS (modified Bas et al.)'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Component/ReconstructionAlgorithm/FieldFactor', (1,), 'f4', data = np.float32(3.0))
dst.attrs['Unit'] = '1'
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Component/ReconstructionAlgorithm/ImageCompressionFactor', (1,), 'f4', data = np.float32(1.01))
dst.attrs['Unit'] = '1'
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/Component/ReconstructionAlgorithm/AtomicVolume', (1,), 'f4', data = np.float32(50.0))
dst.attrs['Unit'] = 'nm^3'
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reference/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reconstruction/Origin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reconstruction/Matrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reconstruction/MapToRefOrigin', (3,1), 'f4', f32o)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Instrument/CoordinateSystem/Reconstruction/MapToRefMatrix', (4,4), 'f4', f32m)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/Author/0/Name', data = np.str('IVAS application'))
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Metadata/UserGenerated/ProcessStatus/Comment', data = np.str('Successful reconstruction with the IVAS software'))
dst = h5w.create_group('ReconstructionID'+str(simid)+'/Metadata/DataHeader')
u32 = np.linspace(1,N,N)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/PulseNumber', (N, 1), 'u4', data = u32)
dst.attrs['Unit'] = '1'
f32 = np.full( (N,1), 200e3, 'f4')
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/PulseFrequency', (N,1), 'f4', data = f32)
dst.attrs['Unit'] = 'kHz' ###???
f32 = np.array([np.linspace(1,N,N),]*2).transpose()
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/HitPositions', (N, 2), 'f4', data = f32)
dst.attrs['Unit'] = 'cm'
f32 = np.full( (N,1), 0.0, 'f4')
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/LaserEnergy', (N,1), 'f4', data = f32)
dst.attrs['Unit'] = 'pJ' ###???
f32 = np.array([np.full( (N,1), 0.0, 'f4'),]*3).transpose()
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/LaserPosition', (N,3), 'f4', data = f32)
#dst.attrs['Unit'] = '' ###???
f32 = np.array(np.linspace(1,N,N)*1.0e3)
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/StandingVoltage', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'V'
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/PulseVoltage', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'V'
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/ReflectronVoltage', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'V'
f32 = np.array([np.linspace(1,N,N)*0.001,]*3).transpose()
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/SpecimenHolderPosition', (N, 3), 'f4', data = f32)
#dst.attrs['Unit'] = '' ###???
f32 = np.float32( np.linspace(1,N,N)*1.0e-9 )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/TimeOfFlight', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'ns'
f32 = np.float32( np.linspace(1,N,N)*10.0 )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/MassToChargeRatio', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'Da'
f32 = np.full( (N,1), 40.0, 'f4' )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/SpecimenTemperature', (N, 1), 'f4', data = f32)
dst.attrs['Unit'] = 'K'
u32 = np.full( (N,1), 1, 'u4' )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/Multiplicity', (N, 1), 'u4', data = u32)
dst.attrs['Unit'] = '1'
u32 = np.full( (N,1), 0, 'u4' )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/PulseSinceLastEventPulse', (N, 1), 'u4', data = u32)
dst.attrs['Unit'] = '1'
u32 = np.full( (N,1), 1, 'u4' )
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/IonsPerPulse', (N, 1), 'u4', data = u32)
dst.attrs['Unit'] = '1'
f32 = np.array([np.linspace(1,N,N),]*3).transpose()
dst = h5w.create_dataset('ReconstructionID'+str(simid)+'/Data/IonPositions', (N, 3), 'f4', data = f32)
dst.attrs['Unit'] = 'nm'
#with APSuite6 typically people combine reconstruction and ranging
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Dataset/0/Name', data = np.str('My precious RHIT/HITS file for the PtIr sample'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Dataset/0/UUID', data = np.str('DummyDatasetUUID'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/Name', data = np.str('APTFIM-Ranging'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/Environment/Computers/0/Name', data = np.str('DummyComputerName'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/UUID', data = np.str('DummyComputerUUID'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/OperatingSystem', data = np.str('Win10 DummyVersion'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/MainMemory', data = np.str('DummyMainMemory'))
#asciiList = [n.encode("ascii", "ignore") for n in ['Xeon', 'Xeon']]
#charlength = 'S' + str(len(max(asciiList, key = len)))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/CPUs/0/Name', data = np.str('Xeon'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/GPGPUs/0/Name', data = np.str('Nvidia V100 32GB'))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Environment/Computers/0/Storage/0/Name', data = np.str('DummySSD'))
dtg = datetime.now(timezone.utc)
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndUtc', data = np.str(dtg)) #np.str(datetime.now(timezone.utc).strftime("%Y-%m-%d")+'-'+datetime.now(timezone.utc).strftime("%H:%M:%S"))
dtl = datetime.now()
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/TimeStamp/StartLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Analysis/TimeStamp/EndLocal', data = np.str(dtl)) #datetime.now().strftime("%Y-%m-%d")+'-'+datetime.now().strftime("%H:%M:%S"))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Name', data = np.str('IVAS'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Version', data = np.str('v3.6.4'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/UUID', data = np.str('DummyInstrumentID'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/TimeOfFlightToMassToCharge/Comment', data = np.str('m proportional to calibrated tof with proprietary calibration factors'))
#dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Components/SubSampling')
#dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Components/SubSampling/MassToCharge')
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Components/SubSampling/MassToCharge/Type')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/MassToCharge/Name', data = np.str('DummyFilterMassToChargeName'))
f32 = [0.0, 1200.0]
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/MassToCharge/LinearRanges/0/MinMaxMassToCharge', (1,2), 'f4', data = f32)
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/Multiplicity/Name', data = np.str('DummyFilterMultiplicityName'))
dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/Multiplicity/LinearRanges/0/MinMaxMultiplicity')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/IonPosition/Name', data = np.str('DummyFilterIonPositionsName'))
f32 = [[-25.0, 25.0], [-25.0, 25.0], [0.1, 120.0]]
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/IonPosition/AABBEnsemble/0/MinMaxPositions', (3,2), 'f4', data = f32)
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/IonID/Name', data = np.str('DummyFilterIonIDName'))
u32 = [0, 1, N]
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/Filter/IonID/MinIncrMaxLinearSubSampling', (1,3), 'u4', data = u32)
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/BinningAlgorithm/MassToCharge/Type', data = np.str('0.001 Da'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/BackgroundAlgorithm/Name', data = np.str('DummyBackgroundAlgorithmName'))
dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Component/BackgroundAlgorithm/Ranges')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/PeakDeconvolutionAlgorithm/Name', data = np.str('DummyPeakDeconvolutionAlgorithmName'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/PeakDetectionAlgorithm/Name', data = np.str('DummyPeakDetectionAlgorithmName'))
dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Component/PeakDetectionAlgorithm/Ranges')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Instrument/Component/SignalSmoothingAlgorithm/Name', data = np.str('DummySignalSmoothingAlgorithmName'))
dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/Instrument/Component/SignalSmoothingAlgorithm/Ranges')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/Author/0/Name', data = np.str('IVAS application'))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Metadata/UserGenerated/ProcessStatus/Comment', data = np.str('Successful ranging using IVAS'))
dst = h5w.create_group('RangingID'+str(simid)+'/Metadata/DataHeader')
#dst = h5w.create_group('RangingID'+str(simid)+'/Data/ExecutionDetails')
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/ExecutionDetails/MaxNumberOfThreadsPerProcess', (1,), 'u4', data = np.uint32(1))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/ExecutionDetails/MaxNumberOfGPGPUsPerProcess', (1,), 'u4', data = np.uint32(0))
#dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/ExecutionDetails/MaxNumberOfProcesses', (1,), 'u4', data = np.uint32(1))
Natoms = 32
dst = h5w.create_group('RangingID'+str(simid)+'/Data/IonSpecies')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonSpecies/NumberOfDisjointSpecies', data = np.uint32(1))
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonSpecies/MaxNumberOfAtomsPerIon', data = np.uint32(Natoms))
dst.attrs['Comment'] = 'specifies the maximum length of the molecular ion isotope vector'
u16 = np.zeros([Natoms], np.uint16)
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonSpecies/0/IsotopeVector', data = u16)
dst.attrs['Comment'] = 'vector of hash values which decode hash = Nprotons + 256*Nneutrons'
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonSpecies/0/Charge', data = np.int8(0))
dst.attrs['Unit'] = 'eV'
f32 = [0.000, 0.001]
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonSpecies/0/MassToChargeRanges', (1,2), 'f4', data = f32)
dst.attrs['Unit'] = 'Da'
u8 = np.zeros([N,1], 'u8')
dst = h5w.create_dataset('RangingID'+str(simid)+'/Data/IonLabels', (N,1), 'u1', u8)
dst = h5w.close()
| 1.976563
| 2
|
pyarc/qcba/transforms/prune_literals.py
|
jirifilip/CBA
| 19
|
12787102
|
<reponame>jirifilip/CBA<gh_stars>10-100
import pandas
import numpy as np
from ..data_structures import QuantitativeDataFrame, Interval
class RuleLiteralPruner:
def __init__(self, quantitative_dataframe):
self.__dataframe = quantitative_dataframe
def transform(self, rules):
copied_rules = [ rule.copy() for rule in rules ]
trimmed = [ self.__trim(rule) for rule in copied_rules ]
return trimmed
def produce_combinations(self, array):
arr_len = len(array)
for i in range(arr_len):
combination = array[0:i] + array[i+1:arr_len]
yield combination
def __trim(self, rule):
"""
if type(rule) != QuantitativeCAR:
raise Exception("type of rule must be QuantClassAssociationRule")
"""
attr_removed = False
literals = rule.antecedent
consequent = rule.consequent
rule.update_properties(self.__dataframe)
dataset_len = self.__dataframe.size
if len(literals) < 1:
return rule
while True:
for literals_combination in self.produce_combinations(literals):
if not literals_combination:
continue
copied_rule = rule.copy()
copied_rule.antecedent = literals_combination
copied_rule.update_properties(self.__dataframe)
if copied_rule.confidence > rule.confidence:
rule.support = copied_rule.support
rule.confidence = copied_rule.confidence
rule.rulelen = copied_rule.rulelen
rule.antecedent = copied_rule.antecedent
attr_removed = True
break
else:
attr_removed = False
if attr_removed == False:
break
return rule
| 2.484375
| 2
|
algorithms/larrys-array.py
|
gajubadge11/HackerRank-1
| 340
|
12787103
|
#!/bin/python3
import sys
def rotate(A, pos):
A[pos], A[pos+1], A[pos+2] = A[pos+1], A[pos+2], A[pos]
def larrysArray(A):
for _ in range(len(A)):
for ind in range(1, len(A) - 1):
a, b, c = A[ind-1], A[ind], A[ind+1]
#print("ind = {} A = {} B = {} C = {}".format(ind, a, b, c))
if a > b or c < a:
#print("rotating 1")
rotate(A, ind-1)
if A == sorted(A):
return 'YES'
else:
return 'NO'
if __name__ == "__main__":
t = int(input().strip())
for a0 in range(t):
n = int(input().strip())
A = list(map(int, input().strip().split(' ')))
result = larrysArray(A)
print(result)
| 3.65625
| 4
|
tests/checker/test_master.py
|
siccegge/ctf-gameserver
| 0
|
12787104
|
import datetime
from unittest.mock import patch
from ctf_gameserver.checker.master import MasterLoop
from ctf_gameserver.checker.metrics import DummyQueue
from ctf_gameserver.lib.checkresult import CheckResult
from ctf_gameserver.lib.database import transaction_cursor
from ctf_gameserver.lib.test_util import DatabaseTestCase
class MasterTest(DatabaseTestCase):
fixtures = ['tests/checker/fixtures/master.json']
def setUp(self):
self.master_loop = MasterLoop(self.connection, None, 'service1', '/dev/null', None, 2, 8, 10,
'0.0.%s.1', b'secret', {}, DummyQueue())
def test_handle_flag_request(self):
with transaction_cursor(self.connection) as cursor:
cursor.execute('UPDATE scoring_gamecontrol SET start=NOW()')
task_info = {
'service': 'service1',
'_team_id': 2,
'team': 92,
'tick': 1
}
params1 = {'tick': 1}
resp1 = self.master_loop.handle_flag_request(task_info, params1)
params2 = {'tick': 1}
resp2 = self.master_loop.handle_flag_request(task_info, params2)
params3 = {'tick': 1, 'payload': 'TmV2ZXIgZ28='}
resp3 = self.master_loop.handle_flag_request(task_info, params3)
self.assertEqual(resp1, resp2)
self.assertNotEqual(resp1, resp3)
params4 = {'tick': 2}
resp4 = self.master_loop.handle_flag_request(task_info, params4)
params5 = {'tick': 2}
resp5 = self.master_loop.handle_flag_request(task_info, params5)
self.assertEqual(resp4, resp5)
self.assertNotEqual(resp1, resp4)
params6 = {}
self.assertIsNone(self.master_loop.handle_flag_request(task_info, params6))
# Changing the start time changes all flags
with transaction_cursor(self.connection) as cursor:
# SQLite syntax for tests
cursor.execute('UPDATE scoring_gamecontrol SET start=DATETIME("now", "+1 hour")')
resp1_again = self.master_loop.handle_flag_request(task_info, params1)
resp4_again = self.master_loop.handle_flag_request(task_info, params4)
self.assertNotEqual(resp1, resp1_again)
self.assertNotEqual(resp4, resp4_again)
def test_handle_result_request(self):
task_info = {
'service': 'service1',
'_team_id': 2,
'team': 92,
'tick': 1
}
param = CheckResult.OK.value
start_time = datetime.datetime.utcnow().replace(microsecond=0)
self.assertIsNone(self.master_loop.handle_result_request(task_info, param))
with transaction_cursor(self.connection) as cursor:
cursor.execute('SELECT COUNT(*) FROM scoring_statuscheck')
self.assertEqual(cursor.fetchone()[0], 1)
cursor.execute('SELECT status FROM scoring_statuscheck'
' WHERE service_id = 1 AND team_id = 2 AND tick = 1')
self.assertEqual(cursor.fetchone()[0], CheckResult.OK.value)
cursor.execute('SELECT placement_end FROM scoring_flag'
' WHERE service_id = 1 AND protecting_team_id = 2 AND tick = 1')
self.assertGreaterEqual(cursor.fetchone()[0], start_time)
task_info['tick'] = 2
param = CheckResult.FAULTY.value
start_time = datetime.datetime.utcnow().replace(microsecond=0)
self.assertIsNone(self.master_loop.handle_result_request(task_info, param))
with transaction_cursor(self.connection) as cursor:
cursor.execute('SELECT status FROM scoring_statuscheck'
' WHERE service_id = 1 AND team_id = 2 AND tick = 2')
self.assertEqual(cursor.fetchone()[0], CheckResult.FAULTY.value)
cursor.execute('SELECT placement_end FROM scoring_flag'
' WHERE service_id = 1 AND protecting_team_id = 2 AND tick = 2')
self.assertGreaterEqual(cursor.fetchone()[0], start_time)
task_info['tick'] = 3
param = 'Not an int'
self.assertIsNone(self.master_loop.handle_result_request(task_info, param))
with transaction_cursor(self.connection) as cursor:
cursor.execute('SELECT status FROM scoring_statuscheck'
' WHERE service_id = 1 AND team_id = 2 AND tick = 3')
self.assertIsNone(cursor.fetchone())
cursor.execute('SELECT placement_end FROM scoring_flag'
' WHERE service_id = 1 AND protecting_team_id = 2 AND tick = 3')
self.assertIsNone(cursor.fetchone()[0])
param = 1337
self.assertIsNone(self.master_loop.handle_result_request(task_info, param))
with transaction_cursor(self.connection) as cursor:
cursor.execute('SELECT status FROM scoring_statuscheck'
' WHERE service_id = 1 AND team_id = 2 AND tick = 3')
self.assertIsNone(cursor.fetchone())
cursor.execute('SELECT placement_end FROM scoring_flag'
' WHERE service_id = 1 AND protecting_team_id = 2 AND tick = 3')
self.assertIsNone(cursor.fetchone()[0])
@patch('ctf_gameserver.checker.database.get_check_duration')
def test_update_launch_params(self, check_duration_mock):
# Very short duration, but should be ignored in tick 1
check_duration_mock.return_value = 1
self.master_loop.update_launch_params(-1)
self.assertEqual(self.master_loop.tasks_per_launch, 0)
with transaction_cursor(self.connection) as cursor:
cursor.execute('UPDATE scoring_gamecontrol SET current_tick=1')
self.master_loop.update_launch_params(1)
self.assertEqual(self.master_loop.tasks_per_launch, 1)
with transaction_cursor(self.connection) as cursor:
for i in range(10, 400):
username = 'team{}'.format(i)
email = <EMAIL>'.format(username)
cursor.execute('INSERT INTO auth_user (id, username, first_name, last_name, email, password,'
' is_superuser, is_staff, is_active, date_joined)'
' VALUES (%s, %s, %s, %s, %s, %s, false, false, true, NOW())',
(i, username, '', '', '', 'password'))
cursor.execute('INSERT INTO registration_team (user_id, informal_email, image, affiliation,'
' country, nop_team)'
' VALUES (%s, %s, %s, %s, %s, false)', (i, email, '', '', 'World'))
cursor.execute('INSERT INTO scoring_flag (service_id, protecting_team_id, tick)'
' VALUES (1, %s, 1)', (i,))
self.master_loop.update_launch_params(1)
self.assertEqual(self.master_loop.tasks_per_launch, 49)
check_duration_mock.return_value = None
self.master_loop.update_launch_params(10)
self.assertEqual(self.master_loop.tasks_per_launch, 49)
check_duration_mock.return_value = 3600
self.master_loop.update_launch_params(10)
self.assertEqual(self.master_loop.tasks_per_launch, 49)
check_duration_mock.return_value = 90
self.master_loop.update_launch_params(10)
self.assertEqual(self.master_loop.tasks_per_launch, 7)
self.master_loop.interval = 5
self.master_loop.update_launch_params(10)
self.assertEqual(self.master_loop.tasks_per_launch, 4)
check_duration_mock.return_value = 10
self.master_loop.interval = 10
self.master_loop.tick_duration = datetime.timedelta(seconds=90)
self.master_loop.update_launch_params(10)
self.assertEqual(self.master_loop.tasks_per_launch, 7)
| 2.265625
| 2
|
run_test_sequence.py
|
MichaMucha/evolving-classifier
| 0
|
12787105
|
<filename>run_test_sequence.py
__author__ = 'michalmucha'
import csv
from datetime import datetime
from EVABCD import Classifier, SequenceOfActions, Action
# TEST_DATA_FILENAME = 'long_test_sequence2.csv'
TEST_DATA_FILENAME = 'testcase/sequence-1.csv'
USERS = 60
SEQUENCE_LENGTH = 25
SUBSEQUENCE_LENGTH = 4
PROMPT = False
RECURSIVE = True
REPETITIONS = 2
def run():
# load sequence
# assign all sequences to user objects
# add these sequences to EVABCD
# run EVABCD
# INPUT FORMAT:
# [ userID , sequenceElementValue , timestamp ]
EVABCD_Classifier = Classifier(subsequenceLength=SUBSEQUENCE_LENGTH, recursive=RECURSIVE)
sequence = [1, 'ls', datetime.now()]
sequence = []
with open(TEST_DATA_FILENAME, 'r', encoding='utf-8') as f:
r = csv.reader(f, delimiter=';')
next(r)
for row in r:
row[2] = datetime.strptime(row[2],"%d/%m/%Y %H:%M:%S")
sequence.append(row)
for i in range(REPETITIONS):
for userID in range(1,36):
actions = [Action(x[1],x[2]) for x in sequence if int(x[0]) == userID]
EVABCD_Classifier.evolve(userID, SequenceOfActions(actions))
if PROMPT:
print(EVABCD_Classifier)
input('enter to continue')
for userID in range(36,USERS+1):
actions = [Action(x[1],x[2]) for x in sequence if int(x[0]) == userID]
EVABCD_Classifier.evolve(userID, SequenceOfActions(actions[:14]))
if PROMPT:
print(EVABCD_Classifier)
input('enter to continue')
print('FINISHED EVOLUTIONS')
print(EVABCD_Classifier)
EVABCD_Classifier.classifyAll()
EVABCD_Classifier.writeReport('report_rts.txt')
if __name__ == '__main__':
run()
| 2.765625
| 3
|
moona/http/request_headers.py
|
katunilya/mona
| 2
|
12787106
|
<gh_stars>1-10
from logging.handlers import HTTPHandler
from pymon import Future
from moona.http.context import HTTPContext
from moona.http.handlers import HTTPFunc, handler, skip
def has_header(name: str) -> HTTPHandler:
"""Processes next `HTTPFunc` only when request has passed header.
Args:
name (str): to check for.
"""
raw_name = name.encode("UTF-8").lower()
@handler
def _handler(nxt: HTTPFunc, ctx: HTTPContext) -> Future[HTTPContext | None]:
match ctx.request_headers.get(raw_name, None):
case None:
return skip(ctx)
case _:
return nxt(ctx)
return _handler
def matches_header(name: str, value: str) -> HTTPHandler:
"""Processes next `HTTPFunc` only when request has valid headers.
Args:
name (str): to check header.
value (str): to check. Optional. If not passed, than presence of header is
checked.
"""
raw_name = name.encode("UTF-8").lower()
raw_value = value.encode("UTF-8")
@handler
def _handler(nxt: HTTPFunc, ctx: HTTPContext) -> Future[HTTPContext | None]:
match ctx.request_headers.get(raw_name, None) == raw_value:
case True:
return nxt(ctx)
case False:
return skip(ctx)
return _handler
| 2.515625
| 3
|
tests/unit/logs/metadata_engine/entity_id/test_me_id.py
|
dt-be/dynatrace-aws-log-forwarder
| 0
|
12787107
|
<reponame>dt-be/dynatrace-aws-log-forwarder
# Copyright 2021 Dynatrace LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logs.metadata_engine import me_id, jmespath
from logs.metadata_engine.jmespath import format_required
jmes_custom_functions = jmespath.MappingCustomFunctions()
def test_format_required():
assert format_required("{}", None) is None
assert format_required("{}", []) is None
assert format_required("{}{}", []) is None
assert format_required("{}{}", ["1"]) is None
assert format_required("{}{}_{}", ["1", "2"]) is None
assert jmes_custom_functions._func_format_arn("{}", None) is None
assert jmes_custom_functions._func_format_arn("{}", []) is None
assert jmes_custom_functions._func_format_arn("{}{}", []) is None
assert jmes_custom_functions._func_format_arn("{}{}", ["1"]) is None
assert jmes_custom_functions._func_format_arn("{}{}_{}", ["1", "2"]) is None
def test_meid_credentials_v1_legacy_md5():
input = "dynatrace-aws-logs-Lambda-1K7HG2Q2LIQKUus-east-1_000047316593"
id = me_id._legacy_entity_id_md5(input)
meid = me_id.meid_md5("AWS_LAMBDA_FUNCTION", input)
meid_from_format = me_id.meid_md5("AWS_LAMBDA_FUNCTION", format_required("{}{}_{}", ["dynatrace-aws-logs-Lambda-1K7HG2Q2LIQKU", "us-east-1", "000047316593"]))
assert id == -3464187019831048966
assert meid == "AWS_LAMBDA_FUNCTION-CFECBC426F7384FA"
assert meid == meid_from_format
def test_meid_credentials_v2_supporting_service__murmurhash():
input = "api gatewayarn:aws:apigateway:us-east-1:000047316593:/restapis/PetStore"
id = me_id._murmurhash2_64A(input)
meid = me_id.meid_murmurhash("CUSTOM_DEVICE", input)
meid_from_list = me_id.meid_murmurhash("CUSTOM_DEVICE", format_required("{}{}", ["api gateway", "arn:aws:apigateway:us-east-1:000047316593:/restapis/PetStore"]))
assert id == -364647979568170292
assert meid == "CUSTOM_DEVICE-FAF0829835C67ACC"
assert meid == meid_from_list
def test_meid_in_credentials_v2_core_services__murmurhash_awsseed():
real_long_id_from_dt_cluster = 7316649878848848536
real_meid_from_dt_cluster = "RELATIONAL_DATABASE_SERVICE-6589F64CAEB0C298"
wrong_default_long_id = 5481040344698372608
wrong_default_meid = "RELATIONAL_DATABASE_SERVICE-4C1091275954C200"
dbInstanceArn = "arn:aws:rds:us-east-1:908047316593:db:belu-metadata-database-1-instance-1"
entity_type = "RELATIONAL_DATABASE_SERVICE"
id = me_id._murmurhash2_64A(dbInstanceArn)
meid = me_id.meid_murmurhash(entity_type, dbInstanceArn)
assert meid == wrong_default_meid, "From default calculation, with seed unset/default (0xe17a1465): 3782874213:"
assert id == wrong_default_long_id
id = me_id._murmurhash2_64A(dbInstanceArn, seed=3782874213)
meid = me_id.meid_murmurhash(entity_type, dbInstanceArn)
assert meid == wrong_default_meid, "From default calculation, with seed given explicitly, same as default (0xe17a1465): 3782874213:"
assert id == wrong_default_long_id
id = me_id._murmurhash2_64A(dbInstanceArn, seed=-512093083)
meid = me_id.meid_murmurhash_awsseed(entity_type, dbInstanceArn)
assert meid == real_meid_from_dt_cluster, "From awsSeed calculation, with seed -512093083:"
assert id == real_long_id_from_dt_cluster
meid = jmes_custom_functions._func_dt_meid_rds_v2(dbInstanceArn)
assert meid == real_meid_from_dt_cluster, "From jmesPath customFunctions - seed should be -512093083:"
assert id == real_long_id_from_dt_cluster
| 1.765625
| 2
|
publications/admin_views/import_bibtex.py
|
christianglodt/django-publications
| 0
|
12787108
|
__license__ = 'MIT License <http://www.opensource.org/licenses/mit-license.php>'
__author__ = '<NAME> <<EMAIL>>'
__docformat__ = 'epytext'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.db import transaction
from publications.models import Type
from publications.utils import import_bibtex as do_import_bibtex
# mapping of months
MONTHS = {
'jan': 1, 'january': 1,
'feb': 2, 'february': 2,
'mar': 3, 'march': 3,
'apr': 4, 'april': 4,
'may': 5,
'jun': 6, 'june': 6,
'jul': 7, 'july': 7,
'aug': 8, 'august': 8,
'sep': 9, 'september': 9,
'oct': 10, 'october': 10,
'nov': 11, 'november': 11,
'dec': 12, 'december': 12}
def import_bibtex(request):
if request.method == 'POST':
# try to import BibTex
bibtex = request.POST['bibliography']
with transaction.atomic():
publications, errors = do_import_bibtex(bibtex)
status = messages.SUCCESS
if len(publications) == 0:
status = messages.ERROR
msg = 'No publications were added, %i errors occurred' % len(errors)
elif len(publications) > 1:
msg = 'Successfully added %i publications (%i skipped due to errors)' % (len(publications), len(errors))
else:
msg = 'Successfully added %i publication (%i error(s) occurred)' % (len(publications), len(errors))
# show message
messages.add_message(request, status, msg)
for error in errors:
messages.add_message(request, messages.ERROR, error)
# redirect to publication listing
return HttpResponseRedirect('../')
else:
return render_to_response(
'admin/publications/import_bibtex.html', {
'title': 'Import BibTex',
'types': Type.objects.all(),
'request': request},
RequestContext(request))
import_bibtex = staff_member_required(import_bibtex)
| 1.867188
| 2
|
manila/scheduler/weighers/host_affinity.py
|
kpawar89/manila
| 159
|
12787109
|
# Copyright 2019 NetApp, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila import context
from manila.db import api as db_api
from manila.scheduler.weighers import base_host
from manila.share import utils as share_utils
class HostAffinityWeigher(base_host.BaseHostWeigher):
def _weigh_object(self, obj, weight_properties):
"""Weigh hosts based on their proximity to the source's share pool.
If no snapshot_id was provided will return 0, otherwise, if source and
destination hosts are located on:
1. same back ends and pools: host is a perfect choice (100)
2. same back ends and different pools: host is a very good choice (75)
3. different back ends with the same AZ: host is a good choice (50)
4. different back ends and AZs: host isn't so good choice (25)
"""
ctx = context.get_admin_context()
request_spec = weight_properties.get('request_spec')
snapshot_id = request_spec.get('snapshot_id')
snapshot_host = request_spec.get('snapshot_host')
if None in [snapshot_id, snapshot_host]:
# NOTE(silvacarlose): if the request does not contain a snapshot_id
# or a snapshot_host, the user is not creating a share from a
# snapshot and we don't need to weigh the host.
return 0
snapshot_ref = db_api.share_snapshot_get(ctx, snapshot_id)
# Source host info: pool, backend and availability zone
src_pool = share_utils.extract_host(snapshot_host, 'pool')
src_backend = share_utils.extract_host(
request_spec.get('snapshot_host'), 'backend')
src_az = snapshot_ref['share']['availability_zone']
# Destination host info: pool, backend and availability zone
dst_pool = share_utils.extract_host(obj.host, 'pool')
dst_backend = share_utils.extract_host(obj.host, 'backend')
# NOTE(dviroel): All hosts were already filtered by the availability
# zone parameter.
dst_az = None
if weight_properties['availability_zone_id']:
dst_az = db_api.availability_zone_get(
ctx, weight_properties['availability_zone_id']).name
if src_backend == dst_backend:
return 100 if (src_pool and src_pool == dst_pool) else 75
else:
return 50 if (src_az and src_az == dst_az) else 25
| 1.96875
| 2
|
setup.py
|
Flowdalic/nattka
| 7
|
12787110
|
#!/usr/bin/env python
# (c) 2020 <NAME>
# 2-clause BSD license
from setuptools import setup
from nattka import __version__
setup(
name='nattka',
version=__version__,
description='A New Arch Tester Toolkit (open source replacement '
'for stable-bot)',
author='<NAME>',
author_email='<EMAIL>',
license='BSD',
url='http://github.com/mgorny/nattka',
packages=['nattka'],
entry_points={
'console_scripts': [
'nattka=nattka.__main__:setuptools_main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Testing',
]
)
| 1.140625
| 1
|
paper_plots/savgol.py
|
amanchokshi/mwa-satellites
| 1
|
12787111
|
"""
SAVGOL INTERP.
--------------
"""
import argparse
from pathlib import Path
import matplotlib
import numpy as np
from embers.rf_tools.align_data import savgol_interp
from embers.rf_tools.colormaps import spectral
from matplotlib import pyplot as plt
matplotlib.use("Agg")
_spec, _ = spectral()
parser = argparse.ArgumentParser(
description="""
Savgol Interpolation paper plot
"""
)
parser.add_argument(
"--rf_dir",
metavar="\b",
default="../../tiles_data",
help="Directory with raw rf data. Default=.../../tiles_data",
)
parser.add_argument(
"--out_dir",
metavar="\b",
default="../embers_out/paper_plots",
help="Output Directory. Default=./embers_out/paper_plots",
)
args = parser.parse_args()
rf_dir = Path(args.rf_dir)
out_dir = Path(args.out_dir)
out_dir.mkdir(parents=True, exist_ok=True)
try:
ch = 8
(
ref_ali,
tile_ali,
time_array,
ref_power,
tile_power,
ref_time,
tile_time,
) = savgol_interp(
f"{rf_dir}/rf0XX/2019-09-15/rf0XX_2019-09-15-11:00.txt",
f"{rf_dir}/S06XX/2019-09-15/S06XX_2019-09-15-11:00.txt",
savgol_window_1=11,
savgol_window_2=15,
polyorder=2,
interp_type="cubic",
interp_freq=1,
)
plt.style.use("seaborn")
nice_fonts = {
# Use LaTeX to write all text
# "text.usetex": True,
"font.family": "sans-serif",
# Use 10pt font in plots, to match 10pt font in document
"axes.labelsize": 10,
"font.size": 10,
# Make the legend/label fonts a little smaller
"legend.fontsize": 6,
"xtick.labelsize": 8,
"ytick.labelsize": 8,
}
plt.rcParams.update(nice_fonts)
fig = plt.figure(figsize=(3.6, 2.4))
colors = _spec([0.14, 0.28])
tile_t = tile_time - tile_time[0]
time_array = time_array - time_array[0]
med = np.median(tile_power)
tile_p = tile_power - med
tile_p_aligned = tile_ali - med
plt.plot(
time_array,
tile_p_aligned[::, ch],
linewidth=1,
color=colors[0],
# color="#2c5d63",
alpha=0.9,
label="SavGol",
)
plt.scatter(
tile_t,
tile_p[::, ch],
color=colors[1],
# color="#7fa998",
marker=".",
s=3,
alpha=0.2,
label="AUT raw",
)
leg = plt.legend(loc="upper right", frameon=True, markerscale=4, handlelength=1)
leg.get_frame().set_facecolor("white")
for le in leg.legendHandles:
le.set_alpha(1)
plt.ylabel("Power [dB]")
plt.xlabel("Time [s]")
plt.tight_layout()
plt.savefig(f"{out_dir}/savgol.pdf", bbox_inches="tight")
print(f"SAVGOL INTERP saved to {out_dir}")
except Exception as e:
print(e)
print("Missing input rf files. Check path to rf_dir")
| 2.203125
| 2
|
optimum/onnxruntime/preprocessors/passes/fully_connected.py
|
techthiyanes/optimum
| 414
|
12787112
|
<reponame>techthiyanes/optimum
# Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Set, Tuple
from onnx import ModelProto
from onnxruntime.transformers.onnx_model import OnnxModel
from optimum.onnxruntime.preprocessors import PreprocessorPass
class IncludeFullyConnectedNodes(PreprocessorPass):
def __init__(self):
super().__init__()
def __call__(self, graph: ModelProto, model: OnnxModel) -> Tuple[Set[str], Set[str]]:
fc_subgraphs = []
for add_node in model.get_nodes_by_op_type("Add"):
fc_components = model.match_parent_path(add_node, ["MatMul"], [1])
if fc_components is not None:
fc_components.append(add_node)
fc_subgraphs.append(fc_components)
fc_components = {node.name for fc in fc_subgraphs for node in fc}
return fc_components, set()
| 2.15625
| 2
|
spockbot/plugins/core/auth.py
|
SpockBotMC/SpockBot
| 171
|
12787113
|
"""
Provides authorization functions for Mojang's login and session servers
"""
import hashlib
import json
# This is for python2 compatibility
try:
import urllib.request as request
from urllib.error import URLError
except ImportError:
import urllib2 as request
from urllib2 import URLError
import logging
import os
from spockbot.mcp.yggdrasil import YggdrasilCore
from spockbot.plugins.base import PluginBase, pl_announce
logger = logging.getLogger('spockbot')
# This function courtesy of barneygale
def java_hex_digest(digest):
d = int(digest.hexdigest(), 16)
if d >> 39 * 4 & 0x8:
d = "-%x" % ((-d) & (2 ** (40 * 4) - 1))
else:
d = "%x" % d
return d
class AuthCore(object):
def __init__(self, event, online_mode, auth_timeout):
self.online_mode = online_mode
self.auth_timeout = auth_timeout
self.__event = event
self.ygg = YggdrasilCore()
self._shared_secret = None
self._username = None
def get_username(self):
return self._username
def set_username(self, username):
self.ygg.username = username
username = property(get_username, set_username)
def set_password(self, password):
if password and not self.online_mode:
logger.warning("PASSWORD PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.password = password
password = property(lambda x: bool(x.ygg.password), set_password)
def set_client_token(self, client_token):
if not self.online_mode:
logger.warning("CLIENT TOKEN PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.client_token = client_token
client_token = property(
lambda x: bool(x.ygg.client_token), set_client_token
)
def set_auth_token(self, auth_token):
if not self.online_mode:
logger.warning("AUTH TOKEN PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.auth_token = auth_token
auth_token = property(
lambda x: bool(x.ygg.auth_token), set_auth_token
)
def get_shared_secret(self):
self._shared_secret = self._shared_secret or os.urandom(16)
return self._shared_secret
shared_secret = property(get_shared_secret)
def start_session(self):
if not self.online_mode:
self._username = self.ygg.username
return True
if self.ygg.login():
self._username = self.ygg.selected_profile['name']
return True
self.__event.emit('auth_session_error')
return False
def send_session_auth(self, pubkey_raw, server_id_raw):
server_id = java_hex_digest(hashlib.sha1(
server_id_raw.encode('ascii') + self.shared_secret + pubkey_raw
))
logger.info('Attempting to authenticate with Mojang session server')
url = "https://sessionserver.mojang.com/session/minecraft/join"
data = json.dumps({
'accessToken': self.ygg.access_token,
'selectedProfile': self.ygg.selected_profile,
'serverId': server_id,
}).encode('utf-8')
headers = {'Content-Type': 'application/json'}
req = request.Request(url, data, headers)
try:
rep = request.urlopen(
req, timeout=self.auth_timeout
).read().decode('ascii')
except URLError:
rep = "Couldn't connect to sessionserver.mojang.com"
if rep:
logger.warning('Mojang session auth response: %s', rep)
logger.info('Session authentication successful')
@pl_announce('Auth')
class AuthPlugin(PluginBase):
requires = 'Event'
defaults = {
'online_mode': True,
'auth_timeout': 3, # No idea how long this should be, 3s seems good
'auth_quit': True,
'sess_quit': True,
}
events = {
'auth_login_error': 'handle_auth_error',
'auth_session_error': 'handle_session_error',
}
def __init__(self, ploader, settings):
super(AuthPlugin, self).__init__(ploader, settings)
self.sess_quit = self.settings['sess_quit']
self.auth_quit = self.settings['auth_quit']
ploader.provides('Auth', AuthCore(
self.event,
self.settings['online_mode'],
self.settings['auth_timeout']
))
def handle_auth_error(self, name, data):
if self.auth_quit:
logger.error('AUTH: Session authentication error, calling kill')
self.event.kill()
def handle_session_error(self, name, data):
if self.sess_quit:
logger.error('AUTH: Session start error, calling kill')
self.event.kill()
| 2.703125
| 3
|
src/ti_dbscan/dbscan.py
|
rafal0502/ti-dbscan
| 0
|
12787114
|
<gh_stars>0
import numpy
def basicDBSCAN(D, eps, MinPts):
"""
Clustering dataset 'D' using DBSCAN algorithm.
This implementation takes a dataset 'D' (a list
of vectors), a threshold distance 'eps', and req-
uired number of points 'MinPts'.
It returning label -1 (noise), cluster is numbered
starting from 1
Args:
D ([type]): [description]
eps ([type]): [description]
MinPts ([type]): [description]
"""
labels = len(D) * [0] # initially all labels are 0
C = 0 # C is the ID of the current cluster
# Picking new seed points - a point from which
# to grow a new cluster
# If valid seed point is found, a new cluster is created
# and the cluster growth is all handled by the 'expandCluster'
# rountine
# For each point P in the dataset D
# (X - index of datapoint, not datapoint itslef)
for X in range(0, len(D)):
# Only points that have not already been claimed
# can be pciked as new seed points
# If the point's label is not 0, continue to the next point
if not (labels[X] == 0):
continue
# Find all of X's neighboring points
NeighborPts = markRegion(D, X, eps)
# If the number is below MinPts, this point is noise
# This is the only condition under which a point is labeled
# NOISE - may be later picked up by another cluster as boundary
# point (this is only condition under which a cluster label
# can be changed -- from NOISE to something else)
if len(NeighborPts) < MinPts:
labels[X] = -1
# Otherwise, if there are at lest MinPts nearby,
# use this point as the seed for a new cluster
else:
C += 1
expandCluster(D, labels, X, NeighborPts, C, eps, MinPts)
# All data has been clsutered!
return labels
def expandCluster(D, labels, X, NeighborPts, C, eps, MinPts):
"""
Expanding cluster beased on seed point X with label C
This function searches through the dataset to find all
points that belong to this new cluster. When this function
returns, cluster 'C' is complete
Args:
D ([type]): List of vectors
labels ([type]): List storing the cluster labels
for all dataset points
X ([type]): Index of the seed point for new cluster
NeighborPts ([type]): All of the neighbors of 'X'
C ([type]): Label for new cluster
eps ([type]): Threshold distance
MinPts ([type]): Minimum required number of neighbors
"""
# Assign the cluster lable to the seed point
labels[X] = C
# Look at each neighbor of X (Xn - group of n neighbors)
# NeighborPts will be used as a FIFO queue of points to
# search - that is, it will grow as we discover new branch
# points for the cluster. FIFO bahavior - accomplished by
# using while-loop instead of for-loop
# In NeighborPts, the points are represented by their index
# in the original dataset
i = 0
while i < len(NeighborPts):
# get next point from the queue
Xn = NeighborPts[i]
# If Xn was labelled NOISE during the seed search,
# then we know it's not a branch point (it doesn't have
# enough neighbors), so make it a leaf point of cluster C
# and move on
if labels[Xn] == -1:
labels[Xn] = C
# Otherwise, if Xn is not already claimed, claim is a part of C
elif labels[Xn] == 0:
# Add Xn to cluster C (Assign cluster label C)
labels[Xn] = C
# Find all the neighbors of Xn
XnNeighborsPts = markRegion(D, Xn, eps)
# If Xn has at least MinPts neighbors, it's a branch point
# Add all of its neighbors to the FIFO queue to be searched
if len(XnNeighborsPts) >= MinPts:
NeighborPts = NeighborPts + XnNeighborsPts
# If Xn doesn't have enough neighbors, then it's a leaf point
# Don't queue up it's neighbors as expansion points
# Advance to the next point in the FIFO queue
i += 1
# Growing of cluster C ended
def markRegion(D, X, eps):
"""
Find all points in dataset 'D' within distance 'eps' of point 'X'
This function calculates the distance between a point X and every
other point in the dataset, and then returns only those points,
which are within threshold distance 'eps'.
Args:
D ([type]): List of vectors
X ([type]): Index of the seed point for new cluster
NeighborPts ([type]): All of the neighbors of 'X'
eps ([type]): [description]
"""
neighbors = []
# For each point in the dataset...
for Xn in range(0, len(D)):
# If the distance is below the threshold, add it to the neighbors list
if numpy.linalg.norm(D[X] - D[Xn]) < eps:
neighbors.append(Xn)
return neighbors
| 3.46875
| 3
|
spock/addons/s3/__init__.py
|
gbmarc1/spock
| 58
|
12787115
|
# -*- coding: utf-8 -*-
# Copyright FMR LLC <<EMAIL>>
# SPDX-License-Identifier: Apache-2.0
"""
Spock is a framework that helps manage complex parameter configurations for Python applications
Please refer to the documentation provided in the README.md
"""
from spock.addons.s3.configs import S3Config, S3DownloadConfig, S3UploadConfig
__all__ = ["configs", "utils", "S3Config", "S3DownloadConfig", "S3UploadConfig"]
| 1.578125
| 2
|
pyhubtel_sms/exceptions.py
|
idadzie/pyhubtel-sms
| 0
|
12787116
|
<gh_stars>0
# -*- coding: utf-8 -*-
def _explain_response(response):
reasons = {
400: {
3: "The message body was too long.",
4: "The message is not routable on the Hubtel gateway.",
6: "The message content was rejected or is invalid.",
7: "One or more parameters are not allowed in the message.",
8: "One or more parameters are not valid for the message.",
},
402: "Your account does not have enough messaging credits to send "
"the message.",
403: "Recipient has not given his/her approval to receive messages.",
404: "The specified message was not found.",
}
sub_code = response.json().get("Status", False)
try:
return reasons.get(response.status_code).get(sub_code, None)
except (AttributeError, KeyError):
return reasons.get(response.status_code, None)
class HubtelAPIException(Exception):
"""An ambiguous exception occurred."""
def __init__(self, *args):
super(HubtelAPIException, self).__init__(*args)
if len(self.args) == 0:
self.reason = ""
if len(self.args) == 1:
self.reason = str(self.args[0])
if len(self.args) == 2:
error_message = self.args[0]
response = self.args[1]
reason = _explain_response(response)
self.reason = str("{}: {}".format(error_message, reason))
if len(self.args) >= 3:
self.reason = str(self.args)
def __str__(self):
if len(self.args) == 0:
return ""
if self.reason:
return str(self.reason)
return str(self.args)
class SMSError(HubtelAPIException):
"""An SMS error occurred."""
class InvalidPhoneNumberError(HubtelAPIException):
"""An invalid phone number error occurred."""
class InvalidTimeStringError(HubtelAPIException):
"""An invalid time string error occured."""
class InvalidMessageError(HubtelAPIException):
"""An invalid message error occurred."""
| 2.5625
| 3
|
make_plot.py
|
jelena-markovic/compare-selection
| 0
|
12787117
|
<reponame>jelena-markovic/compare-selection
import os, glob
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from utils import summarize
from statistics import FDR_summary
def feature_plot(param, power, color='r', label='foo', ylim=None, horiz=None):
ax = plt.gca()
ax.plot(param, power, 'o--', color=color, label=label)
ax.set_xticks(sorted(np.unique(param)))
if ylim is not None:
ax.set_ylim(ylim)
if horiz is not None:
ax.plot(ax.get_xticks(), horiz * np.ones(len(ax.get_xticks())), 'k--')
def plot(df,
fixed,
param,
feature,
outbase,
methods=None):
# results, rho_results, signal_results = extract_results(df)
methods = methods or np.unique(df['class_name'])
df['Method'] = df['method_name']
# plot with rho on x axis
g_plot = sns.FacetGrid(df, col=fixed, hue='Method', sharex=True, sharey=True, col_wrap=2, size=5, legend_out=False)
if feature == 'Full model power':
rendered_plot = g_plot.map(feature_plot, param, feature, ylim=(0,1))
elif feature == 'Full model FDR':
rendered_plot = g_plot.map(feature_plot, param, feature, ylim=(0,0.3), horiz=0.2)
rendered_plot.add_legend()
rendered_plot.savefig(outbase + '.pdf')
rendered_plot.savefig(outbase + '.png')
return df
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser(
description='''
Make plots for
Try:
python make_plot.py --methods lee_theory liu_theory --csvfile indep.csv
''')
parser.add_argument('--methods', nargs='+',
dest='methods',
help='Names of methods in plot (i.e. class name). Defaults to all methods.')
parser.add_argument('--param',
dest='param',
default='rho',
help='Make a plot with param on x-axis for varying fixed')
parser.add_argument('--fixed',
dest='fixed',
default='signal',
help='Which value if fixed for each facet')
parser.add_argument('--feature',
dest='feature',
default='power',
help='Variable for y-axis')
parser.add_argument('--csvfile', help='csvfile.', dest='csvfile')
parser.add_argument('--csvbase', help='csvfile.', dest='csvbase')
parser.add_argument('--outbase', help='Base of name of pdf file where results are plotted.')
opts = parser.parse_args()
if opts.csvbase is not None:
full_df = pd.concat([pd.read_csv(f) for f in glob.glob(opts.csvbase + '*signal*csv')])
full_df.to_csv(opts.csvbase + '.csv')
csvfile = opts.csvbase + '.csv'
else:
csvfile = opts.csvfile
if opts.param == opts.fixed:
raise ValueError('one should be rho, the other signal')
df = pd.read_csv(csvfile)
summary_df = summarize(['method_param',
opts.param,
opts.fixed],
df,
FDR_summary)
plot(summary_df,
opts.fixed,
opts.param,
{'power':'Full model power', 'fdr': 'Full model FDR'}[opts.feature],
opts.outbase,
methods=opts.methods)
| 2.21875
| 2
|
relay/api/fields.py
|
weilbith/relay
| 0
|
12787118
|
<filename>relay/api/fields.py
import hexbytes
from eth_utils import is_address, to_checksum_address
from marshmallow import fields
from webargs import ValidationError
from relay.network_graph.payment_path import FeePayer
class Address(fields.Field):
def _serialize(self, value, attr, obj, **kwargs):
return super()._serialize(value, attr, obj, **kwargs)
def _deserialize(self, value, attr, data, **kwargs):
if not is_address(value):
raise ValidationError(
f"Could not parse attribute {attr}: Invalid address {value}"
)
return to_checksum_address(value)
class BigInteger(fields.Field):
def _serialize(self, value, attr, obj, **kwargs):
assert isinstance(value, int)
value = str(value)
return super()._serialize(value, attr, obj, **kwargs)
def _deserialize(self, value, attr, data, **kwargs):
if not isinstance(value, str):
raise ValidationError(f"{attr} has to be a string")
try:
int_value = int(value)
except ValueError:
raise ValidationError("Could not parse integer")
return int_value
class HexBytes(fields.Field):
def _serialize(self, value, attr, obj, **kwargs):
return "0x{:064X}".format(int.from_bytes(value, "big")).lower()
def _deserialize(self, value, attr, data, **kwargs):
try:
hex_bytes = hexbytes.HexBytes(value)
except ValueError:
raise ValidationError("Could not parse Hex number")
return hex_bytes
class HexEncodedBytes(fields.Field):
"""hex encoded bytes field, correctly round-trips. was needed because
HexBytes doesn't round trip correctly """
def _serialize(self, value, attr, obj, **kwargs):
if isinstance(value, hexbytes.HexBytes):
return value.hex()
elif isinstance(value, bytes):
return "0x" + value.hex()
else:
raise ValueError("Value must be of type bytes or HexBytes")
def _deserialize(self, value, attr, data, **kwargs):
if not (isinstance(value, str) and value.startswith("0x")):
raise ValidationError(
f"Could not parse hex-encoded bytes objects of attribute {attr}: {value}"
)
try:
# Create bytes first, to not use weird conversion done by hexbytes constructor
return hexbytes.HexBytes(bytes.fromhex(value[2:]))
except ValueError:
raise ValidationError(
f"Could not parse hex-encoded bytes objects of attribute {attr}: {value}"
)
class FeePayerField(fields.Field):
def _serialize(self, value, attr, obj, **kwargs):
if isinstance(value, FeePayer):
# serialises into the value of the FeePayer enum
return value.value
else:
raise ValidationError("Value must be of type FeePayer")
def _deserialize(self, value, attr, data, **kwargs):
# deserialize into the FeePayer enum instance corresponding to the value
try:
return FeePayer(value)
except ValueError:
raise ValidationError(
f"Could not parse attribute {attr}: {value} has to be one of "
f"{[fee_payer.value for fee_payer in FeePayer]}"
)
| 2.40625
| 2
|
exam/migrations/0005_alter_marks_marks_mx_alter_marks_marks_ob.py
|
HarshNarayanJha/School-Management-Project
| 2
|
12787119
|
# Generated by Django 4.0.1 on 2022-03-15 04:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('exam', '0004_remove_exam_name_exam_exam_name_alter_exam_cls_and_more'),
]
operations = [
migrations.AlterField(
model_name='marks',
name='marks_mx',
field=models.IntegerField(blank=True, help_text='Maximum marks in the subject', null=True, verbose_name='Maximum Marks'),
),
migrations.AlterField(
model_name='marks',
name='marks_ob',
field=models.IntegerField(blank=True, help_text='Marks obtained in the subject', null=True, verbose_name='Marks Obtained'),
),
]
| 1.5
| 2
|
test/unit/test_xlwtwrapper.py
|
muchu1983/104_mops
| 0
|
12787120
|
"""
Copyright (C) 2015, <NAME>
Contributed by <NAME> (<EMAIL>)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import unittest
import logging
import time
from mops.xlwtwrapper import XlwtWrapper
"""
測試 excel 寫入
"""
class XlwtWrapperTest(unittest.TestCase):
#準備
def setUp(self):
logging.basicConfig(level=logging.INFO)
pass
#收尾
def tearDown(self):
pass
#測試寫入行資料
def test_addRowData(self):
logging.info("XlwtWrapperTest.test_addRowData")
wrapper = XlwtWrapper()
for i in range(5000):
wrapper.addRowData(("20160208", "中文字", "B", "DEF", "123", "TWD", "456", "789", "DEF"))
wrapper.saveExcelFile()
#測試開始
if __name__ == "__main__":
unittest.main(exit=False)
| 2.21875
| 2
|
labs/model-test/test_2_practice.py
|
ioanabirsan/python
| 0
|
12787121
|
# Sa se scrie o functie cu numele problema1 ce returneaza o lista ordonata crescator ce contine toate cuvintele
# din sirul de caractere s dat ca parametru. Un cuvant este format din: litere mici si mari, cifre si
# caracterul underscore '_'.
import re
import os
import urllib
from urllib import request
import hashlib
import zipfile
import socket
def problema1(s):
word_pattern = '(\w+)'
words = re.findall(word_pattern, s)
words.sort()
return words
# print(problema1('@c3sta 3st3, un cuvant_.'))
# Sa se scrie o functie cu numele problema2 care primeste ca parametri un sir de caractere s si un
# sir de caractere url ce reprezinta un link http.
# Sa se returneze True daca s se gaseste in continutul de la link-ul http dat, sau False altfel.
def problema2(s, url):
response = urllib.request.urlopen(url)
content = response.read()
return s.encode() in content
# print(problema2("facebook", "https://mbasic.facebook.com/"))
# print(problema2(s="2014 hackaday.com. All Rights Reserved.", url="http://retro.hackaday.com/"))
# print(problema2(s="google", url="https://www.google.com.hk"))
# print(problema2(s="gooogli", url="https://www.google.com.hk"))
# Sa se scrie o functie cu numele problema3 care primeste ca parametru un sir de caractere path ce
# reprezinta path-ul unui director.
# Sa se returneze o lista ordonata crescator cu hash-urile md5 ale tuturor fisierelor din director ( nerecursiv ).
def problema3(path):
def hash(filepath, block_size=4096):
try:
hash = hashlib.md5()
f = open(filepath, 'rb')
while True:
data = f.read(block_size)
if len(data) is 0:
break
hash.update(data)
f.close()
return hash.hexdigest()
except:
return ''
files = os.listdir(path)
md5 = []
for file in files:
file_path = os.path.join(path, file)
if os.path.isfile(file_path):
md5.append(hash(file_path))
md5.sort()
return md5
# print(problema3('C:\\facultate\\an3\\sem1\\python\\python\\labs'))
# Sa se scrie o functie cu numele problema4 ce primeste ca parametru un sir de caractere path ce
# reprezinta path-ul unei arhive zip.
# Sa se returneze o lista ordonata crescator cu numele fisierelor care au size dupa compresie mai mare de 1 KB
# ( 1000 de bytes ).
def problema4(path):
list = []
z = zipfile.ZipFile(path)
for i in z.infolist():
if i.compress_size > 1000:
name = os.path.basename(i.filename)
list.append(name)
list.sort()
return list
# print(problema4('C:\\facultate\\an3\\sem1\\Introduction-to-.Net\\project\\CLMS\\CLMS\\clms.zip'))
# Sa se scrie o functie cu numele problema5 care primeste ca argumente un sir de caractere host,
# un numar port si un sir de caractere text.
# Sa se returneze raspunsul final de la server, ca si string, urmand urmatorul protocol definit:
# - clientul trimite continutul argumentului text la server
# - clientul va primi de la server un alt sir de caractere (de lungime 32)
# - clientul trimite serverului hash-ul sha256 al sirului primit anterior
# - clientul primeste raspunsul final de la server (de lungime 32) pe care il returneaza
def problema5(host, port, text):
def get_sha256(text):
hash = hashlib.sha256()
hash.update(text.encode())
return hash.hexdigest()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(text.encode())
message = s.recv(32).decode()
hash_message = get_sha256(message)
s.send(hash_message.encode())
final_message = s.recv(32).decode()
s.close()
return final_message
| 3.234375
| 3
|
v1/music/views/album.py
|
lawiz22/PLOUC-Backend-master
| 0
|
12787122
|
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from v1.filters.albums.album import album_filter
from v1.music.models.album import Album
from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate
# albums
class AlbumView(APIView):
@staticmethod
def get(request):
"""
List albums
"""
albums = Album.objects.all()
albums = album_filter(request, albums)
if type(albums) == Response:
return albums
return Response(AlbumSerializer(albums, many=True).data)
@staticmethod
def post(request):
"""
Create album
"""
serializer = AlbumSerializerCreate(data=request.data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(AlbumSerializer(serializer.instance).data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# albums/{album_id}
class AlbumDetail(APIView):
@staticmethod
def get(request, album_id):
"""
View individual album
"""
album = get_object_or_404(Album, pk=album_id)
return Response(AlbumSerializerFull(album).data)
@staticmethod
def patch(request, album_id):
"""
Update album
"""
album = get_object_or_404(Album, pk=album_id)
serializer = AlbumSerializerUpdate(album, data=request.data, context={'request': request}, partial=True)
if serializer.is_valid():
serializer.save()
return Response(AlbumSerializerFull(serializer.instance).data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@staticmethod
def delete(request, album_id):
"""
Delete album
"""
album = get_object_or_404(Album, pk=album_id)
if album.user != request.user:
return Response(status=status.HTTP_401_UNAUTHORIZED)
album.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| 2.21875
| 2
|
yfinance/version.py
|
le-minhphuc/yfinance
| 3
|
12787123
|
<filename>yfinance/version.py<gh_stars>1-10
version = "0.1.63"
| 1.0625
| 1
|
buffers/__init__.py
|
MCPS-team/onboard
| 0
|
12787124
|
from .interface import SensorsData, PotholeEvent, PotholeEventHistory
from .sensors_buffer import SensorsBuffer
from .frame_buffer import FrameBuffer
from .swapper import Swapper
| 1.0625
| 1
|
matroids/construct/nulity_function.py
|
PotassiumIodide/matroid-theory-in-python
| 2
|
12787125
|
from typing import Callable, TypeVar
from matroids.core.set_operator import powset
from matroids.construct import independent_sets
T = TypeVar('T')
def from_independent_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by independent sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by independent sets
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
_, Is = matroid
# n(X) = |X| - max{|I|: I ∈ Is, I ⊆ X}, ∀X ⊆ E.
return lambda X: len(X) - max(map(len, (I for I in Is if I <= X)))
def from_dependent_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by dependent sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by dependent sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_dependent_matroid(matroid)))
def from_bases_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by bases.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by dependent sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_bases_matroid(matroid)))
def from_circuits_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by circuits.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by circuits.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_circuits_matroid(matroid)))
def from_rank_matroid(matroid: tuple[set[T], Callable[[set[T]], int]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by a rank function.
Args:
matroid (tuple[set[T], Callable[[set[T]], int]]): A matroid defined by a rank function
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, r = matroid
return lambda X: len(X) - r(X)
def from_closure_matroid(matroid: tuple[set[T], Callable[[set[T]], set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by a closure function.
Args:
matroid (tuple[set[T], Callable[[set[T]], set[T]]]): A matroid defined by a closure function
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, cl = matroid
# n(X) = |X| - min{ |I| : X ⊆ cl(I) }.
return lambda X: len(X) - min(len(I) for I in powset(E) if X <= cl(I))
def from_flats_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by flats.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by flats.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_flats_matroid(matroid)))
def from_open_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by open sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by open sets.
Returns:
Callable[[set[T]], int]: The nulity function of a matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_open_matroid(matroid)))
def from_hyperplanes_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by hyperplanes
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by hyperplanes
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_hyperplanes_matroid(matroid)))
def from_spanning_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by spanning sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by spanning sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_spanning_matroid(matroid)))
| 2.890625
| 3
|
addons/account/tests/test_transfer_wizard.py
|
SHIVJITH/Odoo_Machine_Test
| 0
|
12787126
|
# -*- coding: utf-8 -*-
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged, Form
import time
@tagged('post_install', '-at_install')
class TestTransferWizard(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.company = cls.company_data['company']
cls.receivable_account = cls.company_data['default_account_receivable']
cls.payable_account = cls.company_data['default_account_payable']
cls.accounts = cls.env['account.account'].search([('reconcile', '=', False), ('company_id', '=', cls.company.id)], limit=5)
cls.journal = cls.company_data['default_journal_misc']
# Set rate for base currency to 1
cls.env['res.currency.rate'].search([('company_id', '=', cls.company.id), ('currency_id', '=', cls.company.currency_id.id)]).write({'rate': 1})
# Create test currencies
cls.test_currency_1 = cls.env['res.currency'].create({
'name': "PMK",
'symbol':'P',
})
cls.test_currency_2 = cls.env['res.currency'].create({
'name': "toto",
'symbol':'To',
})
cls.test_currency_3 = cls.env['res.currency'].create({
'name': "titi",
'symbol':'Ti',
})
# Create test rates
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 0.5,
'currency_id': cls.test_currency_1.id,
'company_id': cls.company.id
})
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 2,
'currency_id': cls.test_currency_2.id,
'company_id': cls.company.id
})
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 10,
'currency_id': cls.test_currency_3.id,
'company_id': cls.company.id
})
# Create an account using a foreign currency
cls.test_currency_account = cls.env['account.account'].create({
'name': 'test destination account',
'code': 'test_dest_acc',
'user_type_id': cls.env['ir.model.data'].xmlid_to_res_id('account.data_account_type_current_assets'),
'currency_id': cls.test_currency_3.id,
})
# Create test account.move
cls.move_1 = cls.env['account.move'].create({
'journal_id': cls.journal.id,
'line_ids': [
(0, 0, {
'name': "test1_1",
'account_id': cls.receivable_account.id,
'debit': 500,
}),
(0, 0, {
'name': "test1_2",
'account_id': cls.accounts[0].id,
'credit': 500,
}),
(0, 0, {
'name': "test1_3",
'account_id': cls.accounts[0].id,
'debit': 800,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test1_4",
'account_id': cls.accounts[1].id,
'credit': 500,
}),
(0, 0, {
'name': "test1_5",
'account_id': cls.accounts[2].id,
'credit': 300,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test1_6",
'account_id': cls.accounts[0].id,
'debit': 270,
'currency_id': cls.test_currency_1.id,
'amount_currency': 540,
}),
(0, 0, {
'name': "test1_7",
'account_id': cls.accounts[1].id,
'credit': 140,
}),
(0, 0, {
'name': "test1_8",
'account_id': cls.accounts[2].id,
'credit': 160,
}),
(0, 0, {
'name': "test1_9",
'account_id': cls.accounts[2].id,
'debit': 30,
'currency_id': cls.test_currency_2.id,
'amount_currency': 15,
}),
]
})
cls.move_1.action_post()
cls.move_2 = cls.env['account.move'].create({
'journal_id': cls.journal.id,
'line_ids': [
(0, 0, {
'name': "test2_1",
'account_id': cls.accounts[1].id,
'debit': 400,
}),
(0, 0, {
'name': "test2_2",
'account_id': cls.payable_account.id,
'credit': 400,
}),
(0, 0, {
'name': "test2_3",
'account_id': cls.accounts[3].id,
'debit': 250,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test2_4",
'account_id': cls.accounts[1].id,
'debit': 480,
'partner_id': cls.partner_b.id,
}),
(0, 0, {
'name': "test2_5",
'account_id': cls.accounts[2].id,
'credit': 730,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test2_6",
'account_id': cls.accounts[2].id,
'credit': 412,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_2.id,
'amount_currency': -633,
}),
(0, 0, {
'name': "test2_7",
'account_id': cls.accounts[1].id,
'debit': 572,
}),
(0, 0, {
'name': "test2_8",
'account_id': cls.accounts[2].id,
'credit': 100,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_2.id,
'amount_currency': -123,
}),
(0, 0, {
'name': "test2_9",
'account_id': cls.accounts[2].id,
'credit': 60,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_1.id,
'amount_currency': -10,
}),
]
})
cls.move_2.action_post()
def test_transfer_wizard_reconcile(self):
""" Tests reconciliation when doing a transfer with the wizard
"""
active_move_lines = (self.move_1 + self.move_2).mapped('line_ids').filtered(lambda x: x.account_id.user_type_id.type in ('receivable', 'payable'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.receivable_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
payable_transfer = transfer_move.line_ids.filtered(lambda x: x.account_id == self.payable_account)
receivable_transfer = transfer_move.line_ids.filtered(lambda x: x.account_id == self.receivable_account)
self.assertTrue(payable_transfer.reconciled, "Payable line of the transfer move should be fully reconciled")
self.assertAlmostEqual(self.move_1.line_ids.filtered(lambda x: x.account_id == self.receivable_account).amount_residual, 100, self.company.currency_id.decimal_places, "Receivable line of the original move should be partially reconciled, and still have a residual amount of 100 (500 - 400 from payable account)")
self.assertTrue(self.move_2.line_ids.filtered(lambda x: x.account_id == self.payable_account).reconciled, "Payable line of the original move should be fully reconciled")
self.assertAlmostEqual(receivable_transfer.amount_residual, 0, self.company.currency_id.decimal_places, "Receivable line from the transfer move should have nothing left to reconcile")
self.assertAlmostEqual(payable_transfer.debit, 400, self.company.currency_id.decimal_places, "400 should have been debited from payable account to apply the transfer")
self.assertAlmostEqual(receivable_transfer.credit, 400, self.company.currency_id.decimal_places, "400 should have been credited to receivable account to apply the transfer")
def test_transfer_wizard_grouping(self):
""" Tests grouping (by account and partner) when doing a transfer with the wizard
"""
active_move_lines = (self.move_1 + self.move_2).mapped('line_ids').filtered(lambda x: x.name in ('test1_3', 'test1_4', 'test1_5', 'test2_3', 'test2_4', 'test2_5', 'test2_6', 'test2_8'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.accounts[4]
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
groups = {}
for line in transfer_move.line_ids:
key = (line.account_id, line.partner_id or None, line.currency_id)
self.assertFalse(groups.get(key), "There should be only one line per (account, partner, currency) group in the transfer move.")
groups[key] = line
self.assertAlmostEqual(groups[(self.accounts[0], self.partner_a, self.company_data['currency'])].balance, -800, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[1], None, self.company_data['currency'])].balance, 500, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[1], self.partner_b, self.company_data['currency'])].balance, -480, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[2], self.partner_a, self.company_data['currency'])].balance, 1030, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[2], self.partner_a, self.test_currency_2)].balance, 512, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[3], self.partner_a, self.company_data['currency'])].balance, -250, self.company.currency_id.decimal_places)
def test_transfer_wizard_currency_conversion(self):
""" Tests multi currency use of the transfer wizard, checking the conversion
is propperly done when using a destination account with a currency_id set.
"""
active_move_lines = self.move_1.mapped('line_ids').filtered(lambda x: x.name in ('test1_6', 'test1_9'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.test_currency_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
destination_line = transfer_move.line_ids.filtered(lambda x: x.account_id == self.test_currency_account)
self.assertEqual(destination_line.currency_id, self.test_currency_3, "Transferring to an account with a currency set should keep this currency on the transfer line.")
self.assertAlmostEqual(destination_line.amount_currency, 3000, self.company.currency_id.decimal_places, "Transferring two lines with different currencies (and the same partner) on an account with a currency set should convert the balance of these lines into this account's currency (here (270 + 30) * 10 = 3000)")
def test_transfer_wizard_no_currency_conversion(self):
""" Tests multi currency use of the transfer wizard, verifying that
currency amounts are kept on distinct lines when transferring to an
account without any currency specified.
"""
active_move_lines = self.move_2.mapped('line_ids').filtered(lambda x: x.name in ('test2_9', 'test2_6', 'test2_8'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.receivable_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
destination_lines = transfer_move.line_ids.filtered(lambda x: x.account_id == self.receivable_account)
self.assertEqual(len(destination_lines), 2, "Two lines should have been created on destination account: one for each currency (the lines with same partner and currency should have been aggregated)")
self.assertAlmostEqual(destination_lines.filtered(lambda x: x.currency_id == self.test_currency_1).amount_currency, -10, self.test_currency_1.decimal_places)
self.assertAlmostEqual(destination_lines.filtered(lambda x: x.currency_id == self.test_currency_2).amount_currency, -756, self.test_currency_2.decimal_places)
| 1.867188
| 2
|
osrsmath/general/player.py
|
Palfore/OSRSmath
| 5
|
12787127
|
<gh_stars>1-10
from typing import Dict, List
class Player:
def __init__(self, levels: Dict[str, int]):
self.levels = levels
| 2.890625
| 3
|
dns_shark/errors/dns_zero_counter_error.py
|
jmiiller/dns_shark
| 3
|
12787128
|
from dns_shark.errors.dns_shark_error import DNSSharkError
class DNSZeroCounterError(DNSSharkError):
"""
An error that occurs when the resolver counter is set to 0. Indicates that either an abnormally long resolution
occurred or the resolver is in an infinite loop.
"""
| 2.203125
| 2
|
python/paddle/incubate/complex/tensor_op_patch.py
|
joey12300/Paddle
| 2
|
12787129
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from ...fluid import framework
from . import tensor
def monkey_patch_math_complex():
# complexVariable do not support scaler type now, so here not contains
# reverse methods, such as "__radd__", "__rsub__", "__rmul__", "__rdiv__",
# "__rtruediv__", "__rmatmul__".
complex_methods = [
('__add__', _binary_creator_('__add__', "elementwise_add", False)),
('__sub__', _binary_creator_('__sub__', "elementwise_sub", False)),
('__mul__', _binary_creator_('__mul__', "elementwise_mul", False)),
('__div__', _binary_creator_('__div__', "elementwise_div", False)),
('__truediv__', _binary_creator_('__truediv__', "elementwise_div",
False)),
('__matmul__', _binary_creator_('__matmul__', "matmul", False)),
]
for method in complex_methods:
method_name = method[0]
method_impl = method[1]
if method_impl:
setattr(framework.ComplexVariable, method_name, method_impl)
for method in tensor.__all__:
method_impl = getattr(tensor, method)
if method_impl:
setattr(framework.ComplexVariable, method, method_impl)
# for binary operator such as elementwise
def _binary_creator_(method_name, op_type, reverse=False):
def __impl__(self, other_var):
math_op = getattr(tensor, op_type)
return math_op(self, other_var)
__impl__.__name__ = method_name
return __impl__
| 1.835938
| 2
|
tests/unit/metrics_tests/test_object_keypoint_similarity.py
|
Joeper214/blueoil
| 248
|
12787130
|
<filename>tests/unit/metrics_tests/test_object_keypoint_similarity.py<gh_stars>100-1000
# -*- coding: utf-8 -*-
# Copyright 2018 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import pytest
import numpy as np
from blueoil.metrics.object_keypoint_similarity import compute_object_keypoint_similarity, _compute_oks
# Apply set_test_environment() in conftest.py to all tests in this file.
pytestmark = pytest.mark.usefixtures("set_test_environment")
def test_compute_oks():
# case1
joints_gt = np.zeros((17, 3))
joints_pred = np.zeros((17, 3))
image_size = (160, 160)
joints_gt[0, 0] = 80
joints_gt[0, 1] = 80
joints_gt[0, 2] = 1
joints_pred[0, 0] = 70
joints_pred[0, 1] = 70
joints_pred[0, 2] = 1
joints_pred[2, 0] = 1000
joints_pred[2, 1] = 1000
joints_pred[2, 2] = 1
expected = 0.2358359
result = _compute_oks(joints_gt, joints_pred, image_size)
assert np.allclose(result, expected)
# case2
joints_gt = np.zeros((17, 3))
joints_pred = np.zeros((17, 3))
image_size = (160, 160)
joints_gt[0, 0] = 80
joints_gt[0, 1] = 80
joints_gt[0, 2] = 0
joints_pred[0, 0] = 70
joints_pred[0, 1] = 70
joints_pred[0, 2] = 1
joints_pred[2, 0] = 1000
joints_pred[2, 1] = 1000
joints_pred[2, 2] = 1
expected = -1
result = _compute_oks(joints_gt, joints_pred, image_size)
assert np.allclose(result, expected)
# case3
joints_gt = np.zeros((17, 3))
joints_pred1 = np.zeros((17, 3))
joints_pred2 = np.zeros((17, 3))
image_size = (160, 160)
joints_gt[0, 0] = 80
joints_gt[0, 1] = 80
joints_gt[0, 2] = 1
joints_pred1[0, 0] = 70
joints_pred1[0, 1] = 70
joints_pred1[0, 2] = 1
joints_pred2[0, 0] = 78
joints_pred2[0, 1] = 78
joints_pred2[0, 2] = 1
result1 = _compute_oks(joints_gt, joints_pred1, image_size)
result2 = _compute_oks(joints_gt, joints_pred2, image_size)
assert result2 > result1
def test_compute_object_keypoint_similarity():
# case1
joints_gt = np.zeros((1, 17, 3))
joints_pred = np.zeros((1, 17, 3))
image_size = (160, 160)
joints_gt[0, 0, 0] = 80
joints_gt[0, 0, 1] = 80
joints_gt[0, 0, 2] = 1
joints_pred[0, 0, 0] = 70
joints_pred[0, 0, 1] = 70
joints_pred[0, 0, 2] = 1
expected = 0.2358359
result = compute_object_keypoint_similarity(joints_gt, joints_pred, image_size)
assert np.allclose(result, expected)
# case2
joints_gt = np.zeros((2, 17, 3))
joints_pred = np.zeros((2, 17, 3))
image_size = (160, 160)
joints_gt[0, 0, 0] = 80
joints_gt[0, 0, 1] = 80
joints_gt[0, 0, 2] = 1
joints_pred[0, 0, 0] = 70
joints_pred[0, 0, 1] = 70
joints_pred[0, 0, 2] = 1
joints_gt[1, 0, 0] = 50
joints_gt[1, 0, 1] = 50
joints_gt[1, 0, 2] = 1
joints_pred[1, 0, 0] = 50
joints_pred[1, 0, 1] = 50
joints_pred[1, 0, 2] = 1
expected = 0.61791795
result = compute_object_keypoint_similarity(joints_gt, joints_pred, image_size)
assert np.allclose(result, expected)
# case3
joints_gt = np.zeros((2, 17, 3))
joints_pred = np.zeros((2, 17, 3))
image_size = (160, 160)
joints_gt[0, 0, 0] = 80
joints_gt[0, 0, 1] = 80
joints_pred[0, 0, 0] = 70
joints_pred[0, 0, 1] = 70
joints_pred[0, 0, 2] = 1
joints_gt[1, 0, 0] = 50
joints_gt[1, 0, 1] = 50
joints_pred[1, 0, 0] = 50
joints_pred[1, 0, 1] = 50
joints_pred[1, 0, 2] = 1
try:
compute_object_keypoint_similarity(joints_gt, joints_pred, image_size)
except ValueError:
pass
if __name__ == '__main__':
test_compute_oks()
test_compute_object_keypoint_similarity()
| 2.328125
| 2
|
minecraft_py/game.py
|
plave0/petnica-oop-ex
| 2
|
12787131
|
from world import World
from player import Player
import mc_objects as mco
class Game:
def __init__(self) -> None:
self.world = World(20, 20)
self.player = Player(self.world)
print("Game started.")
def play(self):
stone_block = mco.Block(mco.Blocks.Stone)
pick = mco.Tool(mco.Tools.Pickaxe)
shovel = mco.Tool(mco.Tools.Shovel)
self.player.add_to_inv(stone_block, 0, 1)
self.player.use(0, 16, 0)
self.player.add_to_inv(pick, 1, 1)
self.player.select_object(1)
self.player.hit(0, 16, 0)
self.player.hit(0, 15, 0)
self.player.hit(0, 14, 0)
self.player.hit(0, 13, 0)
self.player.hit(0, 12, 0)
self.player.hit(0, 11, 0)
| 2.859375
| 3
|
tests/test_extractor.py
|
SiggiGue/sigfeat
| 8
|
12787132
|
<filename>tests/test_extractor.py
import pytest
from sigfeat.base import Feature
from sigfeat.extractor import Extractor
from sigfeat.source.array import ArraySource
from sigfeat.sink import DefaultDictSink
class A(Feature):
_started = False
def on_start(self, *args):
self._started = True
def process(self, data, fdata):
return int(data[0])
def test_extractor_with_sink():
ex = Extractor(A(), A(name='hidden_a').hide())
sc = ArraySource(
list(range(10)),
blocksize=1,
overlap=0,
samplerate=1)
sk = DefaultDictSink()
ex.extract(sc, sk)
assert list(sk['results']['A']) == list(range(10))
assert any(sk['hiddenfeatures'])
assert any(sk['features'])
def test_extractor_without_sink():
ex = Extractor(A())
sc = ArraySource(
list(range(10)),
blocksize=1,
overlap=0,
samplerate=1)
for i, res in enumerate(ex.extract(sc)):
assert i == res['A']
ex.reset()
assert ex.featureset['A']._started is False
if __name__ == '__main__':
pytest.main() # pragma: no coverage
| 2.125
| 2
|
Q37/sol.py
|
shivamT95/projecteuler
| 0
|
12787133
|
<filename>Q37/sol.py
import math
def is_prime(n):
if n == 1:
return False
if n % 2 == 0 and n > 2:
return False
return all(n % i for i in range(3, int(math.sqrt(n))+1,2))
def check(n):
tn = n
tp = 1
while(n != 0):
if(is_prime(n) == False):
return False
n = n//10
tp = tp*10
while(tn != 0):
if(is_prime(tn) == False):
return False
tn = tn % tp
tp = tp//10
return True
ans = 0
for i in range(11,1000000):
if(is_prime(i)):
if(check(i)):
ans = ans+i
print(ans)
| 3.671875
| 4
|
Project Code 1.0/Power of soldiers/power_of_soldiers.py
|
mishrakeshav/Competitive-Programming
| 2
|
12787134
|
<reponame>mishrakeshav/Competitive-Programming
import math
from collections import deque
class Vertex:
def __init__(self, val):
self.val = val
self.connections = dict()
self.visited = False
self.previous = None
def setVisited(self, visited):
self.visited = visited
def setPrevious(self, val):
self.previous = val
def addConnection(self, node, w=0):
self.connections[node] = w
def isVisited(self):
return self.visited
def getConnections(self):
return self.connections
def __repr__(self):
return str(self.val)
class Graph:
def __init__(self, undirected=True):
self.g = dict()
self.undirected = undirected
self.size = 0
def addVertex(self, val):
self.g[val] = Vertex(val)
def addEdge(self, src, dst, w=0):
if src not in self.g:
self.g[src] = Vertex(src)
if dst not in self.g:
self.g[dst] = Vertex(dst)
self.g[src].addConnection(dst, w)
if self.undirected:
self.g[dst].addConnection(src, w)
def getSize(self):
return len(self.g)
def getVertices(self):
return self.g.keys()
def __contains__(self, val):
return val in self.g
def __iter__(self):
return iter(self.g)
def getVertex(self, val):
return self.g.get(val, None)
def detect_cyles_in_the_graph(g):
def dfs(v, g):
v.setVisited(True)
for node in v.getConnections():
vertex = g.getVertex(node)
if vertex.isVisited():
return True
if dfs(vertex, g):
return True
v.setVisited(False)
return False
for node in g:
vertex = g.getVertex(node)
if dfs(vertex, g):
return True
return False
def topological_sorting(g):
def dfshelper(g, ordering, v):
for node in v.getConnections():
vertex = g.getVertex(node)
if vertex.isVisited():
continue
dfshelper(g, ordering, vertex)
v.setVisited(True)
ordering.appendleft(v.val)
def helper(g, ordering):
for node in g:
vertex = g.getVertex(node)
if vertex.isVisited():
continue
dfshelper(g, ordering, vertex)
ordering = deque()
helper(g, ordering)
return ordering
if __name__ == '__main__':
for t in range(int(input())):
n = int(input())
m = int(input())
g = Graph(undirected=False)
for i in range(m):
a, b = input().split()
g.addEdge(b, a)
if detect_cyles_in_the_graph(g):
print("NO")
else:
print("YES")
order = topological_sorting(g)
for i in order:
print(i)
| 3.609375
| 4
|
api/resources/data/building/room.py
|
PiWatcher/pci-backend
| 0
|
12787135
|
<gh_stars>0
from datetime import *
from flask import jsonify, request
from flask_restful import Resource
from api.services.MongoManagerService import MongoManagerService as mms
from api.services.LoginAuthenticationService import LoginAuthenticationService as las
class ApiDataBuildingRoomLive(Resource):
def get(self):
'''
Grabs the data for the past hour for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_live_data(building_name, room)
return response
class ApiDataBuildingRoomDaily(Resource):
def get(self):
'''
Grabs the data for the past day for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_daily_data(building_name, room)
return response
class ApiDataBuildingRoomWeekly(Resource):
def get(self):
'''
Grabs the data for the past week for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_weekly_data(building_name, room)
return response
class ApiDataBuildingRoomMonthly(Resource):
def get(self):
'''
Grabs the data for the past month for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_monthly_data(building_name, room)
return response
class ApiDataBuildingRoomQuarterly(Resource):
def get(self):
'''
Grabs the data for the past quarter for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_quarterly_data(building_name, room)
return response
class ApiDataBuildingRoomYearly(Resource):
def get(self):
'''
Grabs the data for the past year for a room
@returns a response object
'''
building_name = request.args.get('building_name', type=str)
room = request.args.get('room', type=str)
response = mms().get_yearly_data(building_name, room)
return response
| 2.734375
| 3
|
Chapter06/src/features.py
|
jvstinian/Python-Reinforcement-Learning-Projects
| 114
|
12787136
|
import numpy as np
from config import GOPARAMETERS
def stone_features(board_state):
# 16 planes, where every other plane represents the stones of a particular color
# which means we track the stones of the last 8 moves.
features = np.zeros([16, GOPARAMETERS.N, GOPARAMETERS.N], dtype=np.uint8)
num_deltas_avail = board_state.board_deltas.shape[0]
cumulative_deltas = np.cumsum(board_state.board_deltas, axis=0)
last_eight = np.tile(board_state.board, [8, 1, 1])
last_eight[1:num_deltas_avail + 1] -= cumulative_deltas
last_eight[num_deltas_avail +1:] = last_eight[num_deltas_avail].reshape(1, GOPARAMETERS.N, GOPARAMETERS.N)
features[::2] = last_eight == board_state.to_play
features[1::2] = last_eight == -board_state.to_play
return np.rollaxis(features, 0, 3)
def color_to_play_feature(board_state):
# 1 plane representing which color is to play
# The plane is filled with 1's if the color to play is black; 0's otherwise
if board_state.to_play == GOPARAMETERS.BLACK:
return np.ones([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)
else:
return np.zeros([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)
def extract_features(board_state):
stone_feat = stone_features(board_state=board_state)
turn_feat = color_to_play_feature(board_state=board_state)
all_features = np.concatenate([stone_feat, turn_feat], axis=2)
return all_features
| 2.59375
| 3
|
tests/test_dnf.py
|
Quansight-Labs/python-moa
| 22
|
12787137
|
<filename>tests/test_dnf.py
import copy
import pytest
from moa import ast, dnf, testing
def test_add_indexing_node():
tree = ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ())
symbol_table = {'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6))}
expected_tree = ast.Node((ast.NodeSymbol.PSI,), (2, 3), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ())))
expected_symbol_table = {
'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 2, 1)),
'_i2': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2,), None, (ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i1',), ()), ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i2',), ()))),
}
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf.add_indexing_node)
def test_matches_rule_simple():
tree = ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ())
symbol_table = {'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6))}
context = ast.create_context(ast=tree, symbol_table=symbol_table)
rule = ((ast.NodeSymbol.ARRAY,),)
assert dnf.matches_rule(rule, context)
def test_not_matches_rule_simple():
tree = ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ())
symbol_table = {'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6))}
context = ast.create_context(ast=tree, symbol_table=symbol_table)
rule = ((ast.NodeSymbol.PSI,),)
assert not dnf.matches_rule(rule, context)
def test_matches_rule_nested():
tree = ast.Node((ast.NodeSymbol.TRANSPOSE,), None, (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ()),))
symbol_table = {'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6))}
context = ast.create_context(ast=tree, symbol_table=symbol_table)
rule = ((ast.NodeSymbol.TRANSPOSE,), (((ast.NodeSymbol.ARRAY,),),))
assert dnf.matches_rule(rule, context)
def test_not_matches_rule_nested():
tree = ast.Node((ast.NodeSymbol.TRANSPOSE,), None, (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2, 3), ('A',), ()),))
symbol_table = {'A': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3), None, (1, 2, 3, 4, 5, 6))}
context = ast.create_context(ast=tree, symbol_table=symbol_table)
rule = ((ast.NodeSymbol.TRANSPOSE,), (((ast.NodeSymbol.TRANSPOSE,),),))
assert not dnf.matches_rule(rule, context)
def test_reduce_psi_psi():
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_a1': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1,), None, (ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ()),)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (3,), None, (1, 2, 3)),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (1,), ('_a1',), ()),
ast.Node((ast.NodeSymbol.PSI,), (4,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ())))))
expected_symbol_table = {
**symbol_table,
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (1, 2, 3, ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ())))
}
expected_tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a4',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ())))
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_psi)
def test_reduce_psi_assign():
symbol_table = {
'_a1': ast.SymbolNode((ast.NodeSymbol.ARRAY,), (1,), None, (0, 1, 2)),
'_a2': ast.SymbolNode((ast.NodeSymbol.ARRAY,), (1, 2, 3), None, None),
'_a3': ast.SymbolNode((ast.NodeSymbol.ARRAY,), (1, 2, 3), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a1',), ()),
ast.Node((ast.NodeSymbol.ASSIGN,), (1, 2, 3), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3), ('_a2',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3), ('_a3',), ())))))
expected_tree = ast.Node((ast.NodeSymbol.ASSIGN,), (), (), (
ast.Node((ast.NodeSymbol.PSI,), (), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a1',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3), ('_a2',), ()))),
ast.Node((ast.NodeSymbol.PSI,), (), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a1',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3), ('_a3',), ())))))
testing.assert_transformation(tree, symbol_table, expected_tree, symbol_table, dnf._reduce_psi_assign)
def test_reduce_psi_transpose():
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 2, 1)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (
ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i0',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i1',), ()), 1, 0)),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.TRANSPOSE,), (4, 3, 2, 1), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ()),))))
expected_symbol_table = {
**symbol_table,
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (0, 1, ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i1',), ()), ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i0',), ()))),
}
expected_tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a4',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ())))
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_transpose)
def test_reduce_psi_transposev():
symbol_table = {
'_a0': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (3, 2, 1, 1)),
'_a1': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (4, 2, 3, 1)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2, 3, 5, 7), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a0',), ()),
ast.Node((ast.NodeSymbol.TRANSPOSEV,), (7, 3, 5, 2), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a1',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (2, 3, 5, 7), ('_a2',), ())))))
expected_symbol_table = {
**symbol_table,
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (1, 2, 1, 3))
}
expected_tree = ast.Node((ast.NodeSymbol.PSI,), (), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (2, 3, 5, 7), ('_a2',), ())))
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_transposev)
@pytest.mark.parametrize("operation", [
ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS,
ast.NodeSymbol.DIVIDE, ast.NodeSymbol.TIMES,
])
def test_reduce_psi_outer_plus_minus_times_divide_equal_shape(operation):
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 4, 1)),
'_i2': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 5, 1)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ()),
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()),
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i2',), ()))),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (3,), None, None),
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4, 5), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.DOT, operation), (3, 4, 5), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (4, 5), ('_a4',), ())))))
expected_symbol_table = {
**symbol_table,
'_a6': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1,), None, (ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ()),)),
'_a7': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2,), None, (
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()),
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i2',), ()),
)),
}
expected_tree = ast.Node((operation,), (0,), (), (
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (1,), ('_a6',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a3',), ()))),
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a7',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (4, 5), ('_a4',), ())))))
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_outer_plus_minus_times_divide)
@pytest.mark.parametrize("operation", [
(ast.NodeSymbol.PLUS, ast.NodeSymbol.PLUS),
(ast.NodeSymbol.MINUS, ast.NodeSymbol.PLUS),
(ast.NodeSymbol.DIVIDE,ast.NodeSymbol.PLUS),
(ast.NodeSymbol.TIMES, ast.NodeSymbol.PLUS),
(ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS),
(ast.NodeSymbol.MINUS, ast.NodeSymbol.MINUS),
(ast.NodeSymbol.DIVIDE,ast.NodeSymbol.MINUS),
(ast.NodeSymbol.TIMES, ast.NodeSymbol.MINUS),
(ast.NodeSymbol.PLUS, ast.NodeSymbol.TIMES),
(ast.NodeSymbol.MINUS, ast.NodeSymbol.TIMES),
(ast.NodeSymbol.DIVIDE,ast.NodeSymbol.TIMES),
(ast.NodeSymbol.TIMES, ast.NodeSymbol.TIMES),
(ast.NodeSymbol.PLUS, ast.NodeSymbol.DIVIDE),
(ast.NodeSymbol.MINUS, ast.NodeSymbol.DIVIDE),
(ast.NodeSymbol.DIVIDE,ast.NodeSymbol.DIVIDE),
(ast.NodeSymbol.TIMES, ast.NodeSymbol.DIVIDE),
])
def test_reduce_psi_inner_plus_minus_times_divide_equal_shape(operation):
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 5, 1)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ()),
ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()))),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (3, 4), None, None),
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4, 5), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.DOT, *operation), (3, 5), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3, 4), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (4, 5), ('_a4',), ())))))
expected_symbol_table = {
**symbol_table,
'_i5': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 4, 1)),
'_a6': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2,), None, (ast.Node((ast.NodeSymbol.INDEX,), (), ('_i0',), ()), ast.Node((ast.NodeSymbol.INDEX,), (), ('_i5',), ()))),
'_a7': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2,), None, (ast.Node((ast.NodeSymbol.INDEX,), (), ('_i5',), ()), ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()))),
}
expected_tree = ast.Node((ast.NodeSymbol.REDUCE, operation[0]), (0,), ('_i5',), (
ast.Node((operation[1],), (0,), (), (
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a6',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (3, 4), ('_a3',), ()))),
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a7',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (4, 5), ('_a4',), ()))))),))
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_inner_plus_minus_times_divide)
@pytest.mark.parametrize("operation", [
ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS,
ast.NodeSymbol.DIVIDE, ast.NodeSymbol.TIMES,
])
def test_reduce_psi_reduce_plus_minus_times_divide_equal_shape(operation):
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 4, 1)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 5, 1)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (2,), None, (ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i0',), ()), ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i1',), ()))),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (3, 4, 5), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (2,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.REDUCE, operation), (4, 5), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3, 4, 5), ('_a3',), ()),))))
expected_tree = ast.Node((ast.NodeSymbol.REDUCE, operation), (0,), ('_i4',), (
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (3,), ('_a5',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (3, 4, 5), ('_a3',), ()))),))
expected_symbol_table = {
**symbol_table,
'_i4': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3, 1)),
'_a5': ast.SymbolNode(ast.NodeSymbol.ARRAY, (3,), None, (ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i4',), ()), ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i0',), ()), ast.Node((ast.NodeSymbol.ARRAY,), (), ('_i1',), ()))),
}
testing.assert_transformation(tree, symbol_table, expected_tree, expected_symbol_table, dnf._reduce_psi_reduce_plus_minus_times_divide)
@pytest.mark.parametrize("operation", [
ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS,
ast.NodeSymbol.DIVIDE, ast.NodeSymbol.TIMES,
])
def test_reduce_psi_plus_minus_times_divide_equal_shape(operation):
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 4)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (1, 2, ast.Node(ast.NodeSymbol.INDEX, (), ('_i0',), ()), ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()))),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None, None),
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((operation,), (1, 2, 3, 4), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a4',), ())))))
expected_tree = ast.Node((operation,), (0,), (), (
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a3',), ()))),
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a4',), ())))))
testing.assert_transformation(tree, symbol_table, expected_tree, symbol_table, dnf._reduce_psi_plus_minus_times_divide)
@pytest.mark.parametrize("operation", [
ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS,
ast.NodeSymbol.DIVIDE, ast.NodeSymbol.TIMES,
])
def test_reduce_psi_plus_minus_times_divide_scalar(operation):
symbol_table = {
'_i0': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 3)),
'_i1': ast.SymbolNode(ast.NodeSymbol.INDEX, (), None, (0, 4)),
'_a2': ast.SymbolNode(ast.NodeSymbol.ARRAY, (4,), None, (1, 2, ast.Node(ast.NodeSymbol.INDEX, (), ('_i0',), ()), ast.Node((ast.NodeSymbol.INDEX,), (), ('_i1',), ()))),
'_a3': ast.SymbolNode(ast.NodeSymbol.ARRAY, (), None, None),
'_a4': ast.SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None, None),
}
tree = ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((operation,), (1, 2, 3, 4), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (), ('_a3',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a4',), ())))))
expected_tree = ast.Node((operation,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (), ('_a3',), ()),
ast.Node((ast.NodeSymbol.PSI,), (0,), (), (
ast.Node((ast.NodeSymbol.ARRAY,), (4,), ('_a2',), ()),
ast.Node((ast.NodeSymbol.ARRAY,), (1, 2, 3, 4), ('_a4',), ())))))
testing.assert_transformation(tree, symbol_table, expected_tree, symbol_table, dnf._reduce_psi_plus_minus_times_divide)
# @pytest.mark.parametrize("operation", [
# ast.NodeSymbol.PLUS, ast.NodeSymbol.MINUS,
# ast.NodeSymbol.DIVIDE, ast.NodeSymbol.TIMES,
# ])
# def test_reduce_psi_plus_minus_times_divide_scalar(operation):
# symbol_table = {
# '_i0': SymbolNode(ast.NodeSymbol.INDEX, (), (0, 3)),
# '_i1': SymbolNode(ast.NodeSymbol.INDEX, (), (0, 4)),
# '_a2': SymbolNode(ast.NodeSymbol.ARRAY, (4,), (1, 2, '_i0', '_i1')),
# '_a3': SymbolNode(ast.NodeSymbol.ARRAY, (), None),
# '_a4': SymbolNode(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), None),
# }
# symbol_table_copy = copy.deepcopy(symbol_table)
# tree = Node(ast.NodeSymbol.PSI, (0,),
# Node(ast.NodeSymbol.ARRAY, (4,), '_a2'),
# Node(operation, (1, 2, 3, 4),
# Node(ast.NodeSymbol.ARRAY, (), '_a3'),
# Node(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), '_a4')))
# expected_tree = Node(operation, (0,),
# Node(ast.NodeSymbol.ARRAY, (), '_a3'),
# Node(ast.NodeSymbol.PSI, (0,),
# Node(ast.NodeSymbol.ARRAY, (4,), '_a2'),
# Node(ast.NodeSymbol.ARRAY, (1, 2, 3, 4), '_a4')))
# new_symbol_table, new_tree = _reduce_psi_plus_minus_times_divide(symbol_table, tree)
# assert symbol_table_copy == symbol_table
# assert new_tree == expected_tree
# assert new_symbol_table == symbol_table
| 2.25
| 2
|
python/python/collections/queue_with_stacks.py
|
othonreyes/code_problems
| 0
|
12787138
|
<reponame>othonreyes/code_problems
class Queue:
def __init__(self):
stack1 = []
stack2 = []
def add(self, n):
if len(self.stack2) > 0:
self.empty(self.stack2, self.stack1)
self.stack1.append(n)
def remove(self):
if len(self.stack1) > 0:
self.empty(self.stack1, self.stack2)
return self.stack2.pop()
def empty(self, source, target):
while len(source) > 0:
target.append(source.pop())
| 3.828125
| 4
|
preprocessing.py
|
shreyagummadi/Traffic-Sign-Detection-and-Recognition
| 0
|
12787139
|
<filename>preprocessing.py
import cv2
import numpy as np
def preprocess(img):
# img = cv2.fastNlMeansDenoisingColored(img)
R = img[:,:,2]
R = cv2.medianBlur(R,5)
G = img[:,:,1]
G = cv2.medianBlur(G,5)
B = img[:,:,0]
B = cv2.medianBlur(B,5)
im = np.zeros((600,600))
R = cv2.normalize(R, im , 0, 255, cv2.NORM_MINMAX).astype(float)
G = cv2.normalize(G, im, 0, 255, cv2.NORM_MINMAX).astype(float)
B = cv2.normalize(B, im, 0, 255, cv2.NORM_MINMAX).astype(float)
x1 = R-B
y1 = R-G
x2 = B-R
y2 = B-G
z1 = []
z2 = []
for j in range(x1.shape[0]):
for k in range(x1.shape[1]):
if x1[j,k] < y1[j,k]:
z1.append(x1[j,k])
else:
z1.append(y1[j,k])
z1 = np.reshape(z1,(600,600))
red_norm = []
for j in range(z1.shape[0]):
for k in range(z1.shape[1]):
if z1[j,k]>0:
red_norm.append(z1[j,k])
else:
red_norm.append(0)
red_norm = np.reshape(red_norm,(600,600)).astype(np.uint8)
for j in range(x2.shape[0]):
for k in range(x2.shape[1]):
if x2[j,k] < y2[j,k]:
z2.append(x2[j,k])
else:
z2.append(y2[j,k])
z2 = np.reshape(z2,(600,600))
blue_norm = []
for j in range(z2.shape[0]):
for k in range(z2.shape[1]):
if z2[j,k]>0:
blue_norm.append(z2[j,k])
else:
blue_norm.append(0)
blue_norm = np.reshape(blue_norm,(600,600)).astype(np.uint8)
return red_norm,blue_norm
| 2.75
| 3
|
Utils/py/naoth/naoth/utils/_debug.py
|
BerlinUnited/NaoTH
| 15
|
12787140
|
import sys
import struct
import asyncio
import threading
import functools
from concurrent.futures import Future
from .. import pb
__all__ = ['DebugProxy', 'DebugCommand', 'AgentController']
class DebugProxy(threading.Thread):
"""
Debugging class, creating a connection between RobotControl and the Nao (naoth agent) which can be used to
* print out the communication between the naoth agent and the controlling instance (eg. RobotControl) -- set
:param:`print_cmd` to True
* create a connection which never 'dies'; even when the naoth agent restarts/dies, the connection to the controlling
instance is kept
* accepting multiple connections of controlling instances to the naoth agent, allowing -- for example -- to connect
two RobotControl instances to the robot, each command is distributed accordingly
A extra module is also available in order to start the proxy directly from the command line::
python -m naoth.utils.DebugProxy host [port] [--target port] [--print]
It is also possible to instantiate the proxy in the interactive shell or another python script to get slightly more
control over when the proxy ist started and stopped::
>>> import naoth
>>> p = naoth.utils.DebugProxy('localhost', print_cmd=True, start=False)
>>> # ...
>>> p.start()
>>> # ...
>>> p.stop()
"""
def __init__(self, agent_host, agent_port=5401, dest_port=7777, print_cmd=False, start=True):
"""
Initializes the class variables and starts the thread immediately if :param:`start` is set to True (default).
:param agent_host: the host name or ip address of naoth agent (robot, dummysimulator, ...); eg. "localhost"
:param agent_port: the debug port of the naoth agent; default is 5401
:param dest_port: the port which should be opened to allow controlling applications to connect to (eg. RC)
:param print_cmd: set to True to print out all commands and responses going through the proxy.
:param start: whether the thread should start immediately (default) or not
"""
super().__init__()
# the agent thread is only started, if there's at least one connected host
self._robot = None
self._robot_host = agent_host
self._robot_port = agent_port
self._host_host = 'localhost'
self._host_port = dest_port
self._print = print_cmd
self._loop = asyncio.new_event_loop()
self._hosts = []
self._host_listener = None
self._host_connection_cnt = 0
# start thread immediately
if start: self.start()
def run(self) -> None:
"""
The thread main loop.
Sets the asyncio loop of the thread, starts a listener server on the dest_port and runs until cancelled.
"""
# set the event loop to this thread
asyncio.set_event_loop(self._loop)
# start host listener server and 'wait' until the server ist started
self._host_listener = self._loop.run_until_complete(start_server(self._host, self._host_host, self._host_port))
# run until cancelled
self._loop.run_forever()
def stop(self, timeout=None) -> None:
"""
Stops the (running) thread and blocks until finished or until the optional timeout occurs.
Since this method is called from another thread, the stop request is scheduled as task on the main loop and
executed some ms later. Also `join` is called on the thread to wait until the thread is actually finished.
The timeout can be used to make sure the main program continues, if an error prevents terminating this
thread -- though that shouldn't happen.
"""
if self._loop.is_running():
self._loop.call_soon_threadsafe(lambda: self._loop.create_task(self._stop_internal()))
self.join(timeout)
async def _stop_internal(self) -> None:
"""
The (internal) scheduled stop request task called by `stop()`.
It stops the listener server, closes all open connections and stops the main loop, which causes the thread to
finish.
"""
# shutdown host listener server to prevent new connections
if self._host_listener:
self._host_listener.close()
await self._host_listener.wait_closed()
# cancel all established connections
# NOTE: the connection to the agent is stopped with the last host connection
for task in self._hosts:
task.cancel()
await task
self._loop.stop()
def _register_host(self) -> None:
"""
Registers a new host connection and sets the name of the task.
The connection to the naoth instance is started only, if there is a host connected to the proxy. This prevents
blocking the naoth instance unnecessarily from direct connection, when the proxy doesn't have something to do.
"""
self._host_connection_cnt += 1
#asyncio.Task.current_task().set_name('Host-{}'.format(self._host_connection_cnt)) # 3.8+
self._hosts.append(asyncio.Task.current_task())
if self._robot is None:
self._robot = AgentController(self._robot_host, self._robot_port)
self._robot.wait_connected() # TODO: is this reasonable???
def _unregister_host(self) -> None:
"""
Unregisters the host from the proxy.
if there are no other active host connections, the naoth agent controller/connection is stopped -- to prevent
blocking unused resources (naoth instance).
"""
self._hosts.remove(asyncio.Task.current_task())
if len(self._hosts) == 0:
if self._robot is not None:
self._robot.stop()
self._robot = None
async def _host(self, stream_reader, stream_writer) -> None:
"""
The actual task, which handles the host connection to the proxy.
Therefore is reads all debug commands send from the host and relays it to the connected naoth instance. The
response is returned to the host.
If required, the received command and the response if printed out to the terminal.
The task runs as long as the connection to the host is active or until the proxy thread is stopped.
"""
self._register_host()
while True:
try:
raw_id = await stream_reader.read(4)
# connection is closed/lost
if raw_id == b'': break
cmd_id = struct.unpack('=l', raw_id)[0]
# check if command is not just a heart beat
if cmd_id != -1:
raw_length = await stream_reader.read(4)
cmd_length = struct.unpack('=l', raw_length)[0]
raw_data = await stream_reader.read(cmd_length)
cmd = DebugCommand.deserialize(raw_data)
cmd.id = cmd_id
# NOTE: the callback is executed in the agent thread!
cmd.add_done_callback(functools.partial(self._response_handler, stream_writer))
self._robot.send_command(cmd)
if self._print:
print(cmd)
except asyncio.CancelledError: # task cancelled
break
except Exception as e:
print('Host-Task:', e)
# close the connection to the host before exiting
stream_writer.close()
await stream_writer._protocol._get_close_waiter(stream_writer) # HACK: in order to work with < 3.7
self._unregister_host()
def _response_handler(self, stream, cmd) -> None:
"""
Helper method in order to transfer the command from agent thread back to 'this' thread -- since the callback is
called in the agent thread. This can 'causes a delay of ~0.5ms.
"""
self._loop.call_soon_threadsafe(lambda: self._response_writer(stream, cmd))
def _response_writer(self, stream, cmd) -> None:
"""Writes the response of the command back to the requesting host."""
# TODO: what to todo, if the command got cancelled?!?
if stream and not cmd.cancelled():
stream.write(struct.pack("<I", cmd.id) + struct.pack("<I", len(cmd.result())) + cmd.result())
if self._print:
print(cmd)
class DebugCommand(Future):
"""
Class representing a debug command for a naoth agent.
It is a Future and can be waited for the response.
"""
def __init__(self, name: str, args=None):
"""
Constructor for the command.
:param name: the name of the command
:param args: additional argument(s) of the command as string or list of string/tuples (name and value)
"""
super().__init__()
self._id = 0
self._name = name
self._args = []
# args can be a string, tuple or a list
if isinstance(args, (str, tuple)):
self._args.append(args)
elif isinstance(args, list):
self._args.extend(args)
@property
def id(self) -> int:
"""Returns command id."""
return self._id
@id.setter
def id(self, value: int) -> None:
"""Sets the command id."""
self._id = value
@property
def name(self) -> str:
"""Returns the name of this command."""
return self._name
def add_arg(self, arg) -> None:
"""
Adds an argument to this command.
:param arg: this can be a simple string or a tuple of two strings (argument name and value).
"""
self._args.append(arg)
def serialize(self) -> bytes:
"""
Serializes the command to a byte representation in order to send it to the agent.
:return: returns the bytes representation of this command
"""
cmd_args = []
if self._args:
for a in self._args:
if isinstance(a, str):
cmd_args.append(pb.Messages_pb2.CMDArg(name=a))
else:
cmd_args.append(pb.Messages_pb2.CMDArg(name=a[0], bytes=a[1].encode()))
proto = pb.Messages_pb2.CMD(name=self.name, args=cmd_args)
return struct.pack("<I", self.id) + struct.pack("<I", proto.ByteSize()) + proto.SerializeToString()
@staticmethod
def deserialize(data) -> 'DebugCommand':
"""
Parses the given data and returns an instance of DebugCommand.
:param data: byte string of a serialized debug command
"""
proto = pb.Messages_pb2.CMD()
proto.ParseFromString(data)
return DebugCommand(proto.name, [(arg.name, arg.bytes.decode()) for arg in proto.args])
def __str__(self) -> str:
"""Returns the string representation of this command."""
str_builder = [self.__class__.__name__, '-', str(self.id), ' [', self._state, ']: ', self.name]
str_args = ", ".join(map(lambda a: self._str_args_helper(a), self._args))
if str_args:
str_builder.append(' ( ')
str_builder.append(str_args)
str_builder.append(' )')
if self.done() and len(self.result()) > 0:
str_builder.append(' {\n')
try:
str_builder.append(self.result().decode('utf-8').strip())
except:
str_builder.append(str(self.result()))
str_builder.append('\n}')
return ''.join(str_builder)
@staticmethod
def _str_args_helper(arg) -> str:
"""Helper method to format the command arguments for the `__str__` method."""
if isinstance(arg, str):
return arg
return arg[0] + ('' if len(arg[1]) == 0 else ': ' + repr(arg[1]))
class AgentController(threading.Thread):
"""
Class to establish a connection to a naoth agent and sending DebugRequests to it - like doing it via RobotControl.
An instance can be created in an interactive shell or script and sending debug requests to the robot:
>>> import naoth, time
>>> a = naoth.utils.AgentController('localhost', 5401)
>>> a.wait_connected()
>>> a.representation('PlayerInfo').add_done_callback(print)
>>> c = a.agent('soccer_agent')
>>> c.add_done_callback(print)
>>> c = a.debugrequest('gamecontroller:blow_whistle', True) # debug request for cognition
>>> r = c.result() # blocks until result is available
>>> a.debugrequest('Plot:Motion.Cycle', True, 'motion') # debug request for motion
>>> a.debugrequests([ \
('gamecontroller:gamephase:normal', True), \
('gamecontroller:game_state:penalized', True), \
('gamecontroller:set_play:pushing_free_kick', True), \
('gamecontroller:secondaryde:30', True) \
])
>>> a.behavior() # BehaviorStateSparse
>>> a.behavior(True) # BehaviorStateComplete
>>> a.module('FakeBallDetector', True)
>>> a.module('ArmCollisionDetector2018', True, 'motion')
>>> a.send_command(naoth.utils.DebugCommand('help', 'ping')).add_done_callback(print)
>>> c = naoth.utils.DebugCommand('Cognition:representation:list')
>>> a.send_command(c)
>>> c.result() # blocks until result is available
>>> a.stop() # stop the agent gracefully
"""
def __init__(self, host, port=5401, start=True):
"""
Initializes the class variables and starts the thread immediately if :param:`start` is set to True (default).
:param host: the host name or ip address of naoth agent (robot, dummysimulator, ...); eg. "localhost"
:param port: the debug port of the naoth agent; default is 5401
:param start: whether the thread should start immediately (default) or not
"""
super().__init__()
self._host = host
self._port = port
self._stream_reader = None
self._stream_writer = None
self._tasks = []
self._loop = asyncio.new_event_loop()
self._cmd_id = 1
self._cmd_q = asyncio.Queue(loop=self._loop)
self._cmd_m = {}
self._connected = threading.Event()
self._connected_internal = asyncio.Event(loop=self._loop)
# start thread immediately
if start: self.start()
def run(self) -> None:
"""
The thread main loop.
Sets the asyncio loop of the thread, starts all necessary tasks and runs until cancelled.
"""
# set the event loop to this thread
asyncio.set_event_loop(self._loop)
# schedule tasks
self._tasks.append(self._loop.create_task(self._connect())) # name='Connection listener'
self._tasks.append(self._loop.create_task(self._send_heart_beat())) # name='Heart beat'
self._tasks.append(self._loop.create_task(self._poll_answers())) # name='Poll answers'
self._tasks.append(self._loop.create_task(self._send_commands())) # name='Send commands'
# run tasks cooperatively and wait 'till loop is stopped
self._loop.run_forever()
self._set_connected(False)
def stop(self, timeout=None) -> None:
"""
Stops the (running) thread and blocks until finished or until the optional timeout occurs.
Since this method is called from another thread, the stop request is scheduled as task on the main loop and
executed some ms later. Also `join` is called on the thread to wait until the thread is actually finished.
The timeout can be used to make sure the main program continues, if an error prevents terminating this
thread -- though that shouldn't happen.
"""
if self._loop.is_running():
self._loop.call_soon_threadsafe(lambda: self._loop.create_task(self._stop_internal()))
self.join(timeout)
async def _stop_internal(self) -> None:
"""
The (internal) scheduled stop request task called by `stop()`.
It stops all scheduled tasks in reverse order and stops the main loop, which causes the thread to finish.
"""
for task in reversed(self._tasks):
try:
task.cancel()
await task
except Exception as e:
print('Stop agent:', task.get_name() if hasattr(task, 'get_name') else task, e, file=sys.stderr)
self._loop.stop()
def is_connected(self) -> bool:
"""Returns True, if the thread is connected to the naoth instance, False otherwise."""
return self._connected.is_set()
def _assert_is_alive(self):
"""Asserts, that this thread is alive, otherwise a runtime error is raised."""
if not self.is_alive():
raise RuntimeError(self.__class__.__name__ + " must be alive and running!")
def wait_connected(self, timeout=None) -> None:
"""
Blocks until the thread is connected to the naoth agent or until the optional timeout occurs.
If the thread wasn't started or isn't alive anymore, a runtime error is raised.
"""
self._assert_is_alive()
self._connected.wait(timeout)
def _set_connected(self, state: bool) -> None:
"""Internal helper method to handle the connection state."""
if state:
self._connected_internal.set()
self._connected.set()
else:
self._connected.clear()
self._connected_internal.clear()
if self._stream_writer:
self._stream_writer.close()
async def _connect(self) -> None:
"""Connection task, which is used to (re-)establish the connection to the naoth agent."""
while True:
try:
# (try to) establish connection or raise exception
self._stream_reader, \
self._stream_writer = await open_connection(host=self._host, port=self._port)
# update internal & external connection state
self._set_connected(True)
# wait 'till the connection is 'closed' (lost?)
await self._stream_writer._protocol._get_close_waiter(self._stream_writer) # HACK: in order to work with < 3.7
# reset the streams
self._stream_reader = None
self._stream_writer = None
except asyncio.CancelledError:
break
except OSError:
# task can be cancelled while sleeping ...
try:
# connection failed, wait before next connection attempt
await asyncio.sleep(1)
except asyncio.CancelledError:
break
except Exception as e: # unexpected exception
print('Connection listener:', e, file=sys.stderr)
except Exception as e: # unexpected exception
print('Connection listener:', e, file=sys.stderr)
finally:
# empty queue and set exception – since we doesn't have a connection
while not self._cmd_q.empty():
self._cmd_q.get_nowait().set_exception(Exception('Not connected to the agent!'))
if self._stream_writer:
self._stream_writer.close()
#await self._stream_writer.wait_closed() # NOTE: this doesn't complete?!
#await self._stream_writer._protocol._get_close_waiter(self._stream_writer) # HACK: in order to work with < 3.7
async def _send_heart_beat(self) -> None:
"""Task to regularly (1s) send a heart beat to the agent."""
while True:
try:
await self._connected_internal.wait()
self._stream_writer.write(struct.pack('!i', -1))
await self._stream_writer.drain()
await asyncio.sleep(1)
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Heart beat:', e, file=sys.stderr)
async def _poll_answers(self) -> None:
"""Task to receive the response of a previous command and set the result to that command."""
def lost_connection(d):
"""Helper function to determine, if the connection was lost."""
if d == b'':
self._set_connected(False)
return True
return False
while True:
try:
await self._connected_internal.wait()
raw_id = await self._stream_reader.read(4)
if lost_connection(raw_id): continue
cmd_id = struct.unpack('=l', raw_id)[0]
raw_size = await self._stream_reader.read(4)
if lost_connection(raw_size): continue
size = struct.unpack('=l', raw_size)[0]
raw_data = await self._stream_reader.read(size)
if size > 0 and lost_connection(raw_data): continue
while len(raw_data) < size:
new_data = await self._stream_reader.read(size - len(raw_data))
if lost_connection(new_data):
break
raw_data += new_data
if not self._connected.is_set():
continue
if cmd_id in self._cmd_m:
cmd, _id = self._cmd_m.pop(cmd_id)
if not cmd.cancelled():
cmd.id = _id
cmd.set_result(raw_data)
else:
print('Unknown command id:', cmd_id, file=sys.stderr)
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Poll answers:', e, file=sys.stderr)
async def _send_commands(self) -> None:
"""Task to send scheduled commands."""
def cancel_cmd(cmd, ex=None):
"""Helper function, if an exception occurred and the command couldn't be send."""
_, _id = self._cmd_m.pop(cmd.id)
cmd.set_exception(ex if ex else Exception('Lost connection to the agent!'))
cmd.id = _id # replace internal id with the original
while True:
try:
await self._connected_internal.wait()
# get next command
cmd = await self._cmd_q.get()
# set command to running
if cmd.set_running_or_notify_cancel():
self._store_cmd(cmd)
try:
# send command
self._stream_writer.write(cmd.serialize())
await self._stream_writer.drain()
except asyncio.CancelledError: # task cancelled
cancel_cmd(cmd)
break
except OSError: # connection lost
self._set_connected(False)
cancel_cmd(cmd)
except Exception as e: # unexpected exception
print('Send commands:', e, file=sys.stderr)
cancel_cmd(cmd, e)
finally:
self._cmd_q.task_done() # mark as done
else:
self._cmd_q.task_done() # mark as done
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Send commands:', e, file=sys.stderr)
def _store_cmd(self, cmd) -> None:
"""Replaces the command id with an internal id and store command+id for later response."""
self._cmd_m[self._cmd_id] = (cmd, cmd.id)
cmd.id = self._cmd_id
self._cmd_id += 1
def send_command(self, cmd: DebugCommand) -> DebugCommand:
"""
Schedules the given command in the command queue and returns the command.
:raises Exception: if not connected to a naoth agent or the given command was already executed
"""
if not self.is_connected():
raise Exception('Not connected to the agent!')
if cmd.done():
raise Exception('This command has already been executed!')
# command queue is not thread safe - make sure we're add it in the correct thread
# this can 'causes a delay of ~0.5ms
self._loop.call_soon_threadsafe(functools.partial(self._cmd_q.put_nowait, cmd))
return cmd
def debugrequest(self, request: str, enable: bool, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a debug request of the agent.
:param request: the debug request which should be en-/disabled
:param enable: True, if debug request should be enabled, False if it should be disabled
:param type: the type of the debug request ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
return self.debugrequests([(request, enable)], type)
def debugrequests(self, requests: list, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a list of debug request of the agent.
:param requests: a list of tuples ('debug request', True|False) of debug requests which should be en-/disabled
:param type: the type of the debug request ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
dbg = pb.Messages_pb2.DebugRequest(requests=[
pb.Messages_pb2.DebugRequest.Item(name=request, value=enable) for request, enable in requests
])
if type == 'cognition':
return self.send_command(DebugCommand('Cognition:representation:set', [('DebugRequest', dbg.SerializeToString().decode())]))
elif type == 'motion':
return self.send_command(DebugCommand('Motion:representation:set', [('DebugRequest', dbg.SerializeToString().decode())]))
raise Exception('Unknown debug request type! Allowed: "cognition", "motion"')
def module(self, name: str, enable: bool, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a module of the agent instance.
:param name: the module which should be en-/disabled
:param enable: True, if module should be enabled, False if it should be disabled
:param type: the type of the module ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
if type == 'cognition':
return self.send_command(DebugCommand('Cognition:modules:set', [(name, ('on' if enable else 'off'))]))
elif type == 'motion':
return self.send_command(DebugCommand('Motion:modules:set', [(name, ('on' if enable else 'off'))]))
raise Exception('Unknown module type! Allowed: "cognition", "motion"')
def representation(self, name: str, type: str = 'cognition', binary: bool = False) -> DebugCommand:
"""
Schedules a command for retrieving a representation.
:param name: the name of the representation which should be retrieved.
:param type: the type of the representation ('cognition' or 'motion')
:param binary: whether the result should be binary (protobuf) or as string
:return: Returns the the scheduled command (future)
"""
if type == 'cognition':
if binary:
return self.send_command(DebugCommand('Cognition:representation:get', [name]))
else:
return self.send_command(DebugCommand('Cognition:representation:print', [name]))
elif type == 'motion':
if binary:
return self.send_command(DebugCommand('Motion:representation:get', [name]))
else:
return self.send_command(DebugCommand('Motion:representation:print', [name]))
raise Exception('Unknown representation type! Allowed: "cognition", "motion"')
def agent(self, name: str = None) -> DebugCommand:
"""
Get or set a named agent for execution.
:param name: the name of the agent (behavior), which should be executed or None if the current agent should
be returned
:return: Returns the the scheduled command (future)
"""
if name is None:
return self.send_command(DebugCommand('Cognition:behavior:get_agent'))
return self.send_command(DebugCommand('Cognition:behavior:set_agent', [('agent', name)]))
def behavior(self, complete=False) -> DebugCommand:
"""
Schedules a command for retrieving the current behavior of the agent.
:param complete: True, if the complete behavior tree should be retrieved, False otherwise (sparse)
:return: Returns the the scheduled command (future)
"""
if complete:
return self.representation('BehaviorStateComplete', binary=True)
else:
return self.representation('BehaviorStateSparse', binary=True)
if sys.version_info < (3, 7):
# python < 3.7
@asyncio.coroutine
def open_connection(host=None, port=None, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
protocol = StreamReaderProtocolCompat(reader)
transport, _ = yield from loop.create_connection(lambda: protocol, host, port)
writer = asyncio.StreamWriter(transport, protocol, reader, loop)
return reader, writer
@asyncio.coroutine
def start_server(client_connected_cb, host=None, port=None, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
def factory():
reader = asyncio.StreamReader(loop=loop)
protocol = StreamReaderProtocolCompat(reader, client_connected_cb, loop=loop)
return protocol
return (yield from loop.create_server(factory, host, port))
class StreamReaderProtocolCompat(asyncio.StreamReaderProtocol):
def __init__(self, stream_reader, client_connected_cb=None, loop=None):
super().__init__(stream_reader, client_connected_cb, loop)
self._closed = self._loop.create_future()
def connection_lost(self, exc) -> None:
super().connection_lost(exc)
if not self._closed.done():
if exc is None:
self._closed.set_result(None)
else:
self._closed.set_exception(exc)
def _get_close_waiter(self, stream):
return self._closed
def __del__(self):
if self._closed.done() and not self._closed.cancelled():
self._closed.exception()
else:
# python >= 3.7
open_connection = asyncio.open_connection
start_server = asyncio.start_server
| 2.703125
| 3
|
python/subtitles/subtitlesview.py
|
chirimenmonster/wotmods-subtitles
| 0
|
12787141
|
<reponame>chirimenmonster/wotmods-subtitles
# -*- coding: utf-8 -*-
import logging
import BigWorld
import GUI
from gui.Scaleform.framework import ViewSettings, ViewTypes, ScopeTemplates
from gui.Scaleform.framework.entities.View import View
from modsettings import MOD_NAME
for name in [ 'gui.Scalform.framework.entities.View', 'gui.Scaleform.Flash' ]:
logging.getLogger(name).setLevel(logging.DEBUG)
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
class SubtitlesView(View):
def _populate(self):
BigWorld.logInfo(MOD_NAME, '_populate', None)
super(SubtitlesView, self)._populate()
screen = GUI.screenResolution()
center = ( screen[0] / 2, screen[1] / 2)
BigWorld.logInfo(MOD_NAME, '({}, {}), width={}, height={}'.format(center[0], center[1], screen[0], screen[1]), None)
self.as_setPositionS(center[0] - 100, center[1] + 280)
def as_setMessageS(self, message):
BigWorld.logInfo(MOD_NAME, 'as_setMessageS: {}'.format(message), None)
self.flashObject.as_setMessage(message)
def as_setPositionS(self, x, y):
BigWorld.logInfo(MOD_NAME, 'as_setPositionS: ({}, {})'.format(x, y), None)
self.flashObject.as_setPosition(x, y)
| 2.015625
| 2
|
elf2dol.py
|
stblr/mkw-sp
| 24
|
12787142
|
#!/usr/bin/env python3
from argparse import ArgumentParser
from elftools.elf.constants import P_FLAGS
from elftools.elf.elffile import ELFFile
import io
def segment_is_text(segment):
return segment['p_flags'] & P_FLAGS.PF_X == P_FLAGS.PF_X
def segment_is_data(segment):
return not segment_is_text(segment) and not segment_is_bss(segment)
def segment_is_bss(segment):
return segment['p_filesz'] == 0
def write_to_dol_header(file, offset, val):
file.seek(offset)
file.write(val.to_bytes(4, byteorder = 'big'))
file.seek(0, io.SEEK_END)
def write_segment_to_dol(idx, segment, dol_file):
write_to_dol_header(dol_file, 0x00 + 0x04 * idx, dol_file.tell())
write_to_dol_header(dol_file, 0x48 + 0x04 * idx, segment['p_vaddr'])
# align filesz to 0x20
filesz = ((segment['p_filesz'] + 0x1F) >> 5) << 5
write_to_dol_header(dol_file, 0x90 + 0x04 * idx, filesz)
dol_file.write(segment.data())
# align current dol size to 0x20
size = 0x20 - dol_file.tell() & 0x1F
dol_file.write(bytes([0x00] * size))
parser = ArgumentParser()
parser.add_argument('in_path')
parser.add_argument('out_path')
args = parser.parse_args()
with open(args.in_path, 'rb') as elf_file, open(args.out_path, 'wb') as dol_file:
elf_file = ELFFile(elf_file)
num_segments = elf_file.num_segments()
dol_file.write(bytes([0x00] * 0x100))
idx = 0
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_text(segment):
continue
write_segment_to_dol(idx, segment, dol_file)
idx += 1
idx = 7
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_data(segment):
continue
write_segment_to_dol(idx, segment, dol_file)
idx += 1
bss_start = 0
bss_end = 0
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_bss(segment):
continue
if bss_start == 0:
bss_start = segment['p_vaddr']
bss_end = segment['p_vaddr'] + segment['p_memsz']
write_to_dol_header(dol_file, 0xD8, bss_start)
bss_size = bss_end - bss_start
write_to_dol_header(dol_file, 0xDC, bss_size)
write_to_dol_header(dol_file, 0xE0, elf_file['e_entry'])
| 2.609375
| 3
|
django/views/list_modules.py
|
Kh4n/django-exploit
| 1
|
12787143
|
import importlib
from pprint import pprint
import ast
def get_imports_from_file(file_name):
with open(file_name, "r") as source:
tree = ast.parse(source.read())
analyzer = Analyzer()
analyzer.visit(tree)
imports = analyzer.report()
ret = []
for i in imports:
print(i)
try:
a = importlib.util.find_spec(i)
if a.origin:
ret.append(a.origin)
except Exception as e:
print(e)
return ret
class Analyzer(ast.NodeVisitor):
def __init__(self):
self.import_list = []
def visit_Import(self, node):
for alias in node.names:
# print("import", alias.name)
self.import_list.append(alias.name)
self.generic_visit(node)
def visit_ImportFrom(self, node):
for alias in node.names:
# print("from", node.module, "import", alias.name)
if alias.name == '*':
self.import_list.append(node.module)
else:
self.import_list.append(node.module + '.' + alias.name)
self.generic_visit(node)
def generic_visit(self, node):
# print(type(node).__name__)
# print(node._fields)
return super().generic_visit(node)
def report(self):
return self.import_list
if __name__ == "__main__":
print(get_imports_from_file("backdoor.py"))
print(get_imports_from_file("defaults.py"))
| 2.625
| 3
|
_unittests/ut_automation_students/test_repository_little_aspect.py
|
mohamedelkansouli/Ensae_py
| 0
|
12787144
|
"""
@brief test log(time=2s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.ensae_teaching_cs.automation_students import ProjectsRepository
class TestRepositoryLittleAspect(unittest.TestCase):
def test_regular_expression(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
text = """<br /></div>
<div><div dir="ltr">Pourriez-vous vous ajouter sur le doodle suivant ?<div><br></div><div>
<p style="margin:0in;font-family:Calibri;font-size:11pt" lang="fr">
<a href="http://doodle.com/poll/xxxxxxxxc9w8">http://doodle.com/poll/xxxxxxsyz7c9w8</a></p></div></div><div class
"""
f = ProjectsRepository._link_regex.findall(text)
fLOG(f)
self.assertEqual(len(f), 2)
self.assertEqual(f[0], "http://doodle.com/poll/xxxxxxxxc9w8")
if __name__ == "__main__":
unittest.main()
| 2.546875
| 3
|
lambo/acquisitions/monte_carlo.py
|
samuelstanton/lambo
| 10
|
12787145
|
from numpy import array, copy, concatenate
from torch import Tensor
from botorch.acquisition.multi_objective.monte_carlo import (
qExpectedHypervolumeImprovement, qNoisyExpectedHypervolumeImprovement
)
from botorch.posteriors import GPyTorchPosterior, Posterior, DeterministicPosterior
from gpytorch.distributions import MultitaskMultivariateNormal
from gpytorch.lazy import BlockDiagLazyTensor
import torch
# TODO: replace these with the non-mocked versions once botorch #991 comes in
# will need to update to botorch master
class qDiscreteEHVI(qExpectedHypervolumeImprovement):
def forward(self, X: array) -> Tensor:
# mocks the qEHVI call
# assumes that X is an array of shape batch x q rather than a tensor of shape batch x q x d
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
return self._compute_qehvi(samples=samples)
class qDiscreteNEHVI(qNoisyExpectedHypervolumeImprovement):
# TODO: figure out how to remove
def __init__(
self,
model,
ref_point,
X_baseline,
sampler = None,
objective = None,
constraints = None,
X_pending = None,
eta: float = 1e-3,
prune_baseline: bool = False,
alpha: float = 0.0,
cache_pending: bool = True,
max_iep: int = 0,
incremental_nehvi: bool = True,
**kwargs,
):
model.eval()
mocked_features = model.get_features(X_baseline, model.bs)
# for string kernels
if mocked_features.ndim > 2:
mocked_features = mocked_features[..., 0].to(ref_point) # doint let this fail
super().__init__(
model=model,
ref_point=ref_point,
X_baseline=mocked_features,
sampler=sampler,
objective=objective,
constraints=constraints,
X_pending=X_pending,
eta=eta,
prune_baseline=prune_baseline,
alpha=alpha,
cache_pending=cache_pending,
max_iep=max_iep,
incremental_nehvi=incremental_nehvi,
**kwargs
)
self.X_baseline_string = X_baseline
def forward(self, X: array) -> Tensor:
if isinstance(X, Tensor):
baseline_X = self._X_baseline
baseline_X = baseline_X.expand(*X.shape[:-2], -1, -1)
X_full = torch.cat([baseline_X, X], dim=-2)
else:
baseline_X = copy(self.X_baseline_string) # ensure contiguity
baseline_X.resize(
baseline_X.shape[:-(X.ndim)] + X.shape[:-1] + baseline_X.shape[-1:]
)
X_full = concatenate([baseline_X, X], axis=-1)
# Note: it is important to compute the full posterior over `(X_baseline, X)``
# to ensure that we properly sample `f(X)` from the joint distribution `
# `f(X_baseline, X) ~ P(f | D)` given that we can already fixed the sampled
# function values for `f(X_baseline)`
posterior = self.model.posterior(X_full)
q = X.shape[-2]
self._set_sampler(q=q, posterior=posterior)
samples = self.sampler(posterior)[..., -q:, :]
# add previous nehvi from pending points
return self._compute_qehvi(samples=samples) + self._prev_nehvi
def _cache_root_decomposition(self, posterior: GPyTorchPosterior) -> None:
if posterior.mvn._interleaved:
if hasattr(posterior.mvn.lazy_covariance_matrix, 'base_lazy_tensor'):
posterior_lc_base = posterior.mvn.lazy_covariance_matrix.base_lazy_tensor
else:
posterior_lc_base = posterior.mvn.lazy_covariance_matrix
new_lazy_covariance = BlockDiagLazyTensor(posterior_lc_base)
posterior.mvn = MultitaskMultivariateNormal(posterior.mvn.mean, new_lazy_covariance, interleaved=False)
return super()._cache_root_decomposition(posterior=posterior)
class qMTGPDiscreteNEHVI(qDiscreteNEHVI):
# TODO: remove when botorch #1037 goes in
# this is copied over from that diff
_uses_matheron = True
def __init__(self, *args, **kwargs):
super().__init__(cache_root = False, *args, **kwargs)
def _set_sampler(
self,
q: int,
posterior: Posterior,
) -> None:
r"""Update the sampler to use the original base samples for X_baseline.
Args:
q: the batch size
posterior: the posterior
TODO: refactor some/all of this into the MCSampler.
"""
if self.q != q:
# create new base_samples
base_sample_shape = self.sampler._get_base_sample_shape(posterior=posterior)
self.sampler._construct_base_samples(
posterior=posterior, shape=base_sample_shape
)
if (
self.X_baseline.shape[0] > 0
and self.base_sampler.base_samples is not None
and not isinstance(posterior, DeterministicPosterior)
):
current_base_samples = self.base_sampler.base_samples.detach().clone()
# This is the # of non-`sample_shape` dimensions.
base_ndims = current_base_samples.dim() - 1
# Unsqueeze as many dimensions as needed to match base_sample_shape.
view_shape = (
self.sampler.sample_shape
+ torch.Size(
[1] * (len(base_sample_shape) - current_base_samples.dim())
)
+ current_base_samples.shape[-base_ndims:]
)
expanded_shape = (
base_sample_shape[:-base_ndims]
+ current_base_samples.shape[-base_ndims:]
)
# Use stored base samples:
# Use all base_samples from the current sampler
# this includes the base_samples from the base_sampler
# and any base_samples for the new points in the sampler.
# For example, when using sequential greedy candidate generation
# then generate the new candidate point using last (-1) base_sample
# in sampler. This copies that base sample.
end_idx = current_base_samples.shape[-1 if self._uses_matheron else -2]
expanded_samples = current_base_samples.view(view_shape).expand(
expanded_shape
)
if self._uses_matheron:
self.sampler.base_samples[..., :end_idx] = expanded_samples
else:
self.sampler.base_samples[..., :end_idx, :] = expanded_samples
# update cached subset indices
# Note: this also stores self.q = q
self._cache_q_subset_indices(q=q)
| 1.882813
| 2
|
tessia/server/auth/ldap.py
|
tessia-project/tessia
| 5
|
12787146
|
<filename>tessia/server/auth/ldap.py
# Copyright 2016, 2017 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Enable LDAP based authentication and user info retrieval
"""
#
# IMPORTS
#
from jsonschema import validate
from tessia.server.config import CONF
from tessia.server.auth.base import BaseLoginManager
import ldap3
import logging
#
# CONSTANTS AND DEFINITIONS
#
CONFIG_SCHEMA = {
'type': 'object',
'properties': {
'host': {'type': 'string'},
'port': {'type': 'number'},
'ssl': {'type': 'boolean'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'timeout': {'type': 'number'},
'user_base': {'type': 'string'},
'user_filter': {'type': 'string'},
'user_attributes': {
'type': 'object',
'properties': {
'fullname': {'type': ['string', 'null']},
'login': {'type': ['string', 'null']},
'title': {'type': ['string', 'null']},
},
},
'group_base': {
'type': ['string', 'null'],
},
'group_filter': {
'type': ['string', 'null'],
},
'group_membership_attr': {
'type': ['string', 'null'],
},
},
'required': ['host', 'user_base'],
}
#
# CODE
#
class LdapLoginManager(BaseLoginManager):
"""
Implement support to login authentication against LDAP
"""
def __init__(self):
"""
Constructor
Args:
None
Raises:
RuntimeError: in case db config is missing
"""
self._logger = logging.getLogger(__name__)
# validate config file and prepare internal config values
self._parse_conf()
self._logger.debug(
'LDAP module activated with the following config: %s', self._conf)
self._server = ldap3.Server(
self._conf['host'],
port=self._conf['port'],
use_ssl=self._conf['ssl'],
get_info=ldap3.NONE
)
# __init__()
def _bind(self, user_dn, password):
"""
Perform a bind (authentication) operation with the LDAP server
Args:
user_dn (str): distiguished name for user
password (str): password
Returns:
bool: True if authentication succeded, False otherwise
Raises:
None
"""
try:
conn = self._connect(user_dn, password)
result = conn.bind()
except Exception as exc:
self._logger.debug(
'User %s bind failed, debug info:', user_dn, exc_info=exc)
return False
if not result:
self._logger.debug(
'User %s bind failed: %s', user_dn, conn.result)
return result
# _bind()
def _connect(self, user_dn, password):
"""
Open a LDAP connection
Args:
user_dn (str): distiguished name for user
password (str): password
Returns:
ldap3.Connection: connection instance
Raises:
None
"""
conn = ldap3.Connection(
self._server,
user_dn,
password,
read_only=True,
receive_timeout=self._conf['timeout']
)
return conn
# _connect()
def _is_group_member(self, conn, user_dn):
"""
Verify if a given user distiguished name is part of a group (if group
verification was set in config file).
Args:
conn (ldap3.Connection): connection instance
user_dn (str): user distinguished name
Returns:
bool: True if user belongs to group (or no group checking
configured), False otherwise
Raises:
None
"""
# group verification not active: nothing to do
if self._conf['group_filter'] is None:
return True
# ask ldap to return the group member list
search_filter = '(&{group_filter}({member_attr}={user_dn}))'.format(
group_filter=self._conf['group_filter'],
member_attr=self._conf['group_membership_attr'],
user_dn=user_dn
)
self._logger.debug(
"perform membership search with filter: '%s'", search_filter)
# perform operation
result = conn.search(
search_base=self._conf['group_base'],
search_filter=search_filter,
attributes=[self._conf['group_membership_attr']],
)
# operation failed or list is empty: user is not part of the group
result = result and len(conn.response) > 0
if not result:
self._logger.debug('group membership failed: %s', conn.result)
return result
# _is_group_member()
def _parse_conf(self):
"""
Verify if mandatory values were set in config file with appropriate
types and define defaults for optional values not provided.
Args:
None
Raises:
RuntimeError: in case ldap config is missing or wrong
"""
# make sure the config section is available
try:
self._conf = CONF.get_config().get('auth')['ldap']
except (TypeError, KeyError):
raise RuntimeError('No ldap configuration section found')
# apply schema validation
validate(self._conf, CONFIG_SCHEMA)
# set default values for optional configuration fields
self._conf['port'] = self._conf.get('port', 636)
self._conf['ssl'] = self._conf.get('ssl', True)
self._conf['username'] = self._conf.get('username')
self._conf['password'] = self._conf.get('password')
self._conf['timeout'] = self._conf.get('timeout', 10)
user_attributes = self._conf.get('user_attributes', {})
self._conf['user_attributes'] = {}
self._conf['user_attributes']['login'] = (
user_attributes.get('login', 'mail'))
self._conf['user_attributes']['fullname'] = (
user_attributes.get('fullname', 'cn'))
# job's title is an optional attribute
try:
self._conf['user_attributes']['title'] = user_attributes['title']
except KeyError:
pass
if self._conf.get('group_filter') is None:
self._conf['group_filter'] = None
self._conf['group_base'] = None
self._conf['group_membership_attr'] = None
else:
if self._conf.get('group_base') is None:
raise RuntimeError(
'group_filter requires group_base parameter')
if self._conf.get('group_membership_attr') is None:
raise RuntimeError(
'group_filter requires group_membership_attr parameter')
# _parse_conf()
def _search_user(self, conn, username):
"""
Perform a search on the LDAP seaver for the specified user and return
its entry.
Args:
conn (ldap3.Connection): connection instance
username (str): the username to be searched
Returns:
dict: entry containing user attributes retrieved from ldap server
None: in case no entry is found
Raises:
RuntimeError: in case one of the expected attributes is not
provided in the server's response
"""
# create the user search filter based on config file values
search_filter = '({0}={1})'.format(
self._conf['user_attributes']['login'], username)
if self._conf['user_filter'] is not None:
search_filter = '(&{0}{1})'.format(
search_filter, self._conf['user_filter'])
self._logger.debug(
"perform user search with filter: '%s'", search_filter)
# search for user entry
ret = conn.search(
search_base=self._conf['user_base'],
search_filter=search_filter,
attributes=list(self._conf['user_attributes'].values()),
)
# user not found: return nothing
if ret is False or not conn.response:
self._logger.debug('user not found, result: %s', conn.result)
return None
# build a dict of attributes we need from the user entry
user_attrs = {}
for key in self._conf['user_attributes'].keys():
ldap_key = self._conf['user_attributes'][key]
try:
value = conn.response[0]['attributes'][ldap_key]
except KeyError:
self._logger.warning(
'User attribute %s not found in server response: %s',
ldap_key, conn.response[0])
raise RuntimeError(
'User attribute {} not found in server response'.format(
ldap_key))
if isinstance(value, list):
if value:
value = value[0]
else:
value = ''
user_attrs[key] = value
# save the dn (distiguished name) for further operations
user_attrs['dn'] = conn.response[0]['dn']
return user_attrs
# _search_user()
def authenticate(self, username, password):
"""
Perform a search and bind operation to authentication the user with the
ldap server.
Args:
username (str): username
password (str): password
Returns:
dict: entry containing the attributes defined in section
user_attributes of config file
None: in case authentication fails
Raises:
None
"""
# TODO: enable caching
# open connection to ldap server and perform the operations
with self._connect(self._conf['username'],
self._conf['password']) as conn:
entry = self._search_user(conn, username)
if entry is None:
self._logger.warning('user %s not found', username)
return None
# verify group membership if activated
if not self._is_group_member(conn, entry['dn']):
self._logger.warning(
'user %s not member of allowed group(s)', username)
return None
# password invalid: user is not authorized
if not self._bind(entry['dn'], password):
self._logger.warning(
'authentication failed for user %s (invalid password)',
username)
return None
# 'dn' is ldap specific and should not be returned
entry.pop('dn')
self._logger.info('authentication successful for user %s', username)
return entry
# authenticate()
# LdapLoginManager
MANAGER = LdapLoginManager
| 1.59375
| 2
|
blog_site/terminal_blog/app.py
|
jesh-anand/PythonMasterClass
| 0
|
12787147
|
from blog_site.common.database import Database
from blog_site.terminal_blog.model.menu import Menu
__author__ = '<NAME>'
Database.initialize()
menu = Menu()
menu.run_menu()
| 1.53125
| 2
|
Pass/views.py
|
franklinwagbara/Brookstone-Pastoral-Management-System
| 0
|
12787148
|
from django.shortcuts import render, redirect
from StudentManager.functions import viewStudents
from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers
import concurrent.futures
import threading
from django.utils import timezone
import datetime
from Manager.functions import incrementTotalCheckIn, decrementTotalCheckIn
from django.http import HttpResponseRedirect
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.contrib import messages
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from Dashboard.decorators import management
@login_required(login_url='login')
def viewStudentsList(request):
#if bool(Group.objects.get(name="accounts") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="principal") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="administrator") in User.objects.get(username=request.user).groups.all()) == False:
# return render(request, 'dashboard/dashboard.html',
# {'CheckStat': CheckStat.objects.get(id=1),
# 'students': Students.objects.all().filter(CheckedOut="Yes").order_by('LastName') | Students.objects.all().filter(CheckedIn="Yes").order_by('LastName'),
# 'mode': 'viewCheckIn'})
return viewStudents(request, "viewStudentsPass.html")
@login_required(login_url='login')
@management
def viewStudentsListAdmin(request):
#if bool(Group.objects.get(name="accounts") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="principal") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="administrator") in User.objects.get(username=request.user).groups.all()) == False:
# return render(request, 'dashboard/dashboard.html',
# {'CheckStat': CheckStat.objects.get(id=1),
# 'students': Students.objects.all().filter(CheckedOut="Yes").order_by('LastName') | Students.objects.all().filter(CheckedIn="Yes").order_by('LastName'),
# 'mode': 'viewCheckIn'})
return viewStudents(request, "viewStudentsPassAdmin.html")
@login_required(login_url='login')
@management
def viewCheckInProfileAdmin(request, pk):
student = Students.objects.get(pk=pk)
season = CurrentSeason.objects.get(pk=1).Season
checkin = ""
allowed = ""
if CheckIn.objects.filter(Student=student, Season=season).exists():
checkedIn = "Yes"
checkin = CheckIn.objects.get(Student=student, Season=season)
else:
checkedIn = "No"
if Allowed.objects.filter(Student=student, Season=season).exists():
allowed = Allowed.objects.get(Student=student, Season=season)
else:
allowed = ""
return render(request, "checkInProfilePassAdmin.html", {'student': student, 'checkedIn': checkedIn, 'checkin': checkin,
'allowed': allowed})
@login_required(login_url='login')
def viewCheckInProfile(request, pk):
student = Students.objects.get(pk=pk)
season = CurrentSeason.objects.get(pk=1).Season
checkin = ""
allowed = ""
if CheckIn.objects.filter(Student=student, Season=season).exists():
checkedIn = "Yes"
checkin = CheckIn.objects.get(Student=student, Season=season)
else:
checkedIn = "No"
if Allowed.objects.filter(Student=student, Season=season).exists():
allowed = Allowed.objects.get(Student=student, Season=season)
else:
allowed = ""
return render(request, "checkInProfilePass.html", {'student': student, 'checkedIn': checkedIn, 'checkin': checkin,
'allowed': allowed})
def Pass_helperAdmin(request, id):
if request.method == "POST":
reason = request.POST.getlist("reason")[0]
current_season = CurrentSeason.objects.get(pk=1)
season = Seasons.objects.get(SeasonName=current_season)
student = Students.objects.get(pk=id)
if Pointers.objects.filter(id=1).exists():
pass_code = Pointers.objects.get(id=1).PassCodePointer + 1
Pointers.objects.filter(id=1).update(PassCodePointer=pass_code)
Pointers.save
else:
pass_code = CheckIn.objects.all().count() + 1
Pointers.objects.create(id=1, Season=season, PassCodePointer=pass_code)
Pointers.save
pass_code = str(pass_code).zfill(4)
print("here")
if Allowed.objects.filter(Student=student, Season=season).exists():
Allowed.objects.create(Student=student, Season=season, Clear="Yes")
Allowed.save
else:
Allowed.objects.filter(Student=student, Season=season).update(Clear="Yes")
Allowed.save
if CheckIn.objects.filter(Student=student, Season=season).exists():
CheckIn.objects.filter(Student=student,
Season=season).update(Passed="Yes", PassCode=pass_code,
ReasonPass=reason, DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
else:
CheckIn.objects.create(Student=student,
Season=season, Passed="Yes", PassCode=pass_code,
ReasonPass=reason,
DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
print("checked in----")
def Pass_helper(request, id):
if request.method == "POST":
current_season = CurrentSeason.objects.get(pk=1)
season = Seasons.objects.get(SeasonName=current_season)
student = Students.objects.get(pk=id)
if Pointers.objects.filter(id=1).exists():
pass_code = Pointers.objects.get(id=1).PassCodePointer + 1
Pointers.objects.filter(id=1).update(PassCodePointer=pass_code)
Pointers.save()
else:
pass_code = CheckIn.objects.all().count() + 1
Pointers.objects.create(id=1, Season=season, PassCodePointer=pass_code)
Pointers.save
pass_code = str(pass_code).zfill(4)
if CheckIn.objects.filter(Student=student, Season=season).exists():
CheckIn.objects.filter(Student=student,
Season=season).update(Passed="Yes", PassCode=pass_code,
ReasonPass="Fulfilled all requirements.", DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
else:
CheckIn.objects.create(Student=student,
Season=season, Passed="Yes", PassCode=pass_code,
ReasonPass="Fulfilled all requirements.",
DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
print("checked in----")
def sendEMail(request, mailHead, recipient, template, context):
msg=""
if recipient != "None":
html_message = render_to_string("" + template, {
'context': context})
plain_message = strip_tags(html_message)
try:
send_mail(mailHead,
plain_message,
'<EMAIL>',
[recipient],
html_message=html_message,
fail_silently=False)
msg = "Email sent Successfully!"
return msg
except:
msg = "Email failed!"
return msg
else:
msg = "Operation Failed! No recipient provided."
return msg
def wardCheckedInEmail(request, pk):
student = Students.objects.get(pk=pk)
mailHead = "You Ward have being Checked-in into Brookstone Secondary Boarding Facility"
#recipient = student.ParentEmail
recipient = "<EMAIL>"
context = student
template = "EmailPassSuccess.html"
message = sendEMail(request, mailHead, recipient, template, context)
return message
@login_required(login_url='login')
def Pass(request, pk):
#with concurrent.futures.ThreadPoolExecutor() as executor:
# results = [executor.submit(checkin_helper, request, id), executor.submit(wardCheckedInEmail, request, id)]
# for f in concurrent.futures.as_completed(results):
# if f.result() != "EmailNoneResult":
# message = f.result()
# return message
t1 = threading.Thread(target=Pass_helper, args=[request, pk])
t2 = threading.Thread(target=wardCheckedInEmail, args=[request, pk])
message = t1.start()
message2 = t2.start()
message = "Verification Successfull! Student is cleared to pass."
if "Successfull" in message:
messages.success(request, message)
else:
messages.error(request, message)
return redirect("/Pass/viewCheckInProfile/" + str(pk))
#return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
#return redirect("/Pass/viewCheckInProfile/" + str(pk))
@login_required(login_url='login')
@management
def PassAdmin(request, pk):
# with concurrent.futures.ThreadPoolExecutor() as executor:
# results = [executor.submit(checkin_helper, request, id), executor.submit(wardCheckedInEmail, request, id)]
# for f in concurrent.futures.as_completed(results):
# if f.result() != "EmailNoneResult":
# message = f.result()
# return message
t1 = threading.Thread(target=Pass_helperAdmin, args=[request, pk])
t2 = threading.Thread(target=wardCheckedInEmail, args=[request, pk])
message = t1.start()
message2 = t2.start()
message = "Verification Successfull! Student is cleared to pass."
if "Successfull" in message:
print("here " + message)
messages.success(request, message)
else:
messages.error(request, message)
return redirect("/Pass/viewCheckInProfileAdmin/" + str(pk))
| 1.976563
| 2
|
set4/ctr_bitflipping.py
|
adbforlife/cryptopals
| 0
|
12787149
|
import sys
sys.path.append('/Users/ADB/Desktop/ /cryptopals')
from cryptotools import *
key = generate_key()
def enc_oracle(m):
m = b''.join(m.split(b';'))
m = b''.join(m.split(b'='))
prefix = b'comment1=cooking%20MCs;userdata='
suffix = b';comment2=%20like%20a%20pound%20of%20bacon'
plaintext = prefix + m + suffix
return aes_ctr_encrypt(plaintext, key)
def dec_oracle(c):
admin_string = b';admin=true;'
m = aes_ctr_decrypt(c, key)
return m.find(admin_string) >= 0
if __name__ == '__main__':
test_string = b'AadminAtrue'
c = list(enc_oracle(test_string))
c[32] = c[32] ^ ord('A') ^ ord(';')
c[38] = c[38] ^ ord('A') ^ ord('=')
c = bytes(c)
print(dec_oracle(c))
| 2.515625
| 3
|
AutoEncoders/denoising_AE/main.py
|
imhgchoi/pytorch_implementations
| 3
|
12787150
|
from AutoEncoder.denoising_AE.denoising_ae import DAE
from AutoEncoder.denoising_AE.data import Data
from AutoEncoder.denoising_AE.learner import Learner
from AutoEncoder.denoising_AE.visualizer import Visualizer
if __name__ == '__main__':
DATA_DIR = 'D:/rawDataFiles/digit_train.csv'
LEARNING_RATE = 3e-5
EPOCHS = 500
BATCH_SIZE = 3000
data = Data(DATA_DIR)
target, input = data.import_data()
dae = DAE(LEARNING_RATE)
target = target.to(dae.device)
input = input.to(dae.device)
print(target.shape)
print(input.shape)
learner = Learner(dae, input, target, batch_size=BATCH_SIZE, epochs=EPOCHS)
model = learner.learn()
viz = Visualizer(target, model)
viz.viz()
| 2.3125
| 2
|